@rocicorp/zero 0.26.0-canary.0 → 0.26.0-canary.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/out/replicache/src/persist/collect-idb-databases.d.ts +4 -4
- package/out/replicache/src/persist/collect-idb-databases.d.ts.map +1 -1
- package/out/replicache/src/persist/collect-idb-databases.js +22 -19
- package/out/replicache/src/persist/collect-idb-databases.js.map +1 -1
- package/out/replicache/src/persist/refresh.d.ts.map +1 -1
- package/out/replicache/src/persist/refresh.js +0 -8
- package/out/replicache/src/persist/refresh.js.map +1 -1
- package/out/replicache/src/process-scheduler.d.ts +23 -0
- package/out/replicache/src/process-scheduler.d.ts.map +1 -1
- package/out/replicache/src/process-scheduler.js +50 -1
- package/out/replicache/src/process-scheduler.js.map +1 -1
- package/out/replicache/src/replicache-impl.d.ts +8 -0
- package/out/replicache/src/replicache-impl.d.ts.map +1 -1
- package/out/replicache/src/replicache-impl.js +11 -2
- package/out/replicache/src/replicache-impl.js.map +1 -1
- package/out/shared/src/custom-key-map.d.ts +4 -4
- package/out/shared/src/custom-key-map.d.ts.map +1 -1
- package/out/shared/src/custom-key-map.js.map +1 -1
- package/out/shared/src/falsy.d.ts +3 -0
- package/out/shared/src/falsy.d.ts.map +1 -0
- package/out/shared/src/iterables.d.ts +6 -8
- package/out/shared/src/iterables.d.ts.map +1 -1
- package/out/shared/src/iterables.js +13 -7
- package/out/shared/src/iterables.js.map +1 -1
- package/out/shared/src/options.d.ts +1 -0
- package/out/shared/src/options.d.ts.map +1 -1
- package/out/shared/src/options.js +5 -1
- package/out/shared/src/options.js.map +1 -1
- package/out/zero/package.json.js +1 -1
- package/out/zero/src/adapters/drizzle.js +1 -2
- package/out/zero/src/adapters/prisma.d.ts +2 -0
- package/out/zero/src/adapters/prisma.d.ts.map +1 -0
- package/out/zero/src/adapters/prisma.js +6 -0
- package/out/zero/src/adapters/prisma.js.map +1 -0
- package/out/zero/src/pg.js +4 -7
- package/out/zero/src/react.js +3 -1
- package/out/zero/src/react.js.map +1 -1
- package/out/zero/src/server.js +5 -8
- package/out/zero/src/zero-cache-dev.js +7 -3
- package/out/zero/src/zero-cache-dev.js.map +1 -1
- package/out/zero-cache/src/auth/load-permissions.d.ts +3 -2
- package/out/zero-cache/src/auth/load-permissions.d.ts.map +1 -1
- package/out/zero-cache/src/auth/load-permissions.js +14 -8
- package/out/zero-cache/src/auth/load-permissions.js.map +1 -1
- package/out/zero-cache/src/auth/write-authorizer.d.ts +6 -0
- package/out/zero-cache/src/auth/write-authorizer.d.ts.map +1 -1
- package/out/zero-cache/src/auth/write-authorizer.js +16 -3
- package/out/zero-cache/src/auth/write-authorizer.js.map +1 -1
- package/out/zero-cache/src/config/zero-config.d.ts +54 -9
- package/out/zero-cache/src/config/zero-config.d.ts.map +1 -1
- package/out/zero-cache/src/config/zero-config.js +80 -20
- package/out/zero-cache/src/config/zero-config.js.map +1 -1
- package/out/zero-cache/src/custom/fetch.d.ts +3 -0
- package/out/zero-cache/src/custom/fetch.d.ts.map +1 -1
- package/out/zero-cache/src/custom/fetch.js +26 -0
- package/out/zero-cache/src/custom/fetch.js.map +1 -1
- package/out/zero-cache/src/db/lite-tables.js +1 -1
- package/out/zero-cache/src/db/lite-tables.js.map +1 -1
- package/out/zero-cache/src/db/migration-lite.d.ts.map +1 -1
- package/out/zero-cache/src/db/migration-lite.js +9 -3
- package/out/zero-cache/src/db/migration-lite.js.map +1 -1
- package/out/zero-cache/src/db/migration.d.ts.map +1 -1
- package/out/zero-cache/src/db/migration.js +9 -3
- package/out/zero-cache/src/db/migration.js.map +1 -1
- package/out/zero-cache/src/db/specs.d.ts +4 -3
- package/out/zero-cache/src/db/specs.d.ts.map +1 -1
- package/out/zero-cache/src/db/specs.js +4 -1
- package/out/zero-cache/src/db/specs.js.map +1 -1
- package/out/zero-cache/src/db/transaction-pool.d.ts.map +1 -1
- package/out/zero-cache/src/db/transaction-pool.js +9 -3
- package/out/zero-cache/src/db/transaction-pool.js.map +1 -1
- package/out/zero-cache/src/observability/events.d.ts.map +1 -1
- package/out/zero-cache/src/observability/events.js +15 -5
- package/out/zero-cache/src/observability/events.js.map +1 -1
- package/out/zero-cache/src/server/change-streamer.d.ts.map +1 -1
- package/out/zero-cache/src/server/change-streamer.js +10 -2
- package/out/zero-cache/src/server/change-streamer.js.map +1 -1
- package/out/zero-cache/src/server/inspector-delegate.d.ts +1 -1
- package/out/zero-cache/src/server/inspector-delegate.d.ts.map +1 -1
- package/out/zero-cache/src/server/inspector-delegate.js +11 -30
- package/out/zero-cache/src/server/inspector-delegate.js.map +1 -1
- package/out/zero-cache/src/server/main.js +1 -1
- package/out/zero-cache/src/server/main.js.map +1 -1
- package/out/zero-cache/src/server/priority-op.d.ts +8 -0
- package/out/zero-cache/src/server/priority-op.d.ts.map +1 -0
- package/out/zero-cache/src/server/priority-op.js +29 -0
- package/out/zero-cache/src/server/priority-op.js.map +1 -0
- package/out/zero-cache/src/server/syncer.d.ts.map +1 -1
- package/out/zero-cache/src/server/syncer.js +10 -10
- package/out/zero-cache/src/server/syncer.js.map +1 -1
- package/out/zero-cache/src/services/analyze.js +1 -1
- package/out/zero-cache/src/services/analyze.js.map +1 -1
- package/out/zero-cache/src/services/change-source/custom/change-source.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/custom/change-source.js +4 -7
- package/out/zero-cache/src/services/change-source/custom/change-source.js.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/change-source.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/change-source.js +68 -13
- package/out/zero-cache/src/services/change-source/pg/change-source.js.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/initial-sync.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/initial-sync.js +7 -2
- package/out/zero-cache/src/services/change-source/pg/initial-sync.js.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.js +7 -4
- package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.js.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/ddl.d.ts +125 -180
- package/out/zero-cache/src/services/change-source/pg/schema/ddl.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/ddl.js +1 -10
- package/out/zero-cache/src/services/change-source/pg/schema/ddl.js.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/init.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/init.js +26 -12
- package/out/zero-cache/src/services/change-source/pg/schema/init.js.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/published.d.ts +36 -90
- package/out/zero-cache/src/services/change-source/pg/schema/published.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/published.js +51 -14
- package/out/zero-cache/src/services/change-source/pg/schema/published.js.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/shard.d.ts +31 -36
- package/out/zero-cache/src/services/change-source/pg/schema/shard.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/shard.js +25 -17
- package/out/zero-cache/src/services/change-source/pg/schema/shard.js.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/validation.d.ts +2 -2
- package/out/zero-cache/src/services/change-source/pg/schema/validation.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/pg/schema/validation.js +2 -4
- package/out/zero-cache/src/services/change-source/pg/schema/validation.js.map +1 -1
- package/out/zero-cache/src/services/change-source/protocol/current/data.d.ts +158 -53
- package/out/zero-cache/src/services/change-source/protocol/current/data.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/protocol/current/data.js +55 -10
- package/out/zero-cache/src/services/change-source/protocol/current/data.js.map +1 -1
- package/out/zero-cache/src/services/change-source/protocol/current/downstream.d.ts +210 -72
- package/out/zero-cache/src/services/change-source/protocol/current/downstream.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/protocol/current.js +4 -2
- package/out/zero-cache/src/services/change-source/replica-schema.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/replica-schema.js +20 -4
- package/out/zero-cache/src/services/change-source/replica-schema.js.map +1 -1
- package/out/zero-cache/src/services/change-streamer/change-streamer-service.d.ts +1 -1
- package/out/zero-cache/src/services/change-streamer/change-streamer-service.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-streamer/change-streamer-service.js +6 -4
- package/out/zero-cache/src/services/change-streamer/change-streamer-service.js.map +1 -1
- package/out/zero-cache/src/services/change-streamer/change-streamer.d.ts +71 -25
- package/out/zero-cache/src/services/change-streamer/change-streamer.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-streamer/change-streamer.js +1 -1
- package/out/zero-cache/src/services/change-streamer/change-streamer.js.map +1 -1
- package/out/zero-cache/src/services/change-streamer/schema/tables.d.ts +1 -0
- package/out/zero-cache/src/services/change-streamer/schema/tables.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-streamer/schema/tables.js +6 -5
- package/out/zero-cache/src/services/change-streamer/schema/tables.js.map +1 -1
- package/out/zero-cache/src/services/change-streamer/storer.d.ts +1 -1
- package/out/zero-cache/src/services/change-streamer/storer.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-streamer/storer.js +17 -6
- package/out/zero-cache/src/services/change-streamer/storer.js.map +1 -1
- package/out/zero-cache/src/services/change-streamer/subscriber.d.ts +2 -0
- package/out/zero-cache/src/services/change-streamer/subscriber.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-streamer/subscriber.js +14 -1
- package/out/zero-cache/src/services/change-streamer/subscriber.js.map +1 -1
- package/out/zero-cache/src/services/heapz.d.ts.map +1 -1
- package/out/zero-cache/src/services/heapz.js +1 -0
- package/out/zero-cache/src/services/heapz.js.map +1 -1
- package/out/zero-cache/src/services/life-cycle.d.ts +1 -1
- package/out/zero-cache/src/services/life-cycle.d.ts.map +1 -1
- package/out/zero-cache/src/services/life-cycle.js.map +1 -1
- package/out/zero-cache/src/services/litestream/commands.d.ts.map +1 -1
- package/out/zero-cache/src/services/litestream/commands.js +3 -1
- package/out/zero-cache/src/services/litestream/commands.js.map +1 -1
- package/out/zero-cache/src/services/litestream/config.yml +1 -0
- package/out/zero-cache/src/services/mutagen/error.d.ts.map +1 -1
- package/out/zero-cache/src/services/mutagen/error.js +4 -1
- package/out/zero-cache/src/services/mutagen/error.js.map +1 -1
- package/out/zero-cache/src/services/mutagen/mutagen.d.ts +4 -4
- package/out/zero-cache/src/services/mutagen/mutagen.d.ts.map +1 -1
- package/out/zero-cache/src/services/mutagen/mutagen.js +10 -24
- package/out/zero-cache/src/services/mutagen/mutagen.js.map +1 -1
- package/out/zero-cache/src/services/mutagen/pusher.d.ts +8 -6
- package/out/zero-cache/src/services/mutagen/pusher.d.ts.map +1 -1
- package/out/zero-cache/src/services/mutagen/pusher.js +130 -19
- package/out/zero-cache/src/services/mutagen/pusher.js.map +1 -1
- package/out/zero-cache/src/services/replicator/change-processor.d.ts.map +1 -1
- package/out/zero-cache/src/services/replicator/change-processor.js +24 -31
- package/out/zero-cache/src/services/replicator/change-processor.js.map +1 -1
- package/out/zero-cache/src/services/replicator/schema/change-log.d.ts +4 -4
- package/out/zero-cache/src/services/replicator/schema/change-log.d.ts.map +1 -1
- package/out/zero-cache/src/services/replicator/schema/change-log.js +38 -36
- package/out/zero-cache/src/services/replicator/schema/change-log.js.map +1 -1
- package/out/zero-cache/src/services/{change-source → replicator/schema}/column-metadata.d.ts +3 -3
- package/out/zero-cache/src/services/replicator/schema/column-metadata.d.ts.map +1 -0
- package/out/zero-cache/src/services/{change-source → replicator/schema}/column-metadata.js +3 -3
- package/out/zero-cache/src/services/replicator/schema/column-metadata.js.map +1 -0
- package/out/zero-cache/src/services/replicator/schema/replication-state.d.ts.map +1 -1
- package/out/zero-cache/src/services/replicator/schema/replication-state.js +3 -1
- package/out/zero-cache/src/services/replicator/schema/replication-state.js.map +1 -1
- package/out/zero-cache/src/services/run-ast.js +1 -1
- package/out/zero-cache/src/services/run-ast.js.map +1 -1
- package/out/zero-cache/src/services/statz.d.ts.map +1 -1
- package/out/zero-cache/src/services/statz.js +1 -0
- package/out/zero-cache/src/services/statz.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/client-handler.d.ts +5 -6
- package/out/zero-cache/src/services/view-syncer/client-handler.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/client-handler.js +5 -23
- package/out/zero-cache/src/services/view-syncer/client-handler.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/cvr-store.d.ts +1 -1
- package/out/zero-cache/src/services/view-syncer/cvr-store.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/cvr-store.js +65 -44
- package/out/zero-cache/src/services/view-syncer/cvr-store.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/cvr.d.ts +0 -1
- package/out/zero-cache/src/services/view-syncer/cvr.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/cvr.js +23 -6
- package/out/zero-cache/src/services/view-syncer/cvr.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts +14 -22
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.js +46 -67
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/row-record-cache.d.ts +1 -1
- package/out/zero-cache/src/services/view-syncer/row-record-cache.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/row-record-cache.js +22 -11
- package/out/zero-cache/src/services/view-syncer/row-record-cache.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts +0 -2
- package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/snapshotter.js +3 -11
- package/out/zero-cache/src/services/view-syncer/snapshotter.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/view-syncer.d.ts +6 -4
- package/out/zero-cache/src/services/view-syncer/view-syncer.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/view-syncer.js +216 -243
- package/out/zero-cache/src/services/view-syncer/view-syncer.js.map +1 -1
- package/out/zero-cache/src/types/lexi-version.d.ts.map +1 -1
- package/out/zero-cache/src/types/lexi-version.js +4 -1
- package/out/zero-cache/src/types/lexi-version.js.map +1 -1
- package/out/zero-cache/src/types/lite.d.ts.map +1 -1
- package/out/zero-cache/src/types/lite.js +8 -2
- package/out/zero-cache/src/types/lite.js.map +1 -1
- package/out/zero-cache/src/types/shards.js +1 -1
- package/out/zero-cache/src/types/shards.js.map +1 -1
- package/out/zero-cache/src/types/sql.d.ts +5 -0
- package/out/zero-cache/src/types/sql.d.ts.map +1 -1
- package/out/zero-cache/src/types/sql.js +5 -1
- package/out/zero-cache/src/types/sql.js.map +1 -1
- package/out/zero-cache/src/types/subscription.js +1 -1
- package/out/zero-cache/src/types/subscription.js.map +1 -1
- package/out/zero-cache/src/workers/connect-params.d.ts +1 -1
- package/out/zero-cache/src/workers/connect-params.d.ts.map +1 -1
- package/out/zero-cache/src/workers/connect-params.js +2 -3
- package/out/zero-cache/src/workers/connect-params.js.map +1 -1
- package/out/zero-cache/src/workers/replicator.d.ts.map +1 -1
- package/out/zero-cache/src/workers/replicator.js +2 -5
- package/out/zero-cache/src/workers/replicator.js.map +1 -1
- package/out/zero-cache/src/workers/syncer-ws-message-handler.d.ts.map +1 -1
- package/out/zero-cache/src/workers/syncer-ws-message-handler.js +15 -10
- package/out/zero-cache/src/workers/syncer-ws-message-handler.js.map +1 -1
- package/out/zero-cache/src/workers/syncer.d.ts.map +1 -1
- package/out/zero-cache/src/workers/syncer.js +17 -10
- package/out/zero-cache/src/workers/syncer.js.map +1 -1
- package/out/zero-client/src/client/connection-manager.d.ts +8 -0
- package/out/zero-client/src/client/connection-manager.d.ts.map +1 -1
- package/out/zero-client/src/client/connection-manager.js +33 -0
- package/out/zero-client/src/client/connection-manager.js.map +1 -1
- package/out/zero-client/src/client/connection.d.ts.map +1 -1
- package/out/zero-client/src/client/connection.js +6 -3
- package/out/zero-client/src/client/connection.js.map +1 -1
- package/out/zero-client/src/client/context.js +1 -0
- package/out/zero-client/src/client/context.js.map +1 -1
- package/out/zero-client/src/client/error.js +1 -1
- package/out/zero-client/src/client/error.js.map +1 -1
- package/out/zero-client/src/client/mutator-proxy.d.ts.map +1 -1
- package/out/zero-client/src/client/mutator-proxy.js +15 -1
- package/out/zero-client/src/client/mutator-proxy.js.map +1 -1
- package/out/zero-client/src/client/options.d.ts +11 -1
- package/out/zero-client/src/client/options.d.ts.map +1 -1
- package/out/zero-client/src/client/options.js.map +1 -1
- package/out/zero-client/src/client/query-manager.d.ts +4 -0
- package/out/zero-client/src/client/query-manager.d.ts.map +1 -1
- package/out/zero-client/src/client/query-manager.js +7 -0
- package/out/zero-client/src/client/query-manager.js.map +1 -1
- package/out/zero-client/src/client/version.js +1 -1
- package/out/zero-client/src/client/zero.d.ts +5 -5
- package/out/zero-client/src/client/zero.d.ts.map +1 -1
- package/out/zero-client/src/client/zero.js +53 -8
- package/out/zero-client/src/client/zero.js.map +1 -1
- package/out/zero-client/src/mod.d.ts +1 -0
- package/out/zero-client/src/mod.d.ts.map +1 -1
- package/out/zero-protocol/src/connect.d.ts +4 -0
- package/out/zero-protocol/src/connect.d.ts.map +1 -1
- package/out/zero-protocol/src/connect.js +3 -1
- package/out/zero-protocol/src/connect.js.map +1 -1
- package/out/zero-protocol/src/protocol-version.d.ts +1 -1
- package/out/zero-protocol/src/protocol-version.d.ts.map +1 -1
- package/out/zero-protocol/src/protocol-version.js +1 -1
- package/out/zero-protocol/src/protocol-version.js.map +1 -1
- package/out/zero-protocol/src/push.d.ts +16 -0
- package/out/zero-protocol/src/push.d.ts.map +1 -1
- package/out/zero-protocol/src/push.js +25 -1
- package/out/zero-protocol/src/push.js.map +1 -1
- package/out/zero-protocol/src/up.d.ts +2 -0
- package/out/zero-protocol/src/up.d.ts.map +1 -1
- package/out/zero-react/src/mod.d.ts +3 -1
- package/out/zero-react/src/mod.d.ts.map +1 -1
- package/out/zero-react/src/paging-reducer.d.ts +61 -0
- package/out/zero-react/src/paging-reducer.d.ts.map +1 -0
- package/out/zero-react/src/paging-reducer.js +77 -0
- package/out/zero-react/src/paging-reducer.js.map +1 -0
- package/out/zero-react/src/use-query.d.ts +11 -1
- package/out/zero-react/src/use-query.d.ts.map +1 -1
- package/out/zero-react/src/use-query.js +13 -11
- package/out/zero-react/src/use-query.js.map +1 -1
- package/out/zero-react/src/use-rows.d.ts +39 -0
- package/out/zero-react/src/use-rows.d.ts.map +1 -0
- package/out/zero-react/src/use-rows.js +130 -0
- package/out/zero-react/src/use-rows.js.map +1 -0
- package/out/zero-react/src/use-zero-virtualizer.d.ts +122 -0
- package/out/zero-react/src/use-zero-virtualizer.d.ts.map +1 -0
- package/out/zero-react/src/use-zero-virtualizer.js +342 -0
- package/out/zero-react/src/use-zero-virtualizer.js.map +1 -0
- package/out/zero-react/src/zero-provider.js +1 -1
- package/out/zero-react/src/zero-provider.js.map +1 -1
- package/out/zero-server/src/adapters/drizzle.d.ts +18 -18
- package/out/zero-server/src/adapters/drizzle.d.ts.map +1 -1
- package/out/zero-server/src/adapters/drizzle.js +8 -22
- package/out/zero-server/src/adapters/drizzle.js.map +1 -1
- package/out/zero-server/src/adapters/pg.d.ts +19 -13
- package/out/zero-server/src/adapters/pg.d.ts.map +1 -1
- package/out/zero-server/src/adapters/pg.js.map +1 -1
- package/out/zero-server/src/adapters/postgresjs.d.ts +19 -13
- package/out/zero-server/src/adapters/postgresjs.d.ts.map +1 -1
- package/out/zero-server/src/adapters/postgresjs.js.map +1 -1
- package/out/zero-server/src/adapters/prisma.d.ts +66 -0
- package/out/zero-server/src/adapters/prisma.d.ts.map +1 -0
- package/out/zero-server/src/adapters/prisma.js +63 -0
- package/out/zero-server/src/adapters/prisma.js.map +1 -0
- package/out/zero-server/src/custom.js +1 -15
- package/out/zero-server/src/custom.js.map +1 -1
- package/out/zero-server/src/mod.d.ts +9 -8
- package/out/zero-server/src/mod.d.ts.map +1 -1
- package/out/zero-server/src/process-mutations.d.ts +2 -1
- package/out/zero-server/src/process-mutations.d.ts.map +1 -1
- package/out/zero-server/src/process-mutations.js +39 -4
- package/out/zero-server/src/process-mutations.js.map +1 -1
- package/out/zero-server/src/push-processor.js +1 -1
- package/out/zero-server/src/push-processor.js.map +1 -1
- package/out/zero-server/src/schema.d.ts.map +1 -1
- package/out/zero-server/src/schema.js +4 -1
- package/out/zero-server/src/schema.js.map +1 -1
- package/out/zero-server/src/zql-database.d.ts.map +1 -1
- package/out/zero-server/src/zql-database.js +18 -0
- package/out/zero-server/src/zql-database.js.map +1 -1
- package/out/zero-solid/src/mod.d.ts +1 -1
- package/out/zero-solid/src/mod.d.ts.map +1 -1
- package/out/zero-solid/src/solid-view.js +1 -0
- package/out/zero-solid/src/solid-view.js.map +1 -1
- package/out/zero-solid/src/use-query.d.ts +10 -1
- package/out/zero-solid/src/use-query.d.ts.map +1 -1
- package/out/zero-solid/src/use-query.js +22 -5
- package/out/zero-solid/src/use-query.js.map +1 -1
- package/out/zero-solid/src/use-zero.js +1 -1
- package/out/zero-solid/src/use-zero.js.map +1 -1
- package/out/zql/src/ivm/constraint.d.ts.map +1 -1
- package/out/zql/src/ivm/constraint.js +4 -1
- package/out/zql/src/ivm/constraint.js.map +1 -1
- package/out/zql/src/ivm/exists.d.ts.map +1 -1
- package/out/zql/src/ivm/exists.js +4 -1
- package/out/zql/src/ivm/exists.js.map +1 -1
- package/out/zql/src/ivm/join-utils.d.ts.map +1 -1
- package/out/zql/src/ivm/join-utils.js +8 -2
- package/out/zql/src/ivm/join-utils.js.map +1 -1
- package/out/zql/src/ivm/memory-source.d.ts.map +1 -1
- package/out/zql/src/ivm/memory-source.js +12 -3
- package/out/zql/src/ivm/memory-source.js.map +1 -1
- package/out/zql/src/ivm/push-accumulated.d.ts.map +1 -1
- package/out/zql/src/ivm/push-accumulated.js +25 -2
- package/out/zql/src/ivm/push-accumulated.js.map +1 -1
- package/out/zql/src/ivm/stream.d.ts.map +1 -1
- package/out/zql/src/ivm/stream.js +1 -1
- package/out/zql/src/ivm/stream.js.map +1 -1
- package/out/zql/src/ivm/take.d.ts.map +1 -1
- package/out/zql/src/ivm/take.js +24 -6
- package/out/zql/src/ivm/take.js.map +1 -1
- package/out/zql/src/ivm/union-fan-in.d.ts.map +1 -1
- package/out/zql/src/ivm/union-fan-in.js +12 -3
- package/out/zql/src/ivm/union-fan-in.js.map +1 -1
- package/out/zql/src/mutate/mutator.js +4 -4
- package/out/zql/src/mutate/mutator.js.map +1 -1
- package/out/zql/src/query/create-builder.js +3 -5
- package/out/zql/src/query/create-builder.js.map +1 -1
- package/out/zql/src/query/query-registry.js +4 -4
- package/out/zql/src/query/query-registry.js.map +1 -1
- package/out/zqlite/src/table-source.d.ts.map +1 -1
- package/out/zqlite/src/table-source.js +1 -2
- package/out/zqlite/src/table-source.js.map +1 -1
- package/package.json +8 -4
- package/out/zero-cache/src/services/change-source/column-metadata.d.ts.map +0 -1
- package/out/zero-cache/src/services/change-source/column-metadata.js.map +0 -1
- package/out/zero-cache/src/types/schema-versions.d.ts +0 -12
- package/out/zero-cache/src/types/schema-versions.d.ts.map +0 -1
- package/out/zero-cache/src/types/schema-versions.js +0 -28
- package/out/zero-cache/src/types/schema-versions.js.map +0 -1
|
@@ -9,24 +9,20 @@ const authenticatedClientGroupIDs = /* @__PURE__ */ new Set();
|
|
|
9
9
|
class InspectorDelegate {
|
|
10
10
|
#globalMetrics = newMetrics();
|
|
11
11
|
#perQueryServerMetrics = /* @__PURE__ */ new Map();
|
|
12
|
-
#
|
|
13
|
-
#queryIDToTransformationHash = /* @__PURE__ */ new Map();
|
|
14
|
-
#transformationASTs = /* @__PURE__ */ new Map();
|
|
12
|
+
#queryIDToAST = /* @__PURE__ */ new Map();
|
|
15
13
|
#customQueryTransformer;
|
|
16
14
|
constructor(customQueryTransformer) {
|
|
17
15
|
this.#customQueryTransformer = customQueryTransformer;
|
|
18
16
|
}
|
|
19
17
|
addMetric(metric, value, ...args) {
|
|
20
18
|
assert(isServerMetric(metric), `Invalid server metric: ${metric}`);
|
|
21
|
-
const
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
this.#perQueryServerMetrics.set(queryID, serverMetrics);
|
|
27
|
-
}
|
|
28
|
-
serverMetrics[metric].add(value);
|
|
19
|
+
const queryID = args[0];
|
|
20
|
+
let serverMetrics = this.#perQueryServerMetrics.get(queryID);
|
|
21
|
+
if (!serverMetrics) {
|
|
22
|
+
serverMetrics = newMetrics();
|
|
23
|
+
this.#perQueryServerMetrics.set(queryID, serverMetrics);
|
|
29
24
|
}
|
|
25
|
+
serverMetrics[metric].add(value);
|
|
30
26
|
this.#globalMetrics[metric].add(value);
|
|
31
27
|
}
|
|
32
28
|
getMetricsJSONForQuery(queryID) {
|
|
@@ -37,29 +33,14 @@ class InspectorDelegate {
|
|
|
37
33
|
return mapValues(this.#globalMetrics, (v) => v.toJSON());
|
|
38
34
|
}
|
|
39
35
|
getASTForQuery(queryID) {
|
|
40
|
-
|
|
41
|
-
return transformationHash ? this.#transformationASTs.get(transformationHash) : void 0;
|
|
36
|
+
return this.#queryIDToAST.get(queryID);
|
|
42
37
|
}
|
|
43
38
|
removeQuery(queryID) {
|
|
44
39
|
this.#perQueryServerMetrics.delete(queryID);
|
|
45
|
-
this.#
|
|
46
|
-
for (const [transformationHash, idSet] of this.#hashToIDs.entries()) {
|
|
47
|
-
idSet.delete(queryID);
|
|
48
|
-
if (idSet.size === 0) {
|
|
49
|
-
this.#hashToIDs.delete(transformationHash);
|
|
50
|
-
this.#transformationASTs.delete(transformationHash);
|
|
51
|
-
}
|
|
52
|
-
}
|
|
40
|
+
this.#queryIDToAST.delete(queryID);
|
|
53
41
|
}
|
|
54
|
-
addQuery(
|
|
55
|
-
|
|
56
|
-
if (existing === void 0) {
|
|
57
|
-
this.#hashToIDs.set(transformationHash, /* @__PURE__ */ new Set([queryID]));
|
|
58
|
-
} else {
|
|
59
|
-
existing.add(queryID);
|
|
60
|
-
}
|
|
61
|
-
this.#queryIDToTransformationHash.set(queryID, transformationHash);
|
|
62
|
-
this.#transformationASTs.set(transformationHash, ast);
|
|
42
|
+
addQuery(queryID, ast) {
|
|
43
|
+
this.#queryIDToAST.set(queryID, ast);
|
|
63
44
|
}
|
|
64
45
|
/**
|
|
65
46
|
* Check if the client is authenticated. We only require authentication once
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inspector-delegate.js","sources":["../../../../../zero-cache/src/server/inspector-delegate.ts"],"sourcesContent":["import {assert} from '../../../shared/src/asserts.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {mapValues} from '../../../shared/src/objects.ts';\nimport {TDigest} from '../../../shared/src/tdigest.ts';\nimport type {AST} from '../../../zero-protocol/src/ast.ts';\nimport {ProtocolError} from '../../../zero-protocol/src/error.ts';\nimport type {ServerMetrics as ServerMetricsJSON} from '../../../zero-protocol/src/inspect-down.ts';\nimport {hashOfNameAndArgs} from '../../../zero-protocol/src/query-hash.ts';\nimport {\n isServerMetric,\n type MetricMap,\n type MetricsDelegate,\n} from '../../../zql/src/query/metrics-delegate.ts';\nimport {isDevelopmentMode} from '../config/normalize.ts';\nimport type {CustomQueryTransformer} from '../custom-queries/transform-query.ts';\nimport type {HeaderOptions} from '../custom/fetch.ts';\nimport type {CustomQueryRecord} from '../services/view-syncer/schema/types.ts';\n\n/**\n * Server-side metrics collected for queries during materialization and update.\n * These metrics are reported via the inspector and complement client-side metrics.\n */\nexport type ServerMetrics = {\n 'query-materialization-server': TDigest;\n 'query-update-server': TDigest;\n};\n\ntype ClientGroupID = string;\n\n/**\n * Set of authenticated client group IDs. We keep this outside of the class to\n * share this state across all instances of the InspectorDelegate.\n */\nconst authenticatedClientGroupIDs = new Set<ClientGroupID>();\n\nexport class InspectorDelegate implements MetricsDelegate {\n readonly #globalMetrics: ServerMetrics = newMetrics();\n readonly #perQueryServerMetrics = new Map<string, ServerMetrics>();\n readonly #
|
|
1
|
+
{"version":3,"file":"inspector-delegate.js","sources":["../../../../../zero-cache/src/server/inspector-delegate.ts"],"sourcesContent":["import {assert} from '../../../shared/src/asserts.ts';\nimport type {ReadonlyJSONValue} from '../../../shared/src/json.ts';\nimport {mapValues} from '../../../shared/src/objects.ts';\nimport {TDigest} from '../../../shared/src/tdigest.ts';\nimport type {AST} from '../../../zero-protocol/src/ast.ts';\nimport {ProtocolError} from '../../../zero-protocol/src/error.ts';\nimport type {ServerMetrics as ServerMetricsJSON} from '../../../zero-protocol/src/inspect-down.ts';\nimport {hashOfNameAndArgs} from '../../../zero-protocol/src/query-hash.ts';\nimport {\n isServerMetric,\n type MetricMap,\n type MetricsDelegate,\n} from '../../../zql/src/query/metrics-delegate.ts';\nimport {isDevelopmentMode} from '../config/normalize.ts';\nimport type {CustomQueryTransformer} from '../custom-queries/transform-query.ts';\nimport type {HeaderOptions} from '../custom/fetch.ts';\nimport type {CustomQueryRecord} from '../services/view-syncer/schema/types.ts';\n\n/**\n * Server-side metrics collected for queries during materialization and update.\n * These metrics are reported via the inspector and complement client-side metrics.\n */\nexport type ServerMetrics = {\n 'query-materialization-server': TDigest;\n 'query-update-server': TDigest;\n};\n\ntype ClientGroupID = string;\n\n/**\n * Set of authenticated client group IDs. We keep this outside of the class to\n * share this state across all instances of the InspectorDelegate.\n */\nconst authenticatedClientGroupIDs = new Set<ClientGroupID>();\n\nexport class InspectorDelegate implements MetricsDelegate {\n readonly #globalMetrics: ServerMetrics = newMetrics();\n readonly #perQueryServerMetrics = new Map<string, ServerMetrics>();\n readonly #queryIDToAST: Map<string, AST> = new Map();\n readonly #customQueryTransformer: CustomQueryTransformer | undefined;\n\n constructor(customQueryTransformer: CustomQueryTransformer | undefined) {\n this.#customQueryTransformer = customQueryTransformer;\n }\n\n addMetric<K extends keyof MetricMap>(\n metric: K,\n value: number,\n ...args: MetricMap[K]\n ): void {\n assert(isServerMetric(metric), `Invalid server metric: ${metric}`);\n const queryID = args[0];\n let serverMetrics = this.#perQueryServerMetrics.get(queryID);\n if (!serverMetrics) {\n serverMetrics = newMetrics();\n this.#perQueryServerMetrics.set(queryID, serverMetrics);\n }\n serverMetrics[metric].add(value);\n this.#globalMetrics[metric].add(value);\n }\n\n getMetricsJSONForQuery(queryID: string): ServerMetricsJSON | null {\n const serverMetrics = this.#perQueryServerMetrics.get(queryID);\n return serverMetrics ? mapValues(serverMetrics, v => v.toJSON()) : null;\n }\n\n getMetricsJSON() {\n return mapValues(this.#globalMetrics, v => v.toJSON());\n }\n\n getASTForQuery(queryID: string): AST | undefined {\n return this.#queryIDToAST.get(queryID);\n }\n\n removeQuery(queryID: string): void {\n this.#perQueryServerMetrics.delete(queryID);\n this.#queryIDToAST.delete(queryID);\n }\n\n addQuery(queryID: string, ast: AST): void {\n this.#queryIDToAST.set(queryID, ast);\n }\n\n /**\n * Check if the client is authenticated. We only require authentication once\n * per \"worker\".\n */\n isAuthenticated(clientGroupID: ClientGroupID): boolean {\n return (\n isDevelopmentMode() || authenticatedClientGroupIDs.has(clientGroupID)\n );\n }\n\n setAuthenticated(clientGroupID: ClientGroupID): void {\n authenticatedClientGroupIDs.add(clientGroupID);\n }\n\n clearAuthenticated(clientGroupID: ClientGroupID) {\n authenticatedClientGroupIDs.delete(clientGroupID);\n }\n\n /**\n * Transforms a single custom query by name and args using the configured\n * CustomQueryTransformer. This is primarily used by the inspector to transform\n * queries for analysis.\n */\n async transformCustomQuery(\n name: string,\n args: readonly ReadonlyJSONValue[],\n headerOptions: HeaderOptions,\n userQueryURL: string | undefined,\n ): Promise<AST> {\n assert(\n this.#customQueryTransformer,\n 'Custom query transformation requested but no CustomQueryTransformer is configured',\n );\n\n // Create a fake CustomQueryRecord for the single query\n const queryID = hashOfNameAndArgs(name, args);\n const queries: CustomQueryRecord[] = [\n {\n id: queryID,\n type: 'custom',\n name,\n args,\n clientState: {},\n },\n ];\n\n const results = await this.#customQueryTransformer.transform(\n headerOptions,\n queries,\n userQueryURL,\n );\n\n if ('kind' in results) {\n throw new ProtocolError(results);\n }\n\n const result = results[0];\n if (!result) {\n throw new Error('No transformation result returned');\n }\n\n if ('error' in result) {\n const message =\n result.message ?? 'Unknown application error from custom query';\n throw new Error(\n `Error transforming custom query ${name} (${result.error}): ${message} ${JSON.stringify(result.details)}`,\n );\n }\n\n return result.transformedAst;\n }\n}\n\nfunction newMetrics(): ServerMetrics {\n return {\n 'query-materialization-server': new TDigest(),\n 'query-update-server': new TDigest(),\n };\n}\n"],"names":[],"mappings":";;;;;;;AAiCA,MAAM,kDAAkC,IAAA;AAEjC,MAAM,kBAA6C;AAAA,EAC/C,iBAAgC,WAAA;AAAA,EAChC,6CAA6B,IAAA;AAAA,EAC7B,oCAAsC,IAAA;AAAA,EACtC;AAAA,EAET,YAAY,wBAA4D;AACtE,SAAK,0BAA0B;AAAA,EACjC;AAAA,EAEA,UACE,QACA,UACG,MACG;AACN,WAAO,eAAe,MAAM,GAAG,0BAA0B,MAAM,EAAE;AACjE,UAAM,UAAU,KAAK,CAAC;AACtB,QAAI,gBAAgB,KAAK,uBAAuB,IAAI,OAAO;AAC3D,QAAI,CAAC,eAAe;AAClB,sBAAgB,WAAA;AAChB,WAAK,uBAAuB,IAAI,SAAS,aAAa;AAAA,IACxD;AACA,kBAAc,MAAM,EAAE,IAAI,KAAK;AAC/B,SAAK,eAAe,MAAM,EAAE,IAAI,KAAK;AAAA,EACvC;AAAA,EAEA,uBAAuB,SAA2C;AAChE,UAAM,gBAAgB,KAAK,uBAAuB,IAAI,OAAO;AAC7D,WAAO,gBAAgB,UAAU,eAAe,OAAK,EAAE,OAAA,CAAQ,IAAI;AAAA,EACrE;AAAA,EAEA,iBAAiB;AACf,WAAO,UAAU,KAAK,gBAAgB,CAAA,MAAK,EAAE,QAAQ;AAAA,EACvD;AAAA,EAEA,eAAe,SAAkC;AAC/C,WAAO,KAAK,cAAc,IAAI,OAAO;AAAA,EACvC;AAAA,EAEA,YAAY,SAAuB;AACjC,SAAK,uBAAuB,OAAO,OAAO;AAC1C,SAAK,cAAc,OAAO,OAAO;AAAA,EACnC;AAAA,EAEA,SAAS,SAAiB,KAAgB;AACxC,SAAK,cAAc,IAAI,SAAS,GAAG;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,gBAAgB,eAAuC;AACrD,WACE,kBAAA,KAAuB,4BAA4B,IAAI,aAAa;AAAA,EAExE;AAAA,EAEA,iBAAiB,eAAoC;AACnD,gCAA4B,IAAI,aAAa;AAAA,EAC/C;AAAA,EAEA,mBAAmB,eAA8B;AAC/C,gCAA4B,OAAO,aAAa;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,qBACJ,MACA,MACA,eACA,cACc;AACd;AAAA,MACE,KAAK;AAAA,MACL;AAAA,IAAA;AAIF,UAAM,UAAU,kBAAkB,MAAM,IAAI;AAC5C,UAAM,UAA+B;AAAA,MACnC;AAAA,QACE,IAAI;AAAA,QACJ,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,aAAa,CAAA;AAAA,MAAC;AAAA,IAChB;AAGF,UAAM,UAAU,MAAM,KAAK,wBAAwB;AAAA,MACjD;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAGF,QAAI,UAAU,SAAS;AACrB,YAAM,IAAI,cAAc,OAAO;AAAA,IACjC;AAEA,UAAM,SAAS,QAAQ,CAAC;AACxB,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,mCAAmC;AAAA,IACrD;AAEA,QAAI,WAAW,QAAQ;AACrB,YAAM,UACJ,OAAO,WAAW;AACpB,YAAM,IAAI;AAAA,QACR,mCAAmC,IAAI,KAAK,OAAO,KAAK,MAAM,OAAO,IAAI,KAAK,UAAU,OAAO,OAAO,CAAC;AAAA,MAAA;AAAA,IAE3G;AAEA,WAAO,OAAO;AAAA,EAChB;AACF;AAEA,SAAS,aAA4B;AACnC,SAAO;AAAA,IACL,gCAAgC,IAAI,QAAA;AAAA,IACpC,uBAAuB,IAAI,QAAA;AAAA,EAAQ;AAEvC;"}
|
|
@@ -52,7 +52,7 @@ async function runWorker(parent, env) {
|
|
|
52
52
|
restoreStart = await restoreReplica(lc, config);
|
|
53
53
|
} catch (e) {
|
|
54
54
|
if (runChangeStreamer) {
|
|
55
|
-
lc.error?.("error restoring backup. resyncing the replica.");
|
|
55
|
+
lc.error?.("error restoring backup. resyncing the replica.", e);
|
|
56
56
|
} else {
|
|
57
57
|
throw e;
|
|
58
58
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"main.js","sources":["../../../../../zero-cache/src/server/main.ts"],"sourcesContent":["import {resolver} from '@rocicorp/resolver';\nimport path from 'node:path';\nimport {must} from '../../../shared/src/must.ts';\nimport {getNormalizedZeroConfig} from '../config/zero-config.ts';\nimport {initEventSink} from '../observability/events.ts';\nimport {\n exitAfter,\n ProcessManager,\n runUntilKilled,\n type WorkerType,\n} from '../services/life-cycle.ts';\nimport {\n restoreReplica,\n startReplicaBackupProcess,\n} from '../services/litestream/commands.ts';\nimport {\n childWorker,\n parentWorker,\n singleProcessMode,\n type Worker,\n} from '../types/processes.ts';\nimport {\n createNotifierFrom,\n handleSubscriptionsFrom,\n type ReplicaFileMode,\n subscribeTo,\n} from '../workers/replicator.ts';\nimport {createLogContext} from './logging.ts';\nimport {startOtelAuto} from './otel-start.ts';\nimport {WorkerDispatcher} from './worker-dispatcher.ts';\nimport {\n CHANGE_STREAMER_URL,\n MUTATOR_URL,\n REAPER_URL,\n REPLICATOR_URL,\n SYNCER_URL,\n} from './worker-urls.ts';\n\nconst clientConnectionBifurcated = false;\n\nexport default async function runWorker(\n parent: Worker,\n env: NodeJS.ProcessEnv,\n): Promise<void> {\n const startMs = Date.now();\n const config = getNormalizedZeroConfig({env});\n\n startOtelAuto(createLogContext(config, {worker: 'dispatcher'}, false));\n const lc = createLogContext(config, {worker: 'dispatcher'}, true);\n initEventSink(lc, config);\n\n const processes = new ProcessManager(lc, parent);\n\n const {numSyncWorkers: numSyncers} = config;\n if (config.upstream.maxConns < numSyncers) {\n throw new Error(\n `Insufficient upstream connections (${config.upstream.maxConns}) for ${numSyncers} syncers.` +\n `Increase ZERO_UPSTREAM_MAX_CONNS or decrease ZERO_NUM_SYNC_WORKERS (which defaults to available cores).`,\n );\n }\n if (config.cvr.maxConns < numSyncers) {\n throw new Error(\n `Insufficient cvr connections (${config.cvr.maxConns}) for ${numSyncers} syncers.` +\n `Increase ZERO_CVR_MAX_CONNS or decrease ZERO_NUM_SYNC_WORKERS (which defaults to available cores).`,\n );\n }\n\n const internalFlags: string[] =\n numSyncers === 0\n ? []\n : [\n '--upstream-max-conns-per-worker',\n String(Math.floor(config.upstream.maxConns / numSyncers)),\n '--cvr-max-conns-per-worker',\n String(Math.floor(config.cvr.maxConns / numSyncers)),\n ];\n\n function loadWorker(\n moduleUrl: URL,\n type: WorkerType,\n id?: string | number,\n ...args: string[]\n ): Worker {\n const worker = childWorker(moduleUrl, env, ...args, ...internalFlags);\n const name = path.basename(moduleUrl.pathname) + (id ? ` (${id})` : '');\n return processes.addWorker(worker, type, name);\n }\n\n const {\n taskID,\n changeStreamer: {mode: changeStreamerMode, uri: changeStreamerURI},\n litestream,\n } = config;\n const runChangeStreamer =\n changeStreamerMode === 'dedicated' && changeStreamerURI === undefined;\n\n let restoreStart = new Date();\n if (litestream.backupURL || (litestream.executable && !runChangeStreamer)) {\n try {\n restoreStart = await restoreReplica(lc, config);\n } catch (e) {\n if (runChangeStreamer) {\n // If the restore failed, e.g. due to a corrupt backup, the\n // replication-manager recovers by re-syncing.\n lc.error?.('error restoring backup. resyncing the replica.');\n } else {\n // View-syncers, on the other hand, have no option other than to retry\n // until a valid backup has been published. This is achieved by\n // shutting down and letting the container runner retry with its\n // configured policy.\n throw e;\n }\n }\n }\n\n const {promise: changeStreamerReady, resolve: changeStreamerStarted} =\n resolver();\n const changeStreamer = runChangeStreamer\n ? loadWorker(\n CHANGE_STREAMER_URL,\n 'supporting',\n undefined,\n String(restoreStart.getTime()),\n ).once('message', changeStreamerStarted)\n : (changeStreamerStarted() ?? undefined);\n\n const {promise: reaperReady, resolve: reaperStarted} = resolver();\n if (numSyncers > 0) {\n loadWorker(REAPER_URL, 'supporting').once('message', reaperStarted);\n } else {\n reaperStarted();\n }\n\n // Wait for the change-streamer to be ready to guarantee that a replica\n // file is present.\n await changeStreamerReady;\n\n if (runChangeStreamer && litestream.backupURL) {\n // Start a backup replicator and corresponding litestream backup process.\n const {promise: backupReady, resolve} = resolver();\n const mode: ReplicaFileMode = 'backup';\n loadWorker(REPLICATOR_URL, 'supporting', mode, mode).once(\n // Wait for the Replicator's first message (i.e. \"ready\") before starting\n // litestream backup in order to avoid contending on the lock when the\n // replicator first prepares the db file.\n 'message',\n () => {\n processes.addSubprocess(\n startReplicaBackupProcess(config),\n 'supporting',\n 'litestream',\n );\n resolve();\n },\n );\n await backupReady;\n }\n\n // Before starting the view-syncers, ensure that the reaper has started\n // up, indicating that any CVR db migrations have been performed.\n await reaperReady;\n\n const syncers: Worker[] = [];\n if (numSyncers) {\n const mode: ReplicaFileMode =\n runChangeStreamer && litestream.backupURL ? 'serving-copy' : 'serving';\n const {promise: replicaReady, resolve} = resolver();\n const replicator = loadWorker(\n REPLICATOR_URL,\n 'supporting',\n mode,\n mode,\n ).once('message', () => {\n subscribeTo(lc, replicator);\n resolve();\n });\n await replicaReady;\n\n const notifier = createNotifierFrom(lc, replicator);\n for (let i = 0; i < numSyncers; i++) {\n syncers.push(loadWorker(SYNCER_URL, 'user-facing', i + 1, mode));\n }\n syncers.forEach(syncer => handleSubscriptionsFrom(lc, syncer, notifier));\n }\n let mutator: Worker | undefined;\n if (clientConnectionBifurcated) {\n mutator = loadWorker(MUTATOR_URL, 'supporting', 'mutator');\n }\n\n lc.info?.('waiting for workers to be ready ...');\n const logWaiting = setInterval(\n () => lc.info?.(`still waiting for ${processes.initializing().join(', ')}`),\n 10_000,\n );\n await processes.allWorkersReady();\n clearInterval(logWaiting);\n lc.info?.(`all workers ready (${Date.now() - startMs} ms)`);\n\n parent.send(['ready', {ready: true}]);\n\n try {\n await runUntilKilled(\n lc,\n parent,\n new WorkerDispatcher(\n lc,\n taskID,\n parent,\n syncers,\n mutator,\n changeStreamer,\n ),\n );\n } catch (err) {\n processes.logErrorAndExit(err, 'dispatcher');\n }\n\n await processes.done();\n}\n\nif (!singleProcessMode()) {\n void exitAfter(() => runWorker(must(parentWorker), process.env));\n}\n"],"names":[],"mappings":";;;;;;;;;;;;;AAwCA,eAA8B,UAC5B,QACA,KACe;AACf,QAAM,UAAU,KAAK,IAAA;AACrB,QAAM,SAAS,wBAAwB,EAAC,KAAI;AAE5C,gBAAc,iBAAiB,QAAQ,EAAC,QAAQ,aAAA,GAAe,KAAK,CAAC;AACrE,QAAM,KAAK,iBAAiB,QAAQ,EAAC,QAAQ,aAAA,GAAe,IAAI;AAChE,gBAAc,IAAI,MAAM;AAExB,QAAM,YAAY,IAAI,eAAe,IAAI,MAAM;AAE/C,QAAM,EAAC,gBAAgB,WAAA,IAAc;AACrC,MAAI,OAAO,SAAS,WAAW,YAAY;AACzC,UAAM,IAAI;AAAA,MACR,sCAAsC,OAAO,SAAS,QAAQ,SAAS,UAAU;AAAA,IAAA;AAAA,EAGrF;AACA,MAAI,OAAO,IAAI,WAAW,YAAY;AACpC,UAAM,IAAI;AAAA,MACR,iCAAiC,OAAO,IAAI,QAAQ,SAAS,UAAU;AAAA,IAAA;AAAA,EAG3E;AAEA,QAAM,gBACJ,eAAe,IACX,KACA;AAAA,IACE;AAAA,IACA,OAAO,KAAK,MAAM,OAAO,SAAS,WAAW,UAAU,CAAC;AAAA,IACxD;AAAA,IACA,OAAO,KAAK,MAAM,OAAO,IAAI,WAAW,UAAU,CAAC;AAAA,EAAA;AAG3D,WAAS,WACP,WACA,MACA,OACG,MACK;AACR,UAAM,SAAS,YAAY,WAAW,KAAK,GAAG,MAAM,GAAG,aAAa;AACpE,UAAM,OAAO,KAAK,SAAS,UAAU,QAAQ,KAAK,KAAK,KAAK,EAAE,MAAM;AACpE,WAAO,UAAU,UAAU,QAAQ,MAAM,IAAI;AAAA,EAC/C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA,gBAAgB,EAAC,MAAM,oBAAoB,KAAK,kBAAA;AAAA,IAChD;AAAA,EAAA,IACE;AACJ,QAAM,oBACJ,uBAAuB,eAAe,sBAAsB;AAE9D,MAAI,mCAAmB,KAAA;AACvB,MAAI,WAAW,aAAc,WAAW,cAAc,CAAC,mBAAoB;AACzE,QAAI;AACF,qBAAe,MAAM,eAAe,IAAI,MAAM;AAAA,IAChD,SAAS,GAAG;AACV,UAAI,mBAAmB;AAGrB,WAAG,QAAQ,gDAAgD;AAAA,MAC7D,OAAO;AAKL,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,EAAC,SAAS,qBAAqB,SAAS,sBAAA,IAC5C,SAAA;AACF,QAAM,iBAAiB,oBACnB;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,aAAa,QAAA,CAAS;AAAA,EAAA,EAC7B,KAAK,WAAW,qBAAqB,IACtC,2BAA2B;AAEhC,QAAM,EAAC,SAAS,aAAa,SAAS,cAAA,IAAiB,SAAA;AACvD,MAAI,aAAa,GAAG;AAClB,eAAW,YAAY,YAAY,EAAE,KAAK,WAAW,aAAa;AAAA,EACpE,OAAO;AACL,kBAAA;AAAA,EACF;AAIA,QAAM;AAEN,MAAI,qBAAqB,WAAW,WAAW;AAE7C,UAAM,EAAC,SAAS,aAAa,QAAA,IAAW,SAAA;AACxC,UAAM,OAAwB;AAC9B,eAAW,gBAAgB,cAAc,MAAM,IAAI,EAAE;AAAA;AAAA;AAAA;AAAA,MAInD;AAAA,MACA,MAAM;AACJ,kBAAU;AAAA,UACR,0BAA0B,MAAM;AAAA,UAChC;AAAA,UACA;AAAA,QAAA;AAEF,gBAAA;AAAA,MACF;AAAA,IAAA;AAEF,UAAM;AAAA,EACR;AAIA,QAAM;AAEN,QAAM,UAAoB,CAAA;AAC1B,MAAI,YAAY;AACd,UAAM,OACJ,qBAAqB,WAAW,YAAY,iBAAiB;AAC/D,UAAM,EAAC,SAAS,cAAc,QAAA,IAAW,SAAA;AACzC,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,EACA,KAAK,WAAW,MAAM;AACtB,kBAAY,IAAI,UAAU;AAC1B,cAAA;AAAA,IACF,CAAC;AACD,UAAM;AAEN,UAAM,WAAW,mBAAmB,IAAI,UAAU;AAClD,aAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,cAAQ,KAAK,WAAW,YAAY,eAAe,IAAI,GAAG,IAAI,CAAC;AAAA,IACjE;AACA,YAAQ,QAAQ,CAAA,WAAU,wBAAwB,IAAI,QAAQ,QAAQ,CAAC;AAAA,EACzE;AACA,MAAI;AAKJ,KAAG,OAAO,qCAAqC;AAC/C,QAAM,aAAa;AAAA,IACjB,MAAM,GAAG,OAAO,qBAAqB,UAAU,eAAe,KAAK,IAAI,CAAC,EAAE;AAAA,IAC1E;AAAA,EAAA;AAEF,QAAM,UAAU,gBAAA;AAChB,gBAAc,UAAU;AACxB,KAAG,OAAO,sBAAsB,KAAK,QAAQ,OAAO,MAAM;AAE1D,SAAO,KAAK,CAAC,SAAS,EAAC,OAAO,KAAA,CAAK,CAAC;AAEpC,MAAI;AACF,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA,IAAI;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ,SAAS,KAAK;AACZ,cAAU,gBAAgB,KAAK,YAAY;AAAA,EAC7C;AAEA,QAAM,UAAU,KAAA;AAClB;AAEA,IAAI,CAAC,qBAAqB;AACxB,OAAK,UAAU,MAAM,UAAU,KAAK,YAAY,GAAG,QAAQ,GAAG,CAAC;AACjE;"}
|
|
1
|
+
{"version":3,"file":"main.js","sources":["../../../../../zero-cache/src/server/main.ts"],"sourcesContent":["import {resolver} from '@rocicorp/resolver';\nimport path from 'node:path';\nimport {must} from '../../../shared/src/must.ts';\nimport {getNormalizedZeroConfig} from '../config/zero-config.ts';\nimport {initEventSink} from '../observability/events.ts';\nimport {\n exitAfter,\n ProcessManager,\n runUntilKilled,\n type WorkerType,\n} from '../services/life-cycle.ts';\nimport {\n restoreReplica,\n startReplicaBackupProcess,\n} from '../services/litestream/commands.ts';\nimport {\n childWorker,\n parentWorker,\n singleProcessMode,\n type Worker,\n} from '../types/processes.ts';\nimport {\n createNotifierFrom,\n handleSubscriptionsFrom,\n type ReplicaFileMode,\n subscribeTo,\n} from '../workers/replicator.ts';\nimport {createLogContext} from './logging.ts';\nimport {startOtelAuto} from './otel-start.ts';\nimport {WorkerDispatcher} from './worker-dispatcher.ts';\nimport {\n CHANGE_STREAMER_URL,\n MUTATOR_URL,\n REAPER_URL,\n REPLICATOR_URL,\n SYNCER_URL,\n} from './worker-urls.ts';\n\nconst clientConnectionBifurcated = false;\n\nexport default async function runWorker(\n parent: Worker,\n env: NodeJS.ProcessEnv,\n): Promise<void> {\n const startMs = Date.now();\n const config = getNormalizedZeroConfig({env});\n\n startOtelAuto(createLogContext(config, {worker: 'dispatcher'}, false));\n const lc = createLogContext(config, {worker: 'dispatcher'}, true);\n initEventSink(lc, config);\n\n const processes = new ProcessManager(lc, parent);\n\n const {numSyncWorkers: numSyncers} = config;\n if (config.upstream.maxConns < numSyncers) {\n throw new Error(\n `Insufficient upstream connections (${config.upstream.maxConns}) for ${numSyncers} syncers.` +\n `Increase ZERO_UPSTREAM_MAX_CONNS or decrease ZERO_NUM_SYNC_WORKERS (which defaults to available cores).`,\n );\n }\n if (config.cvr.maxConns < numSyncers) {\n throw new Error(\n `Insufficient cvr connections (${config.cvr.maxConns}) for ${numSyncers} syncers.` +\n `Increase ZERO_CVR_MAX_CONNS or decrease ZERO_NUM_SYNC_WORKERS (which defaults to available cores).`,\n );\n }\n\n const internalFlags: string[] =\n numSyncers === 0\n ? []\n : [\n '--upstream-max-conns-per-worker',\n String(Math.floor(config.upstream.maxConns / numSyncers)),\n '--cvr-max-conns-per-worker',\n String(Math.floor(config.cvr.maxConns / numSyncers)),\n ];\n\n function loadWorker(\n moduleUrl: URL,\n type: WorkerType,\n id?: string | number,\n ...args: string[]\n ): Worker {\n const worker = childWorker(moduleUrl, env, ...args, ...internalFlags);\n const name = path.basename(moduleUrl.pathname) + (id ? ` (${id})` : '');\n return processes.addWorker(worker, type, name);\n }\n\n const {\n taskID,\n changeStreamer: {mode: changeStreamerMode, uri: changeStreamerURI},\n litestream,\n } = config;\n const runChangeStreamer =\n changeStreamerMode === 'dedicated' && changeStreamerURI === undefined;\n\n let restoreStart = new Date();\n if (litestream.backupURL || (litestream.executable && !runChangeStreamer)) {\n try {\n restoreStart = await restoreReplica(lc, config);\n } catch (e) {\n if (runChangeStreamer) {\n // If the restore failed, e.g. due to a corrupt backup, the\n // replication-manager recovers by re-syncing.\n lc.error?.('error restoring backup. resyncing the replica.', e);\n } else {\n // View-syncers, on the other hand, have no option other than to retry\n // until a valid backup has been published. This is achieved by\n // shutting down and letting the container runner retry with its\n // configured policy.\n throw e;\n }\n }\n }\n\n const {promise: changeStreamerReady, resolve: changeStreamerStarted} =\n resolver();\n const changeStreamer = runChangeStreamer\n ? loadWorker(\n CHANGE_STREAMER_URL,\n 'supporting',\n undefined,\n String(restoreStart.getTime()),\n ).once('message', changeStreamerStarted)\n : (changeStreamerStarted() ?? undefined);\n\n const {promise: reaperReady, resolve: reaperStarted} = resolver();\n if (numSyncers > 0) {\n loadWorker(REAPER_URL, 'supporting').once('message', reaperStarted);\n } else {\n reaperStarted();\n }\n\n // Wait for the change-streamer to be ready to guarantee that a replica\n // file is present.\n await changeStreamerReady;\n\n if (runChangeStreamer && litestream.backupURL) {\n // Start a backup replicator and corresponding litestream backup process.\n const {promise: backupReady, resolve} = resolver();\n const mode: ReplicaFileMode = 'backup';\n loadWorker(REPLICATOR_URL, 'supporting', mode, mode).once(\n // Wait for the Replicator's first message (i.e. \"ready\") before starting\n // litestream backup in order to avoid contending on the lock when the\n // replicator first prepares the db file.\n 'message',\n () => {\n processes.addSubprocess(\n startReplicaBackupProcess(config),\n 'supporting',\n 'litestream',\n );\n resolve();\n },\n );\n await backupReady;\n }\n\n // Before starting the view-syncers, ensure that the reaper has started\n // up, indicating that any CVR db migrations have been performed.\n await reaperReady;\n\n const syncers: Worker[] = [];\n if (numSyncers) {\n const mode: ReplicaFileMode =\n runChangeStreamer && litestream.backupURL ? 'serving-copy' : 'serving';\n const {promise: replicaReady, resolve} = resolver();\n const replicator = loadWorker(\n REPLICATOR_URL,\n 'supporting',\n mode,\n mode,\n ).once('message', () => {\n subscribeTo(lc, replicator);\n resolve();\n });\n await replicaReady;\n\n const notifier = createNotifierFrom(lc, replicator);\n for (let i = 0; i < numSyncers; i++) {\n syncers.push(loadWorker(SYNCER_URL, 'user-facing', i + 1, mode));\n }\n syncers.forEach(syncer => handleSubscriptionsFrom(lc, syncer, notifier));\n }\n let mutator: Worker | undefined;\n if (clientConnectionBifurcated) {\n mutator = loadWorker(MUTATOR_URL, 'supporting', 'mutator');\n }\n\n lc.info?.('waiting for workers to be ready ...');\n const logWaiting = setInterval(\n () => lc.info?.(`still waiting for ${processes.initializing().join(', ')}`),\n 10_000,\n );\n await processes.allWorkersReady();\n clearInterval(logWaiting);\n lc.info?.(`all workers ready (${Date.now() - startMs} ms)`);\n\n parent.send(['ready', {ready: true}]);\n\n try {\n await runUntilKilled(\n lc,\n parent,\n new WorkerDispatcher(\n lc,\n taskID,\n parent,\n syncers,\n mutator,\n changeStreamer,\n ),\n );\n } catch (err) {\n processes.logErrorAndExit(err, 'dispatcher');\n }\n\n await processes.done();\n}\n\nif (!singleProcessMode()) {\n void exitAfter(() => runWorker(must(parentWorker), process.env));\n}\n"],"names":[],"mappings":";;;;;;;;;;;;;AAwCA,eAA8B,UAC5B,QACA,KACe;AACf,QAAM,UAAU,KAAK,IAAA;AACrB,QAAM,SAAS,wBAAwB,EAAC,KAAI;AAE5C,gBAAc,iBAAiB,QAAQ,EAAC,QAAQ,aAAA,GAAe,KAAK,CAAC;AACrE,QAAM,KAAK,iBAAiB,QAAQ,EAAC,QAAQ,aAAA,GAAe,IAAI;AAChE,gBAAc,IAAI,MAAM;AAExB,QAAM,YAAY,IAAI,eAAe,IAAI,MAAM;AAE/C,QAAM,EAAC,gBAAgB,WAAA,IAAc;AACrC,MAAI,OAAO,SAAS,WAAW,YAAY;AACzC,UAAM,IAAI;AAAA,MACR,sCAAsC,OAAO,SAAS,QAAQ,SAAS,UAAU;AAAA,IAAA;AAAA,EAGrF;AACA,MAAI,OAAO,IAAI,WAAW,YAAY;AACpC,UAAM,IAAI;AAAA,MACR,iCAAiC,OAAO,IAAI,QAAQ,SAAS,UAAU;AAAA,IAAA;AAAA,EAG3E;AAEA,QAAM,gBACJ,eAAe,IACX,KACA;AAAA,IACE;AAAA,IACA,OAAO,KAAK,MAAM,OAAO,SAAS,WAAW,UAAU,CAAC;AAAA,IACxD;AAAA,IACA,OAAO,KAAK,MAAM,OAAO,IAAI,WAAW,UAAU,CAAC;AAAA,EAAA;AAG3D,WAAS,WACP,WACA,MACA,OACG,MACK;AACR,UAAM,SAAS,YAAY,WAAW,KAAK,GAAG,MAAM,GAAG,aAAa;AACpE,UAAM,OAAO,KAAK,SAAS,UAAU,QAAQ,KAAK,KAAK,KAAK,EAAE,MAAM;AACpE,WAAO,UAAU,UAAU,QAAQ,MAAM,IAAI;AAAA,EAC/C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA,gBAAgB,EAAC,MAAM,oBAAoB,KAAK,kBAAA;AAAA,IAChD;AAAA,EAAA,IACE;AACJ,QAAM,oBACJ,uBAAuB,eAAe,sBAAsB;AAE9D,MAAI,mCAAmB,KAAA;AACvB,MAAI,WAAW,aAAc,WAAW,cAAc,CAAC,mBAAoB;AACzE,QAAI;AACF,qBAAe,MAAM,eAAe,IAAI,MAAM;AAAA,IAChD,SAAS,GAAG;AACV,UAAI,mBAAmB;AAGrB,WAAG,QAAQ,kDAAkD,CAAC;AAAA,MAChE,OAAO;AAKL,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,EAAC,SAAS,qBAAqB,SAAS,sBAAA,IAC5C,SAAA;AACF,QAAM,iBAAiB,oBACnB;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO,aAAa,QAAA,CAAS;AAAA,EAAA,EAC7B,KAAK,WAAW,qBAAqB,IACtC,2BAA2B;AAEhC,QAAM,EAAC,SAAS,aAAa,SAAS,cAAA,IAAiB,SAAA;AACvD,MAAI,aAAa,GAAG;AAClB,eAAW,YAAY,YAAY,EAAE,KAAK,WAAW,aAAa;AAAA,EACpE,OAAO;AACL,kBAAA;AAAA,EACF;AAIA,QAAM;AAEN,MAAI,qBAAqB,WAAW,WAAW;AAE7C,UAAM,EAAC,SAAS,aAAa,QAAA,IAAW,SAAA;AACxC,UAAM,OAAwB;AAC9B,eAAW,gBAAgB,cAAc,MAAM,IAAI,EAAE;AAAA;AAAA;AAAA;AAAA,MAInD;AAAA,MACA,MAAM;AACJ,kBAAU;AAAA,UACR,0BAA0B,MAAM;AAAA,UAChC;AAAA,UACA;AAAA,QAAA;AAEF,gBAAA;AAAA,MACF;AAAA,IAAA;AAEF,UAAM;AAAA,EACR;AAIA,QAAM;AAEN,QAAM,UAAoB,CAAA;AAC1B,MAAI,YAAY;AACd,UAAM,OACJ,qBAAqB,WAAW,YAAY,iBAAiB;AAC/D,UAAM,EAAC,SAAS,cAAc,QAAA,IAAW,SAAA;AACzC,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA,EACA,KAAK,WAAW,MAAM;AACtB,kBAAY,IAAI,UAAU;AAC1B,cAAA;AAAA,IACF,CAAC;AACD,UAAM;AAEN,UAAM,WAAW,mBAAmB,IAAI,UAAU;AAClD,aAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,cAAQ,KAAK,WAAW,YAAY,eAAe,IAAI,GAAG,IAAI,CAAC;AAAA,IACjE;AACA,YAAQ,QAAQ,CAAA,WAAU,wBAAwB,IAAI,QAAQ,QAAQ,CAAC;AAAA,EACzE;AACA,MAAI;AAKJ,KAAG,OAAO,qCAAqC;AAC/C,QAAM,aAAa;AAAA,IACjB,MAAM,GAAG,OAAO,qBAAqB,UAAU,eAAe,KAAK,IAAI,CAAC,EAAE;AAAA,IAC1E;AAAA,EAAA;AAEF,QAAM,UAAU,gBAAA;AAChB,gBAAc,UAAU;AACxB,KAAG,OAAO,sBAAsB,KAAK,QAAQ,OAAO,MAAM;AAE1D,SAAO,KAAK,CAAC,SAAS,EAAC,OAAO,KAAA,CAAK,CAAC;AAEpC,MAAI;AACF,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA,IAAI;AAAA,QACF;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAAA,IACF;AAAA,EAEJ,SAAS,KAAK;AACZ,cAAU,gBAAgB,KAAK,YAAY;AAAA,EAC7C;AAEA,QAAM,UAAU,KAAA;AAClB;AAEA,IAAI,CAAC,qBAAqB;AACxB,OAAK,UAAU,MAAM,UAAU,KAAK,YAAY,GAAG,QAAQ,GAAG,CAAC;AACjE;"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { LogContext } from '@rocicorp/logger';
|
|
2
|
+
/**
|
|
3
|
+
* Run an operation with priority, indicating that IVM should use smaller time
|
|
4
|
+
* slices to allow this operation to proceed more quickly
|
|
5
|
+
*/
|
|
6
|
+
export declare function runPriorityOp<T>(lc: LogContext, description: string, op: () => Promise<T>): Promise<T>;
|
|
7
|
+
export declare function isPriorityOpRunning(): boolean;
|
|
8
|
+
//# sourceMappingURL=priority-op.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"priority-op.d.ts","sourceRoot":"","sources":["../../../../../zero-cache/src/server/priority-op.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAKjD;;;GAGG;AACH,wBAAsB,aAAa,CAAC,CAAC,EACnC,EAAE,EAAE,UAAU,EACd,WAAW,EAAE,MAAM,EACnB,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,cAmBrB;AAED,wBAAgB,mBAAmB,YAElC"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
let priorityOpCounter = 0;
|
|
2
|
+
let runningPriorityOpCounter = 0;
|
|
3
|
+
async function runPriorityOp(lc, description, op) {
|
|
4
|
+
const id = priorityOpCounter++;
|
|
5
|
+
runningPriorityOpCounter++;
|
|
6
|
+
const start = Date.now();
|
|
7
|
+
lc = lc.withContext("priorityOpID", id);
|
|
8
|
+
try {
|
|
9
|
+
lc.debug?.(`running priority op ${description}`);
|
|
10
|
+
const result = await op();
|
|
11
|
+
lc.debug?.(
|
|
12
|
+
`finished priority op ${description} in ${Date.now() - start} ms`
|
|
13
|
+
);
|
|
14
|
+
return result;
|
|
15
|
+
} catch (e) {
|
|
16
|
+
lc.debug?.(`failed priority op ${description} in ${Date.now() - start} ms`);
|
|
17
|
+
throw e;
|
|
18
|
+
} finally {
|
|
19
|
+
runningPriorityOpCounter--;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
function isPriorityOpRunning() {
|
|
23
|
+
return runningPriorityOpCounter > 0;
|
|
24
|
+
}
|
|
25
|
+
export {
|
|
26
|
+
isPriorityOpRunning,
|
|
27
|
+
runPriorityOp
|
|
28
|
+
};
|
|
29
|
+
//# sourceMappingURL=priority-op.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"priority-op.js","sources":["../../../../../zero-cache/src/server/priority-op.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\n\nlet priorityOpCounter = 0;\nlet runningPriorityOpCounter = 0;\n\n/**\n * Run an operation with priority, indicating that IVM should use smaller time\n * slices to allow this operation to proceed more quickly\n */\nexport async function runPriorityOp<T>(\n lc: LogContext,\n description: string,\n op: () => Promise<T>,\n) {\n const id = priorityOpCounter++;\n runningPriorityOpCounter++;\n const start = Date.now();\n lc = lc.withContext('priorityOpID', id);\n try {\n lc.debug?.(`running priority op ${description}`);\n const result = await op();\n lc.debug?.(\n `finished priority op ${description} in ${Date.now() - start} ms`,\n );\n return result;\n } catch (e) {\n lc.debug?.(`failed priority op ${description} in ${Date.now() - start} ms`);\n throw e;\n } finally {\n runningPriorityOpCounter--;\n }\n}\n\nexport function isPriorityOpRunning() {\n return runningPriorityOpCounter > 0;\n}\n"],"names":[],"mappings":"AAEA,IAAI,oBAAoB;AACxB,IAAI,2BAA2B;AAM/B,eAAsB,cACpB,IACA,aACA,IACA;AACA,QAAM,KAAK;AACX;AACA,QAAM,QAAQ,KAAK,IAAA;AACnB,OAAK,GAAG,YAAY,gBAAgB,EAAE;AACtC,MAAI;AACF,OAAG,QAAQ,uBAAuB,WAAW,EAAE;AAC/C,UAAM,SAAS,MAAM,GAAA;AACrB,OAAG;AAAA,MACD,wBAAwB,WAAW,OAAO,KAAK,IAAA,IAAQ,KAAK;AAAA,IAAA;AAE9D,WAAO;AAAA,EACT,SAAS,GAAG;AACV,OAAG,QAAQ,sBAAsB,WAAW,OAAO,KAAK,IAAA,IAAQ,KAAK,KAAK;AAC1E,UAAM;AAAA,EACR,UAAA;AACE;AAAA,EACF;AACF;AAEO,SAAS,sBAAsB;AACpC,SAAO,2BAA2B;AACpC;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"syncer.d.ts","sourceRoot":"","sources":["../../../../../zero-cache/src/server/syncer.ts"],"names":[],"mappings":"AAuBA,OAAO,EAGL,KAAK,MAAM,EACZ,MAAM,uBAAuB,CAAC;
|
|
1
|
+
{"version":3,"file":"syncer.d.ts","sourceRoot":"","sources":["../../../../../zero-cache/src/server/syncer.ts"],"names":[],"mappings":"AAuBA,OAAO,EAGL,KAAK,MAAM,EACZ,MAAM,uBAAuB,CAAC;AA8B/B,MAAM,CAAC,OAAO,UAAU,SAAS,CAC/B,MAAM,EAAE,MAAM,EACd,GAAG,EAAE,MAAM,CAAC,UAAU,EACtB,GAAG,IAAI,EAAE,MAAM,EAAE,GAChB,OAAO,CAAC,IAAI,CAAC,CA8If"}
|
|
@@ -26,6 +26,7 @@ import { startAnonymousTelemetry } from "./anonymous-otel-start.js";
|
|
|
26
26
|
import { InspectorDelegate } from "./inspector-delegate.js";
|
|
27
27
|
import { createLogContext } from "./logging.js";
|
|
28
28
|
import { startOtelAuto } from "./otel-start.js";
|
|
29
|
+
import { runPriorityOp, isPriorityOpRunning } from "./priority-op.js";
|
|
29
30
|
function randomID() {
|
|
30
31
|
return randInt(1, Number.MAX_SAFE_INTEGER).toString(36);
|
|
31
32
|
}
|
|
@@ -81,6 +82,11 @@ function runWorker(parent, env, ...args) {
|
|
|
81
82
|
const customQueryConfig = getCustomQueryConfig(config);
|
|
82
83
|
const customQueryTransformer = customQueryConfig && new CustomQueryTransformer(logger, customQueryConfig, shard);
|
|
83
84
|
const inspectorDelegate = new InspectorDelegate(customQueryTransformer);
|
|
85
|
+
const priorityOpRunningYieldThresholdMs = Math.max(
|
|
86
|
+
config.yieldThresholdMs / 4,
|
|
87
|
+
2
|
|
88
|
+
);
|
|
89
|
+
const normalYieldThresholdMs = Math.max(config.yieldThresholdMs, 2);
|
|
84
90
|
return new ViewSyncerService(
|
|
85
91
|
config,
|
|
86
92
|
logger,
|
|
@@ -88,28 +94,23 @@ function runWorker(parent, env, ...args) {
|
|
|
88
94
|
config.taskID,
|
|
89
95
|
id,
|
|
90
96
|
cvrDB,
|
|
91
|
-
config.upstream.type === "pg" ? upstreamDB : void 0,
|
|
92
97
|
new PipelineDriver(
|
|
93
98
|
logger,
|
|
94
99
|
config.log,
|
|
95
|
-
new Snapshotter(
|
|
96
|
-
logger,
|
|
97
|
-
replicaFile,
|
|
98
|
-
shard,
|
|
99
|
-
config.replica.pageCacheSizeKib
|
|
100
|
-
),
|
|
100
|
+
new Snapshotter(logger, replicaFile, shard),
|
|
101
101
|
shard,
|
|
102
102
|
operatorStorage.createClientGroupStorage(id),
|
|
103
103
|
id,
|
|
104
104
|
inspectorDelegate,
|
|
105
|
-
|
|
105
|
+
() => isPriorityOpRunning() ? priorityOpRunningYieldThresholdMs : normalYieldThresholdMs,
|
|
106
106
|
config.enableQueryPlanner
|
|
107
107
|
),
|
|
108
108
|
sub,
|
|
109
109
|
drainCoordinator,
|
|
110
110
|
config.log.slowHydrateThreshold,
|
|
111
111
|
inspectorDelegate,
|
|
112
|
-
customQueryTransformer
|
|
112
|
+
customQueryTransformer,
|
|
113
|
+
runPriorityOp
|
|
113
114
|
);
|
|
114
115
|
};
|
|
115
116
|
const mutagenFactory = (id) => new MutagenService(
|
|
@@ -121,7 +122,6 @@ function runWorker(parent, env, ...args) {
|
|
|
121
122
|
writeAuthzStorage
|
|
122
123
|
);
|
|
123
124
|
const pusherFactory = config.push.url === void 0 && config.mutate.url === void 0 ? void 0 : (id) => new PusherService(
|
|
124
|
-
upstreamDB,
|
|
125
125
|
config,
|
|
126
126
|
{
|
|
127
127
|
...config.push,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"syncer.js","sources":["../../../../../zero-cache/src/server/syncer.ts"],"sourcesContent":["import {randomUUID} from 'node:crypto';\nimport {tmpdir} from 'node:os';\nimport path from 'node:path';\nimport {pid} from 'node:process';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport {randInt} from '../../../shared/src/rand.ts';\nimport * as v from '../../../shared/src/valita.ts';\nimport {DatabaseStorage} from '../../../zqlite/src/database-storage.ts';\nimport type {NormalizedZeroConfig} from '../config/normalize.ts';\nimport {getNormalizedZeroConfig} from '../config/zero-config.ts';\nimport {CustomQueryTransformer} from '../custom-queries/transform-query.ts';\nimport {warmupConnections} from '../db/warmup.ts';\nimport {initEventSink} from '../observability/events.ts';\nimport {exitAfter, runUntilKilled} from '../services/life-cycle.ts';\nimport {MutagenService} from '../services/mutagen/mutagen.ts';\nimport {PusherService} from '../services/mutagen/pusher.ts';\nimport type {ReplicaState} from '../services/replicator/replicator.ts';\nimport type {DrainCoordinator} from '../services/view-syncer/drain-coordinator.ts';\nimport {PipelineDriver} from '../services/view-syncer/pipeline-driver.ts';\nimport {Snapshotter} from '../services/view-syncer/snapshotter.ts';\nimport {ViewSyncerService} from '../services/view-syncer/view-syncer.ts';\nimport {pgClient} from '../types/pg.ts';\nimport {\n parentWorker,\n singleProcessMode,\n type Worker,\n} from '../types/processes.ts';\nimport {getShardID} from '../types/shards.ts';\nimport type {Subscription} from '../types/subscription.ts';\nimport {replicaFileModeSchema, replicaFileName} from '../workers/replicator.ts';\nimport {Syncer} from '../workers/syncer.ts';\nimport {startAnonymousTelemetry} from './anonymous-otel-start.ts';\nimport {InspectorDelegate} from './inspector-delegate.ts';\nimport {createLogContext} from './logging.ts';\nimport {startOtelAuto} from './otel-start.ts';\n\nfunction randomID() {\n return randInt(1, Number.MAX_SAFE_INTEGER).toString(36);\n}\n\nfunction getCustomQueryConfig(\n config: Pick<NormalizedZeroConfig, 'query' | 'getQueries'>,\n) {\n const queryConfig = config.query?.url ? config.query : config.getQueries;\n\n if (!queryConfig?.url) {\n return undefined;\n }\n\n return {\n url: queryConfig.url,\n forwardCookies: queryConfig.forwardCookies ?? false,\n };\n}\n\nexport default function runWorker(\n parent: Worker,\n env: NodeJS.ProcessEnv,\n ...args: string[]\n): Promise<void> {\n const config = getNormalizedZeroConfig({env, argv: args.slice(1)});\n\n startOtelAuto(createLogContext(config, {worker: 'syncer'}, false));\n const lc = createLogContext(config, {worker: 'syncer'}, true);\n initEventSink(lc, config);\n\n assert(args.length > 0, `replicator mode not specified`);\n const fileMode = v.parse(args[0], replicaFileModeSchema);\n\n const {cvr, upstream} = config;\n assert(cvr.maxConnsPerWorker, 'cvr.maxConnsPerWorker must be set');\n assert(upstream.maxConnsPerWorker, 'upstream.maxConnsPerWorker must be set');\n\n const replicaFile = replicaFileName(config.replica.file, fileMode);\n lc.debug?.(`running view-syncer on ${replicaFile}`);\n\n const cvrDB = pgClient(lc, cvr.db, {\n max: cvr.maxConnsPerWorker,\n connection: {['application_name']: `zero-sync-worker-${pid}-cvr`},\n });\n\n const upstreamDB = pgClient(lc, upstream.db, {\n max: upstream.maxConnsPerWorker,\n connection: {['application_name']: `zero-sync-worker-${pid}-upstream`},\n });\n\n const dbWarmup = Promise.allSettled([\n warmupConnections(lc, cvrDB, 'cvr'),\n warmupConnections(lc, upstreamDB, 'upstream'),\n ]);\n\n const tmpDir = config.storageDBTmpDir ?? tmpdir();\n const operatorStorage = DatabaseStorage.create(\n lc,\n path.join(tmpDir, `sync-worker-${randomUUID()}`),\n );\n const writeAuthzStorage = DatabaseStorage.create(\n lc,\n path.join(tmpDir, `mutagen-${randomUUID()}`),\n );\n\n const shard = getShardID(config);\n\n const viewSyncerFactory = (\n id: string,\n sub: Subscription<ReplicaState>,\n drainCoordinator: DrainCoordinator,\n ) => {\n const logger = lc\n .withContext('component', 'view-syncer')\n .withContext('clientGroupID', id)\n .withContext('instance', randomID());\n lc.debug?.(\n `creating view syncer. Query Planner Enabled: ${config.enableQueryPlanner}`,\n );\n\n // Create the custom query transformer if configured\n const customQueryConfig = getCustomQueryConfig(config);\n const customQueryTransformer =\n customQueryConfig &&\n new CustomQueryTransformer(logger, customQueryConfig, shard);\n\n const inspectorDelegate = new InspectorDelegate(customQueryTransformer);\n\n return new ViewSyncerService(\n config,\n logger,\n shard,\n config.taskID,\n id,\n cvrDB,\n
|
|
1
|
+
{"version":3,"file":"syncer.js","sources":["../../../../../zero-cache/src/server/syncer.ts"],"sourcesContent":["import {randomUUID} from 'node:crypto';\nimport {tmpdir} from 'node:os';\nimport path from 'node:path';\nimport {pid} from 'node:process';\nimport {assert} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport {randInt} from '../../../shared/src/rand.ts';\nimport * as v from '../../../shared/src/valita.ts';\nimport {DatabaseStorage} from '../../../zqlite/src/database-storage.ts';\nimport type {NormalizedZeroConfig} from '../config/normalize.ts';\nimport {getNormalizedZeroConfig} from '../config/zero-config.ts';\nimport {CustomQueryTransformer} from '../custom-queries/transform-query.ts';\nimport {warmupConnections} from '../db/warmup.ts';\nimport {initEventSink} from '../observability/events.ts';\nimport {exitAfter, runUntilKilled} from '../services/life-cycle.ts';\nimport {MutagenService} from '../services/mutagen/mutagen.ts';\nimport {PusherService} from '../services/mutagen/pusher.ts';\nimport type {ReplicaState} from '../services/replicator/replicator.ts';\nimport type {DrainCoordinator} from '../services/view-syncer/drain-coordinator.ts';\nimport {PipelineDriver} from '../services/view-syncer/pipeline-driver.ts';\nimport {Snapshotter} from '../services/view-syncer/snapshotter.ts';\nimport {ViewSyncerService} from '../services/view-syncer/view-syncer.ts';\nimport {pgClient} from '../types/pg.ts';\nimport {\n parentWorker,\n singleProcessMode,\n type Worker,\n} from '../types/processes.ts';\nimport {getShardID} from '../types/shards.ts';\nimport type {Subscription} from '../types/subscription.ts';\nimport {replicaFileModeSchema, replicaFileName} from '../workers/replicator.ts';\nimport {Syncer} from '../workers/syncer.ts';\nimport {startAnonymousTelemetry} from './anonymous-otel-start.ts';\nimport {InspectorDelegate} from './inspector-delegate.ts';\nimport {createLogContext} from './logging.ts';\nimport {startOtelAuto} from './otel-start.ts';\nimport {isPriorityOpRunning, runPriorityOp} from './priority-op.ts';\n\nfunction randomID() {\n return randInt(1, Number.MAX_SAFE_INTEGER).toString(36);\n}\n\nfunction getCustomQueryConfig(\n config: Pick<NormalizedZeroConfig, 'query' | 'getQueries'>,\n) {\n const queryConfig = config.query?.url ? config.query : config.getQueries;\n\n if (!queryConfig?.url) {\n return undefined;\n }\n\n return {\n url: queryConfig.url,\n forwardCookies: queryConfig.forwardCookies ?? false,\n };\n}\n\nexport default function runWorker(\n parent: Worker,\n env: NodeJS.ProcessEnv,\n ...args: string[]\n): Promise<void> {\n const config = getNormalizedZeroConfig({env, argv: args.slice(1)});\n\n startOtelAuto(createLogContext(config, {worker: 'syncer'}, false));\n const lc = createLogContext(config, {worker: 'syncer'}, true);\n initEventSink(lc, config);\n\n assert(args.length > 0, `replicator mode not specified`);\n const fileMode = v.parse(args[0], replicaFileModeSchema);\n\n const {cvr, upstream} = config;\n assert(cvr.maxConnsPerWorker, 'cvr.maxConnsPerWorker must be set');\n assert(upstream.maxConnsPerWorker, 'upstream.maxConnsPerWorker must be set');\n\n const replicaFile = replicaFileName(config.replica.file, fileMode);\n lc.debug?.(`running view-syncer on ${replicaFile}`);\n\n const cvrDB = pgClient(lc, cvr.db, {\n max: cvr.maxConnsPerWorker,\n connection: {['application_name']: `zero-sync-worker-${pid}-cvr`},\n });\n\n const upstreamDB = pgClient(lc, upstream.db, {\n max: upstream.maxConnsPerWorker,\n connection: {['application_name']: `zero-sync-worker-${pid}-upstream`},\n });\n\n const dbWarmup = Promise.allSettled([\n warmupConnections(lc, cvrDB, 'cvr'),\n warmupConnections(lc, upstreamDB, 'upstream'),\n ]);\n\n const tmpDir = config.storageDBTmpDir ?? tmpdir();\n const operatorStorage = DatabaseStorage.create(\n lc,\n path.join(tmpDir, `sync-worker-${randomUUID()}`),\n );\n const writeAuthzStorage = DatabaseStorage.create(\n lc,\n path.join(tmpDir, `mutagen-${randomUUID()}`),\n );\n\n const shard = getShardID(config);\n\n const viewSyncerFactory = (\n id: string,\n sub: Subscription<ReplicaState>,\n drainCoordinator: DrainCoordinator,\n ) => {\n const logger = lc\n .withContext('component', 'view-syncer')\n .withContext('clientGroupID', id)\n .withContext('instance', randomID());\n lc.debug?.(\n `creating view syncer. Query Planner Enabled: ${config.enableQueryPlanner}`,\n );\n\n // Create the custom query transformer if configured\n const customQueryConfig = getCustomQueryConfig(config);\n const customQueryTransformer =\n customQueryConfig &&\n new CustomQueryTransformer(logger, customQueryConfig, shard);\n\n const inspectorDelegate = new InspectorDelegate(customQueryTransformer);\n\n const priorityOpRunningYieldThresholdMs = Math.max(\n config.yieldThresholdMs / 4,\n 2,\n );\n const normalYieldThresholdMs = Math.max(config.yieldThresholdMs, 2);\n return new ViewSyncerService(\n config,\n logger,\n shard,\n config.taskID,\n id,\n cvrDB,\n new PipelineDriver(\n logger,\n config.log,\n new Snapshotter(logger, replicaFile, shard),\n shard,\n operatorStorage.createClientGroupStorage(id),\n id,\n inspectorDelegate,\n () =>\n isPriorityOpRunning()\n ? priorityOpRunningYieldThresholdMs\n : normalYieldThresholdMs,\n config.enableQueryPlanner,\n ),\n sub,\n drainCoordinator,\n config.log.slowHydrateThreshold,\n inspectorDelegate,\n customQueryTransformer,\n runPriorityOp,\n );\n };\n\n const mutagenFactory = (id: string) =>\n new MutagenService(\n lc.withContext('component', 'mutagen').withContext('clientGroupID', id),\n shard,\n id,\n upstreamDB,\n config,\n writeAuthzStorage,\n );\n\n const pusherFactory =\n config.push.url === undefined && config.mutate.url === undefined\n ? undefined\n : (id: string) =>\n new PusherService(\n config,\n {\n ...config.push,\n ...config.mutate,\n url: must(\n config.push.url ?? config.mutate.url,\n 'No push or mutate URL configured',\n ),\n },\n lc.withContext('clientGroupID', id),\n id,\n );\n\n const syncer = new Syncer(\n lc,\n config,\n viewSyncerFactory,\n mutagenFactory,\n pusherFactory,\n parent,\n );\n\n startAnonymousTelemetry(lc, config);\n\n void dbWarmup.then(() => parent.send(['ready', {ready: true}]));\n\n return runUntilKilled(lc, parent, syncer);\n}\n\n// fork()\nif (!singleProcessMode()) {\n void exitAfter(() =>\n runWorker(must(parentWorker), process.env, ...process.argv.slice(2)),\n );\n}\n"],"names":["v.parse"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsCA,SAAS,WAAW;AAClB,SAAO,QAAQ,GAAG,OAAO,gBAAgB,EAAE,SAAS,EAAE;AACxD;AAEA,SAAS,qBACP,QACA;AACA,QAAM,cAAc,OAAO,OAAO,MAAM,OAAO,QAAQ,OAAO;AAE9D,MAAI,CAAC,aAAa,KAAK;AACrB,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,KAAK,YAAY;AAAA,IACjB,gBAAgB,YAAY,kBAAkB;AAAA,EAAA;AAElD;AAEA,SAAwB,UACtB,QACA,QACG,MACY;AACf,QAAM,SAAS,wBAAwB,EAAC,KAAK,MAAM,KAAK,MAAM,CAAC,GAAE;AAEjE,gBAAc,iBAAiB,QAAQ,EAAC,QAAQ,SAAA,GAAW,KAAK,CAAC;AACjE,QAAM,KAAK,iBAAiB,QAAQ,EAAC,QAAQ,SAAA,GAAW,IAAI;AAC5D,gBAAc,IAAI,MAAM;AAExB,SAAO,KAAK,SAAS,GAAG,+BAA+B;AACvD,QAAM,WAAWA,MAAQ,KAAK,CAAC,GAAG,qBAAqB;AAEvD,QAAM,EAAC,KAAK,SAAA,IAAY;AACxB,SAAO,IAAI,mBAAmB,mCAAmC;AACjE,SAAO,SAAS,mBAAmB,wCAAwC;AAE3E,QAAM,cAAc,gBAAgB,OAAO,QAAQ,MAAM,QAAQ;AACjE,KAAG,QAAQ,0BAA0B,WAAW,EAAE;AAElD,QAAM,QAAQ,SAAS,IAAI,IAAI,IAAI;AAAA,IACjC,KAAK,IAAI;AAAA,IACT,YAAY,EAAC,CAAC,kBAAkB,GAAG,oBAAoB,GAAG,OAAA;AAAA,EAAM,CACjE;AAED,QAAM,aAAa,SAAS,IAAI,SAAS,IAAI;AAAA,IAC3C,KAAK,SAAS;AAAA,IACd,YAAY,EAAC,CAAC,kBAAkB,GAAG,oBAAoB,GAAG,YAAA;AAAA,EAAW,CACtE;AAED,QAAM,WAAW,QAAQ,WAAW;AAAA,IAClC,kBAAkB,IAAI,OAAO,KAAK;AAAA,IAClC,kBAAkB,IAAI,YAAY,UAAU;AAAA,EAAA,CAC7C;AAED,QAAM,SAAS,OAAO,mBAAmB,OAAA;AACzC,QAAM,kBAAkB,gBAAgB;AAAA,IACtC;AAAA,IACA,KAAK,KAAK,QAAQ,eAAe,WAAA,CAAY,EAAE;AAAA,EAAA;AAEjD,QAAM,oBAAoB,gBAAgB;AAAA,IACxC;AAAA,IACA,KAAK,KAAK,QAAQ,WAAW,WAAA,CAAY,EAAE;AAAA,EAAA;AAG7C,QAAM,QAAQ,WAAW,MAAM;AAE/B,QAAM,oBAAoB,CACxB,IACA,KACA,qBACG;AACH,UAAM,SAAS,GACZ,YAAY,aAAa,aAAa,EACtC,YAAY,iBAAiB,EAAE,EAC/B,YAAY,YAAY,UAAU;AACrC,OAAG;AAAA,MACD,gDAAgD,OAAO,kBAAkB;AAAA,IAAA;AAI3E,UAAM,oBAAoB,qBAAqB,MAAM;AACrD,UAAM,yBACJ,qBACA,IAAI,uBAAuB,QAAQ,mBAAmB,KAAK;AAE7D,UAAM,oBAAoB,IAAI,kBAAkB,sBAAsB;AAEtE,UAAM,oCAAoC,KAAK;AAAA,MAC7C,OAAO,mBAAmB;AAAA,MAC1B;AAAA,IAAA;AAEF,UAAM,yBAAyB,KAAK,IAAI,OAAO,kBAAkB,CAAC;AAClE,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA,IAAI;AAAA,QACF;AAAA,QACA,OAAO;AAAA,QACP,IAAI,YAAY,QAAQ,aAAa,KAAK;AAAA,QAC1C;AAAA,QACA,gBAAgB,yBAAyB,EAAE;AAAA,QAC3C;AAAA,QACA;AAAA,QACA,MACE,oBAAA,IACI,oCACA;AAAA,QACN,OAAO;AAAA,MAAA;AAAA,MAET;AAAA,MACA;AAAA,MACA,OAAO,IAAI;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,iBAAiB,CAAC,OACtB,IAAI;AAAA,IACF,GAAG,YAAY,aAAa,SAAS,EAAE,YAAY,iBAAiB,EAAE;AAAA,IACtE;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAGJ,QAAM,gBACJ,OAAO,KAAK,QAAQ,UAAa,OAAO,OAAO,QAAQ,SACnD,SACA,CAAC,OACC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,MACE,GAAG,OAAO;AAAA,MACV,GAAG,OAAO;AAAA,MACV,KAAK;AAAA,QACH,OAAO,KAAK,OAAO,OAAO,OAAO;AAAA,QACjC;AAAA,MAAA;AAAA,IACF;AAAA,IAEF,GAAG,YAAY,iBAAiB,EAAE;AAAA,IAClC;AAAA,EAAA;AAGV,QAAM,SAAS,IAAI;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAGF,0BAAwB,IAAI,MAAM;AAElC,OAAK,SAAS,KAAK,MAAM,OAAO,KAAK,CAAC,SAAS,EAAC,OAAO,KAAA,CAAK,CAAC,CAAC;AAE9D,SAAO,eAAe,IAAI,QAAQ,MAAM;AAC1C;AAGA,IAAI,CAAC,qBAAqB;AACxB,OAAK;AAAA,IAAU,MACb,UAAU,KAAK,YAAY,GAAG,QAAQ,KAAK,GAAG,QAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,EAAA;AAEvE;"}
|
|
@@ -64,7 +64,7 @@ async function analyzeQuery(lc, config, clientSchema, ast, syncedRows = true, ve
|
|
|
64
64
|
computeZqlSpecs(lc, db, tableSpecs, fullTables);
|
|
65
65
|
const planDebugger = joinPlans ? new AccumulatorDebugger() : void 0;
|
|
66
66
|
const costModel = joinPlans ? createSQLiteCostModel(db, tableSpecs) : void 0;
|
|
67
|
-
const timer = await new TimeSliceTimer().start();
|
|
67
|
+
const timer = await new TimeSliceTimer(lc).start();
|
|
68
68
|
const shouldYield = () => timer.elapsedLap() > TIME_SLICE_LAP_THRESHOLD_MS;
|
|
69
69
|
const yieldProcess = () => timer.yieldProcess();
|
|
70
70
|
const result = await runAst(
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"analyze.js","sources":["../../../../../zero-cache/src/services/analyze.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport type {AnalyzeQueryResult} from '../../../zero-protocol/src/analyze-query-result.ts';\nimport type {AST} from '../../../zero-protocol/src/ast.ts';\nimport type {PermissionsConfig} from '../../../zero-schema/src/compiled-permissions.ts';\nimport {Debug} from '../../../zql/src/builder/debug-delegate.ts';\nimport {MemoryStorage} from '../../../zql/src/ivm/memory-storage.ts';\nimport {\n AccumulatorDebugger,\n serializePlanDebugEvents,\n} from '../../../zql/src/planner/planner-debug.ts';\nimport {Database} from '../../../zqlite/src/db.ts';\nimport {explainQueries} from '../../../zqlite/src/explain-queries.ts';\nimport {createSQLiteCostModel} from '../../../zqlite/src/sqlite-cost-model.ts';\nimport {TableSource} from '../../../zqlite/src/table-source.ts';\nimport type {NormalizedZeroConfig} from '../config/normalize.ts';\nimport {computeZqlSpecs, mustGetTableSpec} from '../db/lite-tables.ts';\nimport type {LiteAndZqlSpec, LiteTableSpec} from '../db/specs.ts';\nimport {runAst} from './run-ast.ts';\nimport {TimeSliceTimer, type TokenData} from './view-syncer/view-syncer.ts';\nimport type {ClientSchema} from '../../../zero-protocol/src/client-schema.ts';\n\nconst TIME_SLICE_LAP_THRESHOLD_MS = 200;\n\nexport async function analyzeQuery(\n lc: LogContext,\n config: NormalizedZeroConfig,\n clientSchema: ClientSchema,\n ast: AST,\n syncedRows = true,\n vendedRows = false,\n permissions?: PermissionsConfig,\n authData?: TokenData,\n joinPlans = false,\n): Promise<AnalyzeQueryResult> {\n using db = new Database(lc, config.replica.file);\n const fullTables = new Map<string, LiteTableSpec>();\n const tableSpecs = new Map<string, LiteAndZqlSpec>();\n const tables = new Map<string, TableSource>();\n\n computeZqlSpecs(lc, db, tableSpecs, fullTables);\n\n const planDebugger = joinPlans ? new AccumulatorDebugger() : undefined;\n const costModel = joinPlans\n ? createSQLiteCostModel(db, tableSpecs)\n : undefined;\n const timer = await new TimeSliceTimer().start();\n const shouldYield = () => timer.elapsedLap() > TIME_SLICE_LAP_THRESHOLD_MS;\n const yieldProcess = () => timer.yieldProcess();\n const result = await runAst(\n lc,\n clientSchema,\n ast,\n true,\n {\n applyPermissions: permissions !== undefined,\n syncedRows,\n vendedRows,\n authData,\n db,\n tableSpecs,\n permissions,\n costModel,\n planDebugger,\n host: {\n debug: new Debug(),\n getSource(tableName: string) {\n let source = tables.get(tableName);\n if (source) {\n return source;\n }\n\n const tableSpec = mustGetTableSpec(tableSpecs, tableName);\n const {primaryKey} = tableSpec.tableSpec;\n\n source = new TableSource(\n lc,\n config.log,\n db,\n tableName,\n tableSpec.zqlSpec,\n primaryKey,\n shouldYield,\n );\n tables.set(tableName, source);\n return source;\n },\n createStorage() {\n return new MemoryStorage();\n },\n decorateSourceInput: input => input,\n decorateInput: input => input,\n addEdge() {},\n decorateFilterInput: input => input,\n },\n },\n yieldProcess,\n );\n\n result.sqlitePlans = explainQueries(result.readRowCountsByQuery ?? {}, db);\n\n if (planDebugger) {\n result.joinPlans = serializePlanDebugEvents(planDebugger.events);\n }\n\n return result;\n}\n"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqBA,MAAM,8BAA8B;AAEpC,eAAsB,aACpB,IACA,QACA,cACA,KACA,aAAa,MACb,aAAa,OACb,aACA,UACA,YAAY,OACiB;AAC7B;AAAA;AAAA,UAAM,KAAK,oBAAI,SAAS,IAAI,OAAO,QAAQ,IAAI;AAC/C,UAAM,iCAAiB,IAAA;AACvB,UAAM,iCAAiB,IAAA;AACvB,UAAM,6BAAa,IAAA;AAEnB,oBAAgB,IAAI,IAAI,YAAY,UAAU;AAE9C,UAAM,eAAe,YAAY,IAAI,oBAAA,IAAwB;AAC7D,UAAM,YAAY,YACd,sBAAsB,IAAI,UAAU,IACpC;AACJ,UAAM,QAAQ,MAAM,IAAI,
|
|
1
|
+
{"version":3,"file":"analyze.js","sources":["../../../../../zero-cache/src/services/analyze.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport type {AnalyzeQueryResult} from '../../../zero-protocol/src/analyze-query-result.ts';\nimport type {AST} from '../../../zero-protocol/src/ast.ts';\nimport type {PermissionsConfig} from '../../../zero-schema/src/compiled-permissions.ts';\nimport {Debug} from '../../../zql/src/builder/debug-delegate.ts';\nimport {MemoryStorage} from '../../../zql/src/ivm/memory-storage.ts';\nimport {\n AccumulatorDebugger,\n serializePlanDebugEvents,\n} from '../../../zql/src/planner/planner-debug.ts';\nimport {Database} from '../../../zqlite/src/db.ts';\nimport {explainQueries} from '../../../zqlite/src/explain-queries.ts';\nimport {createSQLiteCostModel} from '../../../zqlite/src/sqlite-cost-model.ts';\nimport {TableSource} from '../../../zqlite/src/table-source.ts';\nimport type {NormalizedZeroConfig} from '../config/normalize.ts';\nimport {computeZqlSpecs, mustGetTableSpec} from '../db/lite-tables.ts';\nimport type {LiteAndZqlSpec, LiteTableSpec} from '../db/specs.ts';\nimport {runAst} from './run-ast.ts';\nimport {TimeSliceTimer, type TokenData} from './view-syncer/view-syncer.ts';\nimport type {ClientSchema} from '../../../zero-protocol/src/client-schema.ts';\n\nconst TIME_SLICE_LAP_THRESHOLD_MS = 200;\n\nexport async function analyzeQuery(\n lc: LogContext,\n config: NormalizedZeroConfig,\n clientSchema: ClientSchema,\n ast: AST,\n syncedRows = true,\n vendedRows = false,\n permissions?: PermissionsConfig,\n authData?: TokenData,\n joinPlans = false,\n): Promise<AnalyzeQueryResult> {\n using db = new Database(lc, config.replica.file);\n const fullTables = new Map<string, LiteTableSpec>();\n const tableSpecs = new Map<string, LiteAndZqlSpec>();\n const tables = new Map<string, TableSource>();\n\n computeZqlSpecs(lc, db, tableSpecs, fullTables);\n\n const planDebugger = joinPlans ? new AccumulatorDebugger() : undefined;\n const costModel = joinPlans\n ? createSQLiteCostModel(db, tableSpecs)\n : undefined;\n const timer = await new TimeSliceTimer(lc).start();\n const shouldYield = () => timer.elapsedLap() > TIME_SLICE_LAP_THRESHOLD_MS;\n const yieldProcess = () => timer.yieldProcess();\n const result = await runAst(\n lc,\n clientSchema,\n ast,\n true,\n {\n applyPermissions: permissions !== undefined,\n syncedRows,\n vendedRows,\n authData,\n db,\n tableSpecs,\n permissions,\n costModel,\n planDebugger,\n host: {\n debug: new Debug(),\n getSource(tableName: string) {\n let source = tables.get(tableName);\n if (source) {\n return source;\n }\n\n const tableSpec = mustGetTableSpec(tableSpecs, tableName);\n const {primaryKey} = tableSpec.tableSpec;\n\n source = new TableSource(\n lc,\n config.log,\n db,\n tableName,\n tableSpec.zqlSpec,\n primaryKey,\n shouldYield,\n );\n tables.set(tableName, source);\n return source;\n },\n createStorage() {\n return new MemoryStorage();\n },\n decorateSourceInput: input => input,\n decorateInput: input => input,\n addEdge() {},\n decorateFilterInput: input => input,\n },\n },\n yieldProcess,\n );\n\n result.sqlitePlans = explainQueries(result.readRowCountsByQuery ?? {}, db);\n\n if (planDebugger) {\n result.joinPlans = serializePlanDebugEvents(planDebugger.events);\n }\n\n return result;\n}\n"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqBA,MAAM,8BAA8B;AAEpC,eAAsB,aACpB,IACA,QACA,cACA,KACA,aAAa,MACb,aAAa,OACb,aACA,UACA,YAAY,OACiB;AAC7B;AAAA;AAAA,UAAM,KAAK,oBAAI,SAAS,IAAI,OAAO,QAAQ,IAAI;AAC/C,UAAM,iCAAiB,IAAA;AACvB,UAAM,iCAAiB,IAAA;AACvB,UAAM,6BAAa,IAAA;AAEnB,oBAAgB,IAAI,IAAI,YAAY,UAAU;AAE9C,UAAM,eAAe,YAAY,IAAI,oBAAA,IAAwB;AAC7D,UAAM,YAAY,YACd,sBAAsB,IAAI,UAAU,IACpC;AACJ,UAAM,QAAQ,MAAM,IAAI,eAAe,EAAE,EAAE,MAAA;AAC3C,UAAM,cAAc,MAAM,MAAM,WAAA,IAAe;AAC/C,UAAM,eAAe,MAAM,MAAM,aAAA;AACjC,UAAM,SAAS,MAAM;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,QACE,kBAAkB,gBAAgB;AAAA,QAClC;AAAA,QACA;AAAA,QACA;AAAA,QAGA;AAAA,QACA;AAAA,QACA;AAAA,QACA,MAAM;AAAA,UACJ,OAAO,IAAI,MAAA;AAAA,UACX,UAAU,WAAmB;AAC3B,gBAAI,SAAS,OAAO,IAAI,SAAS;AACjC,gBAAI,QAAQ;AACV,qBAAO;AAAA,YACT;AAEA,kBAAM,YAAY,iBAAiB,YAAY,SAAS;AACxD,kBAAM,EAAC,eAAc,UAAU;AAE/B,qBAAS,IAAI;AAAA,cACX;AAAA,cACA,OAAO;AAAA,cACP;AAAA,cACA;AAAA,cACA,UAAU;AAAA,cACV;AAAA,cACA;AAAA,YAAA;AAEF,mBAAO,IAAI,WAAW,MAAM;AAC5B,mBAAO;AAAA,UACT;AAAA,UACA,gBAAgB;AACd,mBAAO,IAAI,cAAA;AAAA,UACb;AAAA,UACA,qBAAqB,CAAA,UAAS;AAAA,UAC9B,eAAe,CAAA,UAAS;AAAA,UACxB,UAAU;AAAA,UAAC;AAAA,UACX,qBAAqB,CAAA,UAAS;AAAA,QAAA;AAAA,MAChC;AAAA,MAEF;AAAA,IAAA;AAGF,WAAO,cAAc,eAAe,OAAO,wBAAwB,CAAA,GAAI,EAAE;AAEzE,QAAI,cAAc;AAChB,aAAO,YAAY,yBAAyB,aAAa,MAAM;AAAA,IACjE;AAEA,WAAO;AAAA,WAtEP;AAAA;AAAA;AAAA;AAAA;AAuEF;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"change-source.d.ts","sourceRoot":"","sources":["../../../../../../../zero-cache/src/services/change-source/custom/change-source.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAMjD,OAAO,EAAC,QAAQ,EAAC,MAAM,iCAAiC,CAAC;AAGzD,OAAO,KAAK,EAAC,WAAW,EAAU,MAAM,0BAA0B,CAAC;AAEnE,OAAO,KAAK,EACV,YAAY,EAEb,MAAM,kDAAkD,CAAC;
|
|
1
|
+
{"version":3,"file":"change-source.d.ts","sourceRoot":"","sources":["../../../../../../../zero-cache/src/services/change-source/custom/change-source.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAMjD,OAAO,EAAC,QAAQ,EAAC,MAAM,iCAAiC,CAAC;AAGzD,OAAO,KAAK,EAAC,WAAW,EAAU,MAAM,0BAA0B,CAAC;AAEnE,OAAO,KAAK,EACV,YAAY,EAEb,MAAM,kDAAkD,CAAC;AAO1D,OAAO,EAGL,KAAK,iBAAiB,EACvB,MAAM,8CAA8C,CAAC;AAKtD;;;GAGG;AACH,wBAAsB,4BAA4B,CAChD,EAAE,EAAE,UAAU,EACd,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,WAAW,EAClB,aAAa,EAAE,MAAM,GACpB,OAAO,CAAC;IAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAAC,YAAY,EAAE,YAAY,CAAA;CAAC,CAAC,CAgC7E;AA2DD;;;;;;;;;;GAUG;AACH,wBAAsB,WAAW,CAC/B,EAAE,EAAE,UAAU,EACd,KAAK,EAAE,WAAW,EAClB,EAAE,EAAE,QAAQ,EACZ,WAAW,EAAE,MAAM,iBA8EpB"}
|
|
@@ -9,7 +9,6 @@ import { stream } from "../../../types/streams.js";
|
|
|
9
9
|
import { AutoResetSignal } from "../../change-streamer/schema/tables.js";
|
|
10
10
|
import { ChangeProcessor } from "../../replicator/change-processor.js";
|
|
11
11
|
import { ReplicationStatusPublisher } from "../../replicator/replication-status.js";
|
|
12
|
-
import { initChangeLog } from "../../replicator/schema/change-log.js";
|
|
13
12
|
import { getSubscriptionState, initReplicationState } from "../../replicator/schema/replication-state.js";
|
|
14
13
|
import { changeStreamMessageSchema } from "../protocol/current/downstream.js";
|
|
15
14
|
import "../protocol/current/status.js";
|
|
@@ -69,7 +68,10 @@ class CustomChangeSource {
|
|
|
69
68
|
url.searchParams.append("publications", pub);
|
|
70
69
|
}
|
|
71
70
|
if (clientWatermark) {
|
|
72
|
-
assert(
|
|
71
|
+
assert(
|
|
72
|
+
replicaVersion.length,
|
|
73
|
+
"replicaVersion is required when clientWatermark is set"
|
|
74
|
+
);
|
|
73
75
|
url.searchParams.set("lastWatermark", clientWatermark);
|
|
74
76
|
url.searchParams.set("replicaVersion", replicaVersion);
|
|
75
77
|
}
|
|
@@ -118,7 +120,6 @@ async function initialSync(lc, shard, tx, upstreamURI) {
|
|
|
118
120
|
5e3
|
|
119
121
|
);
|
|
120
122
|
initReplicationState(tx, [...publications].sort(), commitWatermark);
|
|
121
|
-
initChangeLog(tx);
|
|
122
123
|
processor.processMessage(lc, change);
|
|
123
124
|
break;
|
|
124
125
|
}
|
|
@@ -183,10 +184,6 @@ function getRequiredTables({
|
|
|
183
184
|
[`${appID}.permissions`]: {
|
|
184
185
|
permissions: { type: "json" },
|
|
185
186
|
hash: { type: "string" }
|
|
186
|
-
},
|
|
187
|
-
[`${appID}.schemaVersions`]: {
|
|
188
|
-
minSupportedVersion: { type: "number" },
|
|
189
|
-
maxSupportedVersion: { type: "number" }
|
|
190
187
|
}
|
|
191
188
|
};
|
|
192
189
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"change-source.js","sources":["../../../../../../../zero-cache/src/services/change-source/custom/change-source.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {WebSocket} from 'ws';\nimport {assert, unreachable} from '../../../../../shared/src/asserts.ts';\nimport {stringify} from '../../../../../shared/src/bigint-json.ts';\nimport {deepEqual} from '../../../../../shared/src/json.ts';\nimport type {SchemaValue} from '../../../../../zero-schema/src/table-schema.ts';\nimport {Database} from '../../../../../zqlite/src/db.ts';\nimport {computeZqlSpecs} from '../../../db/lite-tables.ts';\nimport {StatementRunner} from '../../../db/statements.ts';\nimport type {ShardConfig, ShardID} from '../../../types/shards.ts';\nimport {stream} from '../../../types/streams.ts';\nimport type {\n ChangeSource,\n ChangeStream,\n} from '../../change-streamer/change-streamer-service.ts';\nimport {\n AutoResetSignal,\n type ReplicationConfig,\n} from '../../change-streamer/schema/tables.ts';\nimport {ChangeProcessor} from '../../replicator/change-processor.ts';\nimport {ReplicationStatusPublisher} from '../../replicator/replication-status.ts';\nimport {initChangeLog} from '../../replicator/schema/change-log.ts';\nimport {\n getSubscriptionState,\n initReplicationState,\n type SubscriptionState,\n} from '../../replicator/schema/replication-state.ts';\nimport {changeStreamMessageSchema} from '../protocol/current/downstream.ts';\nimport {type ChangeSourceUpstream} from '../protocol/current/upstream.ts';\nimport {initSyncSchema} from './sync-schema.ts';\n\n/**\n * Initializes a Custom change source before streaming changes from the\n * corresponding logical replication stream.\n */\nexport async function initializeCustomChangeSource(\n lc: LogContext,\n upstreamURI: string,\n shard: ShardConfig,\n replicaDbFile: string,\n): Promise<{subscriptionState: SubscriptionState; changeSource: ChangeSource}> {\n await initSyncSchema(\n lc,\n `replica-${shard.appID}-${shard.shardNum}`,\n shard,\n replicaDbFile,\n upstreamURI,\n );\n\n const replica = new Database(lc, replicaDbFile);\n const subscriptionState = getSubscriptionState(new StatementRunner(replica));\n replica.close();\n\n if (shard.publications.length) {\n // Verify that the publications match what has been synced.\n const requested = [...shard.publications].sort();\n const replicated = subscriptionState.publications.sort();\n if (!deepEqual(requested, replicated)) {\n throw new Error(\n `Invalid ShardConfig. Requested publications [${requested}] do not match synced publications: [${replicated}]`,\n );\n }\n }\n\n const changeSource = new CustomChangeSource(\n lc,\n upstreamURI,\n shard,\n subscriptionState,\n );\n\n return {subscriptionState, changeSource};\n}\n\nclass CustomChangeSource implements ChangeSource {\n readonly #lc: LogContext;\n readonly #upstreamUri: string;\n readonly #shard: ShardID;\n readonly #replicationConfig: ReplicationConfig;\n\n constructor(\n lc: LogContext,\n upstreamUri: string,\n shard: ShardID,\n replicationConfig: ReplicationConfig,\n ) {\n this.#lc = lc.withContext('component', 'change-source');\n this.#upstreamUri = upstreamUri;\n this.#shard = shard;\n this.#replicationConfig = replicationConfig;\n }\n\n initialSync(): ChangeStream {\n return this.#startStream();\n }\n\n startStream(clientWatermark: string): Promise<ChangeStream> {\n return Promise.resolve(this.#startStream(clientWatermark));\n }\n\n #startStream(clientWatermark?: string): ChangeStream {\n const {publications, replicaVersion} = this.#replicationConfig;\n const {appID, shardNum} = this.#shard;\n const url = new URL(this.#upstreamUri);\n url.searchParams.set('appID', appID);\n url.searchParams.set('shardNum', String(shardNum));\n for (const pub of publications) {\n url.searchParams.append('publications', pub);\n }\n if (clientWatermark) {\n assert(replicaVersion.length);\n url.searchParams.set('lastWatermark', clientWatermark);\n url.searchParams.set('replicaVersion', replicaVersion);\n }\n\n const ws = new WebSocket(url);\n const {instream, outstream} = stream(\n this.#lc,\n ws,\n changeStreamMessageSchema,\n // Upstream acks coalesce. If upstream exhibits back-pressure,\n // only the last ACK is kept / buffered.\n {coalesce: (curr: ChangeSourceUpstream) => curr},\n );\n return {changes: instream, acks: outstream};\n }\n}\n\n/**\n * Initial sync for a custom change source makes a request to the\n * change source endpoint with no `replicaVersion` or `lastWatermark`.\n * The initial transaction returned by the endpoint is treated as\n * the initial sync, and the commit watermark of that transaction\n * becomes the `replicaVersion` of the initialized replica.\n *\n * Note that this is equivalent to how the LSN of the Postgres WAL\n * at initial sync time is the `replicaVersion` (and starting\n * version for all initially-synced rows).\n */\nexport async function initialSync(\n lc: LogContext,\n shard: ShardConfig,\n tx: Database,\n upstreamURI: string,\n) {\n const {appID: id, publications} = shard;\n const changeSource = new CustomChangeSource(lc, upstreamURI, shard, {\n replicaVersion: '', // ignored for initialSync()\n publications,\n });\n const {changes} = changeSource.initialSync();\n\n const processor = new ChangeProcessor(\n new StatementRunner(tx),\n 'initial-sync',\n (_, err) => {\n throw err;\n },\n );\n\n const statusPublisher = new ReplicationStatusPublisher(tx);\n try {\n let num = 0;\n for await (const change of changes) {\n const [tag] = change;\n switch (tag) {\n case 'begin': {\n const {commitWatermark} = change[2];\n lc.info?.(\n `initial sync of shard ${id} at replicaVersion ${commitWatermark}`,\n );\n statusPublisher.publish(\n lc,\n 'Initializing',\n `Copying upstream tables at version ${commitWatermark}`,\n 5000,\n );\n initReplicationState(tx, [...publications].sort(), commitWatermark);\n initChangeLog(tx);\n processor.processMessage(lc, change);\n break;\n }\n case 'data':\n processor.processMessage(lc, change);\n if (++num % 1000 === 0) {\n lc.debug?.(`processed ${num} changes`);\n }\n break;\n case 'commit':\n processor.processMessage(lc, change);\n validateInitiallySyncedData(lc, tx, shard);\n lc.info?.(`finished initial-sync of ${num} changes`);\n return;\n\n case 'status':\n break; // Ignored\n // @ts-expect-error: falls through if the tag is not 'reset-required\n case 'control': {\n const {tag, message} = change[1];\n if (tag === 'reset-required') {\n throw new AutoResetSignal(\n message ?? 'auto-reset signaled by change source',\n );\n }\n }\n // falls through\n case 'rollback':\n throw new Error(\n `unexpected message during initial-sync: ${stringify(change)}`,\n );\n default:\n unreachable(change);\n }\n }\n throw new Error(\n `change source ${upstreamURI} closed before initial-sync completed`,\n );\n } catch (e) {\n await statusPublisher.publishAndThrowError(lc, 'Initializing', e);\n } finally {\n statusPublisher.stop();\n }\n}\n\n// Verify that the upstream tables expected by the sync logic\n// have been properly initialized.\nfunction getRequiredTables({\n appID,\n shardNum,\n}: ShardID): Record<string, Record<string, SchemaValue>> {\n return {\n [`${appID}_${shardNum}.clients`]: {\n clientGroupID: {type: 'string'},\n clientID: {type: 'string'},\n lastMutationID: {type: 'number'},\n userID: {type: 'string'},\n },\n [`${appID}_${shardNum}.mutations`]: {\n clientGroupID: {type: 'string'},\n clientID: {type: 'string'},\n mutationID: {type: 'number'},\n mutation: {type: 'json'},\n },\n [`${appID}.permissions`]: {\n permissions: {type: 'json'},\n hash: {type: 'string'},\n },\n [`${appID}.schemaVersions`]: {\n minSupportedVersion: {type: 'number'},\n maxSupportedVersion: {type: 'number'},\n },\n };\n}\n\nfunction validateInitiallySyncedData(\n lc: LogContext,\n db: Database,\n shard: ShardID,\n) {\n const tables = computeZqlSpecs(lc, db);\n const required = getRequiredTables(shard);\n for (const [name, columns] of Object.entries(required)) {\n const table = tables.get(name)?.zqlSpec;\n if (!table) {\n throw new Error(\n `Upstream is missing the \"${name}\" table. (Found ${[\n ...tables.keys(),\n ]})` +\n `Please ensure that each table has a unique index over one ` +\n `or more non-null columns.`,\n );\n }\n for (const [col, {type}] of Object.entries(columns)) {\n const found = table[col];\n if (!found) {\n throw new Error(\n `Upstream \"${table}\" table is missing the \"${col}\" column`,\n );\n }\n if (found.type !== type) {\n throw new Error(\n `Upstream \"${table}.${col}\" column is a ${found.type} type but must be a ${type} type.`,\n );\n }\n }\n }\n}\n"],"names":["tag"],"mappings":";;;;;;;;;;;;;;;;AAmCA,eAAsB,6BACpB,IACA,aACA,OACA,eAC6E;AAC7E,QAAM;AAAA,IACJ;AAAA,IACA,WAAW,MAAM,KAAK,IAAI,MAAM,QAAQ;AAAA,IACxC;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAGF,QAAM,UAAU,IAAI,SAAS,IAAI,aAAa;AAC9C,QAAM,oBAAoB,qBAAqB,IAAI,gBAAgB,OAAO,CAAC;AAC3E,UAAQ,MAAA;AAER,MAAI,MAAM,aAAa,QAAQ;AAE7B,UAAM,YAAY,CAAC,GAAG,MAAM,YAAY,EAAE,KAAA;AAC1C,UAAM,aAAa,kBAAkB,aAAa,KAAA;AAClD,QAAI,CAAC,UAAU,WAAW,UAAU,GAAG;AACrC,YAAM,IAAI;AAAA,QACR,gDAAgD,SAAS,wCAAwC,UAAU;AAAA,MAAA;AAAA,IAE/G;AAAA,EACF;AAEA,QAAM,eAAe,IAAI;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAGF,SAAO,EAAC,mBAAmB,aAAA;AAC7B;AAEA,MAAM,mBAA2C;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,YACE,IACA,aACA,OACA,mBACA;AACA,SAAK,MAAM,GAAG,YAAY,aAAa,eAAe;AACtD,SAAK,eAAe;AACpB,SAAK,SAAS;AACd,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEA,cAA4B;AAC1B,WAAO,KAAK,aAAA;AAAA,EACd;AAAA,EAEA,YAAY,iBAAgD;AAC1D,WAAO,QAAQ,QAAQ,KAAK,aAAa,eAAe,CAAC;AAAA,EAC3D;AAAA,EAEA,aAAa,iBAAwC;AACnD,UAAM,EAAC,cAAc,eAAA,IAAkB,KAAK;AAC5C,UAAM,EAAC,OAAO,SAAA,IAAY,KAAK;AAC/B,UAAM,MAAM,IAAI,IAAI,KAAK,YAAY;AACrC,QAAI,aAAa,IAAI,SAAS,KAAK;AACnC,QAAI,aAAa,IAAI,YAAY,OAAO,QAAQ,CAAC;AACjD,eAAW,OAAO,cAAc;AAC9B,UAAI,aAAa,OAAO,gBAAgB,GAAG;AAAA,IAC7C;AACA,QAAI,iBAAiB;AACnB,aAAO,eAAe,MAAM;AAC5B,UAAI,aAAa,IAAI,iBAAiB,eAAe;AACrD,UAAI,aAAa,IAAI,kBAAkB,cAAc;AAAA,IACvD;AAEA,UAAM,KAAK,IAAI,UAAU,GAAG;AAC5B,UAAM,EAAC,UAAU,UAAA,IAAa;AAAA,MAC5B,KAAK;AAAA,MACL;AAAA,MACA;AAAA;AAAA;AAAA,MAGA,EAAC,UAAU,CAAC,SAA+B,KAAA;AAAA,IAAI;AAEjD,WAAO,EAAC,SAAS,UAAU,MAAM,UAAA;AAAA,EACnC;AACF;AAaA,eAAsB,YACpB,IACA,OACA,IACA,aACA;AACA,QAAM,EAAC,OAAO,IAAI,aAAA,IAAgB;AAClC,QAAM,eAAe,IAAI,mBAAmB,IAAI,aAAa,OAAO;AAAA,IAClE,gBAAgB;AAAA;AAAA,IAChB;AAAA,EAAA,CACD;AACD,QAAM,EAAC,QAAA,IAAW,aAAa,YAAA;AAE/B,QAAM,YAAY,IAAI;AAAA,IACpB,IAAI,gBAAgB,EAAE;AAAA,IACtB;AAAA,IACA,CAAC,GAAG,QAAQ;AACV,YAAM;AAAA,IACR;AAAA,EAAA;AAGF,QAAM,kBAAkB,IAAI,2BAA2B,EAAE;AACzD,MAAI;AACF,QAAI,MAAM;AACV,qBAAiB,UAAU,SAAS;AAClC,YAAM,CAAC,GAAG,IAAI;AACd,cAAQ,KAAA;AAAA,QACN,KAAK,SAAS;AACZ,gBAAM,EAAC,gBAAA,IAAmB,OAAO,CAAC;AAClC,aAAG;AAAA,YACD,yBAAyB,EAAE,sBAAsB,eAAe;AAAA,UAAA;AAElE,0BAAgB;AAAA,YACd;AAAA,YACA;AAAA,YACA,sCAAsC,eAAe;AAAA,YACrD;AAAA,UAAA;AAEF,+BAAqB,IAAI,CAAC,GAAG,YAAY,EAAE,KAAA,GAAQ,eAAe;AAClE,wBAAc,EAAE;AAChB,oBAAU,eAAe,IAAI,MAAM;AACnC;AAAA,QACF;AAAA,QACA,KAAK;AACH,oBAAU,eAAe,IAAI,MAAM;AACnC,cAAI,EAAE,MAAM,QAAS,GAAG;AACtB,eAAG,QAAQ,aAAa,GAAG,UAAU;AAAA,UACvC;AACA;AAAA,QACF,KAAK;AACH,oBAAU,eAAe,IAAI,MAAM;AACnC,sCAA4B,IAAI,IAAI,KAAK;AACzC,aAAG,OAAO,4BAA4B,GAAG,UAAU;AACnD;AAAA,QAEF,KAAK;AACH;AAAA;AAAA;AAAA,QAEF,KAAK,WAAW;AACd,gBAAM,EAAC,KAAAA,MAAK,QAAA,IAAW,OAAO,CAAC;AAC/B,cAAIA,SAAQ,kBAAkB;AAC5B,kBAAM,IAAI;AAAA,cACR,WAAW;AAAA,YAAA;AAAA,UAEf;AAAA,QACF;AAAA;AAAA,QAEA,KAAK;AACH,gBAAM,IAAI;AAAA,YACR,2CAA2C,UAAU,MAAM,CAAC;AAAA,UAAA;AAAA,QAEhE;AACE,sBAAY,MAAM;AAAA,MAAA;AAAA,IAExB;AACA,UAAM,IAAI;AAAA,MACR,iBAAiB,WAAW;AAAA,IAAA;AAAA,EAEhC,SAAS,GAAG;AACV,UAAM,gBAAgB,qBAAqB,IAAI,gBAAgB,CAAC;AAAA,EAClE,UAAA;AACE,oBAAgB,KAAA;AAAA,EAClB;AACF;AAIA,SAAS,kBAAkB;AAAA,EACzB;AAAA,EACA;AACF,GAAyD;AACvD,SAAO;AAAA,IACL,CAAC,GAAG,KAAK,IAAI,QAAQ,UAAU,GAAG;AAAA,MAChC,eAAe,EAAC,MAAM,SAAA;AAAA,MACtB,UAAU,EAAC,MAAM,SAAA;AAAA,MACjB,gBAAgB,EAAC,MAAM,SAAA;AAAA,MACvB,QAAQ,EAAC,MAAM,SAAA;AAAA,IAAQ;AAAA,IAEzB,CAAC,GAAG,KAAK,IAAI,QAAQ,YAAY,GAAG;AAAA,MAClC,eAAe,EAAC,MAAM,SAAA;AAAA,MACtB,UAAU,EAAC,MAAM,SAAA;AAAA,MACjB,YAAY,EAAC,MAAM,SAAA;AAAA,MACnB,UAAU,EAAC,MAAM,OAAA;AAAA,IAAM;AAAA,IAEzB,CAAC,GAAG,KAAK,cAAc,GAAG;AAAA,MACxB,aAAa,EAAC,MAAM,OAAA;AAAA,MACpB,MAAM,EAAC,MAAM,SAAA;AAAA,IAAQ;AAAA,IAEvB,CAAC,GAAG,KAAK,iBAAiB,GAAG;AAAA,MAC3B,qBAAqB,EAAC,MAAM,SAAA;AAAA,MAC5B,qBAAqB,EAAC,MAAM,SAAA;AAAA,IAAQ;AAAA,EACtC;AAEJ;AAEA,SAAS,4BACP,IACA,IACA,OACA;AACA,QAAM,SAAS,gBAAgB,IAAI,EAAE;AACrC,QAAM,WAAW,kBAAkB,KAAK;AACxC,aAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACtD,UAAM,QAAQ,OAAO,IAAI,IAAI,GAAG;AAChC,QAAI,CAAC,OAAO;AACV,YAAM,IAAI;AAAA,QACR,4BAA4B,IAAI,mBAAmB;AAAA,UACjD,GAAG,OAAO,KAAA;AAAA,QAAK,CAChB;AAAA,MAAA;AAAA,IAIL;AACA,eAAW,CAAC,KAAK,EAAC,KAAA,CAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AACnD,YAAM,QAAQ,MAAM,GAAG;AACvB,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,aAAa,KAAK,2BAA2B,GAAG;AAAA,QAAA;AAAA,MAEpD;AACA,UAAI,MAAM,SAAS,MAAM;AACvB,cAAM,IAAI;AAAA,UACR,aAAa,KAAK,IAAI,GAAG,iBAAiB,MAAM,IAAI,uBAAuB,IAAI;AAAA,QAAA;AAAA,MAEnF;AAAA,IACF;AAAA,EACF;AACF;"}
|
|
1
|
+
{"version":3,"file":"change-source.js","sources":["../../../../../../../zero-cache/src/services/change-source/custom/change-source.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {WebSocket} from 'ws';\nimport {assert, unreachable} from '../../../../../shared/src/asserts.ts';\nimport {stringify} from '../../../../../shared/src/bigint-json.ts';\nimport {deepEqual} from '../../../../../shared/src/json.ts';\nimport type {SchemaValue} from '../../../../../zero-schema/src/table-schema.ts';\nimport {Database} from '../../../../../zqlite/src/db.ts';\nimport {computeZqlSpecs} from '../../../db/lite-tables.ts';\nimport {StatementRunner} from '../../../db/statements.ts';\nimport type {ShardConfig, ShardID} from '../../../types/shards.ts';\nimport {stream} from '../../../types/streams.ts';\nimport type {\n ChangeSource,\n ChangeStream,\n} from '../../change-streamer/change-streamer-service.ts';\nimport {\n AutoResetSignal,\n type ReplicationConfig,\n} from '../../change-streamer/schema/tables.ts';\nimport {ChangeProcessor} from '../../replicator/change-processor.ts';\nimport {ReplicationStatusPublisher} from '../../replicator/replication-status.ts';\nimport {\n getSubscriptionState,\n initReplicationState,\n type SubscriptionState,\n} from '../../replicator/schema/replication-state.ts';\nimport {changeStreamMessageSchema} from '../protocol/current/downstream.ts';\nimport {type ChangeSourceUpstream} from '../protocol/current/upstream.ts';\nimport {initSyncSchema} from './sync-schema.ts';\n\n/**\n * Initializes a Custom change source before streaming changes from the\n * corresponding logical replication stream.\n */\nexport async function initializeCustomChangeSource(\n lc: LogContext,\n upstreamURI: string,\n shard: ShardConfig,\n replicaDbFile: string,\n): Promise<{subscriptionState: SubscriptionState; changeSource: ChangeSource}> {\n await initSyncSchema(\n lc,\n `replica-${shard.appID}-${shard.shardNum}`,\n shard,\n replicaDbFile,\n upstreamURI,\n );\n\n const replica = new Database(lc, replicaDbFile);\n const subscriptionState = getSubscriptionState(new StatementRunner(replica));\n replica.close();\n\n if (shard.publications.length) {\n // Verify that the publications match what has been synced.\n const requested = [...shard.publications].sort();\n const replicated = subscriptionState.publications.sort();\n if (!deepEqual(requested, replicated)) {\n throw new Error(\n `Invalid ShardConfig. Requested publications [${requested}] do not match synced publications: [${replicated}]`,\n );\n }\n }\n\n const changeSource = new CustomChangeSource(\n lc,\n upstreamURI,\n shard,\n subscriptionState,\n );\n\n return {subscriptionState, changeSource};\n}\n\nclass CustomChangeSource implements ChangeSource {\n readonly #lc: LogContext;\n readonly #upstreamUri: string;\n readonly #shard: ShardID;\n readonly #replicationConfig: ReplicationConfig;\n\n constructor(\n lc: LogContext,\n upstreamUri: string,\n shard: ShardID,\n replicationConfig: ReplicationConfig,\n ) {\n this.#lc = lc.withContext('component', 'change-source');\n this.#upstreamUri = upstreamUri;\n this.#shard = shard;\n this.#replicationConfig = replicationConfig;\n }\n\n initialSync(): ChangeStream {\n return this.#startStream();\n }\n\n startStream(clientWatermark: string): Promise<ChangeStream> {\n return Promise.resolve(this.#startStream(clientWatermark));\n }\n\n #startStream(clientWatermark?: string): ChangeStream {\n const {publications, replicaVersion} = this.#replicationConfig;\n const {appID, shardNum} = this.#shard;\n const url = new URL(this.#upstreamUri);\n url.searchParams.set('appID', appID);\n url.searchParams.set('shardNum', String(shardNum));\n for (const pub of publications) {\n url.searchParams.append('publications', pub);\n }\n if (clientWatermark) {\n assert(\n replicaVersion.length,\n 'replicaVersion is required when clientWatermark is set',\n );\n url.searchParams.set('lastWatermark', clientWatermark);\n url.searchParams.set('replicaVersion', replicaVersion);\n }\n\n const ws = new WebSocket(url);\n const {instream, outstream} = stream(\n this.#lc,\n ws,\n changeStreamMessageSchema,\n // Upstream acks coalesce. If upstream exhibits back-pressure,\n // only the last ACK is kept / buffered.\n {coalesce: (curr: ChangeSourceUpstream) => curr},\n );\n return {changes: instream, acks: outstream};\n }\n}\n\n/**\n * Initial sync for a custom change source makes a request to the\n * change source endpoint with no `replicaVersion` or `lastWatermark`.\n * The initial transaction returned by the endpoint is treated as\n * the initial sync, and the commit watermark of that transaction\n * becomes the `replicaVersion` of the initialized replica.\n *\n * Note that this is equivalent to how the LSN of the Postgres WAL\n * at initial sync time is the `replicaVersion` (and starting\n * version for all initially-synced rows).\n */\nexport async function initialSync(\n lc: LogContext,\n shard: ShardConfig,\n tx: Database,\n upstreamURI: string,\n) {\n const {appID: id, publications} = shard;\n const changeSource = new CustomChangeSource(lc, upstreamURI, shard, {\n replicaVersion: '', // ignored for initialSync()\n publications,\n });\n const {changes} = changeSource.initialSync();\n\n const processor = new ChangeProcessor(\n new StatementRunner(tx),\n 'initial-sync',\n (_, err) => {\n throw err;\n },\n );\n\n const statusPublisher = new ReplicationStatusPublisher(tx);\n try {\n let num = 0;\n for await (const change of changes) {\n const [tag] = change;\n switch (tag) {\n case 'begin': {\n const {commitWatermark} = change[2];\n lc.info?.(\n `initial sync of shard ${id} at replicaVersion ${commitWatermark}`,\n );\n statusPublisher.publish(\n lc,\n 'Initializing',\n `Copying upstream tables at version ${commitWatermark}`,\n 5000,\n );\n initReplicationState(tx, [...publications].sort(), commitWatermark);\n processor.processMessage(lc, change);\n break;\n }\n case 'data':\n processor.processMessage(lc, change);\n if (++num % 1000 === 0) {\n lc.debug?.(`processed ${num} changes`);\n }\n break;\n case 'commit':\n processor.processMessage(lc, change);\n validateInitiallySyncedData(lc, tx, shard);\n lc.info?.(`finished initial-sync of ${num} changes`);\n return;\n\n case 'status':\n break; // Ignored\n // @ts-expect-error: falls through if the tag is not 'reset-required\n case 'control': {\n const {tag, message} = change[1];\n if (tag === 'reset-required') {\n throw new AutoResetSignal(\n message ?? 'auto-reset signaled by change source',\n );\n }\n }\n // falls through\n case 'rollback':\n throw new Error(\n `unexpected message during initial-sync: ${stringify(change)}`,\n );\n default:\n unreachable(change);\n }\n }\n throw new Error(\n `change source ${upstreamURI} closed before initial-sync completed`,\n );\n } catch (e) {\n await statusPublisher.publishAndThrowError(lc, 'Initializing', e);\n } finally {\n statusPublisher.stop();\n }\n}\n\n// Verify that the upstream tables expected by the sync logic\n// have been properly initialized.\nfunction getRequiredTables({\n appID,\n shardNum,\n}: ShardID): Record<string, Record<string, SchemaValue>> {\n return {\n [`${appID}_${shardNum}.clients`]: {\n clientGroupID: {type: 'string'},\n clientID: {type: 'string'},\n lastMutationID: {type: 'number'},\n userID: {type: 'string'},\n },\n [`${appID}_${shardNum}.mutations`]: {\n clientGroupID: {type: 'string'},\n clientID: {type: 'string'},\n mutationID: {type: 'number'},\n mutation: {type: 'json'},\n },\n [`${appID}.permissions`]: {\n permissions: {type: 'json'},\n hash: {type: 'string'},\n },\n };\n}\n\nfunction validateInitiallySyncedData(\n lc: LogContext,\n db: Database,\n shard: ShardID,\n) {\n const tables = computeZqlSpecs(lc, db);\n const required = getRequiredTables(shard);\n for (const [name, columns] of Object.entries(required)) {\n const table = tables.get(name)?.zqlSpec;\n if (!table) {\n throw new Error(\n `Upstream is missing the \"${name}\" table. (Found ${[\n ...tables.keys(),\n ]})` +\n `Please ensure that each table has a unique index over one ` +\n `or more non-null columns.`,\n );\n }\n for (const [col, {type}] of Object.entries(columns)) {\n const found = table[col];\n if (!found) {\n throw new Error(\n `Upstream \"${table}\" table is missing the \"${col}\" column`,\n );\n }\n if (found.type !== type) {\n throw new Error(\n `Upstream \"${table}.${col}\" column is a ${found.type} type but must be a ${type} type.`,\n );\n }\n }\n }\n}\n"],"names":["tag"],"mappings":";;;;;;;;;;;;;;;AAkCA,eAAsB,6BACpB,IACA,aACA,OACA,eAC6E;AAC7E,QAAM;AAAA,IACJ;AAAA,IACA,WAAW,MAAM,KAAK,IAAI,MAAM,QAAQ;AAAA,IACxC;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAGF,QAAM,UAAU,IAAI,SAAS,IAAI,aAAa;AAC9C,QAAM,oBAAoB,qBAAqB,IAAI,gBAAgB,OAAO,CAAC;AAC3E,UAAQ,MAAA;AAER,MAAI,MAAM,aAAa,QAAQ;AAE7B,UAAM,YAAY,CAAC,GAAG,MAAM,YAAY,EAAE,KAAA;AAC1C,UAAM,aAAa,kBAAkB,aAAa,KAAA;AAClD,QAAI,CAAC,UAAU,WAAW,UAAU,GAAG;AACrC,YAAM,IAAI;AAAA,QACR,gDAAgD,SAAS,wCAAwC,UAAU;AAAA,MAAA;AAAA,IAE/G;AAAA,EACF;AAEA,QAAM,eAAe,IAAI;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAGF,SAAO,EAAC,mBAAmB,aAAA;AAC7B;AAEA,MAAM,mBAA2C;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,YACE,IACA,aACA,OACA,mBACA;AACA,SAAK,MAAM,GAAG,YAAY,aAAa,eAAe;AACtD,SAAK,eAAe;AACpB,SAAK,SAAS;AACd,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEA,cAA4B;AAC1B,WAAO,KAAK,aAAA;AAAA,EACd;AAAA,EAEA,YAAY,iBAAgD;AAC1D,WAAO,QAAQ,QAAQ,KAAK,aAAa,eAAe,CAAC;AAAA,EAC3D;AAAA,EAEA,aAAa,iBAAwC;AACnD,UAAM,EAAC,cAAc,eAAA,IAAkB,KAAK;AAC5C,UAAM,EAAC,OAAO,SAAA,IAAY,KAAK;AAC/B,UAAM,MAAM,IAAI,IAAI,KAAK,YAAY;AACrC,QAAI,aAAa,IAAI,SAAS,KAAK;AACnC,QAAI,aAAa,IAAI,YAAY,OAAO,QAAQ,CAAC;AACjD,eAAW,OAAO,cAAc;AAC9B,UAAI,aAAa,OAAO,gBAAgB,GAAG;AAAA,IAC7C;AACA,QAAI,iBAAiB;AACnB;AAAA,QACE,eAAe;AAAA,QACf;AAAA,MAAA;AAEF,UAAI,aAAa,IAAI,iBAAiB,eAAe;AACrD,UAAI,aAAa,IAAI,kBAAkB,cAAc;AAAA,IACvD;AAEA,UAAM,KAAK,IAAI,UAAU,GAAG;AAC5B,UAAM,EAAC,UAAU,UAAA,IAAa;AAAA,MAC5B,KAAK;AAAA,MACL;AAAA,MACA;AAAA;AAAA;AAAA,MAGA,EAAC,UAAU,CAAC,SAA+B,KAAA;AAAA,IAAI;AAEjD,WAAO,EAAC,SAAS,UAAU,MAAM,UAAA;AAAA,EACnC;AACF;AAaA,eAAsB,YACpB,IACA,OACA,IACA,aACA;AACA,QAAM,EAAC,OAAO,IAAI,aAAA,IAAgB;AAClC,QAAM,eAAe,IAAI,mBAAmB,IAAI,aAAa,OAAO;AAAA,IAClE,gBAAgB;AAAA;AAAA,IAChB;AAAA,EAAA,CACD;AACD,QAAM,EAAC,QAAA,IAAW,aAAa,YAAA;AAE/B,QAAM,YAAY,IAAI;AAAA,IACpB,IAAI,gBAAgB,EAAE;AAAA,IACtB;AAAA,IACA,CAAC,GAAG,QAAQ;AACV,YAAM;AAAA,IACR;AAAA,EAAA;AAGF,QAAM,kBAAkB,IAAI,2BAA2B,EAAE;AACzD,MAAI;AACF,QAAI,MAAM;AACV,qBAAiB,UAAU,SAAS;AAClC,YAAM,CAAC,GAAG,IAAI;AACd,cAAQ,KAAA;AAAA,QACN,KAAK,SAAS;AACZ,gBAAM,EAAC,gBAAA,IAAmB,OAAO,CAAC;AAClC,aAAG;AAAA,YACD,yBAAyB,EAAE,sBAAsB,eAAe;AAAA,UAAA;AAElE,0BAAgB;AAAA,YACd;AAAA,YACA;AAAA,YACA,sCAAsC,eAAe;AAAA,YACrD;AAAA,UAAA;AAEF,+BAAqB,IAAI,CAAC,GAAG,YAAY,EAAE,KAAA,GAAQ,eAAe;AAClE,oBAAU,eAAe,IAAI,MAAM;AACnC;AAAA,QACF;AAAA,QACA,KAAK;AACH,oBAAU,eAAe,IAAI,MAAM;AACnC,cAAI,EAAE,MAAM,QAAS,GAAG;AACtB,eAAG,QAAQ,aAAa,GAAG,UAAU;AAAA,UACvC;AACA;AAAA,QACF,KAAK;AACH,oBAAU,eAAe,IAAI,MAAM;AACnC,sCAA4B,IAAI,IAAI,KAAK;AACzC,aAAG,OAAO,4BAA4B,GAAG,UAAU;AACnD;AAAA,QAEF,KAAK;AACH;AAAA;AAAA;AAAA,QAEF,KAAK,WAAW;AACd,gBAAM,EAAC,KAAAA,MAAK,QAAA,IAAW,OAAO,CAAC;AAC/B,cAAIA,SAAQ,kBAAkB;AAC5B,kBAAM,IAAI;AAAA,cACR,WAAW;AAAA,YAAA;AAAA,UAEf;AAAA,QACF;AAAA;AAAA,QAEA,KAAK;AACH,gBAAM,IAAI;AAAA,YACR,2CAA2C,UAAU,MAAM,CAAC;AAAA,UAAA;AAAA,QAEhE;AACE,sBAAY,MAAM;AAAA,MAAA;AAAA,IAExB;AACA,UAAM,IAAI;AAAA,MACR,iBAAiB,WAAW;AAAA,IAAA;AAAA,EAEhC,SAAS,GAAG;AACV,UAAM,gBAAgB,qBAAqB,IAAI,gBAAgB,CAAC;AAAA,EAClE,UAAA;AACE,oBAAgB,KAAA;AAAA,EAClB;AACF;AAIA,SAAS,kBAAkB;AAAA,EACzB;AAAA,EACA;AACF,GAAyD;AACvD,SAAO;AAAA,IACL,CAAC,GAAG,KAAK,IAAI,QAAQ,UAAU,GAAG;AAAA,MAChC,eAAe,EAAC,MAAM,SAAA;AAAA,MACtB,UAAU,EAAC,MAAM,SAAA;AAAA,MACjB,gBAAgB,EAAC,MAAM,SAAA;AAAA,MACvB,QAAQ,EAAC,MAAM,SAAA;AAAA,IAAQ;AAAA,IAEzB,CAAC,GAAG,KAAK,IAAI,QAAQ,YAAY,GAAG;AAAA,MAClC,eAAe,EAAC,MAAM,SAAA;AAAA,MACtB,UAAU,EAAC,MAAM,SAAA;AAAA,MACjB,YAAY,EAAC,MAAM,SAAA;AAAA,MACnB,UAAU,EAAC,MAAM,OAAA;AAAA,IAAM;AAAA,IAEzB,CAAC,GAAG,KAAK,cAAc,GAAG;AAAA,MACxB,aAAa,EAAC,MAAM,OAAA;AAAA,MACpB,MAAM,EAAC,MAAM,SAAA;AAAA,IAAQ;AAAA,EACvB;AAEJ;AAEA,SAAS,4BACP,IACA,IACA,OACA;AACA,QAAM,SAAS,gBAAgB,IAAI,EAAE;AACrC,QAAM,WAAW,kBAAkB,KAAK;AACxC,aAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACtD,UAAM,QAAQ,OAAO,IAAI,IAAI,GAAG;AAChC,QAAI,CAAC,OAAO;AACV,YAAM,IAAI;AAAA,QACR,4BAA4B,IAAI,mBAAmB;AAAA,UACjD,GAAG,OAAO,KAAA;AAAA,QAAK,CAChB;AAAA,MAAA;AAAA,IAIL;AACA,eAAW,CAAC,KAAK,EAAC,KAAA,CAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AACnD,YAAM,QAAQ,MAAM,GAAG;AACvB,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,aAAa,KAAK,2BAA2B,GAAG;AAAA,QAAA;AAAA,MAEpD;AACA,UAAI,MAAM,SAAS,MAAM;AACvB,cAAM,IAAI;AAAA,UACR,aAAa,KAAK,IAAI,GAAG,iBAAiB,MAAM,IAAI,uBAAuB,IAAI;AAAA,QAAA;AAAA,MAEnF;AAAA,IACF;AAAA,EACF;AACF;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"change-source.d.ts","sourceRoot":"","sources":["../../../../../../../zero-cache/src/services/change-source/pg/change-source.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;
|
|
1
|
+
{"version":3,"file":"change-source.d.ts","sourceRoot":"","sources":["../../../../../../../zero-cache/src/services/change-source/pg/change-source.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAkBjD,OAAO,KAAK,EAAa,kBAAkB,EAAC,MAAM,sBAAsB,CAAC;AAEzE,OAAO,EAIL,KAAK,WAAW,EACjB,MAAM,gCAAgC,CAAC;AAExC,OAAO,EAEL,KAAK,WAAW,EAEjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,2BAA2B,CAAC;AAEpD,OAAO,KAAK,EACV,YAAY,EAEb,MAAM,kDAAkD,CAAC;AAE1D,OAAO,EAEL,KAAK,iBAAiB,EACvB,MAAM,8CAA8C,CAAC;AAatD,OAAO,EAAC,KAAK,kBAAkB,EAAC,MAAM,mBAAmB,CAAC;AAC1D,OAAO,KAAK,EAGV,eAAe,IAAI,gBAAgB,EACpC,MAAM,yCAAyC,CAAC;AAwBjD;;;;GAIG;AACH,wBAAsB,8BAA8B,CAClD,EAAE,EAAE,UAAU,EACd,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,WAAW,EAClB,aAAa,EAAE,MAAM,EACrB,WAAW,EAAE,kBAAkB,GAC9B,OAAO,CAAC;IAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAAC,YAAY,EAAE,YAAY,CAAA;CAAC,CAAC,CAoC7E;AA6PD,qBAAa,KAAK;;gBAIJ,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC;IAI9B,SAAS;IAiBT,GAAG,CAAC,SAAS,EAAE,WAAW;CAa3B;AAieD,wBAAgB,iBAAiB,CAAC,CAAC,EAAE,kBAAkB,EAAE,CAAC,EAAE,gBAAgB,WAwB3E"}
|