@rocicorp/zero 0.26.0-canary.0 → 0.26.0-canary.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. package/out/shared/src/custom-key-map.d.ts +4 -4
  2. package/out/shared/src/custom-key-map.d.ts.map +1 -1
  3. package/out/shared/src/custom-key-map.js.map +1 -1
  4. package/out/shared/src/iterables.d.ts +6 -8
  5. package/out/shared/src/iterables.d.ts.map +1 -1
  6. package/out/shared/src/iterables.js +13 -7
  7. package/out/shared/src/iterables.js.map +1 -1
  8. package/out/shared/src/options.d.ts +1 -0
  9. package/out/shared/src/options.d.ts.map +1 -1
  10. package/out/shared/src/options.js +5 -1
  11. package/out/shared/src/options.js.map +1 -1
  12. package/out/zero/package.json.js +1 -1
  13. package/out/zero/src/zero-cache-dev.js +7 -3
  14. package/out/zero/src/zero-cache-dev.js.map +1 -1
  15. package/out/zero-cache/src/config/zero-config.d.ts +10 -1
  16. package/out/zero-cache/src/config/zero-config.d.ts.map +1 -1
  17. package/out/zero-cache/src/config/zero-config.js +27 -7
  18. package/out/zero-cache/src/config/zero-config.js.map +1 -1
  19. package/out/zero-cache/src/observability/events.d.ts.map +1 -1
  20. package/out/zero-cache/src/observability/events.js +15 -5
  21. package/out/zero-cache/src/observability/events.js.map +1 -1
  22. package/out/zero-cache/src/server/change-streamer.d.ts.map +1 -1
  23. package/out/zero-cache/src/server/change-streamer.js +10 -2
  24. package/out/zero-cache/src/server/change-streamer.js.map +1 -1
  25. package/out/zero-cache/src/server/syncer.d.ts +1 -0
  26. package/out/zero-cache/src/server/syncer.d.ts.map +1 -1
  27. package/out/zero-cache/src/server/syncer.js +22 -4
  28. package/out/zero-cache/src/server/syncer.js.map +1 -1
  29. package/out/zero-cache/src/services/change-source/custom/change-source.js +0 -4
  30. package/out/zero-cache/src/services/change-source/custom/change-source.js.map +1 -1
  31. package/out/zero-cache/src/services/change-source/pg/schema/ddl.d.ts.map +1 -1
  32. package/out/zero-cache/src/services/change-source/pg/schema/ddl.js +1 -10
  33. package/out/zero-cache/src/services/change-source/pg/schema/ddl.js.map +1 -1
  34. package/out/zero-cache/src/services/change-source/pg/schema/init.d.ts.map +1 -1
  35. package/out/zero-cache/src/services/change-source/pg/schema/init.js +8 -2
  36. package/out/zero-cache/src/services/change-source/pg/schema/init.js.map +1 -1
  37. package/out/zero-cache/src/services/change-source/pg/schema/shard.d.ts.map +1 -1
  38. package/out/zero-cache/src/services/change-source/pg/schema/shard.js +1 -14
  39. package/out/zero-cache/src/services/change-source/pg/schema/shard.js.map +1 -1
  40. package/out/zero-cache/src/services/change-source/replica-schema.d.ts.map +1 -1
  41. package/out/zero-cache/src/services/change-source/replica-schema.js +8 -1
  42. package/out/zero-cache/src/services/change-source/replica-schema.js.map +1 -1
  43. package/out/zero-cache/src/services/change-streamer/change-streamer-service.d.ts +1 -1
  44. package/out/zero-cache/src/services/change-streamer/change-streamer-service.d.ts.map +1 -1
  45. package/out/zero-cache/src/services/change-streamer/change-streamer-service.js +5 -3
  46. package/out/zero-cache/src/services/change-streamer/change-streamer-service.js.map +1 -1
  47. package/out/zero-cache/src/services/change-streamer/storer.d.ts +1 -1
  48. package/out/zero-cache/src/services/change-streamer/storer.d.ts.map +1 -1
  49. package/out/zero-cache/src/services/change-streamer/storer.js +16 -5
  50. package/out/zero-cache/src/services/change-streamer/storer.js.map +1 -1
  51. package/out/zero-cache/src/services/life-cycle.d.ts +1 -1
  52. package/out/zero-cache/src/services/life-cycle.d.ts.map +1 -1
  53. package/out/zero-cache/src/services/life-cycle.js.map +1 -1
  54. package/out/zero-cache/src/services/litestream/commands.d.ts.map +1 -1
  55. package/out/zero-cache/src/services/litestream/commands.js +3 -1
  56. package/out/zero-cache/src/services/litestream/commands.js.map +1 -1
  57. package/out/zero-cache/src/services/litestream/config.yml +1 -0
  58. package/out/zero-cache/src/services/mutagen/mutagen.d.ts +4 -4
  59. package/out/zero-cache/src/services/mutagen/mutagen.d.ts.map +1 -1
  60. package/out/zero-cache/src/services/mutagen/mutagen.js +9 -24
  61. package/out/zero-cache/src/services/mutagen/mutagen.js.map +1 -1
  62. package/out/zero-cache/src/services/mutagen/pusher.d.ts +1 -2
  63. package/out/zero-cache/src/services/mutagen/pusher.d.ts.map +1 -1
  64. package/out/zero-cache/src/services/mutagen/pusher.js +51 -12
  65. package/out/zero-cache/src/services/mutagen/pusher.js.map +1 -1
  66. package/out/zero-cache/src/services/replicator/change-processor.js +4 -3
  67. package/out/zero-cache/src/services/replicator/change-processor.js.map +1 -1
  68. package/out/zero-cache/src/services/replicator/schema/change-log.d.ts +3 -2
  69. package/out/zero-cache/src/services/replicator/schema/change-log.d.ts.map +1 -1
  70. package/out/zero-cache/src/services/replicator/schema/change-log.js +36 -31
  71. package/out/zero-cache/src/services/replicator/schema/change-log.js.map +1 -1
  72. package/out/zero-cache/src/services/view-syncer/client-handler.d.ts +5 -6
  73. package/out/zero-cache/src/services/view-syncer/client-handler.d.ts.map +1 -1
  74. package/out/zero-cache/src/services/view-syncer/client-handler.js +5 -23
  75. package/out/zero-cache/src/services/view-syncer/client-handler.js.map +1 -1
  76. package/out/zero-cache/src/services/view-syncer/cvr-store.d.ts.map +1 -1
  77. package/out/zero-cache/src/services/view-syncer/cvr-store.js +6 -4
  78. package/out/zero-cache/src/services/view-syncer/cvr-store.js.map +1 -1
  79. package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts +1 -8
  80. package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts.map +1 -1
  81. package/out/zero-cache/src/services/view-syncer/pipeline-driver.js +2 -11
  82. package/out/zero-cache/src/services/view-syncer/pipeline-driver.js.map +1 -1
  83. package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts +0 -2
  84. package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts.map +1 -1
  85. package/out/zero-cache/src/services/view-syncer/snapshotter.js +2 -10
  86. package/out/zero-cache/src/services/view-syncer/snapshotter.js.map +1 -1
  87. package/out/zero-cache/src/services/view-syncer/view-syncer.d.ts +1 -2
  88. package/out/zero-cache/src/services/view-syncer/view-syncer.d.ts.map +1 -1
  89. package/out/zero-cache/src/services/view-syncer/view-syncer.js +40 -42
  90. package/out/zero-cache/src/services/view-syncer/view-syncer.js.map +1 -1
  91. package/out/zero-cache/src/workers/connect-params.d.ts +0 -1
  92. package/out/zero-cache/src/workers/connect-params.d.ts.map +1 -1
  93. package/out/zero-cache/src/workers/connect-params.js +0 -2
  94. package/out/zero-cache/src/workers/connect-params.js.map +1 -1
  95. package/out/zero-cache/src/workers/replicator.d.ts.map +1 -1
  96. package/out/zero-cache/src/workers/replicator.js +2 -5
  97. package/out/zero-cache/src/workers/replicator.js.map +1 -1
  98. package/out/zero-cache/src/workers/syncer-ws-message-handler.d.ts.map +1 -1
  99. package/out/zero-cache/src/workers/syncer-ws-message-handler.js +1 -4
  100. package/out/zero-cache/src/workers/syncer-ws-message-handler.js.map +1 -1
  101. package/out/zero-client/src/client/context.js +1 -0
  102. package/out/zero-client/src/client/context.js.map +1 -1
  103. package/out/zero-client/src/client/options.d.ts +1 -1
  104. package/out/zero-client/src/client/options.js.map +1 -1
  105. package/out/zero-client/src/client/version.js +1 -1
  106. package/out/zero-client/src/client/zero.d.ts +2 -4
  107. package/out/zero-client/src/client/zero.d.ts.map +1 -1
  108. package/out/zero-client/src/client/zero.js +1 -1
  109. package/out/zero-client/src/client/zero.js.map +1 -1
  110. package/out/zero-protocol/src/push.d.ts +7 -0
  111. package/out/zero-protocol/src/push.d.ts.map +1 -1
  112. package/out/zero-protocol/src/push.js +9 -1
  113. package/out/zero-protocol/src/push.js.map +1 -1
  114. package/out/zero-server/src/process-mutations.d.ts +1 -0
  115. package/out/zero-server/src/process-mutations.d.ts.map +1 -1
  116. package/out/zero-server/src/process-mutations.js +41 -2
  117. package/out/zero-server/src/process-mutations.js.map +1 -1
  118. package/out/zero-server/src/zql-database.d.ts.map +1 -1
  119. package/out/zero-server/src/zql-database.js +9 -0
  120. package/out/zero-server/src/zql-database.js.map +1 -1
  121. package/out/zero-solid/src/solid-view.js +1 -0
  122. package/out/zero-solid/src/solid-view.js.map +1 -1
  123. package/out/zero-solid/src/use-query.js +1 -0
  124. package/out/zero-solid/src/use-query.js.map +1 -1
  125. package/out/zql/src/ivm/stream.d.ts.map +1 -1
  126. package/out/zql/src/ivm/stream.js +1 -1
  127. package/out/zql/src/ivm/stream.js.map +1 -1
  128. package/out/zql/src/mutate/mutator.js +4 -4
  129. package/out/zql/src/mutate/mutator.js.map +1 -1
  130. package/out/zql/src/query/create-builder.js +3 -5
  131. package/out/zql/src/query/create-builder.js.map +1 -1
  132. package/out/zql/src/query/query-registry.js +4 -4
  133. package/out/zql/src/query/query-registry.js.map +1 -1
  134. package/package.json +3 -3
  135. package/out/zero-cache/src/types/schema-versions.d.ts +0 -12
  136. package/out/zero-cache/src/types/schema-versions.d.ts.map +0 -1
  137. package/out/zero-cache/src/types/schema-versions.js +0 -28
  138. package/out/zero-cache/src/types/schema-versions.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"mutagen.js","sources":["../../../../../../zero-cache/src/services/mutagen/mutagen.ts"],"sourcesContent":["import {PG_SERIALIZATION_FAILURE} from '@drdgvhbh/postgres-error-codes';\nimport type {LogContext} from '@rocicorp/logger';\nimport {resolver} from '@rocicorp/resolver';\nimport type {JWTPayload} from 'jose';\nimport postgres from 'postgres';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport * as v from '../../../../shared/src/valita.ts';\nimport {ErrorKind} from '../../../../zero-protocol/src/error-kind.ts';\nimport * as MutationType from '../../../../zero-protocol/src/mutation-type-enum.ts';\nimport {\n primaryKeyValueSchema,\n type PrimaryKeyValue,\n} from '../../../../zero-protocol/src/primary-key.ts';\nimport {\n type CRUDMutation,\n type DeleteOp,\n type InsertOp,\n type Mutation,\n type UpdateOp,\n type UpsertOp,\n} from '../../../../zero-protocol/src/push.ts';\nimport {Database} from '../../../../zqlite/src/db.ts';\nimport type {DatabaseStorage} from '../../../../zqlite/src/database-storage.ts';\nimport {\n WriteAuthorizerImpl,\n type WriteAuthorizer,\n} from '../../auth/write-authorizer.ts';\nimport {type ZeroConfig} from '../../config/zero-config.ts';\nimport * as Mode from '../../db/mode-enum.ts';\nimport {getOrCreateCounter} from '../../observability/metrics.ts';\nimport {recordMutation} from '../../server/anonymous-otel-start.ts';\nimport type {PostgresDB, PostgresTransaction} from '../../types/pg.ts';\nimport {throwProtocolErrorIfSchemaVersionNotSupported} from '../../types/schema-versions.ts';\nimport {appSchema, upstreamSchema, type ShardID} from '../../types/shards.ts';\nimport {SlidingWindowLimiter} from '../limiter/sliding-window-limiter.ts';\nimport type {RefCountedService, Service} from '../service.ts';\nimport {MutationAlreadyProcessedError} from './error.ts';\nimport {\n isProtocolError,\n ProtocolError,\n} from '../../../../zero-protocol/src/error.ts';\nimport {ErrorOrigin} from '../../../../zero-protocol/src/error-origin.ts';\n\n// An error encountered processing a mutation.\n// Returned back to application for display to user.\nexport type MutationError = [\n kind: ErrorKind.MutationFailed | ErrorKind.MutationRateLimited,\n desc: string,\n];\n\nexport interface Mutagen extends RefCountedService {\n processMutation(\n mutation: Mutation,\n authData: JWTPayload | undefined,\n schemaVersion: number | undefined,\n customMutatorsEnabled: boolean,\n ): Promise<MutationError | undefined>;\n}\n\nexport class MutagenService implements Mutagen, Service {\n readonly id: string;\n readonly #lc: LogContext;\n readonly #upstream: PostgresDB;\n readonly #shard: ShardID;\n readonly #stopped = resolver();\n readonly #replica: Database;\n readonly #writeAuthorizer: WriteAuthorizerImpl;\n readonly #limiter: SlidingWindowLimiter | undefined;\n #refCount = 0;\n #isStopped = false;\n\n readonly #crudMutations = getOrCreateCounter(\n 'mutation',\n 'crud',\n 'Number of CRUD mutations processed',\n );\n\n constructor(\n lc: LogContext,\n shard: ShardID,\n clientGroupID: string,\n upstream: PostgresDB,\n config: ZeroConfig,\n writeAuthzStorage: DatabaseStorage,\n ) {\n this.id = clientGroupID;\n this.#lc = lc;\n this.#upstream = upstream;\n this.#shard = shard;\n this.#replica = new Database(this.#lc, config.replica.file, {\n fileMustExist: true,\n });\n this.#writeAuthorizer = new WriteAuthorizerImpl(\n this.#lc,\n config,\n this.#replica,\n shard.appID,\n clientGroupID,\n writeAuthzStorage,\n );\n\n if (config.perUserMutationLimit.max !== undefined) {\n this.#limiter = new SlidingWindowLimiter(\n config.perUserMutationLimit.windowMs,\n config.perUserMutationLimit.max,\n );\n }\n }\n\n ref() {\n assert(!this.#isStopped, 'MutagenService is already stopped');\n ++this.#refCount;\n }\n\n unref() {\n assert(!this.#isStopped, 'MutagenService is already stopped');\n --this.#refCount;\n if (this.#refCount <= 0) {\n void this.stop();\n }\n }\n\n hasRefs(): boolean {\n return this.#refCount > 0;\n }\n\n processMutation(\n mutation: Mutation,\n authData: JWTPayload | undefined,\n schemaVersion: number | undefined,\n customMutatorsEnabled = false,\n ): Promise<MutationError | undefined> {\n if (this.#limiter?.canDo() === false) {\n return Promise.resolve([\n ErrorKind.MutationRateLimited,\n 'Rate limit exceeded',\n ]);\n }\n this.#crudMutations.add(1, {\n clientGroupID: this.id,\n });\n return processMutation(\n this.#lc,\n authData,\n this.#upstream,\n this.#shard,\n this.id,\n mutation,\n this.#writeAuthorizer,\n schemaVersion,\n undefined,\n customMutatorsEnabled,\n );\n }\n\n run(): Promise<void> {\n return this.#stopped.promise;\n }\n\n stop(): Promise<void> {\n if (this.#isStopped) {\n return this.#stopped.promise;\n }\n this.#writeAuthorizer.destroy();\n this.#isStopped = true;\n this.#stopped.resolve();\n return this.#stopped.promise;\n }\n}\n\nconst MAX_SERIALIZATION_ATTEMPTS = 10;\n\nexport async function processMutation(\n lc: LogContext,\n authData: JWTPayload | undefined,\n db: PostgresDB,\n shard: ShardID,\n clientGroupID: string,\n mutation: Mutation,\n writeAuthorizer: WriteAuthorizer,\n schemaVersion: number | undefined,\n onTxStart?: () => void | Promise<void>, // for testing\n customMutatorsEnabled = false,\n): Promise<MutationError | undefined> {\n assert(\n mutation.type === MutationType.CRUD,\n 'Only CRUD mutations are supported',\n );\n lc = lc.withContext('mutationID', mutation.id);\n lc = lc.withContext('processMutation');\n lc.debug?.('Process mutation start', mutation);\n\n // Record mutation processing attempt for telemetry (regardless of success/failure)\n recordMutation('crud');\n\n let result: MutationError | undefined;\n\n const start = Date.now();\n try {\n // Mutations can fail for a variety of reasons:\n //\n // - application error\n // - network/db error\n // - zero bug\n //\n // For application errors what we want is to re-run the mutation in\n // \"error mode\", which skips the actual mutation and just updates the\n // lastMutationID. Then return the error to the app.\n //\n // However, it's hard to tell the difference between application errors\n // and the other types.\n //\n // A reasonable policy ends up being to just retry every mutation once\n // in error mode. If the error mode mutation succeeds then we assume it\n // was an application error and return the error to the app. Otherwise,\n // we know it was something internal and we log it.\n //\n // This is not 100% correct - there are theoretical cases where we\n // return an internal error to the app that shouldn't have been. But it\n // would have to be a crazy coincidence: we'd have to have a network\n // error on the first attempt that resolves by the second attempt.\n //\n // One might ask why not try/catch just the calls to the mutators and\n // consider those application errors. That is actually what we do in\n // Replicache:\n //\n // https://github.com/rocicorp/todo-row-versioning/blob/9a0a79dc2d2de32c4fac61b5d1634bd9a9e66b7c/server/src/push.ts#L131\n //\n // We don't do it here because:\n //\n // 1. It's still not perfect. It's hard to isolate SQL errors in\n // mutators due to app developer mistakes from SQL errors due to\n // Zero mistakes.\n // 2. It's not possible to do this with the pg library we're using in\n // Zero anyway: https://github.com/porsager/postgres/issues/455.\n //\n // Personally I think this simple retry policy is nice.\n let errorMode = false;\n for (let i = 0; i < MAX_SERIALIZATION_ATTEMPTS; i++) {\n try {\n await db.begin(Mode.SERIALIZABLE, async tx => {\n // Simulates a concurrent request for testing. In production this is a noop.\n const done = onTxStart?.();\n try {\n return await processMutationWithTx(\n lc,\n tx,\n authData,\n shard,\n clientGroupID,\n schemaVersion,\n mutation,\n errorMode,\n writeAuthorizer,\n );\n } finally {\n await done;\n }\n });\n if (errorMode) {\n lc.debug?.('Ran mutation successfully in error mode');\n }\n break;\n } catch (e) {\n if (e instanceof MutationAlreadyProcessedError) {\n lc.debug?.(e.message);\n // Don't double-count already processed mutations, but they were counted above\n return undefined;\n }\n if (\n isProtocolError(e) &&\n !errorMode &&\n e.kind === ErrorKind.InvalidPush &&\n customMutatorsEnabled &&\n i < 2\n ) {\n // We're temporarily supporting custom mutators AND CRUD mutators at the same time.\n // This can create a lot of OOO mutation errors since we do not know when the API server\n // has applied a custom mutation before moving on to process CRUD mutations.\n // The temporary workaround (since CRUD is being deprecated) is to retry the mutation\n // after a small delay. Users are not expected to be running both CRUD and Custom mutators.\n // They should migrate completely to custom mutators.\n lc.info?.(\n 'Both CRUD and Custom mutators are being used at once. This is supported for now but IS NOT RECOMMENDED. Migrate completely to custom mutators.',\n e,\n );\n await new Promise(resolve => setTimeout(resolve, 100));\n continue;\n }\n if (isProtocolError(e) || errorMode) {\n lc.error?.('Process mutation error', e);\n throw e;\n }\n if (\n e instanceof postgres.PostgresError &&\n e.code === PG_SERIALIZATION_FAILURE\n ) {\n lc.info?.(`attempt ${i + 1}: ${String(e)}`, e);\n continue; // Retry up to MAX_SERIALIZATION_ATTEMPTS.\n }\n result = [ErrorKind.MutationFailed, String(e)];\n if (errorMode) {\n break;\n }\n lc.error?.('Got error running mutation, re-running in error mode', e);\n errorMode = true;\n i--;\n }\n }\n } finally {\n lc.debug?.('Process mutation complete in', Date.now() - start);\n }\n return result;\n}\n\nexport async function processMutationWithTx(\n lc: LogContext,\n tx: PostgresTransaction,\n authData: JWTPayload | undefined,\n shard: ShardID,\n clientGroupID: string,\n schemaVersion: number | undefined,\n mutation: CRUDMutation,\n errorMode: boolean,\n authorizer: WriteAuthorizer,\n) {\n const tasks: (() => Promise<unknown>)[] = [];\n\n async function execute(stmt: postgres.PendingQuery<postgres.Row[]>) {\n try {\n return await stmt.execute();\n } finally {\n const q = stmt as unknown as Query;\n lc.debug?.(`${q.string}: ${JSON.stringify(q.parameters)}`);\n }\n }\n\n authorizer.reloadPermissions();\n\n if (!errorMode) {\n const {ops} = mutation.args[0];\n const normalizedOps = authorizer.normalizeOps(ops);\n const [canPre, canPost] = await Promise.all([\n authorizer.canPreMutation(authData, normalizedOps),\n authorizer.canPostMutation(authData, normalizedOps),\n ]);\n if (canPre && canPost) {\n for (const op of ops) {\n switch (op.op) {\n case 'insert':\n tasks.push(() => execute(getInsertSQL(tx, op)));\n break;\n case 'upsert':\n tasks.push(() => execute(getUpsertSQL(tx, op)));\n break;\n case 'update':\n tasks.push(() => execute(getUpdateSQL(tx, op)));\n break;\n case 'delete':\n tasks.push(() => execute(getDeleteSQL(tx, op)));\n break;\n default:\n unreachable(op);\n }\n }\n }\n }\n\n // Confirm the mutation even though it may have been blocked by the authorizer.\n // Authorizer blocking a mutation is not an error but the correct result of the mutation.\n tasks.unshift(() =>\n checkSchemaVersionAndIncrementLastMutationID(\n tx,\n shard,\n clientGroupID,\n schemaVersion,\n mutation.clientID,\n mutation.id,\n ),\n );\n\n // Note: An error thrown from any Promise aborts the entire transaction.\n await Promise.all(tasks.map(task => task()));\n}\n\nexport function getInsertSQL(\n tx: postgres.TransactionSql,\n create: InsertOp,\n): postgres.PendingQuery<postgres.Row[]> {\n return tx`INSERT INTO ${tx(create.tableName)} ${tx(create.value)}`;\n}\n\nexport function getUpsertSQL(\n tx: postgres.TransactionSql,\n set: UpsertOp,\n): postgres.PendingQuery<postgres.Row[]> {\n const {tableName, primaryKey, value} = set;\n return tx`\n INSERT INTO ${tx(tableName)} ${tx(value)}\n ON CONFLICT (${tx(primaryKey)})\n DO UPDATE SET ${tx(value)}\n `;\n}\n\nfunction getUpdateSQL(\n tx: postgres.TransactionSql,\n update: UpdateOp,\n): postgres.PendingQuery<postgres.Row[]> {\n const table = update.tableName;\n const {primaryKey, value} = update;\n const id: Record<string, PrimaryKeyValue> = {};\n for (const key of primaryKey) {\n id[key] = v.parse(value[key], primaryKeyValueSchema);\n }\n return tx`UPDATE ${tx(table)} SET ${tx(value)} WHERE ${Object.entries(\n id,\n ).flatMap(([key, value], i) =>\n i ? [tx`AND`, tx`${tx(key)} = ${value}`] : tx`${tx(key)} = ${value}`,\n )}`;\n}\n\nfunction getDeleteSQL(\n tx: postgres.TransactionSql,\n deleteOp: DeleteOp,\n): postgres.PendingQuery<postgres.Row[]> {\n const {tableName, primaryKey, value} = deleteOp;\n\n const conditions = [];\n for (const key of primaryKey) {\n if (conditions.length > 0) {\n conditions.push(tx`AND`);\n }\n conditions.push(tx`${tx(key)} = ${value[key]}`);\n }\n\n return tx`DELETE FROM ${tx(tableName)} WHERE ${conditions}`;\n}\n\nasync function checkSchemaVersionAndIncrementLastMutationID(\n tx: PostgresTransaction,\n shard: ShardID,\n clientGroupID: string,\n schemaVersion: number | undefined,\n clientID: string,\n receivedMutationID: number,\n) {\n const [[{lastMutationID}], supportedVersionRange] = await Promise.all([\n tx<{lastMutationID: bigint}[]>`\n INSERT INTO ${tx(upstreamSchema(shard))}.clients \n as current (\"clientGroupID\", \"clientID\", \"lastMutationID\")\n VALUES (${clientGroupID}, ${clientID}, ${1})\n ON CONFLICT (\"clientGroupID\", \"clientID\")\n DO UPDATE SET \"lastMutationID\" = current.\"lastMutationID\" + 1\n RETURNING \"lastMutationID\"\n `,\n schemaVersion === undefined\n ? undefined\n : tx<\n {\n minSupportedVersion: number;\n maxSupportedVersion: number;\n }[]\n >`SELECT \"minSupportedVersion\", \"maxSupportedVersion\" \n FROM ${tx(appSchema(shard))}.\"schemaVersions\"`,\n ]);\n\n // ABORT if the resulting lastMutationID is not equal to the receivedMutationID.\n if (receivedMutationID < lastMutationID) {\n throw new MutationAlreadyProcessedError(\n clientID,\n receivedMutationID,\n lastMutationID,\n );\n } else if (receivedMutationID > lastMutationID) {\n throw new ProtocolError({\n kind: ErrorKind.InvalidPush,\n message: `Push contains unexpected mutation id ${receivedMutationID} for client ${clientID}. Expected mutation id ${lastMutationID.toString()}.`,\n origin: ErrorOrigin.ZeroCache,\n });\n }\n\n if (schemaVersion !== undefined && supportedVersionRange !== undefined) {\n assert(supportedVersionRange.length === 1);\n throwProtocolErrorIfSchemaVersionNotSupported(\n schemaVersion,\n supportedVersionRange[0],\n );\n }\n}\n\n// The slice of information from the Query object in Postgres.js that gets logged for debugging.\n// https://github.com/porsager/postgres/blob/f58cd4f3affd3e8ce8f53e42799672d86cd2c70b/src/connection.js#L219\ntype Query = {string: string; parameters: object[]};\n"],"names":["ErrorKind.MutationRateLimited","MutationType.CRUD","Mode.SERIALIZABLE","ErrorKind.InvalidPush","ErrorKind.MutationFailed","v.parse","value","ErrorOrigin.ZeroCache"],"mappings":";;;;;;;;;;;;;;;;;;;;;AA2DO,MAAM,eAA2C;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAW,SAAA;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,EACT,YAAY;AAAA,EACZ,aAAa;AAAA,EAEJ,iBAAiB;AAAA,IACxB;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAGF,YACE,IACA,OACA,eACA,UACA,QACA,mBACA;AACA,SAAK,KAAK;AACV,SAAK,MAAM;AACX,SAAK,YAAY;AACjB,SAAK,SAAS;AACd,SAAK,WAAW,IAAI,SAAS,KAAK,KAAK,OAAO,QAAQ,MAAM;AAAA,MAC1D,eAAe;AAAA,IAAA,CAChB;AACD,SAAK,mBAAmB,IAAI;AAAA,MAC1B,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,IAAA;AAGF,QAAI,OAAO,qBAAqB,QAAQ,QAAW;AACjD,WAAK,WAAW,IAAI;AAAA,QAClB,OAAO,qBAAqB;AAAA,QAC5B,OAAO,qBAAqB;AAAA,MAAA;AAAA,IAEhC;AAAA,EACF;AAAA,EAEA,MAAM;AACJ,WAAO,CAAC,KAAK,YAAY,mCAAmC;AAC5D,MAAE,KAAK;AAAA,EACT;AAAA,EAEA,QAAQ;AACN,WAAO,CAAC,KAAK,YAAY,mCAAmC;AAC5D,MAAE,KAAK;AACP,QAAI,KAAK,aAAa,GAAG;AACvB,WAAK,KAAK,KAAA;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,UAAmB;AACjB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,gBACE,UACA,UACA,eACA,wBAAwB,OACY;AACpC,QAAI,KAAK,UAAU,MAAA,MAAY,OAAO;AACpC,aAAO,QAAQ,QAAQ;AAAA,QACrBA;AAAAA,QACA;AAAA,MAAA,CACD;AAAA,IACH;AACA,SAAK,eAAe,IAAI,GAAG;AAAA,MACzB,eAAe,KAAK;AAAA,IAAA,CACrB;AACD,WAAO;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAqB;AACnB,WAAO,KAAK,SAAS;AAAA,EACvB;AAAA,EAEA,OAAsB;AACpB,QAAI,KAAK,YAAY;AACnB,aAAO,KAAK,SAAS;AAAA,IACvB;AACA,SAAK,iBAAiB,QAAA;AACtB,SAAK,aAAa;AAClB,SAAK,SAAS,QAAA;AACd,WAAO,KAAK,SAAS;AAAA,EACvB;AACF;AAEA,MAAM,6BAA6B;AAEnC,eAAsB,gBACpB,IACA,UACA,IACA,OACA,eACA,UACA,iBACA,eACA,WACA,wBAAwB,OACY;AACpC;AAAA,IACE,SAAS,SAASC;AAAAA,IAClB;AAAA,EAAA;AAEF,OAAK,GAAG,YAAY,cAAc,SAAS,EAAE;AAC7C,OAAK,GAAG,YAAY,iBAAiB;AACrC,KAAG,QAAQ,0BAA0B,QAAQ;AAG7C,iBAAe,MAAM;AAErB,MAAI;AAEJ,QAAM,QAAQ,KAAK,IAAA;AACnB,MAAI;AAuCF,QAAI,YAAY;AAChB,aAAS,IAAI,GAAG,IAAI,4BAA4B,KAAK;AACnD,UAAI;AACF,cAAM,GAAG,MAAMC,cAAmB,OAAM,OAAM;AAE5C,gBAAM,OAAO,YAAA;AACb,cAAI;AACF,mBAAO,MAAM;AAAA,cACX;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,YAAA;AAAA,UAEJ,UAAA;AACE,kBAAM;AAAA,UACR;AAAA,QACF,CAAC;AACD,YAAI,WAAW;AACb,aAAG,QAAQ,yCAAyC;AAAA,QACtD;AACA;AAAA,MACF,SAAS,GAAG;AACV,YAAI,aAAa,+BAA+B;AAC9C,aAAG,QAAQ,EAAE,OAAO;AAEpB,iBAAO;AAAA,QACT;AACA,YACE,gBAAgB,CAAC,KACjB,CAAC,aACD,EAAE,SAASC,eACX,yBACA,IAAI,GACJ;AAOA,aAAG;AAAA,YACD;AAAA,YACA;AAAA,UAAA;AAEF,gBAAM,IAAI,QAAQ,CAAA,YAAW,WAAW,SAAS,GAAG,CAAC;AACrD;AAAA,QACF;AACA,YAAI,gBAAgB,CAAC,KAAK,WAAW;AACnC,aAAG,QAAQ,0BAA0B,CAAC;AACtC,gBAAM;AAAA,QACR;AACA,YACE,aAAa,SAAS,iBACtB,EAAE,SAAS,0BACX;AACA,aAAG,OAAO,WAAW,IAAI,CAAC,KAAK,OAAO,CAAC,CAAC,IAAI,CAAC;AAC7C;AAAA,QACF;AACA,iBAAS,CAACC,gBAA0B,OAAO,CAAC,CAAC;AAC7C,YAAI,WAAW;AACb;AAAA,QACF;AACA,WAAG,QAAQ,wDAAwD,CAAC;AACpE,oBAAY;AACZ;AAAA,MACF;AAAA,IACF;AAAA,EACF,UAAA;AACE,OAAG,QAAQ,gCAAgC,KAAK,IAAA,IAAQ,KAAK;AAAA,EAC/D;AACA,SAAO;AACT;AAEA,eAAsB,sBACpB,IACA,IACA,UACA,OACA,eACA,eACA,UACA,WACA,YACA;AACA,QAAM,QAAoC,CAAA;AAE1C,iBAAe,QAAQ,MAA6C;AAClE,QAAI;AACF,aAAO,MAAM,KAAK,QAAA;AAAA,IACpB,UAAA;AACE,YAAM,IAAI;AACV,SAAG,QAAQ,GAAG,EAAE,MAAM,KAAK,KAAK,UAAU,EAAE,UAAU,CAAC,EAAE;AAAA,IAC3D;AAAA,EACF;AAEA,aAAW,kBAAA;AAEX,MAAI,CAAC,WAAW;AACd,UAAM,EAAC,IAAA,IAAO,SAAS,KAAK,CAAC;AAC7B,UAAM,gBAAgB,WAAW,aAAa,GAAG;AACjD,UAAM,CAAC,QAAQ,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,MAC1C,WAAW,eAAe,UAAU,aAAa;AAAA,MACjD,WAAW,gBAAgB,UAAU,aAAa;AAAA,IAAA,CACnD;AACD,QAAI,UAAU,SAAS;AACrB,iBAAW,MAAM,KAAK;AACpB,gBAAQ,GAAG,IAAA;AAAA,UACT,KAAK;AACH,kBAAM,KAAK,MAAM,QAAQ,aAAa,IAAI,EAAE,CAAC,CAAC;AAC9C;AAAA,UACF,KAAK;AACH,kBAAM,KAAK,MAAM,QAAQ,aAAa,IAAI,EAAE,CAAC,CAAC;AAC9C;AAAA,UACF,KAAK;AACH,kBAAM,KAAK,MAAM,QAAQ,aAAa,IAAI,EAAE,CAAC,CAAC;AAC9C;AAAA,UACF,KAAK;AACH,kBAAM,KAAK,MAAM,QAAQ,aAAa,IAAI,EAAE,CAAC,CAAC;AAC9C;AAAA,UACF;AACE,wBAAc;AAAA,QAAA;AAAA,MAEpB;AAAA,IACF;AAAA,EACF;AAIA,QAAM;AAAA,IAAQ,MACZ;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,SAAS;AAAA,IAAA;AAAA,EACX;AAIF,QAAM,QAAQ,IAAI,MAAM,IAAI,CAAA,SAAQ,KAAA,CAAM,CAAC;AAC7C;AAEO,SAAS,aACd,IACA,QACuC;AACvC,SAAO,iBAAiB,GAAG,OAAO,SAAS,CAAC,IAAI,GAAG,OAAO,KAAK,CAAC;AAClE;AAEO,SAAS,aACd,IACA,KACuC;AACvC,QAAM,EAAC,WAAW,YAAY,MAAA,IAAS;AACvC,SAAO;AAAA,kBACS,GAAG,SAAS,CAAC,IAAI,GAAG,KAAK,CAAC;AAAA,mBACzB,GAAG,UAAU,CAAC;AAAA,oBACb,GAAG,KAAK,CAAC;AAAA;AAE7B;AAEA,SAAS,aACP,IACA,QACuC;AACvC,QAAM,QAAQ,OAAO;AACrB,QAAM,EAAC,YAAY,MAAA,IAAS;AAC5B,QAAM,KAAsC,CAAA;AAC5C,aAAW,OAAO,YAAY;AAC5B,OAAG,GAAG,IAAIC,MAAQ,MAAM,GAAG,GAAG,qBAAqB;AAAA,EACrD;AACA,SAAO,YAAY,GAAG,KAAK,CAAC,QAAQ,GAAG,KAAK,CAAC,UAAU,OAAO;AAAA,IAC5D;AAAA,EAAA,EACA;AAAA,IAAQ,CAAC,CAAC,KAAKC,MAAK,GAAG,MACvB,IAAI,CAAC,SAAS,KAAK,GAAG,GAAG,CAAC,MAAMA,MAAK,EAAE,IAAI,KAAK,GAAG,GAAG,CAAC,MAAMA,MAAK;AAAA,EAAA,CACnE;AACH;AAEA,SAAS,aACP,IACA,UACuC;AACvC,QAAM,EAAC,WAAW,YAAY,MAAA,IAAS;AAEvC,QAAM,aAAa,CAAA;AACnB,aAAW,OAAO,YAAY;AAC5B,QAAI,WAAW,SAAS,GAAG;AACzB,iBAAW,KAAK,OAAO;AAAA,IACzB;AACA,eAAW,KAAK,KAAK,GAAG,GAAG,CAAC,MAAM,MAAM,GAAG,CAAC,EAAE;AAAA,EAChD;AAEA,SAAO,iBAAiB,GAAG,SAAS,CAAC,UAAU,UAAU;AAC3D;AAEA,eAAe,6CACb,IACA,OACA,eACA,eACA,UACA,oBACA;AACA,QAAM,CAAC,CAAC,EAAC,eAAA,CAAe,GAAG,qBAAqB,IAAI,MAAM,QAAQ,IAAI;AAAA,IACpE;AAAA,kBACc,GAAG,eAAe,KAAK,CAAC,CAAC;AAAA;AAAA,oBAEvB,aAAa,KAAK,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,IAKhD,kBAAkB,SACd,SACA;AAAA,eAMO,GAAG,UAAU,KAAK,CAAC,CAAC;AAAA,EAAA,CAChC;AAGD,MAAI,qBAAqB,gBAAgB;AACvC,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ,WAAW,qBAAqB,gBAAgB;AAC9C,UAAM,IAAI,cAAc;AAAA,MACtB,MAAMH;AAAAA,MACN,SAAS,wCAAwC,kBAAkB,eAAe,QAAQ,0BAA0B,eAAe,UAAU;AAAA,MAC7I,QAAQI;AAAAA,IAAY,CACrB;AAAA,EACH;AAEA,MAAI,kBAAkB,UAAa,0BAA0B,QAAW;AACtE,WAAO,sBAAsB,WAAW,CAAC;AACzC;AAAA,MACE;AAAA,MACA,sBAAsB,CAAC;AAAA,IAAA;AAAA,EAE3B;AACF;"}
1
+ {"version":3,"file":"mutagen.js","sources":["../../../../../../zero-cache/src/services/mutagen/mutagen.ts"],"sourcesContent":["import {PG_SERIALIZATION_FAILURE} from '@drdgvhbh/postgres-error-codes';\nimport type {LogContext} from '@rocicorp/logger';\nimport {resolver} from '@rocicorp/resolver';\nimport type {JWTPayload} from 'jose';\nimport postgres from 'postgres';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport * as v from '../../../../shared/src/valita.ts';\nimport {ErrorKind} from '../../../../zero-protocol/src/error-kind.ts';\nimport {ErrorOrigin} from '../../../../zero-protocol/src/error-origin.ts';\nimport {\n isProtocolError,\n ProtocolError,\n} from '../../../../zero-protocol/src/error.ts';\nimport * as MutationType from '../../../../zero-protocol/src/mutation-type-enum.ts';\nimport {\n primaryKeyValueSchema,\n type PrimaryKeyValue,\n} from '../../../../zero-protocol/src/primary-key.ts';\nimport {\n type CRUDMutation,\n type DeleteOp,\n type InsertOp,\n type Mutation,\n type UpdateOp,\n type UpsertOp,\n} from '../../../../zero-protocol/src/push.ts';\nimport type {DatabaseStorage} from '../../../../zqlite/src/database-storage.ts';\nimport {Database} from '../../../../zqlite/src/db.ts';\nimport {\n WriteAuthorizerImpl,\n type WriteAuthorizer,\n} from '../../auth/write-authorizer.ts';\nimport {type ZeroConfig} from '../../config/zero-config.ts';\nimport * as Mode from '../../db/mode-enum.ts';\nimport {getOrCreateCounter} from '../../observability/metrics.ts';\nimport {recordMutation} from '../../server/anonymous-otel-start.ts';\nimport type {PostgresDB, PostgresTransaction} from '../../types/pg.ts';\nimport {upstreamSchema, type ShardID} from '../../types/shards.ts';\nimport {SlidingWindowLimiter} from '../limiter/sliding-window-limiter.ts';\nimport type {RefCountedService, Service} from '../service.ts';\nimport {MutationAlreadyProcessedError} from './error.ts';\n\n// An error encountered processing a mutation.\n// Returned back to application for display to user.\nexport type MutationError = [\n kind: ErrorKind.MutationFailed | ErrorKind.MutationRateLimited,\n desc: string,\n];\n\nexport interface Mutagen extends RefCountedService {\n processMutation(\n mutation: Mutation,\n authData: JWTPayload | undefined,\n customMutatorsEnabled: boolean,\n ): Promise<MutationError | undefined>;\n}\n\nexport class MutagenService implements Mutagen, Service {\n readonly id: string;\n readonly #lc: LogContext;\n readonly #upstream: PostgresDB;\n readonly #shard: ShardID;\n readonly #stopped = resolver();\n readonly #replica: Database;\n readonly #writeAuthorizer: WriteAuthorizerImpl;\n readonly #limiter: SlidingWindowLimiter | undefined;\n #refCount = 0;\n #isStopped = false;\n\n readonly #crudMutations = getOrCreateCounter(\n 'mutation',\n 'crud',\n 'Number of CRUD mutations processed',\n );\n\n constructor(\n lc: LogContext,\n shard: ShardID,\n clientGroupID: string,\n upstream: PostgresDB,\n config: ZeroConfig,\n writeAuthzStorage: DatabaseStorage,\n ) {\n this.id = clientGroupID;\n this.#lc = lc;\n this.#upstream = upstream;\n this.#shard = shard;\n this.#replica = new Database(this.#lc, config.replica.file, {\n fileMustExist: true,\n });\n this.#writeAuthorizer = new WriteAuthorizerImpl(\n this.#lc,\n config,\n this.#replica,\n shard.appID,\n clientGroupID,\n writeAuthzStorage,\n );\n\n if (config.perUserMutationLimit.max !== undefined) {\n this.#limiter = new SlidingWindowLimiter(\n config.perUserMutationLimit.windowMs,\n config.perUserMutationLimit.max,\n );\n }\n }\n\n ref() {\n assert(!this.#isStopped, 'MutagenService is already stopped');\n ++this.#refCount;\n }\n\n unref() {\n assert(!this.#isStopped, 'MutagenService is already stopped');\n --this.#refCount;\n if (this.#refCount <= 0) {\n void this.stop();\n }\n }\n\n hasRefs(): boolean {\n return this.#refCount > 0;\n }\n\n processMutation(\n mutation: Mutation,\n authData: JWTPayload | undefined,\n customMutatorsEnabled = false,\n ): Promise<MutationError | undefined> {\n if (this.#limiter?.canDo() === false) {\n return Promise.resolve([\n ErrorKind.MutationRateLimited,\n 'Rate limit exceeded',\n ]);\n }\n this.#crudMutations.add(1, {\n clientGroupID: this.id,\n });\n return processMutation(\n this.#lc,\n authData,\n this.#upstream,\n this.#shard,\n this.id,\n mutation,\n this.#writeAuthorizer,\n undefined,\n customMutatorsEnabled,\n );\n }\n\n run(): Promise<void> {\n return this.#stopped.promise;\n }\n\n stop(): Promise<void> {\n if (this.#isStopped) {\n return this.#stopped.promise;\n }\n this.#writeAuthorizer.destroy();\n this.#isStopped = true;\n this.#stopped.resolve();\n return this.#stopped.promise;\n }\n}\n\nconst MAX_SERIALIZATION_ATTEMPTS = 10;\n\nexport async function processMutation(\n lc: LogContext,\n authData: JWTPayload | undefined,\n db: PostgresDB,\n shard: ShardID,\n clientGroupID: string,\n mutation: Mutation,\n writeAuthorizer: WriteAuthorizer,\n onTxStart?: () => void | Promise<void>, // for testing\n customMutatorsEnabled = false,\n): Promise<MutationError | undefined> {\n assert(\n mutation.type === MutationType.CRUD,\n 'Only CRUD mutations are supported',\n );\n lc = lc.withContext('mutationID', mutation.id);\n lc = lc.withContext('processMutation');\n lc.debug?.('Process mutation start', mutation);\n\n // Record mutation processing attempt for telemetry (regardless of success/failure)\n recordMutation('crud');\n\n let result: MutationError | undefined;\n\n const start = Date.now();\n try {\n // Mutations can fail for a variety of reasons:\n //\n // - application error\n // - network/db error\n // - zero bug\n //\n // For application errors what we want is to re-run the mutation in\n // \"error mode\", which skips the actual mutation and just updates the\n // lastMutationID. Then return the error to the app.\n //\n // However, it's hard to tell the difference between application errors\n // and the other types.\n //\n // A reasonable policy ends up being to just retry every mutation once\n // in error mode. If the error mode mutation succeeds then we assume it\n // was an application error and return the error to the app. Otherwise,\n // we know it was something internal and we log it.\n //\n // This is not 100% correct - there are theoretical cases where we\n // return an internal error to the app that shouldn't have been. But it\n // would have to be a crazy coincidence: we'd have to have a network\n // error on the first attempt that resolves by the second attempt.\n //\n // One might ask why not try/catch just the calls to the mutators and\n // consider those application errors. That is actually what we do in\n // Replicache:\n //\n // https://github.com/rocicorp/todo-row-versioning/blob/9a0a79dc2d2de32c4fac61b5d1634bd9a9e66b7c/server/src/push.ts#L131\n //\n // We don't do it here because:\n //\n // 1. It's still not perfect. It's hard to isolate SQL errors in\n // mutators due to app developer mistakes from SQL errors due to\n // Zero mistakes.\n // 2. It's not possible to do this with the pg library we're using in\n // Zero anyway: https://github.com/porsager/postgres/issues/455.\n //\n // Personally I think this simple retry policy is nice.\n let errorMode = false;\n for (let i = 0; i < MAX_SERIALIZATION_ATTEMPTS; i++) {\n try {\n await db.begin(Mode.SERIALIZABLE, async tx => {\n // Simulates a concurrent request for testing. In production this is a noop.\n const done = onTxStart?.();\n try {\n return await processMutationWithTx(\n lc,\n tx,\n authData,\n shard,\n clientGroupID,\n mutation,\n errorMode,\n writeAuthorizer,\n );\n } finally {\n await done;\n }\n });\n if (errorMode) {\n lc.debug?.('Ran mutation successfully in error mode');\n }\n break;\n } catch (e) {\n if (e instanceof MutationAlreadyProcessedError) {\n lc.debug?.(e.message);\n // Don't double-count already processed mutations, but they were counted above\n return undefined;\n }\n if (\n isProtocolError(e) &&\n !errorMode &&\n e.kind === ErrorKind.InvalidPush &&\n customMutatorsEnabled &&\n i < 2\n ) {\n // We're temporarily supporting custom mutators AND CRUD mutators at the same time.\n // This can create a lot of OOO mutation errors since we do not know when the API server\n // has applied a custom mutation before moving on to process CRUD mutations.\n // The temporary workaround (since CRUD is being deprecated) is to retry the mutation\n // after a small delay. Users are not expected to be running both CRUD and Custom mutators.\n // They should migrate completely to custom mutators.\n lc.info?.(\n 'Both CRUD and Custom mutators are being used at once. This is supported for now but IS NOT RECOMMENDED. Migrate completely to custom mutators.',\n e,\n );\n await new Promise(resolve => setTimeout(resolve, 100));\n continue;\n }\n if (isProtocolError(e) || errorMode) {\n lc.error?.('Process mutation error', e);\n throw e;\n }\n if (\n e instanceof postgres.PostgresError &&\n e.code === PG_SERIALIZATION_FAILURE\n ) {\n lc.info?.(`attempt ${i + 1}: ${String(e)}`, e);\n continue; // Retry up to MAX_SERIALIZATION_ATTEMPTS.\n }\n result = [ErrorKind.MutationFailed, String(e)];\n if (errorMode) {\n break;\n }\n lc.error?.('Got error running mutation, re-running in error mode', e);\n errorMode = true;\n i--;\n }\n }\n } finally {\n lc.debug?.('Process mutation complete in', Date.now() - start);\n }\n return result;\n}\n\nexport async function processMutationWithTx(\n lc: LogContext,\n tx: PostgresTransaction,\n authData: JWTPayload | undefined,\n shard: ShardID,\n clientGroupID: string,\n mutation: CRUDMutation,\n errorMode: boolean,\n authorizer: WriteAuthorizer,\n) {\n const tasks: (() => Promise<unknown>)[] = [];\n\n async function execute(stmt: postgres.PendingQuery<postgres.Row[]>) {\n try {\n return await stmt.execute();\n } finally {\n const q = stmt as unknown as Query;\n lc.debug?.(`${q.string}: ${JSON.stringify(q.parameters)}`);\n }\n }\n\n authorizer.reloadPermissions();\n\n if (!errorMode) {\n const {ops} = mutation.args[0];\n const normalizedOps = authorizer.normalizeOps(ops);\n const [canPre, canPost] = await Promise.all([\n authorizer.canPreMutation(authData, normalizedOps),\n authorizer.canPostMutation(authData, normalizedOps),\n ]);\n if (canPre && canPost) {\n for (const op of ops) {\n switch (op.op) {\n case 'insert':\n tasks.push(() => execute(getInsertSQL(tx, op)));\n break;\n case 'upsert':\n tasks.push(() => execute(getUpsertSQL(tx, op)));\n break;\n case 'update':\n tasks.push(() => execute(getUpdateSQL(tx, op)));\n break;\n case 'delete':\n tasks.push(() => execute(getDeleteSQL(tx, op)));\n break;\n default:\n unreachable(op);\n }\n }\n }\n }\n\n // Confirm the mutation even though it may have been blocked by the authorizer.\n // Authorizer blocking a mutation is not an error but the correct result of the mutation.\n tasks.unshift(() =>\n checkSchemaVersionAndIncrementLastMutationID(\n tx,\n shard,\n clientGroupID,\n mutation.clientID,\n mutation.id,\n ),\n );\n\n // Note: An error thrown from any Promise aborts the entire transaction.\n await Promise.all(tasks.map(task => task()));\n}\n\nexport function getInsertSQL(\n tx: postgres.TransactionSql,\n create: InsertOp,\n): postgres.PendingQuery<postgres.Row[]> {\n return tx`INSERT INTO ${tx(create.tableName)} ${tx(create.value)}`;\n}\n\nexport function getUpsertSQL(\n tx: postgres.TransactionSql,\n set: UpsertOp,\n): postgres.PendingQuery<postgres.Row[]> {\n const {tableName, primaryKey, value} = set;\n return tx`\n INSERT INTO ${tx(tableName)} ${tx(value)}\n ON CONFLICT (${tx(primaryKey)})\n DO UPDATE SET ${tx(value)}\n `;\n}\n\nfunction getUpdateSQL(\n tx: postgres.TransactionSql,\n update: UpdateOp,\n): postgres.PendingQuery<postgres.Row[]> {\n const table = update.tableName;\n const {primaryKey, value} = update;\n const id: Record<string, PrimaryKeyValue> = {};\n for (const key of primaryKey) {\n id[key] = v.parse(value[key], primaryKeyValueSchema);\n }\n return tx`UPDATE ${tx(table)} SET ${tx(value)} WHERE ${Object.entries(\n id,\n ).flatMap(([key, value], i) =>\n i ? [tx`AND`, tx`${tx(key)} = ${value}`] : tx`${tx(key)} = ${value}`,\n )}`;\n}\n\nfunction getDeleteSQL(\n tx: postgres.TransactionSql,\n deleteOp: DeleteOp,\n): postgres.PendingQuery<postgres.Row[]> {\n const {tableName, primaryKey, value} = deleteOp;\n\n const conditions = [];\n for (const key of primaryKey) {\n if (conditions.length > 0) {\n conditions.push(tx`AND`);\n }\n conditions.push(tx`${tx(key)} = ${value[key]}`);\n }\n\n return tx`DELETE FROM ${tx(tableName)} WHERE ${conditions}`;\n}\n\nasync function checkSchemaVersionAndIncrementLastMutationID(\n tx: PostgresTransaction,\n shard: ShardID,\n clientGroupID: string,\n clientID: string,\n receivedMutationID: number,\n) {\n const [{lastMutationID}] = await tx<{lastMutationID: bigint}[]>`\n INSERT INTO ${tx(upstreamSchema(shard))}.clients \n as current (\"clientGroupID\", \"clientID\", \"lastMutationID\")\n VALUES (${clientGroupID}, ${clientID}, ${1})\n ON CONFLICT (\"clientGroupID\", \"clientID\")\n DO UPDATE SET \"lastMutationID\" = current.\"lastMutationID\" + 1\n RETURNING \"lastMutationID\"\n `;\n\n // ABORT if the resulting lastMutationID is not equal to the receivedMutationID.\n if (receivedMutationID < lastMutationID) {\n throw new MutationAlreadyProcessedError(\n clientID,\n receivedMutationID,\n lastMutationID,\n );\n } else if (receivedMutationID > lastMutationID) {\n throw new ProtocolError({\n kind: ErrorKind.InvalidPush,\n message: `Push contains unexpected mutation id ${receivedMutationID} for client ${clientID}. Expected mutation id ${lastMutationID.toString()}.`,\n origin: ErrorOrigin.ZeroCache,\n });\n }\n}\n\n// The slice of information from the Query object in Postgres.js that gets logged for debugging.\n// https://github.com/porsager/postgres/blob/f58cd4f3affd3e8ce8f53e42799672d86cd2c70b/src/connection.js#L219\ntype Query = {string: string; parameters: object[]};\n"],"names":["ErrorKind.MutationRateLimited","MutationType.CRUD","Mode.SERIALIZABLE","ErrorKind.InvalidPush","ErrorKind.MutationFailed","v.parse","value","ErrorOrigin.ZeroCache"],"mappings":";;;;;;;;;;;;;;;;;;;;AAyDO,MAAM,eAA2C;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAW,SAAA;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,EACT,YAAY;AAAA,EACZ,aAAa;AAAA,EAEJ,iBAAiB;AAAA,IACxB;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAGF,YACE,IACA,OACA,eACA,UACA,QACA,mBACA;AACA,SAAK,KAAK;AACV,SAAK,MAAM;AACX,SAAK,YAAY;AACjB,SAAK,SAAS;AACd,SAAK,WAAW,IAAI,SAAS,KAAK,KAAK,OAAO,QAAQ,MAAM;AAAA,MAC1D,eAAe;AAAA,IAAA,CAChB;AACD,SAAK,mBAAmB,IAAI;AAAA,MAC1B,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA;AAAA,IAAA;AAGF,QAAI,OAAO,qBAAqB,QAAQ,QAAW;AACjD,WAAK,WAAW,IAAI;AAAA,QAClB,OAAO,qBAAqB;AAAA,QAC5B,OAAO,qBAAqB;AAAA,MAAA;AAAA,IAEhC;AAAA,EACF;AAAA,EAEA,MAAM;AACJ,WAAO,CAAC,KAAK,YAAY,mCAAmC;AAC5D,MAAE,KAAK;AAAA,EACT;AAAA,EAEA,QAAQ;AACN,WAAO,CAAC,KAAK,YAAY,mCAAmC;AAC5D,MAAE,KAAK;AACP,QAAI,KAAK,aAAa,GAAG;AACvB,WAAK,KAAK,KAAA;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,UAAmB;AACjB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,gBACE,UACA,UACA,wBAAwB,OACY;AACpC,QAAI,KAAK,UAAU,MAAA,MAAY,OAAO;AACpC,aAAO,QAAQ,QAAQ;AAAA,QACrBA;AAAAA,QACA;AAAA,MAAA,CACD;AAAA,IACH;AACA,SAAK,eAAe,IAAI,GAAG;AAAA,MACzB,eAAe,KAAK;AAAA,IAAA,CACrB;AACD,WAAO;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAqB;AACnB,WAAO,KAAK,SAAS;AAAA,EACvB;AAAA,EAEA,OAAsB;AACpB,QAAI,KAAK,YAAY;AACnB,aAAO,KAAK,SAAS;AAAA,IACvB;AACA,SAAK,iBAAiB,QAAA;AACtB,SAAK,aAAa;AAClB,SAAK,SAAS,QAAA;AACd,WAAO,KAAK,SAAS;AAAA,EACvB;AACF;AAEA,MAAM,6BAA6B;AAEnC,eAAsB,gBACpB,IACA,UACA,IACA,OACA,eACA,UACA,iBACA,WACA,wBAAwB,OACY;AACpC;AAAA,IACE,SAAS,SAASC;AAAAA,IAClB;AAAA,EAAA;AAEF,OAAK,GAAG,YAAY,cAAc,SAAS,EAAE;AAC7C,OAAK,GAAG,YAAY,iBAAiB;AACrC,KAAG,QAAQ,0BAA0B,QAAQ;AAG7C,iBAAe,MAAM;AAErB,MAAI;AAEJ,QAAM,QAAQ,KAAK,IAAA;AACnB,MAAI;AAuCF,QAAI,YAAY;AAChB,aAAS,IAAI,GAAG,IAAI,4BAA4B,KAAK;AACnD,UAAI;AACF,cAAM,GAAG,MAAMC,cAAmB,OAAM,OAAM;AAE5C,gBAAM,OAAO,YAAA;AACb,cAAI;AACF,mBAAO,MAAM;AAAA,cACX;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,YAAA;AAAA,UAEJ,UAAA;AACE,kBAAM;AAAA,UACR;AAAA,QACF,CAAC;AACD,YAAI,WAAW;AACb,aAAG,QAAQ,yCAAyC;AAAA,QACtD;AACA;AAAA,MACF,SAAS,GAAG;AACV,YAAI,aAAa,+BAA+B;AAC9C,aAAG,QAAQ,EAAE,OAAO;AAEpB,iBAAO;AAAA,QACT;AACA,YACE,gBAAgB,CAAC,KACjB,CAAC,aACD,EAAE,SAASC,eACX,yBACA,IAAI,GACJ;AAOA,aAAG;AAAA,YACD;AAAA,YACA;AAAA,UAAA;AAEF,gBAAM,IAAI,QAAQ,CAAA,YAAW,WAAW,SAAS,GAAG,CAAC;AACrD;AAAA,QACF;AACA,YAAI,gBAAgB,CAAC,KAAK,WAAW;AACnC,aAAG,QAAQ,0BAA0B,CAAC;AACtC,gBAAM;AAAA,QACR;AACA,YACE,aAAa,SAAS,iBACtB,EAAE,SAAS,0BACX;AACA,aAAG,OAAO,WAAW,IAAI,CAAC,KAAK,OAAO,CAAC,CAAC,IAAI,CAAC;AAC7C;AAAA,QACF;AACA,iBAAS,CAACC,gBAA0B,OAAO,CAAC,CAAC;AAC7C,YAAI,WAAW;AACb;AAAA,QACF;AACA,WAAG,QAAQ,wDAAwD,CAAC;AACpE,oBAAY;AACZ;AAAA,MACF;AAAA,IACF;AAAA,EACF,UAAA;AACE,OAAG,QAAQ,gCAAgC,KAAK,IAAA,IAAQ,KAAK;AAAA,EAC/D;AACA,SAAO;AACT;AAEA,eAAsB,sBACpB,IACA,IACA,UACA,OACA,eACA,UACA,WACA,YACA;AACA,QAAM,QAAoC,CAAA;AAE1C,iBAAe,QAAQ,MAA6C;AAClE,QAAI;AACF,aAAO,MAAM,KAAK,QAAA;AAAA,IACpB,UAAA;AACE,YAAM,IAAI;AACV,SAAG,QAAQ,GAAG,EAAE,MAAM,KAAK,KAAK,UAAU,EAAE,UAAU,CAAC,EAAE;AAAA,IAC3D;AAAA,EACF;AAEA,aAAW,kBAAA;AAEX,MAAI,CAAC,WAAW;AACd,UAAM,EAAC,IAAA,IAAO,SAAS,KAAK,CAAC;AAC7B,UAAM,gBAAgB,WAAW,aAAa,GAAG;AACjD,UAAM,CAAC,QAAQ,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,MAC1C,WAAW,eAAe,UAAU,aAAa;AAAA,MACjD,WAAW,gBAAgB,UAAU,aAAa;AAAA,IAAA,CACnD;AACD,QAAI,UAAU,SAAS;AACrB,iBAAW,MAAM,KAAK;AACpB,gBAAQ,GAAG,IAAA;AAAA,UACT,KAAK;AACH,kBAAM,KAAK,MAAM,QAAQ,aAAa,IAAI,EAAE,CAAC,CAAC;AAC9C;AAAA,UACF,KAAK;AACH,kBAAM,KAAK,MAAM,QAAQ,aAAa,IAAI,EAAE,CAAC,CAAC;AAC9C;AAAA,UACF,KAAK;AACH,kBAAM,KAAK,MAAM,QAAQ,aAAa,IAAI,EAAE,CAAC,CAAC;AAC9C;AAAA,UACF,KAAK;AACH,kBAAM,KAAK,MAAM,QAAQ,aAAa,IAAI,EAAE,CAAC,CAAC;AAC9C;AAAA,UACF;AACE,wBAAc;AAAA,QAAA;AAAA,MAEpB;AAAA,IACF;AAAA,EACF;AAIA,QAAM;AAAA,IAAQ,MACZ;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS;AAAA,MACT,SAAS;AAAA,IAAA;AAAA,EACX;AAIF,QAAM,QAAQ,IAAI,MAAM,IAAI,CAAA,SAAQ,KAAA,CAAM,CAAC;AAC7C;AAEO,SAAS,aACd,IACA,QACuC;AACvC,SAAO,iBAAiB,GAAG,OAAO,SAAS,CAAC,IAAI,GAAG,OAAO,KAAK,CAAC;AAClE;AAEO,SAAS,aACd,IACA,KACuC;AACvC,QAAM,EAAC,WAAW,YAAY,MAAA,IAAS;AACvC,SAAO;AAAA,kBACS,GAAG,SAAS,CAAC,IAAI,GAAG,KAAK,CAAC;AAAA,mBACzB,GAAG,UAAU,CAAC;AAAA,oBACb,GAAG,KAAK,CAAC;AAAA;AAE7B;AAEA,SAAS,aACP,IACA,QACuC;AACvC,QAAM,QAAQ,OAAO;AACrB,QAAM,EAAC,YAAY,MAAA,IAAS;AAC5B,QAAM,KAAsC,CAAA;AAC5C,aAAW,OAAO,YAAY;AAC5B,OAAG,GAAG,IAAIC,MAAQ,MAAM,GAAG,GAAG,qBAAqB;AAAA,EACrD;AACA,SAAO,YAAY,GAAG,KAAK,CAAC,QAAQ,GAAG,KAAK,CAAC,UAAU,OAAO;AAAA,IAC5D;AAAA,EAAA,EACA;AAAA,IAAQ,CAAC,CAAC,KAAKC,MAAK,GAAG,MACvB,IAAI,CAAC,SAAS,KAAK,GAAG,GAAG,CAAC,MAAMA,MAAK,EAAE,IAAI,KAAK,GAAG,GAAG,CAAC,MAAMA,MAAK;AAAA,EAAA,CACnE;AACH;AAEA,SAAS,aACP,IACA,UACuC;AACvC,QAAM,EAAC,WAAW,YAAY,MAAA,IAAS;AAEvC,QAAM,aAAa,CAAA;AACnB,aAAW,OAAO,YAAY;AAC5B,QAAI,WAAW,SAAS,GAAG;AACzB,iBAAW,KAAK,OAAO;AAAA,IACzB;AACA,eAAW,KAAK,KAAK,GAAG,GAAG,CAAC,MAAM,MAAM,GAAG,CAAC,EAAE;AAAA,EAChD;AAEA,SAAO,iBAAiB,GAAG,SAAS,CAAC,UAAU,UAAU;AAC3D;AAEA,eAAe,6CACb,IACA,OACA,eACA,UACA,oBACA;AACA,QAAM,CAAC,EAAC,gBAAe,IAAI,MAAM;AAAA,kBACjB,GAAG,eAAe,KAAK,CAAC,CAAC;AAAA;AAAA,oBAEvB,aAAa,KAAK,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAOlD,MAAI,qBAAqB,gBAAgB;AACvC,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ,WAAW,qBAAqB,gBAAgB;AAC9C,UAAM,IAAI,cAAc;AAAA,MACtB,MAAMH;AAAAA,MACN,SAAS,wCAAwC,kBAAkB,eAAe,QAAQ,0BAA0B,eAAe,UAAU;AAAA,MAC7I,QAAQI;AAAAA,IAAY,CACrB;AAAA,EACH;AACF;"}
@@ -2,7 +2,6 @@ import type { LogContext } from '@rocicorp/logger';
2
2
  import type { Downstream } from '../../../../zero-protocol/src/down.ts';
3
3
  import { type MutationID, type PushBody } from '../../../../zero-protocol/src/push.ts';
4
4
  import { type ZeroConfig } from '../../config/zero-config.ts';
5
- import type { PostgresDB } from '../../types/pg.ts';
6
5
  import type { Source } from '../../types/streams.ts';
7
6
  import { Subscription } from '../../types/subscription.ts';
8
7
  import type { HandlerResult, StreamResult } from '../../workers/connection.ts';
@@ -30,7 +29,7 @@ type Config = Pick<ZeroConfig, 'app' | 'shard'>;
30
29
  export declare class PusherService implements Service, Pusher {
31
30
  #private;
32
31
  readonly id: string;
33
- constructor(upstream: PostgresDB, appConfig: Config, pushConfig: ZeroConfig['push'] & {
32
+ constructor(appConfig: Config, pushConfig: ZeroConfig['push'] & {
34
33
  url: string[];
35
34
  }, lc: LogContext, clientGroupID: string);
36
35
  get pushURL(): string | undefined;
@@ -1 +1 @@
1
- {"version":3,"file":"pusher.d.ts","sourceRoot":"","sources":["../../../../../../zero-cache/src/services/mutagen/pusher.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAMjD,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,uCAAuC,CAAC;AAQtE,OAAO,EAEL,KAAK,UAAU,EACf,KAAK,QAAQ,EAEd,MAAM,uCAAuC,CAAC;AAC/C,OAAO,EAAC,KAAK,UAAU,EAAC,MAAM,6BAA6B,CAAC;AAK5D,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAElD,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,wBAAwB,CAAC;AACnD,OAAO,EAAC,YAAY,EAAC,MAAM,6BAA6B,CAAC;AACzD,OAAO,KAAK,EAAC,aAAa,EAAE,YAAY,EAAC,MAAM,6BAA6B,CAAC;AAC7E,OAAO,KAAK,EAAC,iBAAiB,EAAE,OAAO,EAAC,MAAM,eAAe,CAAC;AAE9D,MAAM,WAAW,MAAO,SAAQ,iBAAiB;IAC/C,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;IAErC,cAAc,CACZ,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,MAAM,GAAG,SAAS,GAC9B,MAAM,CAAC,UAAU,CAAC,CAAC;IACtB,WAAW,CACT,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,QAAQ,EACd,IAAI,EAAE,MAAM,GAAG,SAAS,EACxB,UAAU,EAAE,MAAM,GAAG,SAAS,GAC7B,aAAa,CAAC;IACjB,oBAAoB,CAAC,MAAM,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;CACzD;AAED,KAAK,MAAM,GAAG,IAAI,CAAC,UAAU,EAAE,KAAK,GAAG,OAAO,CAAC,CAAC;AAEhD;;;;;;;;;;;;GAYG;AACH,qBAAa,aAAc,YAAW,OAAO,EAAE,MAAM;;IACnD,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;gBAWlB,QAAQ,EAAE,UAAU,EACpB,SAAS,EAAE,MAAM,EACjB,UAAU,EAAE,UAAU,CAAC,MAAM,CAAC,GAAG;QAAC,GAAG,EAAE,MAAM,EAAE,CAAA;KAAC,EAChD,EAAE,EAAE,UAAU,EACd,aAAa,EAAE,MAAM;IAgBvB,IAAI,OAAO,IAAI,MAAM,GAAG,SAAS,CAEhC;IAED,cAAc,CACZ,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,MAAM,GAAG,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IAKjC,WAAW,CACT,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,QAAQ,EACd,IAAI,EAAE,MAAM,GAAG,SAAS,EACxB,UAAU,EAAE,MAAM,GAAG,SAAS,GAC7B,OAAO,CAAC,aAAa,EAAE,YAAY,CAAC;IAWjC,oBAAoB,CAAC,MAAM,EAAE,UAAU;IAW7C,GAAG;IAKH,KAAK;IAQL,OAAO,IAAI,OAAO;IAIlB,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IAKpB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;CAQtB;AAED,KAAK,WAAW,GAAG;IACjB,IAAI,EAAE,QAAQ,CAAC;IACf,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC;IACzB,UAAU,EAAE,MAAM,GAAG,SAAS,CAAC;IAC/B,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AACF,KAAK,iBAAiB,GAAG,WAAW,GAAG,MAAM,CAAC;AA2T9C;;;;;GAKG;AACH,wBAAgB,aAAa,CAC3B,OAAO,EAAE,SAAS,CAAC,iBAAiB,GAAG,SAAS,CAAC,EAAE,GAClD,CAAC,WAAW,EAAE,EAAE,OAAO,CAAC,CAqC1B"}
1
+ {"version":3,"file":"pusher.d.ts","sourceRoot":"","sources":["../../../../../../zero-cache/src/services/mutagen/pusher.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAMjD,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,uCAAuC,CAAC;AAQtE,OAAO,EAGL,KAAK,UAAU,EACf,KAAK,QAAQ,EAEd,MAAM,uCAAuC,CAAC;AAE/C,OAAO,EAAC,KAAK,UAAU,EAAC,MAAM,6BAA6B,CAAC;AAK5D,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,wBAAwB,CAAC;AACnD,OAAO,EAAC,YAAY,EAAC,MAAM,6BAA6B,CAAC;AACzD,OAAO,KAAK,EAAC,aAAa,EAAE,YAAY,EAAC,MAAM,6BAA6B,CAAC;AAC7E,OAAO,KAAK,EAAC,iBAAiB,EAAE,OAAO,EAAC,MAAM,eAAe,CAAC;AAE9D,MAAM,WAAW,MAAO,SAAQ,iBAAiB;IAC/C,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;IAErC,cAAc,CACZ,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,MAAM,GAAG,SAAS,GAC9B,MAAM,CAAC,UAAU,CAAC,CAAC;IACtB,WAAW,CACT,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,QAAQ,EACd,IAAI,EAAE,MAAM,GAAG,SAAS,EACxB,UAAU,EAAE,MAAM,GAAG,SAAS,GAC7B,aAAa,CAAC;IACjB,oBAAoB,CAAC,MAAM,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;CACzD;AAED,KAAK,MAAM,GAAG,IAAI,CAAC,UAAU,EAAE,KAAK,GAAG,OAAO,CAAC,CAAC;AAEhD;;;;;;;;;;;;GAYG;AACH,qBAAa,aAAc,YAAW,OAAO,EAAE,MAAM;;IACnD,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;gBAYlB,SAAS,EAAE,MAAM,EACjB,UAAU,EAAE,UAAU,CAAC,MAAM,CAAC,GAAG;QAAC,GAAG,EAAE,MAAM,EAAE,CAAA;KAAC,EAChD,EAAE,EAAE,UAAU,EACd,aAAa,EAAE,MAAM;IAiBvB,IAAI,OAAO,IAAI,MAAM,GAAG,SAAS,CAEhC;IAED,cAAc,CACZ,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,MAAM,GAAG,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IAKjC,WAAW,CACT,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,QAAQ,EACd,IAAI,EAAE,MAAM,GAAG,SAAS,EACxB,UAAU,EAAE,MAAM,GAAG,SAAS,GAC7B,OAAO,CAAC,aAAa,EAAE,YAAY,CAAC;IAWjC,oBAAoB,CAAC,MAAM,EAAE,UAAU;IAiD7C,GAAG;IAKH,KAAK;IAQL,OAAO,IAAI,OAAO;IAIlB,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IAKpB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;CAQtB;AAED,KAAK,WAAW,GAAG;IACjB,IAAI,EAAE,QAAQ,CAAC;IACf,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC;IACzB,UAAU,EAAE,MAAM,GAAG,SAAS,CAAC;IAC/B,QAAQ,EAAE,MAAM,CAAC;CAClB,CAAC;AACF,KAAK,iBAAiB,GAAG,WAAW,GAAG,MAAM,CAAC;AA2T9C;;;;;GAKG;AACH,wBAAgB,aAAa,CAC3B,OAAO,EAAE,SAAS,CAAC,iBAAiB,GAAG,SAAS,CAAC,EAAE,GAClD,CAAC,WAAW,EAAE,EAAE,OAAO,CAAC,CAqC1B"}
@@ -7,27 +7,29 @@ import { PushFailed } from "../../../../zero-protocol/src/error-kind-enum.js";
7
7
  import { ZeroCache, Server } from "../../../../zero-protocol/src/error-origin-enum.js";
8
8
  import { HTTP, UnsupportedPushVersion, Internal, OutOfOrderMutation } from "../../../../zero-protocol/src/error-reason-enum.js";
9
9
  import { isProtocolError } from "../../../../zero-protocol/src/error.js";
10
- import { pushResponseSchema } from "../../../../zero-protocol/src/push.js";
10
+ import { CLEANUP_RESULTS_MUTATION_NAME, pushResponseSchema } from "../../../../zero-protocol/src/push.js";
11
+ import { Custom } from "../../../../zero-protocol/src/mutation-type-enum.js";
11
12
  import "../../config/zero-config.js";
12
13
  import { compileUrlPattern, fetchFromAPIServer } from "../../custom/fetch.js";
13
14
  import { getOrCreateCounter } from "../../observability/metrics.js";
14
15
  import { recordMutation } from "../../server/anonymous-otel-start.js";
15
16
  import { ProtocolErrorWithLevel } from "../../types/error-with-level.js";
16
- import { upstreamSchema } from "../../types/shards.js";
17
17
  import { Subscription } from "../../types/subscription.js";
18
18
  class PusherService {
19
19
  id;
20
20
  #pusher;
21
21
  #queue;
22
22
  #pushConfig;
23
- #upstream;
24
23
  #config;
24
+ #lc;
25
+ #pushURLPatterns;
25
26
  #stopped;
26
27
  #refCount = 0;
27
28
  #isStopped = false;
28
- constructor(upstream, appConfig, pushConfig, lc, clientGroupID) {
29
+ constructor(appConfig, pushConfig, lc, clientGroupID) {
29
30
  this.#config = appConfig;
30
- this.#upstream = upstream;
31
+ this.#lc = lc.withContext("component", "pusherService");
32
+ this.#pushURLPatterns = pushConfig.url.map(compileUrlPattern);
31
33
  this.#queue = new Queue();
32
34
  this.#pusher = new PushWorker(
33
35
  appConfig,
@@ -55,13 +57,50 @@ class PusherService {
55
57
  };
56
58
  }
57
59
  async ackMutationResponses(upToID) {
58
- const sql = this.#upstream;
59
- await sql`DELETE FROM ${sql(
60
- upstreamSchema({
61
- appID: this.#config.app.id,
62
- shardNum: this.#config.shard.num
63
- })
64
- )}.mutations WHERE "clientGroupID" = ${this.id} AND "clientID" = ${upToID.clientID} AND "mutationID" <= ${upToID.id}`;
60
+ const url = this.#pushConfig.url[0];
61
+ if (!url) {
62
+ return;
63
+ }
64
+ const cleanupBody = {
65
+ clientGroupID: this.id,
66
+ mutations: [
67
+ {
68
+ type: Custom,
69
+ id: 0,
70
+ // Not tracked - this is fire-and-forget
71
+ clientID: upToID.clientID,
72
+ name: CLEANUP_RESULTS_MUTATION_NAME,
73
+ args: [
74
+ {
75
+ clientGroupID: this.id,
76
+ clientID: upToID.clientID,
77
+ upToMutationID: upToID.id
78
+ }
79
+ ],
80
+ timestamp: Date.now()
81
+ }
82
+ ],
83
+ pushVersion: 1,
84
+ timestamp: Date.now(),
85
+ requestID: `cleanup-${this.id}-${upToID.clientID}-${upToID.id}`
86
+ };
87
+ try {
88
+ await fetchFromAPIServer(
89
+ pushResponseSchema,
90
+ "push",
91
+ this.#lc,
92
+ url,
93
+ false,
94
+ this.#pushURLPatterns,
95
+ { appID: this.#config.app.id, shardNum: this.#config.shard.num },
96
+ { apiKey: this.#pushConfig.apiKey },
97
+ cleanupBody
98
+ );
99
+ } catch (e) {
100
+ this.#lc.warn?.("Failed to send cleanup mutation", {
101
+ error: getErrorMessage(e)
102
+ });
103
+ }
65
104
  }
66
105
  ref() {
67
106
  assert(!this.#isStopped, "PusherService is already stopped");
@@ -1 +1 @@
1
- {"version":3,"file":"pusher.js","sources":["../../../../../../zero-cache/src/services/mutagen/pusher.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {groupBy} from '../../../../shared/src/arrays.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {getErrorMessage} from '../../../../shared/src/error.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {Queue} from '../../../../shared/src/queue.ts';\nimport type {Downstream} from '../../../../zero-protocol/src/down.ts';\nimport {ErrorKind} from '../../../../zero-protocol/src/error-kind.ts';\nimport {ErrorOrigin} from '../../../../zero-protocol/src/error-origin.ts';\nimport {ErrorReason} from '../../../../zero-protocol/src/error-reason.ts';\nimport {\n isProtocolError,\n type PushFailedBody,\n} from '../../../../zero-protocol/src/error.ts';\nimport {\n pushResponseSchema,\n type MutationID,\n type PushBody,\n type PushResponse,\n} from '../../../../zero-protocol/src/push.ts';\nimport {type ZeroConfig} from '../../config/zero-config.ts';\nimport {compileUrlPattern, fetchFromAPIServer} from '../../custom/fetch.ts';\nimport {getOrCreateCounter} from '../../observability/metrics.ts';\nimport {recordMutation} from '../../server/anonymous-otel-start.ts';\nimport {ProtocolErrorWithLevel} from '../../types/error-with-level.ts';\nimport type {PostgresDB} from '../../types/pg.ts';\nimport {upstreamSchema} from '../../types/shards.ts';\nimport type {Source} from '../../types/streams.ts';\nimport {Subscription} from '../../types/subscription.ts';\nimport type {HandlerResult, StreamResult} from '../../workers/connection.ts';\nimport type {RefCountedService, Service} from '../service.ts';\n\nexport interface Pusher extends RefCountedService {\n readonly pushURL: string | undefined;\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n ): Source<Downstream>;\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): HandlerResult;\n ackMutationResponses(upToID: MutationID): Promise<void>;\n}\n\ntype Config = Pick<ZeroConfig, 'app' | 'shard'>;\n\n/**\n * Receives push messages from zero-client and forwards\n * them the the user's API server.\n *\n * If the user's API server is taking too long to process\n * the push, the PusherService will add the push to a queue\n * and send pushes in bulk the next time the user's API server\n * is available.\n *\n * - One PusherService exists per client group.\n * - Mutations for a given client are always sent in-order\n * - Mutations for different clients in the same group may be interleaved\n */\nexport class PusherService implements Service, Pusher {\n readonly id: string;\n readonly #pusher: PushWorker;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #pushConfig: ZeroConfig['push'] & {url: string[]};\n readonly #upstream: PostgresDB;\n readonly #config: Config;\n #stopped: Promise<void> | undefined;\n #refCount = 0;\n #isStopped = false;\n\n constructor(\n upstream: PostgresDB,\n appConfig: Config,\n pushConfig: ZeroConfig['push'] & {url: string[]},\n lc: LogContext,\n clientGroupID: string,\n ) {\n this.#config = appConfig;\n this.#upstream = upstream;\n this.#queue = new Queue();\n this.#pusher = new PushWorker(\n appConfig,\n lc,\n pushConfig.url,\n pushConfig.apiKey,\n this.#queue,\n );\n this.id = clientGroupID;\n this.#pushConfig = pushConfig;\n }\n\n get pushURL(): string | undefined {\n return this.#pusher.pushURL[0];\n }\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n ) {\n return this.#pusher.initConnection(clientID, wsID, userPushURL);\n }\n\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): Exclude<HandlerResult, StreamResult> {\n if (!this.#pushConfig.forwardCookies) {\n httpCookie = undefined; // remove cookies if not forwarded\n }\n this.#queue.enqueue({push, auth, clientID, httpCookie});\n\n return {\n type: 'ok',\n };\n }\n\n async ackMutationResponses(upToID: MutationID) {\n // delete the relevant rows from the `mutations` table\n const sql = this.#upstream;\n await sql`DELETE FROM ${sql(\n upstreamSchema({\n appID: this.#config.app.id,\n shardNum: this.#config.shard.num,\n }),\n )}.mutations WHERE \"clientGroupID\" = ${this.id} AND \"clientID\" = ${upToID.clientID} AND \"mutationID\" <= ${upToID.id}`;\n }\n\n ref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n ++this.#refCount;\n }\n\n unref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n --this.#refCount;\n if (this.#refCount <= 0) {\n void this.stop();\n }\n }\n\n hasRefs(): boolean {\n return this.#refCount > 0;\n }\n\n run(): Promise<void> {\n this.#stopped = this.#pusher.run();\n return this.#stopped;\n }\n\n stop(): Promise<void> {\n if (this.#isStopped) {\n return must(this.#stopped, 'Stop was called before `run`');\n }\n this.#isStopped = true;\n this.#queue.enqueue('stop');\n return must(this.#stopped, 'Stop was called before `run`');\n }\n}\n\ntype PusherEntry = {\n push: PushBody;\n auth: string | undefined;\n httpCookie: string | undefined;\n clientID: string;\n};\ntype PusherEntryOrStop = PusherEntry | 'stop';\n\n/**\n * Awaits items in the queue then drains and sends them all\n * to the user's API server.\n */\nclass PushWorker {\n readonly #pushURLs: string[];\n readonly #pushURLPatterns: URLPattern[];\n readonly #apiKey: string | undefined;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #lc: LogContext;\n readonly #config: Config;\n readonly #clients: Map<\n string,\n {\n wsID: string;\n downstream: Subscription<Downstream>;\n }\n >;\n #userPushURL?: string | undefined;\n\n readonly #customMutations = getOrCreateCounter(\n 'mutation',\n 'custom',\n 'Number of custom mutations processed',\n );\n readonly #pushes = getOrCreateCounter(\n 'mutation',\n 'pushes',\n 'Number of pushes processed by the pusher',\n );\n\n constructor(\n config: Config,\n lc: LogContext,\n pushURL: string[],\n apiKey: string | undefined,\n queue: Queue<PusherEntryOrStop>,\n ) {\n this.#pushURLs = pushURL;\n this.#lc = lc.withContext('component', 'pusher');\n this.#pushURLPatterns = pushURL.map(compileUrlPattern);\n this.#apiKey = apiKey;\n this.#queue = queue;\n this.#config = config;\n this.#clients = new Map();\n }\n\n get pushURL() {\n return this.#pushURLs;\n }\n\n /**\n * Returns a new downstream stream if the clientID,wsID pair has not been seen before.\n * If a clientID already exists with a different wsID, that client's downstream is cancelled.\n */\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n ) {\n const existing = this.#clients.get(clientID);\n if (existing && existing.wsID === wsID) {\n // already initialized for this socket\n throw new Error('Connection was already initialized');\n }\n\n // client is back on a new connection\n if (existing) {\n existing.downstream.cancel();\n }\n\n // Handle client group level URL parameters\n if (this.#userPushURL === undefined) {\n // First client in the group - store its URL\n this.#userPushURL = userPushURL;\n } else {\n // Validate that subsequent clients have compatible parameters\n if (this.#userPushURL !== userPushURL) {\n this.#lc.warn?.(\n 'Client provided different mutate parameters than client group',\n {\n clientID,\n clientURL: userPushURL,\n clientGroupURL: this.#userPushURL,\n },\n );\n }\n }\n\n const downstream = Subscription.create<Downstream>({\n cleanup: () => {\n this.#clients.delete(clientID);\n },\n });\n this.#clients.set(clientID, {wsID, downstream});\n return downstream;\n }\n\n async run() {\n for (;;) {\n const task = await this.#queue.dequeue();\n const rest = this.#queue.drain();\n const [pushes, terminate] = combinePushes([task, ...rest]);\n for (const push of pushes) {\n const response = await this.#processPush(push);\n await this.#fanOutResponses(response);\n }\n\n if (terminate) {\n break;\n }\n }\n }\n\n /**\n * 1. If the entire `push` fails, we send the error to relevant clients.\n * 2. If the push succeeds, we look for any mutation failure that should cause the connection to terminate\n * and terminate the connection for those clients.\n */\n #fanOutResponses(response: PushResponse) {\n const connectionTerminations: (() => void)[] = [];\n\n // if the entire push failed, send that to the client.\n if ('kind' in response || 'error' in response) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a push error.',\n response,\n );\n const groupedMutationIDs = groupBy(\n response.mutationIDs ?? [],\n m => m.clientID,\n );\n for (const [clientID, mutationIDs] of groupedMutationIDs) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n // We do not resolve mutations on the client if the push fails\n // as those mutations will be retried.\n if ('error' in response) {\n // This error code path will eventually be removed when we\n // no longer support the legacy push error format.\n const pushFailedBody: PushFailedBody =\n response.error === 'http'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.HTTP,\n status: response.status,\n bodyPreview: response.details,\n mutationIDs,\n message: `Fetch from API server returned non-OK status ${response.status}`,\n }\n : response.error === 'unsupportedPushVersion'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.UnsupportedPushVersion,\n mutationIDs,\n message: `Unsupported push version`,\n }\n : {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.Internal,\n mutationIDs,\n message:\n response.error === 'zeroPusher'\n ? response.details\n : response.error === 'unsupportedSchemaVersion'\n ? 'Unsupported schema version'\n : 'An unknown error occurred while pushing to the API server',\n };\n\n this.#failDownstream(client.downstream, pushFailedBody);\n } else if ('kind' in response) {\n this.#failDownstream(client.downstream, response);\n } else {\n unreachable(response);\n }\n }\n } else {\n // Look for mutations results that should cause us to terminate the connection\n const groupedMutations = groupBy(response.mutations, m => m.id.clientID);\n for (const [clientID, mutations] of groupedMutations) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n let failure: PushFailedBody | undefined;\n let i = 0;\n for (; i < mutations.length; i++) {\n const m = mutations[i];\n if ('error' in m.result) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a mutation error.',\n m.result,\n );\n }\n // This error code path will eventually be removed,\n // keeping this for backwards compatibility, but the server\n // should now return a PushFailedBody with the mutationIDs\n if ('error' in m.result && m.result.error === 'oooMutation') {\n failure = {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.OutOfOrderMutation,\n message: 'mutation was out of order',\n details: m.result.details,\n mutationIDs: mutations.map(m => ({\n clientID: m.id.clientID,\n id: m.id.id,\n })),\n };\n break;\n }\n }\n\n if (failure && i < mutations.length - 1) {\n this.#lc.warn?.(\n 'push-response contains mutations after a mutation which should fatal the connection',\n );\n }\n\n if (failure) {\n connectionTerminations.push(() =>\n this.#failDownstream(client.downstream, failure),\n );\n }\n }\n }\n\n connectionTerminations.forEach(cb => cb());\n }\n\n async #processPush(entry: PusherEntry): Promise<PushResponse> {\n this.#customMutations.add(entry.push.mutations.length, {\n clientGroupID: entry.push.clientGroupID,\n });\n this.#pushes.add(1, {\n clientGroupID: entry.push.clientGroupID,\n });\n\n // Record custom mutations for telemetry\n recordMutation('custom', entry.push.mutations.length);\n\n const url =\n this.#userPushURL ??\n must(this.#pushURLs[0], 'ZERO_MUTATE_URL is not set');\n\n this.#lc.debug?.(\n 'pushing to',\n url,\n 'with',\n entry.push.mutations.length,\n 'mutations',\n );\n\n let mutationIDs: MutationID[] = [];\n\n try {\n mutationIDs = entry.push.mutations.map(m => ({\n id: m.id,\n clientID: m.clientID,\n }));\n\n return await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n url === this.#userPushURL,\n this.#pushURLPatterns,\n {\n appID: this.#config.app.id,\n shardNum: this.#config.shard.num,\n },\n {\n apiKey: this.#apiKey,\n token: entry.auth,\n cookie: entry.httpCookie,\n },\n entry.push,\n );\n } catch (e) {\n if (isProtocolError(e) && e.errorBody.kind === ErrorKind.PushFailed) {\n return {\n ...e.errorBody,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n\n return {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.Internal,\n message: `Failed to push: ${getErrorMessage(e)}`,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n }\n\n #failDownstream(\n downstream: Subscription<Downstream>,\n errorBody: PushFailedBody,\n ): void {\n const logLevel = errorBody.origin === ErrorOrigin.Server ? 'warn' : 'error';\n downstream.fail(new ProtocolErrorWithLevel(errorBody, logLevel));\n }\n}\n\n/**\n * Pushes for different clientIDs could theoretically be interleaved.\n *\n * In order to do efficient batching to the user's API server,\n * we collect all pushes for the same clientID into a single push.\n */\nexport function combinePushes(\n entries: readonly (PusherEntryOrStop | undefined)[],\n): [PusherEntry[], boolean] {\n const pushesByClientID = new Map<string, PusherEntry[]>();\n\n function collect() {\n const ret: PusherEntry[] = [];\n for (const entries of pushesByClientID.values()) {\n const composite: PusherEntry = {\n ...entries[0],\n push: {\n ...entries[0].push,\n mutations: [],\n },\n };\n ret.push(composite);\n for (const entry of entries) {\n assertAreCompatiblePushes(composite, entry);\n composite.push.mutations.push(...entry.push.mutations);\n }\n }\n return ret;\n }\n\n for (const entry of entries) {\n if (entry === 'stop' || entry === undefined) {\n return [collect(), true];\n }\n\n const {clientID} = entry;\n const existing = pushesByClientID.get(clientID);\n if (existing) {\n existing.push(entry);\n } else {\n pushesByClientID.set(clientID, [entry]);\n }\n }\n\n return [collect(), false] as const;\n}\n\n// These invariants should always be true for a given clientID.\n// If they are not, we have a bug in the code somewhere.\nfunction assertAreCompatiblePushes(left: PusherEntry, right: PusherEntry) {\n assert(\n left.clientID === right.clientID,\n 'clientID must be the same for all pushes',\n );\n assert(\n left.auth === right.auth,\n 'auth must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.schemaVersion === right.push.schemaVersion,\n 'schemaVersion must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.pushVersion === right.push.pushVersion,\n 'pushVersion must be the same for all pushes with the same clientID',\n );\n}\n"],"names":["ErrorKind.PushFailed","ErrorOrigin.ZeroCache","ErrorReason.HTTP","ErrorOrigin.Server","ErrorReason.UnsupportedPushVersion","ErrorReason.Internal","ErrorReason.OutOfOrderMutation","m","entries"],"mappings":";;;;;;;;;;;;;;;;;AAgEO,MAAM,cAAyC;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EACA,YAAY;AAAA,EACZ,aAAa;AAAA,EAEb,YACE,UACA,WACA,YACA,IACA,eACA;AACA,SAAK,UAAU;AACf,SAAK,YAAY;AACjB,SAAK,SAAS,IAAI,MAAA;AAClB,SAAK,UAAU,IAAI;AAAA,MACjB;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,WAAW;AAAA,MACX,KAAK;AAAA,IAAA;AAEP,SAAK,KAAK;AACV,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,UAA8B;AAChC,WAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,EAC/B;AAAA,EAEA,eACE,UACA,MACA,aACA;AACA,WAAO,KAAK,QAAQ,eAAe,UAAU,MAAM,WAAW;AAAA,EAChE;AAAA,EAEA,YACE,UACA,MACA,MACA,YACsC;AACtC,QAAI,CAAC,KAAK,YAAY,gBAAgB;AACpC,mBAAa;AAAA,IACf;AACA,SAAK,OAAO,QAAQ,EAAC,MAAM,MAAM,UAAU,YAAW;AAEtD,WAAO;AAAA,MACL,MAAM;AAAA,IAAA;AAAA,EAEV;AAAA,EAEA,MAAM,qBAAqB,QAAoB;AAE7C,UAAM,MAAM,KAAK;AACjB,UAAM,kBAAkB;AAAA,MACtB,eAAe;AAAA,QACb,OAAO,KAAK,QAAQ,IAAI;AAAA,QACxB,UAAU,KAAK,QAAQ,MAAM;AAAA,MAAA,CAC9B;AAAA,IAAA,CACF,sCAAsC,KAAK,EAAE,qBAAqB,OAAO,QAAQ,wBAAwB,OAAO,EAAE;AAAA,EACrH;AAAA,EAEA,MAAM;AACJ,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AAAA,EACT;AAAA,EAEA,QAAQ;AACN,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AACP,QAAI,KAAK,aAAa,GAAG;AACvB,WAAK,KAAK,KAAA;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,UAAmB;AACjB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,MAAqB;AACnB,SAAK,WAAW,KAAK,QAAQ,IAAA;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAAsB;AACpB,QAAI,KAAK,YAAY;AACnB,aAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,IAC3D;AACA,SAAK,aAAa;AAClB,SAAK,OAAO,QAAQ,MAAM;AAC1B,WAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,EAC3D;AACF;AAcA,MAAM,WAAW;AAAA,EACN;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAOT;AAAA,EAES,mBAAmB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAEO,UAAU;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAGF,YACE,QACA,IACA,SACA,QACA,OACA;AACA,SAAK,YAAY;AACjB,SAAK,MAAM,GAAG,YAAY,aAAa,QAAQ;AAC/C,SAAK,mBAAmB,QAAQ,IAAI,iBAAiB;AACrD,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,+BAAe,IAAA;AAAA,EACtB;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,eACE,UACA,MACA,aACA;AACA,UAAM,WAAW,KAAK,SAAS,IAAI,QAAQ;AAC3C,QAAI,YAAY,SAAS,SAAS,MAAM;AAEtC,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAGA,QAAI,UAAU;AACZ,eAAS,WAAW,OAAA;AAAA,IACtB;AAGA,QAAI,KAAK,iBAAiB,QAAW;AAEnC,WAAK,eAAe;AAAA,IACtB,OAAO;AAEL,UAAI,KAAK,iBAAiB,aAAa;AACrC,aAAK,IAAI;AAAA,UACP;AAAA,UACA;AAAA,YACE;AAAA,YACA,WAAW;AAAA,YACX,gBAAgB,KAAK;AAAA,UAAA;AAAA,QACvB;AAAA,MAEJ;AAAA,IACF;AAEA,UAAM,aAAa,aAAa,OAAmB;AAAA,MACjD,SAAS,MAAM;AACb,aAAK,SAAS,OAAO,QAAQ;AAAA,MAC/B;AAAA,IAAA,CACD;AACD,SAAK,SAAS,IAAI,UAAU,EAAC,MAAM,YAAW;AAC9C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,MAAM;AACV,eAAS;AACP,YAAM,OAAO,MAAM,KAAK,OAAO,QAAA;AAC/B,YAAM,OAAO,KAAK,OAAO,MAAA;AACzB,YAAM,CAAC,QAAQ,SAAS,IAAI,cAAc,CAAC,MAAM,GAAG,IAAI,CAAC;AACzD,iBAAW,QAAQ,QAAQ;AACzB,cAAM,WAAW,MAAM,KAAK,aAAa,IAAI;AAC7C,cAAM,KAAK,iBAAiB,QAAQ;AAAA,MACtC;AAEA,UAAI,WAAW;AACb;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,UAAwB;AACvC,UAAM,yBAAyC,CAAA;AAG/C,QAAI,UAAU,YAAY,WAAW,UAAU;AAC7C,WAAK,IAAI;AAAA,QACP;AAAA,QACA;AAAA,MAAA;AAEF,YAAM,qBAAqB;AAAA,QACzB,SAAS,eAAe,CAAA;AAAA,QACxB,OAAK,EAAE;AAAA,MAAA;AAET,iBAAW,CAAC,UAAU,WAAW,KAAK,oBAAoB;AACxD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAIA,YAAI,WAAW,UAAU;AAGvB,gBAAM,iBACJ,SAAS,UAAU,SACf;AAAA,YACE,MAAMA;AAAAA,YACN,QAAQC;AAAAA,YACR,QAAQC;AAAAA,YACR,QAAQ,SAAS;AAAA,YACjB,aAAa,SAAS;AAAA,YACtB;AAAA,YACA,SAAS,gDAAgD,SAAS,MAAM;AAAA,UAAA,IAE1E,SAAS,UAAU,2BACjB;AAAA,YACE,MAAMF;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQC;AAAAA,YACR;AAAA,YACA,SAAS;AAAA,UAAA,IAEX;AAAA,YACE,MAAMJ;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQE;AAAAA,YACR;AAAA,YACA,SACE,SAAS,UAAU,eACf,SAAS,UACT,SAAS,UAAU,6BACjB,+BACA;AAAA,UAAA;AAGlB,eAAK,gBAAgB,OAAO,YAAY,cAAc;AAAA,QACxD,WAAW,UAAU,UAAU;AAC7B,eAAK,gBAAgB,OAAO,YAAY,QAAQ;AAAA,QAClD,OAAO;AACL,sBAAoB;AAAA,QACtB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,mBAAmB,QAAQ,SAAS,WAAW,CAAA,MAAK,EAAE,GAAG,QAAQ;AACvE,iBAAW,CAAC,UAAU,SAAS,KAAK,kBAAkB;AACpD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAEA,YAAI;AACJ,YAAI,IAAI;AACR,eAAO,IAAI,UAAU,QAAQ,KAAK;AAChC,gBAAM,IAAI,UAAU,CAAC;AACrB,cAAI,WAAW,EAAE,QAAQ;AACvB,iBAAK,IAAI;AAAA,cACP;AAAA,cACA,EAAE;AAAA,YAAA;AAAA,UAEN;AAIA,cAAI,WAAW,EAAE,UAAU,EAAE,OAAO,UAAU,eAAe;AAC3D,sBAAU;AAAA,cACR,MAAML;AAAAA,cACN,QAAQG;AAAAA,cACR,QAAQG;AAAAA,cACR,SAAS;AAAA,cACT,SAAS,EAAE,OAAO;AAAA,cAClB,aAAa,UAAU,IAAI,CAAAC,QAAM;AAAA,gBAC/B,UAAUA,GAAE,GAAG;AAAA,gBACf,IAAIA,GAAE,GAAG;AAAA,cAAA,EACT;AAAA,YAAA;AAEJ;AAAA,UACF;AAAA,QACF;AAEA,YAAI,WAAW,IAAI,UAAU,SAAS,GAAG;AACvC,eAAK,IAAI;AAAA,YACP;AAAA,UAAA;AAAA,QAEJ;AAEA,YAAI,SAAS;AACX,iCAAuB;AAAA,YAAK,MAC1B,KAAK,gBAAgB,OAAO,YAAY,OAAO;AAAA,UAAA;AAAA,QAEnD;AAAA,MACF;AAAA,IACF;AAEA,2BAAuB,QAAQ,CAAA,OAAM,GAAA,CAAI;AAAA,EAC3C;AAAA,EAEA,MAAM,aAAa,OAA2C;AAC5D,SAAK,iBAAiB,IAAI,MAAM,KAAK,UAAU,QAAQ;AAAA,MACrD,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AACD,SAAK,QAAQ,IAAI,GAAG;AAAA,MAClB,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AAGD,mBAAe,UAAU,MAAM,KAAK,UAAU,MAAM;AAEpD,UAAM,MACJ,KAAK,gBACL,KAAK,KAAK,UAAU,CAAC,GAAG,4BAA4B;AAEtD,SAAK,IAAI;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,MACrB;AAAA,IAAA;AAGF,QAAI,cAA4B,CAAA;AAEhC,QAAI;AACF,oBAAc,MAAM,KAAK,UAAU,IAAI,CAAA,OAAM;AAAA,QAC3C,IAAI,EAAE;AAAA,QACN,UAAU,EAAE;AAAA,MAAA,EACZ;AAEF,aAAO,MAAM;AAAA,QACX;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA,QAAQ,KAAK;AAAA,QACb,KAAK;AAAA,QACL;AAAA,UACE,OAAO,KAAK,QAAQ,IAAI;AAAA,UACxB,UAAU,KAAK,QAAQ,MAAM;AAAA,QAAA;AAAA,QAE/B;AAAA,UACE,QAAQ,KAAK;AAAA,UACb,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,QAAA;AAAA,QAEhB,MAAM;AAAA,MAAA;AAAA,IAEV,SAAS,GAAG;AACV,UAAI,gBAAgB,CAAC,KAAK,EAAE,UAAU,SAASP,YAAsB;AACnE,eAAO;AAAA,UACL,GAAG,EAAE;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAEA,aAAO;AAAA,QACL,MAAMA;AAAAA,QACN,QAAQC;AAAAA,QACR,QAAQI;AAAAA,QACR,SAAS,mBAAmB,gBAAgB,CAAC,CAAC;AAAA,QAC9C;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AAAA,EAEA,gBACE,YACA,WACM;AACN,UAAM,WAAW,UAAU,WAAWF,SAAqB,SAAS;AACpE,eAAW,KAAK,IAAI,uBAAuB,WAAW,QAAQ,CAAC;AAAA,EACjE;AACF;AAQO,SAAS,cACd,SAC0B;AAC1B,QAAM,uCAAuB,IAAA;AAE7B,WAAS,UAAU;AACjB,UAAM,MAAqB,CAAA;AAC3B,eAAWK,YAAW,iBAAiB,UAAU;AAC/C,YAAM,YAAyB;AAAA,QAC7B,GAAGA,SAAQ,CAAC;AAAA,QACZ,MAAM;AAAA,UACJ,GAAGA,SAAQ,CAAC,EAAE;AAAA,UACd,WAAW,CAAA;AAAA,QAAC;AAAA,MACd;AAEF,UAAI,KAAK,SAAS;AAClB,iBAAW,SAASA,UAAS;AAC3B,kCAA0B,WAAW,KAAK;AAC1C,kBAAU,KAAK,UAAU,KAAK,GAAG,MAAM,KAAK,SAAS;AAAA,MACvD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,aAAW,SAAS,SAAS;AAC3B,QAAI,UAAU,UAAU,UAAU,QAAW;AAC3C,aAAO,CAAC,QAAA,GAAW,IAAI;AAAA,IACzB;AAEA,UAAM,EAAC,aAAY;AACnB,UAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,uBAAiB,IAAI,UAAU,CAAC,KAAK,CAAC;AAAA,IACxC;AAAA,EACF;AAEA,SAAO,CAAC,QAAA,GAAW,KAAK;AAC1B;AAIA,SAAS,0BAA0B,MAAmB,OAAoB;AACxE;AAAA,IACE,KAAK,aAAa,MAAM;AAAA,IACxB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,SAAS,MAAM;AAAA,IACpB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,kBAAkB,MAAM,KAAK;AAAA,IACvC;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,gBAAgB,MAAM,KAAK;AAAA,IACrC;AAAA,EAAA;AAEJ;"}
1
+ {"version":3,"file":"pusher.js","sources":["../../../../../../zero-cache/src/services/mutagen/pusher.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {groupBy} from '../../../../shared/src/arrays.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {getErrorMessage} from '../../../../shared/src/error.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {Queue} from '../../../../shared/src/queue.ts';\nimport type {Downstream} from '../../../../zero-protocol/src/down.ts';\nimport {ErrorKind} from '../../../../zero-protocol/src/error-kind.ts';\nimport {ErrorOrigin} from '../../../../zero-protocol/src/error-origin.ts';\nimport {ErrorReason} from '../../../../zero-protocol/src/error-reason.ts';\nimport {\n isProtocolError,\n type PushFailedBody,\n} from '../../../../zero-protocol/src/error.ts';\nimport {\n CLEANUP_RESULTS_MUTATION_NAME,\n pushResponseSchema,\n type MutationID,\n type PushBody,\n type PushResponse,\n} from '../../../../zero-protocol/src/push.ts';\nimport * as MutationType from '../../../../zero-protocol/src/mutation-type-enum.ts';\nimport {type ZeroConfig} from '../../config/zero-config.ts';\nimport {compileUrlPattern, fetchFromAPIServer} from '../../custom/fetch.ts';\nimport {getOrCreateCounter} from '../../observability/metrics.ts';\nimport {recordMutation} from '../../server/anonymous-otel-start.ts';\nimport {ProtocolErrorWithLevel} from '../../types/error-with-level.ts';\nimport type {Source} from '../../types/streams.ts';\nimport {Subscription} from '../../types/subscription.ts';\nimport type {HandlerResult, StreamResult} from '../../workers/connection.ts';\nimport type {RefCountedService, Service} from '../service.ts';\n\nexport interface Pusher extends RefCountedService {\n readonly pushURL: string | undefined;\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n ): Source<Downstream>;\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): HandlerResult;\n ackMutationResponses(upToID: MutationID): Promise<void>;\n}\n\ntype Config = Pick<ZeroConfig, 'app' | 'shard'>;\n\n/**\n * Receives push messages from zero-client and forwards\n * them the the user's API server.\n *\n * If the user's API server is taking too long to process\n * the push, the PusherService will add the push to a queue\n * and send pushes in bulk the next time the user's API server\n * is available.\n *\n * - One PusherService exists per client group.\n * - Mutations for a given client are always sent in-order\n * - Mutations for different clients in the same group may be interleaved\n */\nexport class PusherService implements Service, Pusher {\n readonly id: string;\n readonly #pusher: PushWorker;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #pushConfig: ZeroConfig['push'] & {url: string[]};\n readonly #config: Config;\n readonly #lc: LogContext;\n readonly #pushURLPatterns: URLPattern[];\n #stopped: Promise<void> | undefined;\n #refCount = 0;\n #isStopped = false;\n\n constructor(\n appConfig: Config,\n pushConfig: ZeroConfig['push'] & {url: string[]},\n lc: LogContext,\n clientGroupID: string,\n ) {\n this.#config = appConfig;\n this.#lc = lc.withContext('component', 'pusherService');\n this.#pushURLPatterns = pushConfig.url.map(compileUrlPattern);\n this.#queue = new Queue();\n this.#pusher = new PushWorker(\n appConfig,\n lc,\n pushConfig.url,\n pushConfig.apiKey,\n this.#queue,\n );\n this.id = clientGroupID;\n this.#pushConfig = pushConfig;\n }\n\n get pushURL(): string | undefined {\n return this.#pusher.pushURL[0];\n }\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n ) {\n return this.#pusher.initConnection(clientID, wsID, userPushURL);\n }\n\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): Exclude<HandlerResult, StreamResult> {\n if (!this.#pushConfig.forwardCookies) {\n httpCookie = undefined; // remove cookies if not forwarded\n }\n this.#queue.enqueue({push, auth, clientID, httpCookie});\n\n return {\n type: 'ok',\n };\n }\n\n async ackMutationResponses(upToID: MutationID) {\n const url = this.#pushConfig.url[0];\n if (!url) {\n // No push URL configured, skip cleanup\n return;\n }\n\n const cleanupBody: PushBody = {\n clientGroupID: this.id,\n mutations: [\n {\n type: MutationType.Custom,\n id: 0, // Not tracked - this is fire-and-forget\n clientID: upToID.clientID,\n name: CLEANUP_RESULTS_MUTATION_NAME,\n args: [\n {\n clientGroupID: this.id,\n clientID: upToID.clientID,\n upToMutationID: upToID.id,\n },\n ],\n timestamp: Date.now(),\n },\n ],\n pushVersion: 1,\n timestamp: Date.now(),\n requestID: `cleanup-${this.id}-${upToID.clientID}-${upToID.id}`,\n };\n\n try {\n await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n false,\n this.#pushURLPatterns,\n {appID: this.#config.app.id, shardNum: this.#config.shard.num},\n {apiKey: this.#pushConfig.apiKey},\n cleanupBody,\n );\n } catch (e) {\n this.#lc.warn?.('Failed to send cleanup mutation', {\n error: getErrorMessage(e),\n });\n }\n }\n\n ref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n ++this.#refCount;\n }\n\n unref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n --this.#refCount;\n if (this.#refCount <= 0) {\n void this.stop();\n }\n }\n\n hasRefs(): boolean {\n return this.#refCount > 0;\n }\n\n run(): Promise<void> {\n this.#stopped = this.#pusher.run();\n return this.#stopped;\n }\n\n stop(): Promise<void> {\n if (this.#isStopped) {\n return must(this.#stopped, 'Stop was called before `run`');\n }\n this.#isStopped = true;\n this.#queue.enqueue('stop');\n return must(this.#stopped, 'Stop was called before `run`');\n }\n}\n\ntype PusherEntry = {\n push: PushBody;\n auth: string | undefined;\n httpCookie: string | undefined;\n clientID: string;\n};\ntype PusherEntryOrStop = PusherEntry | 'stop';\n\n/**\n * Awaits items in the queue then drains and sends them all\n * to the user's API server.\n */\nclass PushWorker {\n readonly #pushURLs: string[];\n readonly #pushURLPatterns: URLPattern[];\n readonly #apiKey: string | undefined;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #lc: LogContext;\n readonly #config: Config;\n readonly #clients: Map<\n string,\n {\n wsID: string;\n downstream: Subscription<Downstream>;\n }\n >;\n #userPushURL?: string | undefined;\n\n readonly #customMutations = getOrCreateCounter(\n 'mutation',\n 'custom',\n 'Number of custom mutations processed',\n );\n readonly #pushes = getOrCreateCounter(\n 'mutation',\n 'pushes',\n 'Number of pushes processed by the pusher',\n );\n\n constructor(\n config: Config,\n lc: LogContext,\n pushURL: string[],\n apiKey: string | undefined,\n queue: Queue<PusherEntryOrStop>,\n ) {\n this.#pushURLs = pushURL;\n this.#lc = lc.withContext('component', 'pusher');\n this.#pushURLPatterns = pushURL.map(compileUrlPattern);\n this.#apiKey = apiKey;\n this.#queue = queue;\n this.#config = config;\n this.#clients = new Map();\n }\n\n get pushURL() {\n return this.#pushURLs;\n }\n\n /**\n * Returns a new downstream stream if the clientID,wsID pair has not been seen before.\n * If a clientID already exists with a different wsID, that client's downstream is cancelled.\n */\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n ) {\n const existing = this.#clients.get(clientID);\n if (existing && existing.wsID === wsID) {\n // already initialized for this socket\n throw new Error('Connection was already initialized');\n }\n\n // client is back on a new connection\n if (existing) {\n existing.downstream.cancel();\n }\n\n // Handle client group level URL parameters\n if (this.#userPushURL === undefined) {\n // First client in the group - store its URL\n this.#userPushURL = userPushURL;\n } else {\n // Validate that subsequent clients have compatible parameters\n if (this.#userPushURL !== userPushURL) {\n this.#lc.warn?.(\n 'Client provided different mutate parameters than client group',\n {\n clientID,\n clientURL: userPushURL,\n clientGroupURL: this.#userPushURL,\n },\n );\n }\n }\n\n const downstream = Subscription.create<Downstream>({\n cleanup: () => {\n this.#clients.delete(clientID);\n },\n });\n this.#clients.set(clientID, {wsID, downstream});\n return downstream;\n }\n\n async run() {\n for (;;) {\n const task = await this.#queue.dequeue();\n const rest = this.#queue.drain();\n const [pushes, terminate] = combinePushes([task, ...rest]);\n for (const push of pushes) {\n const response = await this.#processPush(push);\n await this.#fanOutResponses(response);\n }\n\n if (terminate) {\n break;\n }\n }\n }\n\n /**\n * 1. If the entire `push` fails, we send the error to relevant clients.\n * 2. If the push succeeds, we look for any mutation failure that should cause the connection to terminate\n * and terminate the connection for those clients.\n */\n #fanOutResponses(response: PushResponse) {\n const connectionTerminations: (() => void)[] = [];\n\n // if the entire push failed, send that to the client.\n if ('kind' in response || 'error' in response) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a push error.',\n response,\n );\n const groupedMutationIDs = groupBy(\n response.mutationIDs ?? [],\n m => m.clientID,\n );\n for (const [clientID, mutationIDs] of groupedMutationIDs) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n // We do not resolve mutations on the client if the push fails\n // as those mutations will be retried.\n if ('error' in response) {\n // This error code path will eventually be removed when we\n // no longer support the legacy push error format.\n const pushFailedBody: PushFailedBody =\n response.error === 'http'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.HTTP,\n status: response.status,\n bodyPreview: response.details,\n mutationIDs,\n message: `Fetch from API server returned non-OK status ${response.status}`,\n }\n : response.error === 'unsupportedPushVersion'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.UnsupportedPushVersion,\n mutationIDs,\n message: `Unsupported push version`,\n }\n : {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.Internal,\n mutationIDs,\n message:\n response.error === 'zeroPusher'\n ? response.details\n : response.error === 'unsupportedSchemaVersion'\n ? 'Unsupported schema version'\n : 'An unknown error occurred while pushing to the API server',\n };\n\n this.#failDownstream(client.downstream, pushFailedBody);\n } else if ('kind' in response) {\n this.#failDownstream(client.downstream, response);\n } else {\n unreachable(response);\n }\n }\n } else {\n // Look for mutations results that should cause us to terminate the connection\n const groupedMutations = groupBy(response.mutations, m => m.id.clientID);\n for (const [clientID, mutations] of groupedMutations) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n let failure: PushFailedBody | undefined;\n let i = 0;\n for (; i < mutations.length; i++) {\n const m = mutations[i];\n if ('error' in m.result) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a mutation error.',\n m.result,\n );\n }\n // This error code path will eventually be removed,\n // keeping this for backwards compatibility, but the server\n // should now return a PushFailedBody with the mutationIDs\n if ('error' in m.result && m.result.error === 'oooMutation') {\n failure = {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.OutOfOrderMutation,\n message: 'mutation was out of order',\n details: m.result.details,\n mutationIDs: mutations.map(m => ({\n clientID: m.id.clientID,\n id: m.id.id,\n })),\n };\n break;\n }\n }\n\n if (failure && i < mutations.length - 1) {\n this.#lc.warn?.(\n 'push-response contains mutations after a mutation which should fatal the connection',\n );\n }\n\n if (failure) {\n connectionTerminations.push(() =>\n this.#failDownstream(client.downstream, failure),\n );\n }\n }\n }\n\n connectionTerminations.forEach(cb => cb());\n }\n\n async #processPush(entry: PusherEntry): Promise<PushResponse> {\n this.#customMutations.add(entry.push.mutations.length, {\n clientGroupID: entry.push.clientGroupID,\n });\n this.#pushes.add(1, {\n clientGroupID: entry.push.clientGroupID,\n });\n\n // Record custom mutations for telemetry\n recordMutation('custom', entry.push.mutations.length);\n\n const url =\n this.#userPushURL ??\n must(this.#pushURLs[0], 'ZERO_MUTATE_URL is not set');\n\n this.#lc.debug?.(\n 'pushing to',\n url,\n 'with',\n entry.push.mutations.length,\n 'mutations',\n );\n\n let mutationIDs: MutationID[] = [];\n\n try {\n mutationIDs = entry.push.mutations.map(m => ({\n id: m.id,\n clientID: m.clientID,\n }));\n\n return await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n url === this.#userPushURL,\n this.#pushURLPatterns,\n {\n appID: this.#config.app.id,\n shardNum: this.#config.shard.num,\n },\n {\n apiKey: this.#apiKey,\n token: entry.auth,\n cookie: entry.httpCookie,\n },\n entry.push,\n );\n } catch (e) {\n if (isProtocolError(e) && e.errorBody.kind === ErrorKind.PushFailed) {\n return {\n ...e.errorBody,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n\n return {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.Internal,\n message: `Failed to push: ${getErrorMessage(e)}`,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n }\n\n #failDownstream(\n downstream: Subscription<Downstream>,\n errorBody: PushFailedBody,\n ): void {\n const logLevel = errorBody.origin === ErrorOrigin.Server ? 'warn' : 'error';\n downstream.fail(new ProtocolErrorWithLevel(errorBody, logLevel));\n }\n}\n\n/**\n * Pushes for different clientIDs could theoretically be interleaved.\n *\n * In order to do efficient batching to the user's API server,\n * we collect all pushes for the same clientID into a single push.\n */\nexport function combinePushes(\n entries: readonly (PusherEntryOrStop | undefined)[],\n): [PusherEntry[], boolean] {\n const pushesByClientID = new Map<string, PusherEntry[]>();\n\n function collect() {\n const ret: PusherEntry[] = [];\n for (const entries of pushesByClientID.values()) {\n const composite: PusherEntry = {\n ...entries[0],\n push: {\n ...entries[0].push,\n mutations: [],\n },\n };\n ret.push(composite);\n for (const entry of entries) {\n assertAreCompatiblePushes(composite, entry);\n composite.push.mutations.push(...entry.push.mutations);\n }\n }\n return ret;\n }\n\n for (const entry of entries) {\n if (entry === 'stop' || entry === undefined) {\n return [collect(), true];\n }\n\n const {clientID} = entry;\n const existing = pushesByClientID.get(clientID);\n if (existing) {\n existing.push(entry);\n } else {\n pushesByClientID.set(clientID, [entry]);\n }\n }\n\n return [collect(), false] as const;\n}\n\n// These invariants should always be true for a given clientID.\n// If they are not, we have a bug in the code somewhere.\nfunction assertAreCompatiblePushes(left: PusherEntry, right: PusherEntry) {\n assert(\n left.clientID === right.clientID,\n 'clientID must be the same for all pushes',\n );\n assert(\n left.auth === right.auth,\n 'auth must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.schemaVersion === right.push.schemaVersion,\n 'schemaVersion must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.pushVersion === right.push.pushVersion,\n 'pushVersion must be the same for all pushes with the same clientID',\n );\n}\n"],"names":["MutationType.Custom","ErrorKind.PushFailed","ErrorOrigin.ZeroCache","ErrorReason.HTTP","ErrorOrigin.Server","ErrorReason.UnsupportedPushVersion","ErrorReason.Internal","ErrorReason.OutOfOrderMutation","m","entries"],"mappings":";;;;;;;;;;;;;;;;;AAgEO,MAAM,cAAyC;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EACA,YAAY;AAAA,EACZ,aAAa;AAAA,EAEb,YACE,WACA,YACA,IACA,eACA;AACA,SAAK,UAAU;AACf,SAAK,MAAM,GAAG,YAAY,aAAa,eAAe;AACtD,SAAK,mBAAmB,WAAW,IAAI,IAAI,iBAAiB;AAC5D,SAAK,SAAS,IAAI,MAAA;AAClB,SAAK,UAAU,IAAI;AAAA,MACjB;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,WAAW;AAAA,MACX,KAAK;AAAA,IAAA;AAEP,SAAK,KAAK;AACV,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,UAA8B;AAChC,WAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,EAC/B;AAAA,EAEA,eACE,UACA,MACA,aACA;AACA,WAAO,KAAK,QAAQ,eAAe,UAAU,MAAM,WAAW;AAAA,EAChE;AAAA,EAEA,YACE,UACA,MACA,MACA,YACsC;AACtC,QAAI,CAAC,KAAK,YAAY,gBAAgB;AACpC,mBAAa;AAAA,IACf;AACA,SAAK,OAAO,QAAQ,EAAC,MAAM,MAAM,UAAU,YAAW;AAEtD,WAAO;AAAA,MACL,MAAM;AAAA,IAAA;AAAA,EAEV;AAAA,EAEA,MAAM,qBAAqB,QAAoB;AAC7C,UAAM,MAAM,KAAK,YAAY,IAAI,CAAC;AAClC,QAAI,CAAC,KAAK;AAER;AAAA,IACF;AAEA,UAAM,cAAwB;AAAA,MAC5B,eAAe,KAAK;AAAA,MACpB,WAAW;AAAA,QACT;AAAA,UACE,MAAMA;AAAAA,UACN,IAAI;AAAA;AAAA,UACJ,UAAU,OAAO;AAAA,UACjB,MAAM;AAAA,UACN,MAAM;AAAA,YACJ;AAAA,cACE,eAAe,KAAK;AAAA,cACpB,UAAU,OAAO;AAAA,cACjB,gBAAgB,OAAO;AAAA,YAAA;AAAA,UACzB;AAAA,UAEF,WAAW,KAAK,IAAA;AAAA,QAAI;AAAA,MACtB;AAAA,MAEF,aAAa;AAAA,MACb,WAAW,KAAK,IAAA;AAAA,MAChB,WAAW,WAAW,KAAK,EAAE,IAAI,OAAO,QAAQ,IAAI,OAAO,EAAE;AAAA,IAAA;AAG/D,QAAI;AACF,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL,EAAC,OAAO,KAAK,QAAQ,IAAI,IAAI,UAAU,KAAK,QAAQ,MAAM,IAAA;AAAA,QAC1D,EAAC,QAAQ,KAAK,YAAY,OAAA;AAAA,QAC1B;AAAA,MAAA;AAAA,IAEJ,SAAS,GAAG;AACV,WAAK,IAAI,OAAO,mCAAmC;AAAA,QACjD,OAAO,gBAAgB,CAAC;AAAA,MAAA,CACzB;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAM;AACJ,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AAAA,EACT;AAAA,EAEA,QAAQ;AACN,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AACP,QAAI,KAAK,aAAa,GAAG;AACvB,WAAK,KAAK,KAAA;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,UAAmB;AACjB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,MAAqB;AACnB,SAAK,WAAW,KAAK,QAAQ,IAAA;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAAsB;AACpB,QAAI,KAAK,YAAY;AACnB,aAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,IAC3D;AACA,SAAK,aAAa;AAClB,SAAK,OAAO,QAAQ,MAAM;AAC1B,WAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,EAC3D;AACF;AAcA,MAAM,WAAW;AAAA,EACN;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAOT;AAAA,EAES,mBAAmB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAEO,UAAU;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAGF,YACE,QACA,IACA,SACA,QACA,OACA;AACA,SAAK,YAAY;AACjB,SAAK,MAAM,GAAG,YAAY,aAAa,QAAQ;AAC/C,SAAK,mBAAmB,QAAQ,IAAI,iBAAiB;AACrD,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,+BAAe,IAAA;AAAA,EACtB;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,eACE,UACA,MACA,aACA;AACA,UAAM,WAAW,KAAK,SAAS,IAAI,QAAQ;AAC3C,QAAI,YAAY,SAAS,SAAS,MAAM;AAEtC,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAGA,QAAI,UAAU;AACZ,eAAS,WAAW,OAAA;AAAA,IACtB;AAGA,QAAI,KAAK,iBAAiB,QAAW;AAEnC,WAAK,eAAe;AAAA,IACtB,OAAO;AAEL,UAAI,KAAK,iBAAiB,aAAa;AACrC,aAAK,IAAI;AAAA,UACP;AAAA,UACA;AAAA,YACE;AAAA,YACA,WAAW;AAAA,YACX,gBAAgB,KAAK;AAAA,UAAA;AAAA,QACvB;AAAA,MAEJ;AAAA,IACF;AAEA,UAAM,aAAa,aAAa,OAAmB;AAAA,MACjD,SAAS,MAAM;AACb,aAAK,SAAS,OAAO,QAAQ;AAAA,MAC/B;AAAA,IAAA,CACD;AACD,SAAK,SAAS,IAAI,UAAU,EAAC,MAAM,YAAW;AAC9C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,MAAM;AACV,eAAS;AACP,YAAM,OAAO,MAAM,KAAK,OAAO,QAAA;AAC/B,YAAM,OAAO,KAAK,OAAO,MAAA;AACzB,YAAM,CAAC,QAAQ,SAAS,IAAI,cAAc,CAAC,MAAM,GAAG,IAAI,CAAC;AACzD,iBAAW,QAAQ,QAAQ;AACzB,cAAM,WAAW,MAAM,KAAK,aAAa,IAAI;AAC7C,cAAM,KAAK,iBAAiB,QAAQ;AAAA,MACtC;AAEA,UAAI,WAAW;AACb;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,UAAwB;AACvC,UAAM,yBAAyC,CAAA;AAG/C,QAAI,UAAU,YAAY,WAAW,UAAU;AAC7C,WAAK,IAAI;AAAA,QACP;AAAA,QACA;AAAA,MAAA;AAEF,YAAM,qBAAqB;AAAA,QACzB,SAAS,eAAe,CAAA;AAAA,QACxB,OAAK,EAAE;AAAA,MAAA;AAET,iBAAW,CAAC,UAAU,WAAW,KAAK,oBAAoB;AACxD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAIA,YAAI,WAAW,UAAU;AAGvB,gBAAM,iBACJ,SAAS,UAAU,SACf;AAAA,YACE,MAAMC;AAAAA,YACN,QAAQC;AAAAA,YACR,QAAQC;AAAAA,YACR,QAAQ,SAAS;AAAA,YACjB,aAAa,SAAS;AAAA,YACtB;AAAA,YACA,SAAS,gDAAgD,SAAS,MAAM;AAAA,UAAA,IAE1E,SAAS,UAAU,2BACjB;AAAA,YACE,MAAMF;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQC;AAAAA,YACR;AAAA,YACA,SAAS;AAAA,UAAA,IAEX;AAAA,YACE,MAAMJ;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQE;AAAAA,YACR;AAAA,YACA,SACE,SAAS,UAAU,eACf,SAAS,UACT,SAAS,UAAU,6BACjB,+BACA;AAAA,UAAA;AAGlB,eAAK,gBAAgB,OAAO,YAAY,cAAc;AAAA,QACxD,WAAW,UAAU,UAAU;AAC7B,eAAK,gBAAgB,OAAO,YAAY,QAAQ;AAAA,QAClD,OAAO;AACL,sBAAoB;AAAA,QACtB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,mBAAmB,QAAQ,SAAS,WAAW,CAAA,MAAK,EAAE,GAAG,QAAQ;AACvE,iBAAW,CAAC,UAAU,SAAS,KAAK,kBAAkB;AACpD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAEA,YAAI;AACJ,YAAI,IAAI;AACR,eAAO,IAAI,UAAU,QAAQ,KAAK;AAChC,gBAAM,IAAI,UAAU,CAAC;AACrB,cAAI,WAAW,EAAE,QAAQ;AACvB,iBAAK,IAAI;AAAA,cACP;AAAA,cACA,EAAE;AAAA,YAAA;AAAA,UAEN;AAIA,cAAI,WAAW,EAAE,UAAU,EAAE,OAAO,UAAU,eAAe;AAC3D,sBAAU;AAAA,cACR,MAAML;AAAAA,cACN,QAAQG;AAAAA,cACR,QAAQG;AAAAA,cACR,SAAS;AAAA,cACT,SAAS,EAAE,OAAO;AAAA,cAClB,aAAa,UAAU,IAAI,CAAAC,QAAM;AAAA,gBAC/B,UAAUA,GAAE,GAAG;AAAA,gBACf,IAAIA,GAAE,GAAG;AAAA,cAAA,EACT;AAAA,YAAA;AAEJ;AAAA,UACF;AAAA,QACF;AAEA,YAAI,WAAW,IAAI,UAAU,SAAS,GAAG;AACvC,eAAK,IAAI;AAAA,YACP;AAAA,UAAA;AAAA,QAEJ;AAEA,YAAI,SAAS;AACX,iCAAuB;AAAA,YAAK,MAC1B,KAAK,gBAAgB,OAAO,YAAY,OAAO;AAAA,UAAA;AAAA,QAEnD;AAAA,MACF;AAAA,IACF;AAEA,2BAAuB,QAAQ,CAAA,OAAM,GAAA,CAAI;AAAA,EAC3C;AAAA,EAEA,MAAM,aAAa,OAA2C;AAC5D,SAAK,iBAAiB,IAAI,MAAM,KAAK,UAAU,QAAQ;AAAA,MACrD,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AACD,SAAK,QAAQ,IAAI,GAAG;AAAA,MAClB,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AAGD,mBAAe,UAAU,MAAM,KAAK,UAAU,MAAM;AAEpD,UAAM,MACJ,KAAK,gBACL,KAAK,KAAK,UAAU,CAAC,GAAG,4BAA4B;AAEtD,SAAK,IAAI;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,MACrB;AAAA,IAAA;AAGF,QAAI,cAA4B,CAAA;AAEhC,QAAI;AACF,oBAAc,MAAM,KAAK,UAAU,IAAI,CAAA,OAAM;AAAA,QAC3C,IAAI,EAAE;AAAA,QACN,UAAU,EAAE;AAAA,MAAA,EACZ;AAEF,aAAO,MAAM;AAAA,QACX;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA,QAAQ,KAAK;AAAA,QACb,KAAK;AAAA,QACL;AAAA,UACE,OAAO,KAAK,QAAQ,IAAI;AAAA,UACxB,UAAU,KAAK,QAAQ,MAAM;AAAA,QAAA;AAAA,QAE/B;AAAA,UACE,QAAQ,KAAK;AAAA,UACb,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,QAAA;AAAA,QAEhB,MAAM;AAAA,MAAA;AAAA,IAEV,SAAS,GAAG;AACV,UAAI,gBAAgB,CAAC,KAAK,EAAE,UAAU,SAASP,YAAsB;AACnE,eAAO;AAAA,UACL,GAAG,EAAE;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAEA,aAAO;AAAA,QACL,MAAMA;AAAAA,QACN,QAAQC;AAAAA,QACR,QAAQI;AAAAA,QACR,SAAS,mBAAmB,gBAAgB,CAAC,CAAC;AAAA,QAC9C;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AAAA,EAEA,gBACE,YACA,WACM;AACN,UAAM,WAAW,UAAU,WAAWF,SAAqB,SAAS;AACpE,eAAW,KAAK,IAAI,uBAAuB,WAAW,QAAQ,CAAC;AAAA,EACjE;AACF;AAQO,SAAS,cACd,SAC0B;AAC1B,QAAM,uCAAuB,IAAA;AAE7B,WAAS,UAAU;AACjB,UAAM,MAAqB,CAAA;AAC3B,eAAWK,YAAW,iBAAiB,UAAU;AAC/C,YAAM,YAAyB;AAAA,QAC7B,GAAGA,SAAQ,CAAC;AAAA,QACZ,MAAM;AAAA,UACJ,GAAGA,SAAQ,CAAC,EAAE;AAAA,UACd,WAAW,CAAA;AAAA,QAAC;AAAA,MACd;AAEF,UAAI,KAAK,SAAS;AAClB,iBAAW,SAASA,UAAS;AAC3B,kCAA0B,WAAW,KAAK;AAC1C,kBAAU,KAAK,UAAU,KAAK,GAAG,MAAM,KAAK,SAAS;AAAA,MACvD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,aAAW,SAAS,SAAS;AAC3B,QAAI,UAAU,UAAU,UAAU,QAAW;AAC3C,aAAO,CAAC,QAAA,GAAW,IAAI;AAAA,IACzB;AAEA,UAAM,EAAC,aAAY;AACnB,UAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,uBAAiB,IAAI,UAAU,CAAC,KAAK,CAAC;AAAA,IACxC;AAAA,EACF;AAEA,SAAO,CAAC,QAAA,GAAW,KAAK;AAC1B;AAIA,SAAS,0BAA0B,MAAmB,OAAoB;AACxE;AAAA,IACE,KAAK,aAAa,MAAM;AAAA,IACxB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,SAAS,MAAM;AAAA,IACpB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,kBAAkB,MAAM,KAAK;AAAA,IACvC;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,gBAAgB,MAAM,KAAK;AAAA,IACrC;AAAA,EAAA;AAEJ;"}
@@ -6,10 +6,10 @@ import { must } from "../../../../shared/src/must.js";
6
6
  import { createLiteTableStatement, liteColumnDef, createLiteIndexStatement } from "../../db/create.js";
7
7
  import { computeZqlSpecs, listTables, listIndexes } from "../../db/lite-tables.js";
8
8
  import { mapPostgresToLite, mapPostgresToLiteColumn, mapPostgresToLiteIndex } from "../../db/pg-to-lite.js";
9
- import { ColumnMetadataStore } from "../change-source/column-metadata.js";
10
9
  import { JSON_PARSED, liteRow } from "../../types/lite.js";
11
10
  import { liteTableName } from "../../types/names.js";
12
11
  import { id } from "../../types/sql.js";
12
+ import { ColumnMetadataStore } from "../change-source/column-metadata.js";
13
13
  import { logSetOp, logDeleteOp, logTruncateOp, logResetOp } from "./schema/change-log.js";
14
14
  import { updateReplicationWatermark } from "./schema/replication-state.js";
15
15
  import { ZERO_VERSION_COLUMN_NAME } from "./schema/constants.js";
@@ -161,6 +161,7 @@ class TransactionProcessor {
161
161
  #version;
162
162
  #tableSpecs;
163
163
  #jsonFormat;
164
+ #pos = 0;
164
165
  #schemaChanged = false;
165
166
  constructor(lc, db, mode, tableSpecs, commitVersion, jsonFormat) {
166
167
  this.#startMs = Date.now();
@@ -439,12 +440,12 @@ class TransactionProcessor {
439
440
  }
440
441
  #logSetOp(table, key) {
441
442
  if (this.#mode === "serving") {
442
- logSetOp(this.#db, this.#version, table, key);
443
+ logSetOp(this.#db, this.#version, this.#pos++, table, key);
443
444
  }
444
445
  }
445
446
  #logDeleteOp(table, key) {
446
447
  if (this.#mode === "serving") {
447
- logDeleteOp(this.#db, this.#version, table, key);
448
+ logDeleteOp(this.#db, this.#version, this.#pos++, table, key);
448
449
  }
449
450
  }
450
451
  #logTruncateOp(table) {
@@ -1 +1 @@
1
- {"version":3,"file":"change-processor.js","sources":["../../../../../../zero-cache/src/services/replicator/change-processor.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {SqliteError} from '@rocicorp/zero-sqlite3';\nimport {AbortError} from '../../../../shared/src/abort-error.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {stringify} from '../../../../shared/src/bigint-json.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {\n createLiteIndexStatement,\n createLiteTableStatement,\n liteColumnDef,\n} from '../../db/create.ts';\nimport {\n computeZqlSpecs,\n listIndexes,\n listTables,\n} from '../../db/lite-tables.ts';\nimport {\n mapPostgresToLite,\n mapPostgresToLiteColumn,\n mapPostgresToLiteIndex,\n} from '../../db/pg-to-lite.ts';\nimport type {LiteTableSpec} from '../../db/specs.ts';\nimport type {StatementRunner} from '../../db/statements.ts';\nimport {ColumnMetadataStore} from '../change-source/column-metadata.ts';\nimport type {LexiVersion} from '../../types/lexi-version.ts';\nimport {\n JSON_PARSED,\n liteRow,\n type JSONFormat,\n type LiteRow,\n type LiteRowKey,\n type LiteValueType,\n} from '../../types/lite.ts';\nimport {liteTableName} from '../../types/names.ts';\nimport {id} from '../../types/sql.ts';\nimport type {\n Change,\n ColumnAdd,\n ColumnDrop,\n ColumnUpdate,\n IndexCreate,\n IndexDrop,\n MessageCommit,\n MessageDelete,\n MessageInsert,\n MessageRelation,\n MessageTruncate,\n MessageUpdate,\n TableCreate,\n TableDrop,\n TableRename,\n} from '../change-source/protocol/current/data.ts';\nimport type {ChangeStreamData} from '../change-source/protocol/current/downstream.ts';\nimport type {ReplicatorMode} from './replicator.ts';\nimport {\n logDeleteOp,\n logResetOp,\n logSetOp,\n logTruncateOp,\n} from './schema/change-log.ts';\nimport {\n ZERO_VERSION_COLUMN_NAME,\n updateReplicationWatermark,\n} from './schema/replication-state.ts';\n\nexport type ChangeProcessorMode = ReplicatorMode | 'initial-sync';\n\nexport type CommitResult = {\n watermark: string;\n schemaUpdated: boolean;\n};\n\n/**\n * The ChangeProcessor partitions the stream of messages into transactions\n * by creating a {@link TransactionProcessor} when a transaction begins, and dispatching\n * messages to it until the commit is received.\n *\n * From https://www.postgresql.org/docs/current/protocol-logical-replication.html#PROTOCOL-LOGICAL-MESSAGES-FLOW :\n *\n * \"The logical replication protocol sends individual transactions one by one.\n * This means that all messages between a pair of Begin and Commit messages\n * belong to the same transaction.\"\n */\nexport class ChangeProcessor {\n readonly #db: StatementRunner;\n readonly #mode: ChangeProcessorMode;\n readonly #failService: (lc: LogContext, err: unknown) => void;\n\n // The TransactionProcessor lazily loads table specs into this Map,\n // and reloads them after a schema change. It is cached here to avoid\n // reading them from the DB on every transaction.\n readonly #tableSpecs = new Map<string, LiteTableSpec>();\n\n #currentTx: TransactionProcessor | null = null;\n\n #failure: Error | undefined;\n\n constructor(\n db: StatementRunner,\n mode: ChangeProcessorMode,\n failService: (lc: LogContext, err: unknown) => void,\n ) {\n this.#db = db;\n this.#mode = mode;\n this.#failService = failService;\n }\n\n #fail(lc: LogContext, err: unknown) {\n if (!this.#failure) {\n this.#currentTx?.abort(lc); // roll back any pending transaction.\n\n this.#failure = ensureError(err);\n\n if (!(err instanceof AbortError)) {\n // Propagate the failure up to the service.\n lc.error?.('Message Processing failed:', this.#failure);\n this.#failService(lc, this.#failure);\n }\n }\n }\n\n abort(lc: LogContext) {\n this.#fail(lc, new AbortError());\n }\n\n /** @return If a transaction was committed. */\n processMessage(\n lc: LogContext,\n downstream: ChangeStreamData,\n ): CommitResult | null {\n const [type, message] = downstream;\n if (this.#failure) {\n lc.debug?.(`Dropping ${message.tag}`);\n return null;\n }\n try {\n const watermark =\n type === 'begin'\n ? downstream[2].commitWatermark\n : type === 'commit'\n ? downstream[2].watermark\n : undefined;\n return this.#processMessage(lc, message, watermark);\n } catch (e) {\n this.#fail(lc, e);\n }\n return null;\n }\n\n #beginTransaction(\n lc: LogContext,\n commitVersion: string,\n jsonFormat: JSONFormat,\n ): TransactionProcessor {\n const start = Date.now();\n\n // litestream can technically hold the lock for an arbitrary amount of time\n // when checkpointing a large commit. Crashing on the busy-timeout in this\n // scenario will either produce a corrupt backup or otherwise prevent\n // replication from proceeding.\n //\n // Instead, retry the lock acquisition indefinitely. If this masks\n // an unknown deadlock situation, manual intervention will be necessary.\n for (let i = 0; ; i++) {\n try {\n return new TransactionProcessor(\n lc,\n this.#db,\n this.#mode,\n this.#tableSpecs,\n commitVersion,\n jsonFormat,\n );\n } catch (e) {\n if (e instanceof SqliteError && e.code === 'SQLITE_BUSY') {\n lc.warn?.(\n `SQLITE_BUSY for ${Date.now() - start} ms (attempt ${i + 1}). ` +\n `This is only expected if litestream is performing a large ` +\n `checkpoint.`,\n e,\n );\n continue;\n }\n throw e;\n }\n }\n }\n\n /** @return If a transaction was committed. */\n #processMessage(\n lc: LogContext,\n msg: Change,\n watermark: string | undefined,\n ): CommitResult | null {\n if (msg.tag === 'begin') {\n if (this.#currentTx) {\n throw new Error(`Already in a transaction ${stringify(msg)}`);\n }\n this.#currentTx = this.#beginTransaction(\n lc,\n must(watermark),\n msg.json ?? JSON_PARSED,\n );\n return null;\n }\n\n // For non-begin messages, there should be a #currentTx set.\n const tx = this.#currentTx;\n if (!tx) {\n throw new Error(\n `Received message outside of transaction: ${stringify(msg)}`,\n );\n }\n\n if (msg.tag === 'commit') {\n // Undef this.#currentTx to allow the assembly of the next transaction.\n this.#currentTx = null;\n\n assert(watermark);\n const schemaUpdated = tx.processCommit(msg, watermark);\n return {watermark, schemaUpdated};\n }\n\n if (msg.tag === 'rollback') {\n this.#currentTx?.abort(lc);\n this.#currentTx = null;\n return null;\n }\n\n switch (msg.tag) {\n case 'insert':\n tx.processInsert(msg);\n break;\n case 'update':\n tx.processUpdate(msg);\n break;\n case 'delete':\n tx.processDelete(msg);\n break;\n case 'truncate':\n tx.processTruncate(msg);\n break;\n case 'create-table':\n tx.processCreateTable(msg);\n break;\n case 'rename-table':\n tx.processRenameTable(msg);\n break;\n case 'add-column':\n tx.processAddColumn(msg);\n break;\n case 'update-column':\n tx.processUpdateColumn(msg);\n break;\n case 'drop-column':\n tx.processDropColumn(msg);\n break;\n case 'drop-table':\n tx.processDropTable(msg);\n break;\n case 'create-index':\n tx.processCreateIndex(msg);\n break;\n case 'drop-index':\n tx.processDropIndex(msg);\n break;\n default:\n unreachable(msg);\n }\n\n return null;\n }\n}\n\n/**\n * The {@link TransactionProcessor} handles the sequence of messages from\n * upstream, from `BEGIN` to `COMMIT` and executes the corresponding mutations\n * on the {@link postgres.TransactionSql} on the replica.\n *\n * When applying row contents to the replica, the `_0_version` column is added / updated,\n * and a corresponding entry in the `ChangeLog` is added. The version value is derived\n * from the watermark of the preceding transaction (stored as the `nextStateVersion` in the\n * `ReplicationState` table).\n *\n * Side note: For non-streaming Postgres transactions, the commitEndLsn (and thus\n * commit watermark) is available in the `begin` message, so it could theoretically\n * be used for the row version of changes within the transaction. However, the\n * commitEndLsn is not available in the streaming (in-progress) transaction\n * protocol, and may not be available for CDC streams of other upstream types.\n * Therefore, the zero replication protocol is designed to not require the commit\n * watermark when a transaction begins.\n *\n * Also of interest is the fact that all INSERT Messages are logically applied as\n * UPSERTs. See {@link processInsert} for the underlying motivation.\n */\nclass TransactionProcessor {\n readonly #lc: LogContext;\n readonly #startMs: number;\n readonly #db: StatementRunner;\n readonly #mode: ChangeProcessorMode;\n readonly #version: LexiVersion;\n readonly #tableSpecs: Map<string, LiteTableSpec>;\n readonly #jsonFormat: JSONFormat;\n\n #schemaChanged = false;\n\n constructor(\n lc: LogContext,\n db: StatementRunner,\n mode: ChangeProcessorMode,\n tableSpecs: Map<string, LiteTableSpec>,\n commitVersion: LexiVersion,\n jsonFormat: JSONFormat,\n ) {\n this.#startMs = Date.now();\n this.#mode = mode;\n this.#jsonFormat = jsonFormat;\n\n switch (mode) {\n case 'serving':\n // Although the Replicator / Incremental Syncer is the only writer of the replica,\n // a `BEGIN CONCURRENT` transaction is used to allow View Syncers to simulate\n // (i.e. and `ROLLBACK`) changes on historic snapshots of the database for the\n // purpose of IVM).\n //\n // This TransactionProcessor is the only logic that will actually\n // `COMMIT` any transactions to the replica.\n db.beginConcurrent();\n break;\n case 'backup':\n // For the backup-replicator (i.e. replication-manager), there are no View Syncers\n // and thus BEGIN CONCURRENT is not necessary. In fact, BEGIN CONCURRENT can cause\n // deadlocks with forced wal-checkpoints (which `litestream replicate` performs),\n // so it is important to use vanilla transactions in this configuration.\n db.beginImmediate();\n break;\n case 'initial-sync':\n // When the ChangeProcessor is used for initial-sync, the calling code\n // handles the transaction boundaries.\n break;\n default:\n unreachable();\n }\n this.#db = db;\n this.#version = commitVersion;\n this.#lc = lc.withContext('version', commitVersion);\n this.#tableSpecs = tableSpecs;\n\n if (this.#tableSpecs.size === 0) {\n this.#reloadTableSpecs();\n }\n }\n\n #reloadTableSpecs() {\n this.#tableSpecs.clear();\n // zqlSpecs include the primary key derived from unique indexes\n const zqlSpecs = computeZqlSpecs(this.#lc, this.#db.db);\n for (let spec of listTables(this.#db.db)) {\n if (!spec.primaryKey) {\n spec = {\n ...spec,\n primaryKey: [\n ...(zqlSpecs.get(spec.name)?.tableSpec.primaryKey ?? []),\n ],\n };\n }\n this.#tableSpecs.set(spec.name, spec);\n }\n }\n\n #tableSpec(name: string) {\n return must(this.#tableSpecs.get(name), `Unknown table ${name}`);\n }\n\n #getKey(\n {row, numCols}: {row: LiteRow; numCols: number},\n {relation}: {relation: MessageRelation},\n ): LiteRowKey {\n const keyColumns =\n relation.replicaIdentity !== 'full'\n ? relation.keyColumns // already a suitable key\n : this.#tableSpec(liteTableName(relation)).primaryKey;\n if (!keyColumns?.length) {\n throw new Error(\n `Cannot replicate table \"${relation.name}\" without a PRIMARY KEY or UNIQUE INDEX`,\n );\n }\n // For the common case (replica identity default), the row is already the\n // key for deletes and updates, in which case a new object can be avoided.\n if (numCols === keyColumns.length) {\n return row;\n }\n const key: Record<string, LiteValueType> = {};\n for (const col of keyColumns) {\n key[col] = row[col];\n }\n return key;\n }\n\n processInsert(insert: MessageInsert) {\n const table = liteTableName(insert.relation);\n const newRow = liteRow(\n insert.new,\n this.#tableSpec(table),\n this.#jsonFormat,\n );\n\n this.#upsert(table, {\n ...newRow.row,\n [ZERO_VERSION_COLUMN_NAME]: this.#version,\n });\n\n if (insert.relation.keyColumns.length === 0) {\n // INSERTs can be replicated for rows without a PRIMARY KEY or a\n // UNIQUE INDEX. These are written to the replica but not recorded\n // in the changeLog, because these rows cannot participate in IVM.\n //\n // (Once the table schema has been corrected to include a key, the\n // associated schema change will reset pipelines and data can be\n // loaded via hydration.)\n return;\n }\n const key = this.#getKey(newRow, insert);\n this.#logSetOp(table, key);\n }\n\n #upsert(table: string, row: LiteRow) {\n const columns = Object.keys(row).map(c => id(c));\n this.#db.run(\n `\n INSERT OR REPLACE INTO ${id(table)} (${columns.join(',')})\n VALUES (${Array.from({length: columns.length}).fill('?').join(',')})\n `,\n Object.values(row),\n );\n }\n\n // Updates by default are applied as UPDATE commands to support partial\n // row specifications from the change source. In particular, this is needed\n // to handle updates for which unchanged TOASTed values are not sent:\n //\n // https://www.postgresql.org/docs/current/protocol-logicalrep-message-formats.html#PROTOCOL-LOGICALREP-MESSAGE-FORMATS-TUPLEDATA\n //\n // However, in certain cases an UPDATE may be received for a row that\n // was not initially synced, such as when:\n // (1) an existing table is added to the app's publication, or\n // (2) a new sharding key is added to a shard during resharding.\n //\n // In order to facilitate \"resumptive\" replication, the logic falls back to\n // an INSERT if the update did not change any rows.\n // TODO: Figure out a solution for resumptive replication of rows\n // with TOASTed values.\n processUpdate(update: MessageUpdate) {\n const table = liteTableName(update.relation);\n const newRow = liteRow(\n update.new,\n this.#tableSpec(table),\n this.#jsonFormat,\n );\n const row = {...newRow.row, [ZERO_VERSION_COLUMN_NAME]: this.#version};\n\n // update.key is set with the old values if the key has changed.\n const oldKey = update.key\n ? this.#getKey(\n liteRow(update.key, this.#tableSpec(table), this.#jsonFormat),\n update,\n )\n : null;\n const newKey = this.#getKey(newRow, update);\n\n if (oldKey) {\n this.#logDeleteOp(table, oldKey);\n }\n this.#logSetOp(table, newKey);\n\n const currKey = oldKey ?? newKey;\n const conds = Object.keys(currKey).map(col => `${id(col)}=?`);\n const setExprs = Object.keys(row).map(col => `${id(col)}=?`);\n\n const {changes} = this.#db.run(\n `\n UPDATE ${id(table)}\n SET ${setExprs.join(',')}\n WHERE ${conds.join(' AND ')}\n `,\n [...Object.values(row), ...Object.values(currKey)],\n );\n\n // If the UPDATE did not affect any rows, perform an UPSERT of the\n // new row for resumptive replication.\n if (changes === 0) {\n this.#upsert(table, row);\n }\n }\n\n processDelete(del: MessageDelete) {\n const table = liteTableName(del.relation);\n const rowKey = this.#getKey(\n liteRow(del.key, this.#tableSpec(table), this.#jsonFormat),\n del,\n );\n\n this.#delete(table, rowKey);\n\n if (this.#mode === 'serving') {\n this.#logDeleteOp(table, rowKey);\n }\n }\n\n #delete(table: string, rowKey: LiteRowKey) {\n const conds = Object.keys(rowKey).map(col => `${id(col)}=?`);\n this.#db.run(\n `DELETE FROM ${id(table)} WHERE ${conds.join(' AND ')}`,\n Object.values(rowKey),\n );\n }\n\n processTruncate(truncate: MessageTruncate) {\n for (const relation of truncate.relations) {\n const table = liteTableName(relation);\n // Update replica data.\n this.#db.run(`DELETE FROM ${id(table)}`);\n\n // Update change log.\n this.#logTruncateOp(table);\n }\n }\n processCreateTable(create: TableCreate) {\n const table = mapPostgresToLite(create.spec);\n this.#db.db.exec(createLiteTableStatement(table));\n\n // Write to metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n for (const [colName, colSpec] of Object.entries(create.spec.columns)) {\n store.insert(table.name, colName, colSpec);\n }\n }\n\n this.#logResetOp(table.name);\n this.#lc.info?.(create.tag, table.name);\n }\n\n processRenameTable(rename: TableRename) {\n const oldName = liteTableName(rename.old);\n const newName = liteTableName(rename.new);\n this.#db.db.exec(`ALTER TABLE ${id(oldName)} RENAME TO ${id(newName)}`);\n\n // Rename in metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.renameTable(oldName, newName);\n }\n\n this.#bumpVersions(newName);\n this.#logResetOp(oldName);\n this.#lc.info?.(rename.tag, oldName, newName);\n }\n\n processAddColumn(msg: ColumnAdd) {\n const table = liteTableName(msg.table);\n const {name} = msg.column;\n const spec = mapPostgresToLiteColumn(table, msg.column);\n this.#db.db.exec(\n `ALTER TABLE ${id(table)} ADD ${id(name)} ${liteColumnDef(spec)}`,\n );\n\n // Write to metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.insert(table, name, msg.column.spec);\n }\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, msg.column);\n }\n\n processUpdateColumn(msg: ColumnUpdate) {\n const table = liteTableName(msg.table);\n let oldName = msg.old.name;\n const newName = msg.new.name;\n\n // update-column can ignore defaults because it does not change the values\n // in existing rows.\n //\n // https://www.postgresql.org/docs/current/sql-altertable.html#SQL-ALTERTABLE-DESC-SET-DROP-DEFAULT\n //\n // \"The new default value will only apply in subsequent INSERT or UPDATE\n // commands; it does not cause rows already in the table to change.\"\n //\n // This allows support for _changing_ column defaults to any expression,\n // since it does not affect what the replica needs to do.\n const oldSpec = mapPostgresToLiteColumn(table, msg.old, 'ignore-default');\n const newSpec = mapPostgresToLiteColumn(table, msg.new, 'ignore-default');\n\n // The only updates that are relevant are the column name and the data type.\n if (oldName === newName && oldSpec.dataType === newSpec.dataType) {\n this.#lc.info?.(msg.tag, 'no thing to update', oldSpec, newSpec);\n return;\n }\n // If the data type changes, we have to make a new column with the new data type\n // and copy the values over.\n if (oldSpec.dataType !== newSpec.dataType) {\n // Remember (and drop) the indexes that reference the column.\n const indexes = listIndexes(this.#db.db).filter(\n idx => idx.tableName === table && oldName in idx.columns,\n );\n const stmts = indexes.map(idx => `DROP INDEX IF EXISTS ${id(idx.name)};`);\n const tmpName = `tmp.${newName}`;\n stmts.push(`\n ALTER TABLE ${id(table)} ADD ${id(tmpName)} ${liteColumnDef(newSpec)};\n UPDATE ${id(table)} SET ${id(tmpName)} = ${id(oldName)};\n ALTER TABLE ${id(table)} DROP ${id(oldName)};\n `);\n for (const idx of indexes) {\n // Re-create the indexes to reference the new column.\n idx.columns[tmpName] = idx.columns[oldName];\n delete idx.columns[oldName];\n stmts.push(createLiteIndexStatement(idx));\n }\n this.#db.db.exec(stmts.join(''));\n oldName = tmpName;\n }\n if (oldName !== newName) {\n this.#db.db.exec(\n `ALTER TABLE ${id(table)} RENAME ${id(oldName)} TO ${id(newName)}`,\n );\n }\n\n // Update metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.update(table, msg.old.name, msg.new.name, msg.new.spec);\n }\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, msg.new);\n }\n\n processDropColumn(msg: ColumnDrop) {\n const table = liteTableName(msg.table);\n const {column} = msg;\n this.#db.db.exec(`ALTER TABLE ${id(table)} DROP ${id(column)}`);\n\n // Delete from metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.deleteColumn(table, column);\n }\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, column);\n }\n\n processDropTable(drop: TableDrop) {\n const name = liteTableName(drop.id);\n this.#db.db.exec(`DROP TABLE IF EXISTS ${id(name)}`);\n\n // Delete from metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.deleteTable(name);\n }\n\n this.#logResetOp(name);\n this.#lc.info?.(drop.tag, name);\n }\n\n processCreateIndex(create: IndexCreate) {\n const index = mapPostgresToLiteIndex(create.spec);\n this.#db.db.exec(createLiteIndexStatement(index));\n\n // indexes affect tables visibility (e.g. sync-ability is gated on\n // having a unique index), so reset pipelines to refresh table schemas.\n this.#logResetOp(index.tableName);\n this.#lc.info?.(create.tag, index.name);\n }\n\n processDropIndex(drop: IndexDrop) {\n const name = liteTableName(drop.id);\n this.#db.db.exec(`DROP INDEX IF EXISTS ${id(name)}`);\n this.#lc.info?.(drop.tag, name);\n }\n\n #bumpVersions(table: string) {\n this.#db.run(\n `UPDATE ${id(table)} SET ${id(ZERO_VERSION_COLUMN_NAME)} = ?`,\n this.#version,\n );\n this.#logResetOp(table);\n }\n\n #logSetOp(table: string, key: LiteRowKey) {\n if (this.#mode === 'serving') {\n logSetOp(this.#db, this.#version, table, key);\n }\n }\n\n #logDeleteOp(table: string, key: LiteRowKey) {\n if (this.#mode === 'serving') {\n logDeleteOp(this.#db, this.#version, table, key);\n }\n }\n\n #logTruncateOp(table: string) {\n if (this.#mode === 'serving') {\n logTruncateOp(this.#db, this.#version, table);\n }\n }\n\n #logResetOp(table: string) {\n this.#schemaChanged = true;\n if (this.#mode === 'serving') {\n logResetOp(this.#db, this.#version, table);\n }\n this.#reloadTableSpecs();\n }\n\n /** @returns `true` if the schema was updated. */\n processCommit(commit: MessageCommit, watermark: string): boolean {\n if (watermark !== this.#version) {\n throw new Error(\n `'commit' version ${watermark} does not match 'begin' version ${\n this.#version\n }: ${stringify(commit)}`,\n );\n }\n updateReplicationWatermark(this.#db, watermark);\n\n if (this.#schemaChanged) {\n const start = Date.now();\n this.#db.db.pragma('optimize');\n this.#lc.info?.(\n `PRAGMA optimized after schema change (${Date.now() - start} ms)`,\n );\n }\n\n if (this.#mode !== 'initial-sync') {\n this.#db.commit();\n }\n\n const elapsedMs = Date.now() - this.#startMs;\n this.#lc.debug?.(`Committed tx@${this.#version} (${elapsedMs} ms)`);\n\n return this.#schemaChanged;\n }\n\n abort(lc: LogContext) {\n lc.info?.(`aborting transaction ${this.#version}`);\n this.#db.rollback();\n }\n}\n\nfunction ensureError(err: unknown): Error {\n if (err instanceof Error) {\n return err;\n }\n const error = new Error();\n error.cause = err;\n return error;\n}\n"],"names":[],"mappings":";;;;;;;;;;;;;;;AAmFO,MAAM,gBAAgB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA,EAKA,kCAAkB,IAAA;AAAA,EAE3B,aAA0C;AAAA,EAE1C;AAAA,EAEA,YACE,IACA,MACA,aACA;AACA,SAAK,MAAM;AACX,SAAK,QAAQ;AACb,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,IAAgB,KAAc;AAClC,QAAI,CAAC,KAAK,UAAU;AAClB,WAAK,YAAY,MAAM,EAAE;AAEzB,WAAK,WAAW,YAAY,GAAG;AAE/B,UAAI,EAAE,eAAe,aAAa;AAEhC,WAAG,QAAQ,8BAA8B,KAAK,QAAQ;AACtD,aAAK,aAAa,IAAI,KAAK,QAAQ;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,IAAgB;AACpB,SAAK,MAAM,IAAI,IAAI,WAAA,CAAY;AAAA,EACjC;AAAA;AAAA,EAGA,eACE,IACA,YACqB;AACrB,UAAM,CAAC,MAAM,OAAO,IAAI;AACxB,QAAI,KAAK,UAAU;AACjB,SAAG,QAAQ,YAAY,QAAQ,GAAG,EAAE;AACpC,aAAO;AAAA,IACT;AACA,QAAI;AACF,YAAM,YACJ,SAAS,UACL,WAAW,CAAC,EAAE,kBACd,SAAS,WACP,WAAW,CAAC,EAAE,YACd;AACR,aAAO,KAAK,gBAAgB,IAAI,SAAS,SAAS;AAAA,IACpD,SAAS,GAAG;AACV,WAAK,MAAM,IAAI,CAAC;AAAA,IAClB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,kBACE,IACA,eACA,YACsB;AACtB,UAAM,QAAQ,KAAK,IAAA;AASnB,aAAS,IAAI,KAAK,KAAK;AACrB,UAAI;AACF,eAAO,IAAI;AAAA,UACT;AAAA,UACA,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,UACL;AAAA,UACA;AAAA,QAAA;AAAA,MAEJ,SAAS,GAAG;AACV,YAAI,aAAa,eAAe,EAAE,SAAS,eAAe;AACxD,aAAG;AAAA,YACD,mBAAmB,KAAK,IAAA,IAAQ,KAAK,gBAAgB,IAAI,CAAC;AAAA,YAG1D;AAAA,UAAA;AAEF;AAAA,QACF;AACA,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,gBACE,IACA,KACA,WACqB;AACrB,QAAI,IAAI,QAAQ,SAAS;AACvB,UAAI,KAAK,YAAY;AACnB,cAAM,IAAI,MAAM,4BAA4B,UAAU,GAAG,CAAC,EAAE;AAAA,MAC9D;AACA,WAAK,aAAa,KAAK;AAAA,QACrB;AAAA,QACA,KAAK,SAAS;AAAA,QACd,IAAI,QAAQ;AAAA,MAAA;AAEd,aAAO;AAAA,IACT;AAGA,UAAM,KAAK,KAAK;AAChB,QAAI,CAAC,IAAI;AACP,YAAM,IAAI;AAAA,QACR,4CAA4C,UAAU,GAAG,CAAC;AAAA,MAAA;AAAA,IAE9D;AAEA,QAAI,IAAI,QAAQ,UAAU;AAExB,WAAK,aAAa;AAElB,aAAO,SAAS;AAChB,YAAM,gBAAgB,GAAG,cAAc,KAAK,SAAS;AACrD,aAAO,EAAC,WAAW,cAAA;AAAA,IACrB;AAEA,QAAI,IAAI,QAAQ,YAAY;AAC1B,WAAK,YAAY,MAAM,EAAE;AACzB,WAAK,aAAa;AAClB,aAAO;AAAA,IACT;AAEA,YAAQ,IAAI,KAAA;AAAA,MACV,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,gBAAgB,GAAG;AACtB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF,KAAK;AACH,WAAG,oBAAoB,GAAG;AAC1B;AAAA,MACF,KAAK;AACH,WAAG,kBAAkB,GAAG;AACxB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF;AACE,oBAAe;AAAA,IAAA;AAGnB,WAAO;AAAA,EACT;AACF;AAuBA,MAAM,qBAAqB;AAAA,EAChB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,iBAAiB;AAAA,EAEjB,YACE,IACA,IACA,MACA,YACA,eACA,YACA;AACA,SAAK,WAAW,KAAK,IAAA;AACrB,SAAK,QAAQ;AACb,SAAK,cAAc;AAEnB,YAAQ,MAAA;AAAA,MACN,KAAK;AAQH,WAAG,gBAAA;AACH;AAAA,MACF,KAAK;AAKH,WAAG,eAAA;AACH;AAAA,MACF,KAAK;AAGH;AAAA,MACF;AACE,oBAAA;AAAA,IAAY;AAEhB,SAAK,MAAM;AACX,SAAK,WAAW;AAChB,SAAK,MAAM,GAAG,YAAY,WAAW,aAAa;AAClD,SAAK,cAAc;AAEnB,QAAI,KAAK,YAAY,SAAS,GAAG;AAC/B,WAAK,kBAAA;AAAA,IACP;AAAA,EACF;AAAA,EAEA,oBAAoB;AAClB,SAAK,YAAY,MAAA;AAEjB,UAAM,WAAW,gBAAgB,KAAK,KAAK,KAAK,IAAI,EAAE;AACtD,aAAS,QAAQ,WAAW,KAAK,IAAI,EAAE,GAAG;AACxC,UAAI,CAAC,KAAK,YAAY;AACpB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,YAAY;AAAA,YACV,GAAI,SAAS,IAAI,KAAK,IAAI,GAAG,UAAU,cAAc,CAAA;AAAA,UAAC;AAAA,QACxD;AAAA,MAEJ;AACA,WAAK,YAAY,IAAI,KAAK,MAAM,IAAI;AAAA,IACtC;AAAA,EACF;AAAA,EAEA,WAAW,MAAc;AACvB,WAAO,KAAK,KAAK,YAAY,IAAI,IAAI,GAAG,iBAAiB,IAAI,EAAE;AAAA,EACjE;AAAA,EAEA,QACE,EAAC,KAAK,WACN,EAAC,YACW;AACZ,UAAM,aACJ,SAAS,oBAAoB,SACzB,SAAS,aACT,KAAK,WAAW,cAAc,QAAQ,CAAC,EAAE;AAC/C,QAAI,CAAC,YAAY,QAAQ;AACvB,YAAM,IAAI;AAAA,QACR,2BAA2B,SAAS,IAAI;AAAA,MAAA;AAAA,IAE5C;AAGA,QAAI,YAAY,WAAW,QAAQ;AACjC,aAAO;AAAA,IACT;AACA,UAAM,MAAqC,CAAA;AAC3C,eAAW,OAAO,YAAY;AAC5B,UAAI,GAAG,IAAI,IAAI,GAAG;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,cAAc,QAAuB;AACnC,UAAM,QAAQ,cAAc,OAAO,QAAQ;AAC3C,UAAM,SAAS;AAAA,MACb,OAAO;AAAA,MACP,KAAK,WAAW,KAAK;AAAA,MACrB,KAAK;AAAA,IAAA;AAGP,SAAK,QAAQ,OAAO;AAAA,MAClB,GAAG,OAAO;AAAA,MACV,CAAC,wBAAwB,GAAG,KAAK;AAAA,IAAA,CAClC;AAED,QAAI,OAAO,SAAS,WAAW,WAAW,GAAG;AAQ3C;AAAA,IACF;AACA,UAAM,MAAM,KAAK,QAAQ,QAAQ,MAAM;AACvC,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EAEA,QAAQ,OAAe,KAAc;AACnC,UAAM,UAAU,OAAO,KAAK,GAAG,EAAE,IAAI,CAAA,MAAK,GAAG,CAAC,CAAC;AAC/C,SAAK,IAAI;AAAA,MACP;AAAA,+BACyB,GAAG,KAAK,CAAC,KAAK,QAAQ,KAAK,GAAG,CAAC;AAAA,kBAC5C,MAAM,KAAK,EAAC,QAAQ,QAAQ,QAAO,EAAE,KAAK,GAAG,EAAE,KAAK,GAAG,CAAC;AAAA;AAAA,MAEpE,OAAO,OAAO,GAAG;AAAA,IAAA;AAAA,EAErB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,cAAc,QAAuB;AACnC,UAAM,QAAQ,cAAc,OAAO,QAAQ;AAC3C,UAAM,SAAS;AAAA,MACb,OAAO;AAAA,MACP,KAAK,WAAW,KAAK;AAAA,MACrB,KAAK;AAAA,IAAA;AAEP,UAAM,MAAM,EAAC,GAAG,OAAO,KAAK,CAAC,wBAAwB,GAAG,KAAK,SAAA;AAG7D,UAAM,SAAS,OAAO,MAClB,KAAK;AAAA,MACH,QAAQ,OAAO,KAAK,KAAK,WAAW,KAAK,GAAG,KAAK,WAAW;AAAA,MAC5D;AAAA,IAAA,IAEF;AACJ,UAAM,SAAS,KAAK,QAAQ,QAAQ,MAAM;AAE1C,QAAI,QAAQ;AACV,WAAK,aAAa,OAAO,MAAM;AAAA,IACjC;AACA,SAAK,UAAU,OAAO,MAAM;AAE5B,UAAM,UAAU,UAAU;AAC1B,UAAM,QAAQ,OAAO,KAAK,OAAO,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAC5D,UAAM,WAAW,OAAO,KAAK,GAAG,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAE3D,UAAM,EAAC,QAAA,IAAW,KAAK,IAAI;AAAA,MACzB;AAAA,eACS,GAAG,KAAK,CAAC;AAAA,cACV,SAAS,KAAK,GAAG,CAAC;AAAA,gBAChB,MAAM,KAAK,OAAO,CAAC;AAAA;AAAA,MAE7B,CAAC,GAAG,OAAO,OAAO,GAAG,GAAG,GAAG,OAAO,OAAO,OAAO,CAAC;AAAA,IAAA;AAKnD,QAAI,YAAY,GAAG;AACjB,WAAK,QAAQ,OAAO,GAAG;AAAA,IACzB;AAAA,EACF;AAAA,EAEA,cAAc,KAAoB;AAChC,UAAM,QAAQ,cAAc,IAAI,QAAQ;AACxC,UAAM,SAAS,KAAK;AAAA,MAClB,QAAQ,IAAI,KAAK,KAAK,WAAW,KAAK,GAAG,KAAK,WAAW;AAAA,MACzD;AAAA,IAAA;AAGF,SAAK,QAAQ,OAAO,MAAM;AAE1B,QAAI,KAAK,UAAU,WAAW;AAC5B,WAAK,aAAa,OAAO,MAAM;AAAA,IACjC;AAAA,EACF;AAAA,EAEA,QAAQ,OAAe,QAAoB;AACzC,UAAM,QAAQ,OAAO,KAAK,MAAM,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAC3D,SAAK,IAAI;AAAA,MACP,eAAe,GAAG,KAAK,CAAC,UAAU,MAAM,KAAK,OAAO,CAAC;AAAA,MACrD,OAAO,OAAO,MAAM;AAAA,IAAA;AAAA,EAExB;AAAA,EAEA,gBAAgB,UAA2B;AACzC,eAAW,YAAY,SAAS,WAAW;AACzC,YAAM,QAAQ,cAAc,QAAQ;AAEpC,WAAK,IAAI,IAAI,eAAe,GAAG,KAAK,CAAC,EAAE;AAGvC,WAAK,eAAe,KAAK;AAAA,IAC3B;AAAA,EACF;AAAA,EACA,mBAAmB,QAAqB;AACtC,UAAM,QAAQ,kBAAkB,OAAO,IAAI;AAC3C,SAAK,IAAI,GAAG,KAAK,yBAAyB,KAAK,CAAC;AAGhD,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,OAAO,KAAK,OAAO,GAAG;AACpE,cAAM,OAAO,MAAM,MAAM,SAAS,OAAO;AAAA,MAC3C;AAAA,IACF;AAEA,SAAK,YAAY,MAAM,IAAI;AAC3B,SAAK,IAAI,OAAO,OAAO,KAAK,MAAM,IAAI;AAAA,EACxC;AAAA,EAEA,mBAAmB,QAAqB;AACtC,UAAM,UAAU,cAAc,OAAO,GAAG;AACxC,UAAM,UAAU,cAAc,OAAO,GAAG;AACxC,SAAK,IAAI,GAAG,KAAK,eAAe,GAAG,OAAO,CAAC,cAAc,GAAG,OAAO,CAAC,EAAE;AAGtE,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,YAAY,SAAS,OAAO;AAAA,IACpC;AAEA,SAAK,cAAc,OAAO;AAC1B,SAAK,YAAY,OAAO;AACxB,SAAK,IAAI,OAAO,OAAO,KAAK,SAAS,OAAO;AAAA,EAC9C;AAAA,EAEA,iBAAiB,KAAgB;AAC/B,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,UAAM,EAAC,SAAQ,IAAI;AACnB,UAAM,OAAO,wBAAwB,OAAO,IAAI,MAAM;AACtD,SAAK,IAAI,GAAG;AAAA,MACV,eAAe,GAAG,KAAK,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,cAAc,IAAI,CAAC;AAAA,IAAA;AAIjE,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,OAAO,OAAO,MAAM,IAAI,OAAO,IAAI;AAAA,IAC3C;AAEA,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,IAAI,MAAM;AAAA,EAC5C;AAAA,EAEA,oBAAoB,KAAmB;AACrC,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,QAAI,UAAU,IAAI,IAAI;AACtB,UAAM,UAAU,IAAI,IAAI;AAYxB,UAAM,UAAU,wBAAwB,OAAO,IAAI,KAAK,gBAAgB;AACxE,UAAM,UAAU,wBAAwB,OAAO,IAAI,KAAK,gBAAgB;AAGxE,QAAI,YAAY,WAAW,QAAQ,aAAa,QAAQ,UAAU;AAChE,WAAK,IAAI,OAAO,IAAI,KAAK,sBAAsB,SAAS,OAAO;AAC/D;AAAA,IACF;AAGA,QAAI,QAAQ,aAAa,QAAQ,UAAU;AAEzC,YAAM,UAAU,YAAY,KAAK,IAAI,EAAE,EAAE;AAAA,QACvC,CAAA,QAAO,IAAI,cAAc,SAAS,WAAW,IAAI;AAAA,MAAA;AAEnD,YAAM,QAAQ,QAAQ,IAAI,CAAA,QAAO,wBAAwB,GAAG,IAAI,IAAI,CAAC,GAAG;AACxE,YAAM,UAAU,OAAO,OAAO;AAC9B,YAAM,KAAK;AAAA,sBACK,GAAG,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAC,IAAI,cAAc,OAAO,CAAC;AAAA,iBAC3D,GAAG,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAC,MAAM,GAAG,OAAO,CAAC;AAAA,sBACxC,GAAG,KAAK,CAAC,SAAS,GAAG,OAAO,CAAC;AAAA,SAC1C;AACH,iBAAW,OAAO,SAAS;AAEzB,YAAI,QAAQ,OAAO,IAAI,IAAI,QAAQ,OAAO;AAC1C,eAAO,IAAI,QAAQ,OAAO;AAC1B,cAAM,KAAK,yBAAyB,GAAG,CAAC;AAAA,MAC1C;AACA,WAAK,IAAI,GAAG,KAAK,MAAM,KAAK,EAAE,CAAC;AAC/B,gBAAU;AAAA,IACZ;AACA,QAAI,YAAY,SAAS;AACvB,WAAK,IAAI,GAAG;AAAA,QACV,eAAe,GAAG,KAAK,CAAC,WAAW,GAAG,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC;AAAA,MAAA;AAAA,IAEpE;AAGA,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,OAAO,OAAO,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,IAAI;AAAA,IAC9D;AAEA,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,IAAI,GAAG;AAAA,EACzC;AAAA,EAEA,kBAAkB,KAAiB;AACjC,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,UAAM,EAAC,WAAU;AACjB,SAAK,IAAI,GAAG,KAAK,eAAe,GAAG,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,EAAE;AAG9D,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,aAAa,OAAO,MAAM;AAAA,IAClC;AAEA,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,MAAM;AAAA,EACxC;AAAA,EAEA,iBAAiB,MAAiB;AAChC,UAAM,OAAO,cAAc,KAAK,EAAE;AAClC,SAAK,IAAI,GAAG,KAAK,wBAAwB,GAAG,IAAI,CAAC,EAAE;AAGnD,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,YAAY,IAAI;AAAA,IACxB;AAEA,SAAK,YAAY,IAAI;AACrB,SAAK,IAAI,OAAO,KAAK,KAAK,IAAI;AAAA,EAChC;AAAA,EAEA,mBAAmB,QAAqB;AACtC,UAAM,QAAQ,uBAAuB,OAAO,IAAI;AAChD,SAAK,IAAI,GAAG,KAAK,yBAAyB,KAAK,CAAC;AAIhD,SAAK,YAAY,MAAM,SAAS;AAChC,SAAK,IAAI,OAAO,OAAO,KAAK,MAAM,IAAI;AAAA,EACxC;AAAA,EAEA,iBAAiB,MAAiB;AAChC,UAAM,OAAO,cAAc,KAAK,EAAE;AAClC,SAAK,IAAI,GAAG,KAAK,wBAAwB,GAAG,IAAI,CAAC,EAAE;AACnD,SAAK,IAAI,OAAO,KAAK,KAAK,IAAI;AAAA,EAChC;AAAA,EAEA,cAAc,OAAe;AAC3B,SAAK,IAAI;AAAA,MACP,UAAU,GAAG,KAAK,CAAC,QAAQ,GAAG,wBAAwB,CAAC;AAAA,MACvD,KAAK;AAAA,IAAA;AAEP,SAAK,YAAY,KAAK;AAAA,EACxB;AAAA,EAEA,UAAU,OAAe,KAAiB;AACxC,QAAI,KAAK,UAAU,WAAW;AAC5B,eAAS,KAAK,KAAK,KAAK,UAAU,OAAO,GAAG;AAAA,IAC9C;AAAA,EACF;AAAA,EAEA,aAAa,OAAe,KAAiB;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,kBAAY,KAAK,KAAK,KAAK,UAAU,OAAO,GAAG;AAAA,IACjD;AAAA,EACF;AAAA,EAEA,eAAe,OAAe;AAC5B,QAAI,KAAK,UAAU,WAAW;AAC5B,oBAAc,KAAK,KAAK,KAAK,UAAU,KAAK;AAAA,IAC9C;AAAA,EACF;AAAA,EAEA,YAAY,OAAe;AACzB,SAAK,iBAAiB;AACtB,QAAI,KAAK,UAAU,WAAW;AAC5B,iBAAW,KAAK,KAAK,KAAK,UAAU,KAAK;AAAA,IAC3C;AACA,SAAK,kBAAA;AAAA,EACP;AAAA;AAAA,EAGA,cAAc,QAAuB,WAA4B;AAC/D,QAAI,cAAc,KAAK,UAAU;AAC/B,YAAM,IAAI;AAAA,QACR,oBAAoB,SAAS,mCAC3B,KAAK,QACP,KAAK,UAAU,MAAM,CAAC;AAAA,MAAA;AAAA,IAE1B;AACA,+BAA2B,KAAK,KAAK,SAAS;AAE9C,QAAI,KAAK,gBAAgB;AACvB,YAAM,QAAQ,KAAK,IAAA;AACnB,WAAK,IAAI,GAAG,OAAO,UAAU;AAC7B,WAAK,IAAI;AAAA,QACP,yCAAyC,KAAK,IAAA,IAAQ,KAAK;AAAA,MAAA;AAAA,IAE/D;AAEA,QAAI,KAAK,UAAU,gBAAgB;AACjC,WAAK,IAAI,OAAA;AAAA,IACX;AAEA,UAAM,YAAY,KAAK,IAAA,IAAQ,KAAK;AACpC,SAAK,IAAI,QAAQ,gBAAgB,KAAK,QAAQ,KAAK,SAAS,MAAM;AAElE,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,IAAgB;AACpB,OAAG,OAAO,wBAAwB,KAAK,QAAQ,EAAE;AACjD,SAAK,IAAI,SAAA;AAAA,EACX;AACF;AAEA,SAAS,YAAY,KAAqB;AACxC,MAAI,eAAe,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,QAAQ,IAAI,MAAA;AAClB,QAAM,QAAQ;AACd,SAAO;AACT;"}
1
+ {"version":3,"file":"change-processor.js","sources":["../../../../../../zero-cache/src/services/replicator/change-processor.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {SqliteError} from '@rocicorp/zero-sqlite3';\nimport {AbortError} from '../../../../shared/src/abort-error.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {stringify} from '../../../../shared/src/bigint-json.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {\n createLiteIndexStatement,\n createLiteTableStatement,\n liteColumnDef,\n} from '../../db/create.ts';\nimport {\n computeZqlSpecs,\n listIndexes,\n listTables,\n} from '../../db/lite-tables.ts';\nimport {\n mapPostgresToLite,\n mapPostgresToLiteColumn,\n mapPostgresToLiteIndex,\n} from '../../db/pg-to-lite.ts';\nimport type {LiteTableSpec} from '../../db/specs.ts';\nimport type {StatementRunner} from '../../db/statements.ts';\nimport type {LexiVersion} from '../../types/lexi-version.ts';\nimport {\n JSON_PARSED,\n liteRow,\n type JSONFormat,\n type LiteRow,\n type LiteRowKey,\n type LiteValueType,\n} from '../../types/lite.ts';\nimport {liteTableName} from '../../types/names.ts';\nimport {id} from '../../types/sql.ts';\nimport {ColumnMetadataStore} from '../change-source/column-metadata.ts';\nimport type {\n Change,\n ColumnAdd,\n ColumnDrop,\n ColumnUpdate,\n IndexCreate,\n IndexDrop,\n MessageCommit,\n MessageDelete,\n MessageInsert,\n MessageRelation,\n MessageTruncate,\n MessageUpdate,\n TableCreate,\n TableDrop,\n TableRename,\n} from '../change-source/protocol/current/data.ts';\nimport type {ChangeStreamData} from '../change-source/protocol/current/downstream.ts';\nimport type {ReplicatorMode} from './replicator.ts';\nimport {\n logDeleteOp,\n logResetOp,\n logSetOp,\n logTruncateOp,\n} from './schema/change-log.ts';\nimport {\n ZERO_VERSION_COLUMN_NAME,\n updateReplicationWatermark,\n} from './schema/replication-state.ts';\n\nexport type ChangeProcessorMode = ReplicatorMode | 'initial-sync';\n\nexport type CommitResult = {\n watermark: string;\n schemaUpdated: boolean;\n};\n\n/**\n * The ChangeProcessor partitions the stream of messages into transactions\n * by creating a {@link TransactionProcessor} when a transaction begins, and dispatching\n * messages to it until the commit is received.\n *\n * From https://www.postgresql.org/docs/current/protocol-logical-replication.html#PROTOCOL-LOGICAL-MESSAGES-FLOW :\n *\n * \"The logical replication protocol sends individual transactions one by one.\n * This means that all messages between a pair of Begin and Commit messages\n * belong to the same transaction.\"\n */\nexport class ChangeProcessor {\n readonly #db: StatementRunner;\n readonly #mode: ChangeProcessorMode;\n readonly #failService: (lc: LogContext, err: unknown) => void;\n\n // The TransactionProcessor lazily loads table specs into this Map,\n // and reloads them after a schema change. It is cached here to avoid\n // reading them from the DB on every transaction.\n readonly #tableSpecs = new Map<string, LiteTableSpec>();\n\n #currentTx: TransactionProcessor | null = null;\n\n #failure: Error | undefined;\n\n constructor(\n db: StatementRunner,\n mode: ChangeProcessorMode,\n failService: (lc: LogContext, err: unknown) => void,\n ) {\n this.#db = db;\n this.#mode = mode;\n this.#failService = failService;\n }\n\n #fail(lc: LogContext, err: unknown) {\n if (!this.#failure) {\n this.#currentTx?.abort(lc); // roll back any pending transaction.\n\n this.#failure = ensureError(err);\n\n if (!(err instanceof AbortError)) {\n // Propagate the failure up to the service.\n lc.error?.('Message Processing failed:', this.#failure);\n this.#failService(lc, this.#failure);\n }\n }\n }\n\n abort(lc: LogContext) {\n this.#fail(lc, new AbortError());\n }\n\n /** @return If a transaction was committed. */\n processMessage(\n lc: LogContext,\n downstream: ChangeStreamData,\n ): CommitResult | null {\n const [type, message] = downstream;\n if (this.#failure) {\n lc.debug?.(`Dropping ${message.tag}`);\n return null;\n }\n try {\n const watermark =\n type === 'begin'\n ? downstream[2].commitWatermark\n : type === 'commit'\n ? downstream[2].watermark\n : undefined;\n return this.#processMessage(lc, message, watermark);\n } catch (e) {\n this.#fail(lc, e);\n }\n return null;\n }\n\n #beginTransaction(\n lc: LogContext,\n commitVersion: string,\n jsonFormat: JSONFormat,\n ): TransactionProcessor {\n const start = Date.now();\n\n // litestream can technically hold the lock for an arbitrary amount of time\n // when checkpointing a large commit. Crashing on the busy-timeout in this\n // scenario will either produce a corrupt backup or otherwise prevent\n // replication from proceeding.\n //\n // Instead, retry the lock acquisition indefinitely. If this masks\n // an unknown deadlock situation, manual intervention will be necessary.\n for (let i = 0; ; i++) {\n try {\n return new TransactionProcessor(\n lc,\n this.#db,\n this.#mode,\n this.#tableSpecs,\n commitVersion,\n jsonFormat,\n );\n } catch (e) {\n if (e instanceof SqliteError && e.code === 'SQLITE_BUSY') {\n lc.warn?.(\n `SQLITE_BUSY for ${Date.now() - start} ms (attempt ${i + 1}). ` +\n `This is only expected if litestream is performing a large ` +\n `checkpoint.`,\n e,\n );\n continue;\n }\n throw e;\n }\n }\n }\n\n /** @return If a transaction was committed. */\n #processMessage(\n lc: LogContext,\n msg: Change,\n watermark: string | undefined,\n ): CommitResult | null {\n if (msg.tag === 'begin') {\n if (this.#currentTx) {\n throw new Error(`Already in a transaction ${stringify(msg)}`);\n }\n this.#currentTx = this.#beginTransaction(\n lc,\n must(watermark),\n msg.json ?? JSON_PARSED,\n );\n return null;\n }\n\n // For non-begin messages, there should be a #currentTx set.\n const tx = this.#currentTx;\n if (!tx) {\n throw new Error(\n `Received message outside of transaction: ${stringify(msg)}`,\n );\n }\n\n if (msg.tag === 'commit') {\n // Undef this.#currentTx to allow the assembly of the next transaction.\n this.#currentTx = null;\n\n assert(watermark);\n const schemaUpdated = tx.processCommit(msg, watermark);\n return {watermark, schemaUpdated};\n }\n\n if (msg.tag === 'rollback') {\n this.#currentTx?.abort(lc);\n this.#currentTx = null;\n return null;\n }\n\n switch (msg.tag) {\n case 'insert':\n tx.processInsert(msg);\n break;\n case 'update':\n tx.processUpdate(msg);\n break;\n case 'delete':\n tx.processDelete(msg);\n break;\n case 'truncate':\n tx.processTruncate(msg);\n break;\n case 'create-table':\n tx.processCreateTable(msg);\n break;\n case 'rename-table':\n tx.processRenameTable(msg);\n break;\n case 'add-column':\n tx.processAddColumn(msg);\n break;\n case 'update-column':\n tx.processUpdateColumn(msg);\n break;\n case 'drop-column':\n tx.processDropColumn(msg);\n break;\n case 'drop-table':\n tx.processDropTable(msg);\n break;\n case 'create-index':\n tx.processCreateIndex(msg);\n break;\n case 'drop-index':\n tx.processDropIndex(msg);\n break;\n default:\n unreachable(msg);\n }\n\n return null;\n }\n}\n\n/**\n * The {@link TransactionProcessor} handles the sequence of messages from\n * upstream, from `BEGIN` to `COMMIT` and executes the corresponding mutations\n * on the {@link postgres.TransactionSql} on the replica.\n *\n * When applying row contents to the replica, the `_0_version` column is added / updated,\n * and a corresponding entry in the `ChangeLog` is added. The version value is derived\n * from the watermark of the preceding transaction (stored as the `nextStateVersion` in the\n * `ReplicationState` table).\n *\n * Side note: For non-streaming Postgres transactions, the commitEndLsn (and thus\n * commit watermark) is available in the `begin` message, so it could theoretically\n * be used for the row version of changes within the transaction. However, the\n * commitEndLsn is not available in the streaming (in-progress) transaction\n * protocol, and may not be available for CDC streams of other upstream types.\n * Therefore, the zero replication protocol is designed to not require the commit\n * watermark when a transaction begins.\n *\n * Also of interest is the fact that all INSERT Messages are logically applied as\n * UPSERTs. See {@link processInsert} for the underlying motivation.\n */\nclass TransactionProcessor {\n readonly #lc: LogContext;\n readonly #startMs: number;\n readonly #db: StatementRunner;\n readonly #mode: ChangeProcessorMode;\n readonly #version: LexiVersion;\n readonly #tableSpecs: Map<string, LiteTableSpec>;\n readonly #jsonFormat: JSONFormat;\n\n #pos = 0;\n #schemaChanged = false;\n\n constructor(\n lc: LogContext,\n db: StatementRunner,\n mode: ChangeProcessorMode,\n tableSpecs: Map<string, LiteTableSpec>,\n commitVersion: LexiVersion,\n jsonFormat: JSONFormat,\n ) {\n this.#startMs = Date.now();\n this.#mode = mode;\n this.#jsonFormat = jsonFormat;\n\n switch (mode) {\n case 'serving':\n // Although the Replicator / Incremental Syncer is the only writer of the replica,\n // a `BEGIN CONCURRENT` transaction is used to allow View Syncers to simulate\n // (i.e. and `ROLLBACK`) changes on historic snapshots of the database for the\n // purpose of IVM).\n //\n // This TransactionProcessor is the only logic that will actually\n // `COMMIT` any transactions to the replica.\n db.beginConcurrent();\n break;\n case 'backup':\n // For the backup-replicator (i.e. replication-manager), there are no View Syncers\n // and thus BEGIN CONCURRENT is not necessary. In fact, BEGIN CONCURRENT can cause\n // deadlocks with forced wal-checkpoints (which `litestream replicate` performs),\n // so it is important to use vanilla transactions in this configuration.\n db.beginImmediate();\n break;\n case 'initial-sync':\n // When the ChangeProcessor is used for initial-sync, the calling code\n // handles the transaction boundaries.\n break;\n default:\n unreachable();\n }\n this.#db = db;\n this.#version = commitVersion;\n this.#lc = lc.withContext('version', commitVersion);\n this.#tableSpecs = tableSpecs;\n\n if (this.#tableSpecs.size === 0) {\n this.#reloadTableSpecs();\n }\n }\n\n #reloadTableSpecs() {\n this.#tableSpecs.clear();\n // zqlSpecs include the primary key derived from unique indexes\n const zqlSpecs = computeZqlSpecs(this.#lc, this.#db.db);\n for (let spec of listTables(this.#db.db)) {\n if (!spec.primaryKey) {\n spec = {\n ...spec,\n primaryKey: [\n ...(zqlSpecs.get(spec.name)?.tableSpec.primaryKey ?? []),\n ],\n };\n }\n this.#tableSpecs.set(spec.name, spec);\n }\n }\n\n #tableSpec(name: string) {\n return must(this.#tableSpecs.get(name), `Unknown table ${name}`);\n }\n\n #getKey(\n {row, numCols}: {row: LiteRow; numCols: number},\n {relation}: {relation: MessageRelation},\n ): LiteRowKey {\n const keyColumns =\n relation.replicaIdentity !== 'full'\n ? relation.keyColumns // already a suitable key\n : this.#tableSpec(liteTableName(relation)).primaryKey;\n if (!keyColumns?.length) {\n throw new Error(\n `Cannot replicate table \"${relation.name}\" without a PRIMARY KEY or UNIQUE INDEX`,\n );\n }\n // For the common case (replica identity default), the row is already the\n // key for deletes and updates, in which case a new object can be avoided.\n if (numCols === keyColumns.length) {\n return row;\n }\n const key: Record<string, LiteValueType> = {};\n for (const col of keyColumns) {\n key[col] = row[col];\n }\n return key;\n }\n\n processInsert(insert: MessageInsert) {\n const table = liteTableName(insert.relation);\n const newRow = liteRow(\n insert.new,\n this.#tableSpec(table),\n this.#jsonFormat,\n );\n\n this.#upsert(table, {\n ...newRow.row,\n [ZERO_VERSION_COLUMN_NAME]: this.#version,\n });\n\n if (insert.relation.keyColumns.length === 0) {\n // INSERTs can be replicated for rows without a PRIMARY KEY or a\n // UNIQUE INDEX. These are written to the replica but not recorded\n // in the changeLog, because these rows cannot participate in IVM.\n //\n // (Once the table schema has been corrected to include a key, the\n // associated schema change will reset pipelines and data can be\n // loaded via hydration.)\n return;\n }\n const key = this.#getKey(newRow, insert);\n this.#logSetOp(table, key);\n }\n\n #upsert(table: string, row: LiteRow) {\n const columns = Object.keys(row).map(c => id(c));\n this.#db.run(\n `\n INSERT OR REPLACE INTO ${id(table)} (${columns.join(',')})\n VALUES (${Array.from({length: columns.length}).fill('?').join(',')})\n `,\n Object.values(row),\n );\n }\n\n // Updates by default are applied as UPDATE commands to support partial\n // row specifications from the change source. In particular, this is needed\n // to handle updates for which unchanged TOASTed values are not sent:\n //\n // https://www.postgresql.org/docs/current/protocol-logicalrep-message-formats.html#PROTOCOL-LOGICALREP-MESSAGE-FORMATS-TUPLEDATA\n //\n // However, in certain cases an UPDATE may be received for a row that\n // was not initially synced, such as when:\n // (1) an existing table is added to the app's publication, or\n // (2) a new sharding key is added to a shard during resharding.\n //\n // In order to facilitate \"resumptive\" replication, the logic falls back to\n // an INSERT if the update did not change any rows.\n // TODO: Figure out a solution for resumptive replication of rows\n // with TOASTed values.\n processUpdate(update: MessageUpdate) {\n const table = liteTableName(update.relation);\n const newRow = liteRow(\n update.new,\n this.#tableSpec(table),\n this.#jsonFormat,\n );\n const row = {...newRow.row, [ZERO_VERSION_COLUMN_NAME]: this.#version};\n\n // update.key is set with the old values if the key has changed.\n const oldKey = update.key\n ? this.#getKey(\n liteRow(update.key, this.#tableSpec(table), this.#jsonFormat),\n update,\n )\n : null;\n const newKey = this.#getKey(newRow, update);\n\n if (oldKey) {\n this.#logDeleteOp(table, oldKey);\n }\n this.#logSetOp(table, newKey);\n\n const currKey = oldKey ?? newKey;\n const conds = Object.keys(currKey).map(col => `${id(col)}=?`);\n const setExprs = Object.keys(row).map(col => `${id(col)}=?`);\n\n const {changes} = this.#db.run(\n `\n UPDATE ${id(table)}\n SET ${setExprs.join(',')}\n WHERE ${conds.join(' AND ')}\n `,\n [...Object.values(row), ...Object.values(currKey)],\n );\n\n // If the UPDATE did not affect any rows, perform an UPSERT of the\n // new row for resumptive replication.\n if (changes === 0) {\n this.#upsert(table, row);\n }\n }\n\n processDelete(del: MessageDelete) {\n const table = liteTableName(del.relation);\n const rowKey = this.#getKey(\n liteRow(del.key, this.#tableSpec(table), this.#jsonFormat),\n del,\n );\n\n this.#delete(table, rowKey);\n\n if (this.#mode === 'serving') {\n this.#logDeleteOp(table, rowKey);\n }\n }\n\n #delete(table: string, rowKey: LiteRowKey) {\n const conds = Object.keys(rowKey).map(col => `${id(col)}=?`);\n this.#db.run(\n `DELETE FROM ${id(table)} WHERE ${conds.join(' AND ')}`,\n Object.values(rowKey),\n );\n }\n\n processTruncate(truncate: MessageTruncate) {\n for (const relation of truncate.relations) {\n const table = liteTableName(relation);\n // Update replica data.\n this.#db.run(`DELETE FROM ${id(table)}`);\n\n // Update change log.\n this.#logTruncateOp(table);\n }\n }\n processCreateTable(create: TableCreate) {\n const table = mapPostgresToLite(create.spec);\n this.#db.db.exec(createLiteTableStatement(table));\n\n // Write to metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n for (const [colName, colSpec] of Object.entries(create.spec.columns)) {\n store.insert(table.name, colName, colSpec);\n }\n }\n\n this.#logResetOp(table.name);\n this.#lc.info?.(create.tag, table.name);\n }\n\n processRenameTable(rename: TableRename) {\n const oldName = liteTableName(rename.old);\n const newName = liteTableName(rename.new);\n this.#db.db.exec(`ALTER TABLE ${id(oldName)} RENAME TO ${id(newName)}`);\n\n // Rename in metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.renameTable(oldName, newName);\n }\n\n this.#bumpVersions(newName);\n this.#logResetOp(oldName);\n this.#lc.info?.(rename.tag, oldName, newName);\n }\n\n processAddColumn(msg: ColumnAdd) {\n const table = liteTableName(msg.table);\n const {name} = msg.column;\n const spec = mapPostgresToLiteColumn(table, msg.column);\n this.#db.db.exec(\n `ALTER TABLE ${id(table)} ADD ${id(name)} ${liteColumnDef(spec)}`,\n );\n\n // Write to metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.insert(table, name, msg.column.spec);\n }\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, msg.column);\n }\n\n processUpdateColumn(msg: ColumnUpdate) {\n const table = liteTableName(msg.table);\n let oldName = msg.old.name;\n const newName = msg.new.name;\n\n // update-column can ignore defaults because it does not change the values\n // in existing rows.\n //\n // https://www.postgresql.org/docs/current/sql-altertable.html#SQL-ALTERTABLE-DESC-SET-DROP-DEFAULT\n //\n // \"The new default value will only apply in subsequent INSERT or UPDATE\n // commands; it does not cause rows already in the table to change.\"\n //\n // This allows support for _changing_ column defaults to any expression,\n // since it does not affect what the replica needs to do.\n const oldSpec = mapPostgresToLiteColumn(table, msg.old, 'ignore-default');\n const newSpec = mapPostgresToLiteColumn(table, msg.new, 'ignore-default');\n\n // The only updates that are relevant are the column name and the data type.\n if (oldName === newName && oldSpec.dataType === newSpec.dataType) {\n this.#lc.info?.(msg.tag, 'no thing to update', oldSpec, newSpec);\n return;\n }\n // If the data type changes, we have to make a new column with the new data type\n // and copy the values over.\n if (oldSpec.dataType !== newSpec.dataType) {\n // Remember (and drop) the indexes that reference the column.\n const indexes = listIndexes(this.#db.db).filter(\n idx => idx.tableName === table && oldName in idx.columns,\n );\n const stmts = indexes.map(idx => `DROP INDEX IF EXISTS ${id(idx.name)};`);\n const tmpName = `tmp.${newName}`;\n stmts.push(`\n ALTER TABLE ${id(table)} ADD ${id(tmpName)} ${liteColumnDef(newSpec)};\n UPDATE ${id(table)} SET ${id(tmpName)} = ${id(oldName)};\n ALTER TABLE ${id(table)} DROP ${id(oldName)};\n `);\n for (const idx of indexes) {\n // Re-create the indexes to reference the new column.\n idx.columns[tmpName] = idx.columns[oldName];\n delete idx.columns[oldName];\n stmts.push(createLiteIndexStatement(idx));\n }\n this.#db.db.exec(stmts.join(''));\n oldName = tmpName;\n }\n if (oldName !== newName) {\n this.#db.db.exec(\n `ALTER TABLE ${id(table)} RENAME ${id(oldName)} TO ${id(newName)}`,\n );\n }\n\n // Update metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.update(table, msg.old.name, msg.new.name, msg.new.spec);\n }\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, msg.new);\n }\n\n processDropColumn(msg: ColumnDrop) {\n const table = liteTableName(msg.table);\n const {column} = msg;\n this.#db.db.exec(`ALTER TABLE ${id(table)} DROP ${id(column)}`);\n\n // Delete from metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.deleteColumn(table, column);\n }\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, column);\n }\n\n processDropTable(drop: TableDrop) {\n const name = liteTableName(drop.id);\n this.#db.db.exec(`DROP TABLE IF EXISTS ${id(name)}`);\n\n // Delete from metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.deleteTable(name);\n }\n\n this.#logResetOp(name);\n this.#lc.info?.(drop.tag, name);\n }\n\n processCreateIndex(create: IndexCreate) {\n const index = mapPostgresToLiteIndex(create.spec);\n this.#db.db.exec(createLiteIndexStatement(index));\n\n // indexes affect tables visibility (e.g. sync-ability is gated on\n // having a unique index), so reset pipelines to refresh table schemas.\n this.#logResetOp(index.tableName);\n this.#lc.info?.(create.tag, index.name);\n }\n\n processDropIndex(drop: IndexDrop) {\n const name = liteTableName(drop.id);\n this.#db.db.exec(`DROP INDEX IF EXISTS ${id(name)}`);\n this.#lc.info?.(drop.tag, name);\n }\n\n #bumpVersions(table: string) {\n this.#db.run(\n `UPDATE ${id(table)} SET ${id(ZERO_VERSION_COLUMN_NAME)} = ?`,\n this.#version,\n );\n this.#logResetOp(table);\n }\n\n #logSetOp(table: string, key: LiteRowKey) {\n if (this.#mode === 'serving') {\n logSetOp(this.#db, this.#version, this.#pos++, table, key);\n }\n }\n\n #logDeleteOp(table: string, key: LiteRowKey) {\n if (this.#mode === 'serving') {\n logDeleteOp(this.#db, this.#version, this.#pos++, table, key);\n }\n }\n\n #logTruncateOp(table: string) {\n if (this.#mode === 'serving') {\n logTruncateOp(this.#db, this.#version, table);\n }\n }\n\n #logResetOp(table: string) {\n this.#schemaChanged = true;\n if (this.#mode === 'serving') {\n logResetOp(this.#db, this.#version, table);\n }\n this.#reloadTableSpecs();\n }\n\n /** @returns `true` if the schema was updated. */\n processCommit(commit: MessageCommit, watermark: string): boolean {\n if (watermark !== this.#version) {\n throw new Error(\n `'commit' version ${watermark} does not match 'begin' version ${\n this.#version\n }: ${stringify(commit)}`,\n );\n }\n updateReplicationWatermark(this.#db, watermark);\n\n if (this.#schemaChanged) {\n const start = Date.now();\n this.#db.db.pragma('optimize');\n this.#lc.info?.(\n `PRAGMA optimized after schema change (${Date.now() - start} ms)`,\n );\n }\n\n if (this.#mode !== 'initial-sync') {\n this.#db.commit();\n }\n\n const elapsedMs = Date.now() - this.#startMs;\n this.#lc.debug?.(`Committed tx@${this.#version} (${elapsedMs} ms)`);\n\n return this.#schemaChanged;\n }\n\n abort(lc: LogContext) {\n lc.info?.(`aborting transaction ${this.#version}`);\n this.#db.rollback();\n }\n}\n\nfunction ensureError(err: unknown): Error {\n if (err instanceof Error) {\n return err;\n }\n const error = new Error();\n error.cause = err;\n return error;\n}\n"],"names":[],"mappings":";;;;;;;;;;;;;;;AAmFO,MAAM,gBAAgB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA,EAKA,kCAAkB,IAAA;AAAA,EAE3B,aAA0C;AAAA,EAE1C;AAAA,EAEA,YACE,IACA,MACA,aACA;AACA,SAAK,MAAM;AACX,SAAK,QAAQ;AACb,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,IAAgB,KAAc;AAClC,QAAI,CAAC,KAAK,UAAU;AAClB,WAAK,YAAY,MAAM,EAAE;AAEzB,WAAK,WAAW,YAAY,GAAG;AAE/B,UAAI,EAAE,eAAe,aAAa;AAEhC,WAAG,QAAQ,8BAA8B,KAAK,QAAQ;AACtD,aAAK,aAAa,IAAI,KAAK,QAAQ;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,IAAgB;AACpB,SAAK,MAAM,IAAI,IAAI,WAAA,CAAY;AAAA,EACjC;AAAA;AAAA,EAGA,eACE,IACA,YACqB;AACrB,UAAM,CAAC,MAAM,OAAO,IAAI;AACxB,QAAI,KAAK,UAAU;AACjB,SAAG,QAAQ,YAAY,QAAQ,GAAG,EAAE;AACpC,aAAO;AAAA,IACT;AACA,QAAI;AACF,YAAM,YACJ,SAAS,UACL,WAAW,CAAC,EAAE,kBACd,SAAS,WACP,WAAW,CAAC,EAAE,YACd;AACR,aAAO,KAAK,gBAAgB,IAAI,SAAS,SAAS;AAAA,IACpD,SAAS,GAAG;AACV,WAAK,MAAM,IAAI,CAAC;AAAA,IAClB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,kBACE,IACA,eACA,YACsB;AACtB,UAAM,QAAQ,KAAK,IAAA;AASnB,aAAS,IAAI,KAAK,KAAK;AACrB,UAAI;AACF,eAAO,IAAI;AAAA,UACT;AAAA,UACA,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,UACL;AAAA,UACA;AAAA,QAAA;AAAA,MAEJ,SAAS,GAAG;AACV,YAAI,aAAa,eAAe,EAAE,SAAS,eAAe;AACxD,aAAG;AAAA,YACD,mBAAmB,KAAK,IAAA,IAAQ,KAAK,gBAAgB,IAAI,CAAC;AAAA,YAG1D;AAAA,UAAA;AAEF;AAAA,QACF;AACA,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,gBACE,IACA,KACA,WACqB;AACrB,QAAI,IAAI,QAAQ,SAAS;AACvB,UAAI,KAAK,YAAY;AACnB,cAAM,IAAI,MAAM,4BAA4B,UAAU,GAAG,CAAC,EAAE;AAAA,MAC9D;AACA,WAAK,aAAa,KAAK;AAAA,QACrB;AAAA,QACA,KAAK,SAAS;AAAA,QACd,IAAI,QAAQ;AAAA,MAAA;AAEd,aAAO;AAAA,IACT;AAGA,UAAM,KAAK,KAAK;AAChB,QAAI,CAAC,IAAI;AACP,YAAM,IAAI;AAAA,QACR,4CAA4C,UAAU,GAAG,CAAC;AAAA,MAAA;AAAA,IAE9D;AAEA,QAAI,IAAI,QAAQ,UAAU;AAExB,WAAK,aAAa;AAElB,aAAO,SAAS;AAChB,YAAM,gBAAgB,GAAG,cAAc,KAAK,SAAS;AACrD,aAAO,EAAC,WAAW,cAAA;AAAA,IACrB;AAEA,QAAI,IAAI,QAAQ,YAAY;AAC1B,WAAK,YAAY,MAAM,EAAE;AACzB,WAAK,aAAa;AAClB,aAAO;AAAA,IACT;AAEA,YAAQ,IAAI,KAAA;AAAA,MACV,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,gBAAgB,GAAG;AACtB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF,KAAK;AACH,WAAG,oBAAoB,GAAG;AAC1B;AAAA,MACF,KAAK;AACH,WAAG,kBAAkB,GAAG;AACxB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF;AACE,oBAAe;AAAA,IAAA;AAGnB,WAAO;AAAA,EACT;AACF;AAuBA,MAAM,qBAAqB;AAAA,EAChB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,OAAO;AAAA,EACP,iBAAiB;AAAA,EAEjB,YACE,IACA,IACA,MACA,YACA,eACA,YACA;AACA,SAAK,WAAW,KAAK,IAAA;AACrB,SAAK,QAAQ;AACb,SAAK,cAAc;AAEnB,YAAQ,MAAA;AAAA,MACN,KAAK;AAQH,WAAG,gBAAA;AACH;AAAA,MACF,KAAK;AAKH,WAAG,eAAA;AACH;AAAA,MACF,KAAK;AAGH;AAAA,MACF;AACE,oBAAA;AAAA,IAAY;AAEhB,SAAK,MAAM;AACX,SAAK,WAAW;AAChB,SAAK,MAAM,GAAG,YAAY,WAAW,aAAa;AAClD,SAAK,cAAc;AAEnB,QAAI,KAAK,YAAY,SAAS,GAAG;AAC/B,WAAK,kBAAA;AAAA,IACP;AAAA,EACF;AAAA,EAEA,oBAAoB;AAClB,SAAK,YAAY,MAAA;AAEjB,UAAM,WAAW,gBAAgB,KAAK,KAAK,KAAK,IAAI,EAAE;AACtD,aAAS,QAAQ,WAAW,KAAK,IAAI,EAAE,GAAG;AACxC,UAAI,CAAC,KAAK,YAAY;AACpB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,YAAY;AAAA,YACV,GAAI,SAAS,IAAI,KAAK,IAAI,GAAG,UAAU,cAAc,CAAA;AAAA,UAAC;AAAA,QACxD;AAAA,MAEJ;AACA,WAAK,YAAY,IAAI,KAAK,MAAM,IAAI;AAAA,IACtC;AAAA,EACF;AAAA,EAEA,WAAW,MAAc;AACvB,WAAO,KAAK,KAAK,YAAY,IAAI,IAAI,GAAG,iBAAiB,IAAI,EAAE;AAAA,EACjE;AAAA,EAEA,QACE,EAAC,KAAK,WACN,EAAC,YACW;AACZ,UAAM,aACJ,SAAS,oBAAoB,SACzB,SAAS,aACT,KAAK,WAAW,cAAc,QAAQ,CAAC,EAAE;AAC/C,QAAI,CAAC,YAAY,QAAQ;AACvB,YAAM,IAAI;AAAA,QACR,2BAA2B,SAAS,IAAI;AAAA,MAAA;AAAA,IAE5C;AAGA,QAAI,YAAY,WAAW,QAAQ;AACjC,aAAO;AAAA,IACT;AACA,UAAM,MAAqC,CAAA;AAC3C,eAAW,OAAO,YAAY;AAC5B,UAAI,GAAG,IAAI,IAAI,GAAG;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,cAAc,QAAuB;AACnC,UAAM,QAAQ,cAAc,OAAO,QAAQ;AAC3C,UAAM,SAAS;AAAA,MACb,OAAO;AAAA,MACP,KAAK,WAAW,KAAK;AAAA,MACrB,KAAK;AAAA,IAAA;AAGP,SAAK,QAAQ,OAAO;AAAA,MAClB,GAAG,OAAO;AAAA,MACV,CAAC,wBAAwB,GAAG,KAAK;AAAA,IAAA,CAClC;AAED,QAAI,OAAO,SAAS,WAAW,WAAW,GAAG;AAQ3C;AAAA,IACF;AACA,UAAM,MAAM,KAAK,QAAQ,QAAQ,MAAM;AACvC,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EAEA,QAAQ,OAAe,KAAc;AACnC,UAAM,UAAU,OAAO,KAAK,GAAG,EAAE,IAAI,CAAA,MAAK,GAAG,CAAC,CAAC;AAC/C,SAAK,IAAI;AAAA,MACP;AAAA,+BACyB,GAAG,KAAK,CAAC,KAAK,QAAQ,KAAK,GAAG,CAAC;AAAA,kBAC5C,MAAM,KAAK,EAAC,QAAQ,QAAQ,QAAO,EAAE,KAAK,GAAG,EAAE,KAAK,GAAG,CAAC;AAAA;AAAA,MAEpE,OAAO,OAAO,GAAG;AAAA,IAAA;AAAA,EAErB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,cAAc,QAAuB;AACnC,UAAM,QAAQ,cAAc,OAAO,QAAQ;AAC3C,UAAM,SAAS;AAAA,MACb,OAAO;AAAA,MACP,KAAK,WAAW,KAAK;AAAA,MACrB,KAAK;AAAA,IAAA;AAEP,UAAM,MAAM,EAAC,GAAG,OAAO,KAAK,CAAC,wBAAwB,GAAG,KAAK,SAAA;AAG7D,UAAM,SAAS,OAAO,MAClB,KAAK;AAAA,MACH,QAAQ,OAAO,KAAK,KAAK,WAAW,KAAK,GAAG,KAAK,WAAW;AAAA,MAC5D;AAAA,IAAA,IAEF;AACJ,UAAM,SAAS,KAAK,QAAQ,QAAQ,MAAM;AAE1C,QAAI,QAAQ;AACV,WAAK,aAAa,OAAO,MAAM;AAAA,IACjC;AACA,SAAK,UAAU,OAAO,MAAM;AAE5B,UAAM,UAAU,UAAU;AAC1B,UAAM,QAAQ,OAAO,KAAK,OAAO,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAC5D,UAAM,WAAW,OAAO,KAAK,GAAG,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAE3D,UAAM,EAAC,QAAA,IAAW,KAAK,IAAI;AAAA,MACzB;AAAA,eACS,GAAG,KAAK,CAAC;AAAA,cACV,SAAS,KAAK,GAAG,CAAC;AAAA,gBAChB,MAAM,KAAK,OAAO,CAAC;AAAA;AAAA,MAE7B,CAAC,GAAG,OAAO,OAAO,GAAG,GAAG,GAAG,OAAO,OAAO,OAAO,CAAC;AAAA,IAAA;AAKnD,QAAI,YAAY,GAAG;AACjB,WAAK,QAAQ,OAAO,GAAG;AAAA,IACzB;AAAA,EACF;AAAA,EAEA,cAAc,KAAoB;AAChC,UAAM,QAAQ,cAAc,IAAI,QAAQ;AACxC,UAAM,SAAS,KAAK;AAAA,MAClB,QAAQ,IAAI,KAAK,KAAK,WAAW,KAAK,GAAG,KAAK,WAAW;AAAA,MACzD;AAAA,IAAA;AAGF,SAAK,QAAQ,OAAO,MAAM;AAE1B,QAAI,KAAK,UAAU,WAAW;AAC5B,WAAK,aAAa,OAAO,MAAM;AAAA,IACjC;AAAA,EACF;AAAA,EAEA,QAAQ,OAAe,QAAoB;AACzC,UAAM,QAAQ,OAAO,KAAK,MAAM,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAC3D,SAAK,IAAI;AAAA,MACP,eAAe,GAAG,KAAK,CAAC,UAAU,MAAM,KAAK,OAAO,CAAC;AAAA,MACrD,OAAO,OAAO,MAAM;AAAA,IAAA;AAAA,EAExB;AAAA,EAEA,gBAAgB,UAA2B;AACzC,eAAW,YAAY,SAAS,WAAW;AACzC,YAAM,QAAQ,cAAc,QAAQ;AAEpC,WAAK,IAAI,IAAI,eAAe,GAAG,KAAK,CAAC,EAAE;AAGvC,WAAK,eAAe,KAAK;AAAA,IAC3B;AAAA,EACF;AAAA,EACA,mBAAmB,QAAqB;AACtC,UAAM,QAAQ,kBAAkB,OAAO,IAAI;AAC3C,SAAK,IAAI,GAAG,KAAK,yBAAyB,KAAK,CAAC;AAGhD,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,OAAO,KAAK,OAAO,GAAG;AACpE,cAAM,OAAO,MAAM,MAAM,SAAS,OAAO;AAAA,MAC3C;AAAA,IACF;AAEA,SAAK,YAAY,MAAM,IAAI;AAC3B,SAAK,IAAI,OAAO,OAAO,KAAK,MAAM,IAAI;AAAA,EACxC;AAAA,EAEA,mBAAmB,QAAqB;AACtC,UAAM,UAAU,cAAc,OAAO,GAAG;AACxC,UAAM,UAAU,cAAc,OAAO,GAAG;AACxC,SAAK,IAAI,GAAG,KAAK,eAAe,GAAG,OAAO,CAAC,cAAc,GAAG,OAAO,CAAC,EAAE;AAGtE,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,YAAY,SAAS,OAAO;AAAA,IACpC;AAEA,SAAK,cAAc,OAAO;AAC1B,SAAK,YAAY,OAAO;AACxB,SAAK,IAAI,OAAO,OAAO,KAAK,SAAS,OAAO;AAAA,EAC9C;AAAA,EAEA,iBAAiB,KAAgB;AAC/B,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,UAAM,EAAC,SAAQ,IAAI;AACnB,UAAM,OAAO,wBAAwB,OAAO,IAAI,MAAM;AACtD,SAAK,IAAI,GAAG;AAAA,MACV,eAAe,GAAG,KAAK,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,cAAc,IAAI,CAAC;AAAA,IAAA;AAIjE,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,OAAO,OAAO,MAAM,IAAI,OAAO,IAAI;AAAA,IAC3C;AAEA,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,IAAI,MAAM;AAAA,EAC5C;AAAA,EAEA,oBAAoB,KAAmB;AACrC,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,QAAI,UAAU,IAAI,IAAI;AACtB,UAAM,UAAU,IAAI,IAAI;AAYxB,UAAM,UAAU,wBAAwB,OAAO,IAAI,KAAK,gBAAgB;AACxE,UAAM,UAAU,wBAAwB,OAAO,IAAI,KAAK,gBAAgB;AAGxE,QAAI,YAAY,WAAW,QAAQ,aAAa,QAAQ,UAAU;AAChE,WAAK,IAAI,OAAO,IAAI,KAAK,sBAAsB,SAAS,OAAO;AAC/D;AAAA,IACF;AAGA,QAAI,QAAQ,aAAa,QAAQ,UAAU;AAEzC,YAAM,UAAU,YAAY,KAAK,IAAI,EAAE,EAAE;AAAA,QACvC,CAAA,QAAO,IAAI,cAAc,SAAS,WAAW,IAAI;AAAA,MAAA;AAEnD,YAAM,QAAQ,QAAQ,IAAI,CAAA,QAAO,wBAAwB,GAAG,IAAI,IAAI,CAAC,GAAG;AACxE,YAAM,UAAU,OAAO,OAAO;AAC9B,YAAM,KAAK;AAAA,sBACK,GAAG,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAC,IAAI,cAAc,OAAO,CAAC;AAAA,iBAC3D,GAAG,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAC,MAAM,GAAG,OAAO,CAAC;AAAA,sBACxC,GAAG,KAAK,CAAC,SAAS,GAAG,OAAO,CAAC;AAAA,SAC1C;AACH,iBAAW,OAAO,SAAS;AAEzB,YAAI,QAAQ,OAAO,IAAI,IAAI,QAAQ,OAAO;AAC1C,eAAO,IAAI,QAAQ,OAAO;AAC1B,cAAM,KAAK,yBAAyB,GAAG,CAAC;AAAA,MAC1C;AACA,WAAK,IAAI,GAAG,KAAK,MAAM,KAAK,EAAE,CAAC;AAC/B,gBAAU;AAAA,IACZ;AACA,QAAI,YAAY,SAAS;AACvB,WAAK,IAAI,GAAG;AAAA,QACV,eAAe,GAAG,KAAK,CAAC,WAAW,GAAG,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC;AAAA,MAAA;AAAA,IAEpE;AAGA,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,OAAO,OAAO,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,IAAI;AAAA,IAC9D;AAEA,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,IAAI,GAAG;AAAA,EACzC;AAAA,EAEA,kBAAkB,KAAiB;AACjC,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,UAAM,EAAC,WAAU;AACjB,SAAK,IAAI,GAAG,KAAK,eAAe,GAAG,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,EAAE;AAG9D,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,aAAa,OAAO,MAAM;AAAA,IAClC;AAEA,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,MAAM;AAAA,EACxC;AAAA,EAEA,iBAAiB,MAAiB;AAChC,UAAM,OAAO,cAAc,KAAK,EAAE;AAClC,SAAK,IAAI,GAAG,KAAK,wBAAwB,GAAG,IAAI,CAAC,EAAE;AAGnD,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,YAAY,IAAI;AAAA,IACxB;AAEA,SAAK,YAAY,IAAI;AACrB,SAAK,IAAI,OAAO,KAAK,KAAK,IAAI;AAAA,EAChC;AAAA,EAEA,mBAAmB,QAAqB;AACtC,UAAM,QAAQ,uBAAuB,OAAO,IAAI;AAChD,SAAK,IAAI,GAAG,KAAK,yBAAyB,KAAK,CAAC;AAIhD,SAAK,YAAY,MAAM,SAAS;AAChC,SAAK,IAAI,OAAO,OAAO,KAAK,MAAM,IAAI;AAAA,EACxC;AAAA,EAEA,iBAAiB,MAAiB;AAChC,UAAM,OAAO,cAAc,KAAK,EAAE;AAClC,SAAK,IAAI,GAAG,KAAK,wBAAwB,GAAG,IAAI,CAAC,EAAE;AACnD,SAAK,IAAI,OAAO,KAAK,KAAK,IAAI;AAAA,EAChC;AAAA,EAEA,cAAc,OAAe;AAC3B,SAAK,IAAI;AAAA,MACP,UAAU,GAAG,KAAK,CAAC,QAAQ,GAAG,wBAAwB,CAAC;AAAA,MACvD,KAAK;AAAA,IAAA;AAEP,SAAK,YAAY,KAAK;AAAA,EACxB;AAAA,EAEA,UAAU,OAAe,KAAiB;AACxC,QAAI,KAAK,UAAU,WAAW;AAC5B,eAAS,KAAK,KAAK,KAAK,UAAU,KAAK,QAAQ,OAAO,GAAG;AAAA,IAC3D;AAAA,EACF;AAAA,EAEA,aAAa,OAAe,KAAiB;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,kBAAY,KAAK,KAAK,KAAK,UAAU,KAAK,QAAQ,OAAO,GAAG;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,eAAe,OAAe;AAC5B,QAAI,KAAK,UAAU,WAAW;AAC5B,oBAAc,KAAK,KAAK,KAAK,UAAU,KAAK;AAAA,IAC9C;AAAA,EACF;AAAA,EAEA,YAAY,OAAe;AACzB,SAAK,iBAAiB;AACtB,QAAI,KAAK,UAAU,WAAW;AAC5B,iBAAW,KAAK,KAAK,KAAK,UAAU,KAAK;AAAA,IAC3C;AACA,SAAK,kBAAA;AAAA,EACP;AAAA;AAAA,EAGA,cAAc,QAAuB,WAA4B;AAC/D,QAAI,cAAc,KAAK,UAAU;AAC/B,YAAM,IAAI;AAAA,QACR,oBAAoB,SAAS,mCAC3B,KAAK,QACP,KAAK,UAAU,MAAM,CAAC;AAAA,MAAA;AAAA,IAE1B;AACA,+BAA2B,KAAK,KAAK,SAAS;AAE9C,QAAI,KAAK,gBAAgB;AACvB,YAAM,QAAQ,KAAK,IAAA;AACnB,WAAK,IAAI,GAAG,OAAO,UAAU;AAC7B,WAAK,IAAI;AAAA,QACP,yCAAyC,KAAK,IAAA,IAAQ,KAAK;AAAA,MAAA;AAAA,IAE/D;AAEA,QAAI,KAAK,UAAU,gBAAgB;AACjC,WAAK,IAAI,OAAA;AAAA,IACX;AAEA,UAAM,YAAY,KAAK,IAAA,IAAQ,KAAK;AACpC,SAAK,IAAI,QAAQ,gBAAgB,KAAK,QAAQ,KAAK,SAAS,MAAM;AAElE,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,IAAgB;AACpB,OAAG,OAAO,wBAAwB,KAAK,QAAQ,EAAE;AACjD,SAAK,IAAI,SAAA;AAAA,EACX;AACF;AAEA,SAAS,YAAY,KAAqB;AACxC,MAAI,eAAe,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,QAAQ,IAAI,MAAA;AAClB,QAAM,QAAQ;AACd,SAAO;AACT;"}
@@ -40,13 +40,14 @@ export declare const RESET_OP = "r";
40
40
  export declare const changeLogEntrySchema: v.Type<{
41
41
  rowKey: Readonly<Record<string, import("../../../../../shared/src/bigint-json.ts").JSONValue>> | null;
42
42
  stateVersion: string;
43
+ pos: number;
43
44
  table: string;
44
45
  op: "d" | "r" | "s" | "t";
45
46
  }>;
46
47
  export type ChangeLogEntry = v.Infer<typeof changeLogEntrySchema>;
47
48
  export declare function initChangeLog(db: Database): void;
48
- export declare function logSetOp(db: StatementRunner, version: LexiVersion, table: string, row: LiteRowKey): string;
49
- export declare function logDeleteOp(db: StatementRunner, version: LexiVersion, table: string, row: LiteRowKey): string;
49
+ export declare function logSetOp(db: StatementRunner, version: LexiVersion, pos: number, table: string, row: LiteRowKey): string;
50
+ export declare function logDeleteOp(db: StatementRunner, version: LexiVersion, pos: number, table: string, row: LiteRowKey): string;
50
51
  export declare function logTruncateOp(db: StatementRunner, version: LexiVersion, table: string): void;
51
52
  export declare function logResetOp(db: StatementRunner, version: LexiVersion, table: string): void;
52
53
  //# sourceMappingURL=change-log.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"change-log.d.ts","sourceRoot":"","sources":["../../../../../../../zero-cache/src/services/replicator/schema/change-log.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,CAAC,MAAM,qCAAqC,CAAC;AACzD,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,iCAAiC,CAAC;AAC9D,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,2BAA2B,CAAC;AAC/D,OAAO,KAAK,EAAC,WAAW,EAAC,MAAM,gCAAgC,CAAC;AAChE,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,wBAAwB,CAAC;AAGvD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AAEH,eAAO,MAAM,MAAM,MAAM,CAAC;AAC1B,eAAO,MAAM,MAAM,MAAM,CAAC;AAC1B,eAAO,MAAM,WAAW,MAAM,CAAC;AAC/B,eAAO,MAAM,QAAQ,MAAM,CAAC;AA2B5B,eAAO,MAAM,oBAAoB;;;;;EAW5B,CAAC;AAEN,MAAM,MAAM,cAAc,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,oBAAoB,CAAC,CAAC;AAElE,wBAAgB,aAAa,CAAC,EAAE,EAAE,QAAQ,QAEzC;AAED,wBAAgB,QAAQ,CACtB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,UAAU,GACd,MAAM,CAER;AAED,wBAAgB,WAAW,CACzB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,UAAU,GACd,MAAM,CAER;AAoBD,wBAAgB,aAAa,CAC3B,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,QAGd;AAED,wBAAgB,UAAU,CACxB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,QAGd"}
1
+ {"version":3,"file":"change-log.d.ts","sourceRoot":"","sources":["../../../../../../../zero-cache/src/services/replicator/schema/change-log.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,CAAC,MAAM,qCAAqC,CAAC;AACzD,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,iCAAiC,CAAC;AAC9D,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,2BAA2B,CAAC;AAC/D,OAAO,KAAK,EAAC,WAAW,EAAC,MAAM,gCAAgC,CAAC;AAChE,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,wBAAwB,CAAC;AAGvD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AAEH,eAAO,MAAM,MAAM,MAAM,CAAC;AAC1B,eAAO,MAAM,MAAM,MAAM,CAAC;AAC1B,eAAO,MAAM,WAAW,MAAM,CAAC;AAC/B,eAAO,MAAM,QAAQ,MAAM,CAAC;AAgC5B,eAAO,MAAM,oBAAoB;;;;;;EAe5B,CAAC;AAEN,MAAM,MAAM,cAAc,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,oBAAoB,CAAC,CAAC;AAElE,wBAAgB,aAAa,CAAC,EAAE,EAAE,QAAQ,QAEzC;AAED,wBAAgB,QAAQ,CACtB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,UAAU,GACd,MAAM,CAER;AAED,wBAAgB,WAAW,CACzB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,UAAU,GACd,MAAM,CAER;AAqBD,wBAAgB,aAAa,CAC3B,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,QAGd;AAED,wBAAgB,UAAU,CACxB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,QAGd"}