@rocicorp/zero 0.25.10-canary.9 → 0.25.11-canary.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/out/replicache/src/persist/refresh.d.ts.map +1 -1
- package/out/replicache/src/persist/refresh.js +0 -8
- package/out/replicache/src/persist/refresh.js.map +1 -1
- package/out/replicache/src/process-scheduler.d.ts +23 -0
- package/out/replicache/src/process-scheduler.d.ts.map +1 -1
- package/out/replicache/src/process-scheduler.js +50 -1
- package/out/replicache/src/process-scheduler.js.map +1 -1
- package/out/replicache/src/replicache-impl.d.ts +8 -0
- package/out/replicache/src/replicache-impl.d.ts.map +1 -1
- package/out/replicache/src/replicache-impl.js +10 -1
- package/out/replicache/src/replicache-impl.js.map +1 -1
- package/out/zero/package.json.js +1 -1
- package/out/zero-cache/src/custom/fetch.d.ts +3 -3
- package/out/zero-cache/src/custom/fetch.d.ts.map +1 -1
- package/out/zero-cache/src/custom/fetch.js +116 -76
- package/out/zero-cache/src/custom/fetch.js.map +1 -1
- package/out/zero-cache/src/custom-queries/transform-query.d.ts.map +1 -1
- package/out/zero-cache/src/custom-queries/transform-query.js +0 -1
- package/out/zero-cache/src/custom-queries/transform-query.js.map +1 -1
- package/out/zero-cache/src/server/anonymous-otel-start.d.ts.map +1 -1
- package/out/zero-cache/src/server/anonymous-otel-start.js +1 -0
- package/out/zero-cache/src/server/anonymous-otel-start.js.map +1 -1
- package/out/zero-cache/src/server/change-streamer.d.ts.map +1 -1
- package/out/zero-cache/src/server/change-streamer.js +6 -1
- package/out/zero-cache/src/server/change-streamer.js.map +1 -1
- package/out/zero-cache/src/server/inspector-delegate.d.ts.map +1 -1
- package/out/zero-cache/src/server/inspector-delegate.js +2 -2
- package/out/zero-cache/src/server/inspector-delegate.js.map +1 -1
- package/out/zero-cache/src/server/priority-op.d.ts +8 -0
- package/out/zero-cache/src/server/priority-op.d.ts.map +1 -0
- package/out/zero-cache/src/server/priority-op.js +29 -0
- package/out/zero-cache/src/server/priority-op.js.map +1 -0
- package/out/zero-cache/src/server/syncer.d.ts.map +1 -1
- package/out/zero-cache/src/server/syncer.js +9 -2
- package/out/zero-cache/src/server/syncer.js.map +1 -1
- package/out/zero-cache/src/services/analyze.js +1 -1
- package/out/zero-cache/src/services/analyze.js.map +1 -1
- package/out/zero-cache/src/services/change-source/replica-schema.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-source/replica-schema.js +5 -1
- package/out/zero-cache/src/services/change-source/replica-schema.js.map +1 -1
- package/out/zero-cache/src/services/change-streamer/backup-monitor.d.ts +1 -1
- package/out/zero-cache/src/services/change-streamer/backup-monitor.d.ts.map +1 -1
- package/out/zero-cache/src/services/change-streamer/backup-monitor.js +10 -6
- package/out/zero-cache/src/services/change-streamer/backup-monitor.js.map +1 -1
- package/out/zero-cache/src/services/change-streamer/change-streamer-http.js +2 -2
- package/out/zero-cache/src/services/change-streamer/change-streamer-http.js.map +1 -1
- package/out/zero-cache/src/services/mutagen/pusher.d.ts.map +1 -1
- package/out/zero-cache/src/services/mutagen/pusher.js +1 -3
- package/out/zero-cache/src/services/mutagen/pusher.js.map +1 -1
- package/out/zero-cache/src/services/replicator/replication-status.d.ts +2 -1
- package/out/zero-cache/src/services/replicator/replication-status.d.ts.map +1 -1
- package/out/zero-cache/src/services/replicator/replication-status.js +7 -3
- package/out/zero-cache/src/services/replicator/replication-status.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/cvr-store.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/cvr-store.js +60 -22
- package/out/zero-cache/src/services/view-syncer/cvr-store.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/cvr.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/cvr.js +2 -0
- package/out/zero-cache/src/services/view-syncer/cvr.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts +1 -1
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.js +2 -2
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/row-record-cache.d.ts +1 -1
- package/out/zero-cache/src/services/view-syncer/row-record-cache.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/row-record-cache.js +22 -11
- package/out/zero-cache/src/services/view-syncer/row-record-cache.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/view-syncer.d.ts +2 -1
- package/out/zero-cache/src/services/view-syncer/view-syncer.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/view-syncer.js +80 -52
- package/out/zero-cache/src/services/view-syncer/view-syncer.js.map +1 -1
- package/out/zero-cache/src/types/error-with-level.d.ts +1 -1
- package/out/zero-cache/src/types/error-with-level.d.ts.map +1 -1
- package/out/zero-cache/src/types/error-with-level.js +1 -1
- package/out/zero-cache/src/types/error-with-level.js.map +1 -1
- package/out/zero-client/src/client/connection-manager.d.ts +3 -0
- package/out/zero-client/src/client/connection-manager.d.ts.map +1 -1
- package/out/zero-client/src/client/connection-manager.js +10 -3
- package/out/zero-client/src/client/connection-manager.js.map +1 -1
- package/out/zero-client/src/client/error.d.ts +5 -1
- package/out/zero-client/src/client/error.d.ts.map +1 -1
- package/out/zero-client/src/client/error.js +3 -3
- package/out/zero-client/src/client/error.js.map +1 -1
- package/out/zero-client/src/client/options.d.ts +1 -1
- package/out/zero-client/src/client/options.js.map +1 -1
- package/out/zero-client/src/client/version.js +1 -1
- package/out/zero-client/src/client/zero.d.ts +2 -1
- package/out/zero-client/src/client/zero.d.ts.map +1 -1
- package/out/zero-client/src/client/zero.js +29 -3
- package/out/zero-client/src/client/zero.js.map +1 -1
- package/package.json +2 -2
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pusher.js","sources":["../../../../../../zero-cache/src/services/mutagen/pusher.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {groupBy} from '../../../../shared/src/arrays.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {getErrorMessage} from '../../../../shared/src/error.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {Queue} from '../../../../shared/src/queue.ts';\nimport type {Downstream} from '../../../../zero-protocol/src/down.ts';\nimport {ErrorKind} from '../../../../zero-protocol/src/error-kind.ts';\nimport {ErrorOrigin} from '../../../../zero-protocol/src/error-origin.ts';\nimport {ErrorReason} from '../../../../zero-protocol/src/error-reason.ts';\nimport {\n isProtocolError,\n type PushFailedBody,\n} from '../../../../zero-protocol/src/error.ts';\nimport {\n pushResponseSchema,\n type MutationID,\n type PushBody,\n type PushResponse,\n} from '../../../../zero-protocol/src/push.ts';\nimport {type ZeroConfig} from '../../config/zero-config.ts';\nimport {compileUrlPattern, fetchFromAPIServer} from '../../custom/fetch.ts';\nimport {getOrCreateCounter} from '../../observability/metrics.ts';\nimport {recordMutation} from '../../server/anonymous-otel-start.ts';\nimport {ProtocolErrorWithLevel} from '../../types/error-with-level.ts';\nimport type {PostgresDB} from '../../types/pg.ts';\nimport {upstreamSchema} from '../../types/shards.ts';\nimport type {Source} from '../../types/streams.ts';\nimport {Subscription} from '../../types/subscription.ts';\nimport type {HandlerResult, StreamResult} from '../../workers/connection.ts';\nimport type {RefCountedService, Service} from '../service.ts';\n\nexport interface Pusher extends RefCountedService {\n readonly pushURL: string | undefined;\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n userPushHeaders: Record<string, string> | undefined,\n ): Source<Downstream>;\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): HandlerResult;\n ackMutationResponses(upToID: MutationID): Promise<void>;\n}\n\ntype Config = Pick<ZeroConfig, 'app' | 'shard'>;\n\n/**\n * Receives push messages from zero-client and forwards\n * them the the user's API server.\n *\n * If the user's API server is taking too long to process\n * the push, the PusherService will add the push to a queue\n * and send pushes in bulk the next time the user's API server\n * is available.\n *\n * - One PusherService exists per client group.\n * - Mutations for a given client are always sent in-order\n * - Mutations for different clients in the same group may be interleaved\n */\nexport class PusherService implements Service, Pusher {\n readonly id: string;\n readonly #pusher: PushWorker;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #pushConfig: ZeroConfig['push'] & {url: string[]};\n readonly #upstream: PostgresDB;\n readonly #config: Config;\n #stopped: Promise<void> | undefined;\n #refCount = 0;\n #isStopped = false;\n\n constructor(\n upstream: PostgresDB,\n appConfig: Config,\n pushConfig: ZeroConfig['push'] & {url: string[]},\n lc: LogContext,\n clientGroupID: string,\n ) {\n this.#config = appConfig;\n this.#upstream = upstream;\n this.#queue = new Queue();\n this.#pusher = new PushWorker(\n appConfig,\n lc,\n pushConfig.url,\n pushConfig.apiKey,\n this.#queue,\n );\n this.id = clientGroupID;\n this.#pushConfig = pushConfig;\n }\n\n get pushURL(): string | undefined {\n return this.#pusher.pushURL[0];\n }\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n userPushHeaders: Record<string, string> | undefined,\n ) {\n return this.#pusher.initConnection(\n clientID,\n wsID,\n userPushURL,\n userPushHeaders,\n );\n }\n\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): Exclude<HandlerResult, StreamResult> {\n if (!this.#pushConfig.forwardCookies) {\n httpCookie = undefined; // remove cookies if not forwarded\n }\n this.#queue.enqueue({push, auth, clientID, httpCookie});\n\n return {\n type: 'ok',\n };\n }\n\n async ackMutationResponses(upToID: MutationID) {\n // delete the relevant rows from the `mutations` table\n const sql = this.#upstream;\n await sql`DELETE FROM ${sql(\n upstreamSchema({\n appID: this.#config.app.id,\n shardNum: this.#config.shard.num,\n }),\n )}.mutations WHERE \"clientGroupID\" = ${this.id} AND \"clientID\" = ${upToID.clientID} AND \"mutationID\" <= ${upToID.id}`;\n }\n\n ref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n ++this.#refCount;\n }\n\n unref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n --this.#refCount;\n if (this.#refCount <= 0) {\n void this.stop();\n }\n }\n\n hasRefs(): boolean {\n return this.#refCount > 0;\n }\n\n run(): Promise<void> {\n this.#stopped = this.#pusher.run();\n return this.#stopped;\n }\n\n stop(): Promise<void> {\n if (this.#isStopped) {\n return must(this.#stopped, 'Stop was called before `run`');\n }\n this.#isStopped = true;\n this.#queue.enqueue('stop');\n return must(this.#stopped, 'Stop was called before `run`');\n }\n}\n\ntype PusherEntry = {\n push: PushBody;\n auth: string | undefined;\n httpCookie: string | undefined;\n clientID: string;\n};\ntype PusherEntryOrStop = PusherEntry | 'stop';\n\n/**\n * Awaits items in the queue then drains and sends them all\n * to the user's API server.\n */\nclass PushWorker {\n readonly #pushURLs: string[];\n readonly #pushURLPatterns: URLPattern[];\n readonly #apiKey: string | undefined;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #lc: LogContext;\n readonly #config: Config;\n readonly #clients: Map<\n string,\n {\n wsID: string;\n downstream: Subscription<Downstream>;\n }\n >;\n #userPushURL?: string | undefined;\n #userPushHeaders?: Record<string, string> | undefined;\n\n readonly #customMutations = getOrCreateCounter(\n 'mutation',\n 'custom',\n 'Number of custom mutations processed',\n );\n readonly #pushes = getOrCreateCounter(\n 'mutation',\n 'pushes',\n 'Number of pushes processed by the pusher',\n );\n\n constructor(\n config: Config,\n lc: LogContext,\n pushURL: string[],\n apiKey: string | undefined,\n queue: Queue<PusherEntryOrStop>,\n ) {\n this.#pushURLs = pushURL;\n this.#lc = lc.withContext('component', 'pusher');\n this.#pushURLPatterns = pushURL.map(compileUrlPattern);\n this.#apiKey = apiKey;\n this.#queue = queue;\n this.#config = config;\n this.#clients = new Map();\n }\n\n get pushURL() {\n return this.#pushURLs;\n }\n\n /**\n * Returns a new downstream stream if the clientID,wsID pair has not been seen before.\n * If a clientID already exists with a different wsID, that client's downstream is cancelled.\n */\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n userPushHeaders: Record<string, string> | undefined,\n ) {\n const existing = this.#clients.get(clientID);\n if (existing && existing.wsID === wsID) {\n // already initialized for this socket\n throw new Error('Connection was already initialized');\n }\n\n // client is back on a new connection\n if (existing) {\n existing.downstream.cancel();\n }\n\n // Handle client group level URL parameters\n if (this.#userPushURL === undefined) {\n // First client in the group - store its URL and headers\n this.#userPushURL = userPushURL;\n this.#userPushHeaders = userPushHeaders;\n } else {\n // Validate that subsequent clients have compatible parameters\n if (this.#userPushURL !== userPushURL) {\n this.#lc.warn?.(\n 'Client provided different mutate parameters than client group',\n {\n clientID,\n clientURL: userPushURL,\n clientGroupURL: this.#userPushURL,\n },\n );\n }\n }\n\n const downstream = Subscription.create<Downstream>({\n cleanup: () => {\n this.#clients.delete(clientID);\n },\n });\n this.#clients.set(clientID, {wsID, downstream});\n return downstream;\n }\n\n async run() {\n for (;;) {\n const task = await this.#queue.dequeue();\n const rest = this.#queue.drain();\n const [pushes, terminate] = combinePushes([task, ...rest]);\n for (const push of pushes) {\n const response = await this.#processPush(push);\n await this.#fanOutResponses(response);\n }\n\n if (terminate) {\n break;\n }\n }\n }\n\n /**\n * 1. If the entire `push` fails, we send the error to relevant clients.\n * 2. If the push succeeds, we look for any mutation failure that should cause the connection to terminate\n * and terminate the connection for those clients.\n */\n #fanOutResponses(response: PushResponse) {\n const connectionTerminations: (() => void)[] = [];\n\n // if the entire push failed, send that to the client.\n if ('kind' in response || 'error' in response) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a push error.',\n response,\n );\n const groupedMutationIDs = groupBy(\n response.mutationIDs ?? [],\n m => m.clientID,\n );\n for (const [clientID, mutationIDs] of groupedMutationIDs) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n // We do not resolve mutations on the client if the push fails\n // as those mutations will be retried.\n if ('error' in response) {\n // This error code path will eventually be removed when we\n // no longer support the legacy push error format.\n const pushFailedBody: PushFailedBody =\n response.error === 'http'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.HTTP,\n status: response.status,\n bodyPreview: response.details,\n mutationIDs,\n message: `Fetch from API server returned non-OK status ${response.status}`,\n }\n : response.error === 'unsupportedPushVersion'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.UnsupportedPushVersion,\n mutationIDs,\n message: `Unsupported push version`,\n }\n : {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.Internal,\n mutationIDs,\n message:\n response.error === 'zeroPusher'\n ? response.details\n : response.error === 'unsupportedSchemaVersion'\n ? 'Unsupported schema version'\n : 'An unknown error occurred while pushing to the API server',\n };\n\n this.#failDownstream(client.downstream, pushFailedBody);\n } else if ('kind' in response) {\n this.#failDownstream(client.downstream, response);\n } else {\n unreachable(response);\n }\n }\n } else {\n // Look for mutations results that should cause us to terminate the connection\n const groupedMutations = groupBy(response.mutations, m => m.id.clientID);\n for (const [clientID, mutations] of groupedMutations) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n let failure: PushFailedBody | undefined;\n let i = 0;\n for (; i < mutations.length; i++) {\n const m = mutations[i];\n if ('error' in m.result) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a mutation error.',\n m.result,\n );\n }\n // This error code path will eventually be removed,\n // keeping this for backwards compatibility, but the server\n // should now return a PushFailedBody with the mutationIDs\n if ('error' in m.result && m.result.error === 'oooMutation') {\n failure = {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.OutOfOrderMutation,\n message: 'mutation was out of order',\n details: m.result.details,\n mutationIDs: mutations.map(m => ({\n clientID: m.id.clientID,\n id: m.id.id,\n })),\n };\n break;\n }\n }\n\n if (failure && i < mutations.length - 1) {\n this.#lc.warn?.(\n 'push-response contains mutations after a mutation which should fatal the connection',\n );\n }\n\n if (failure) {\n connectionTerminations.push(() =>\n this.#failDownstream(client.downstream, failure),\n );\n }\n }\n }\n\n connectionTerminations.forEach(cb => cb());\n }\n\n async #processPush(entry: PusherEntry): Promise<PushResponse> {\n this.#customMutations.add(entry.push.mutations.length, {\n clientGroupID: entry.push.clientGroupID,\n });\n this.#pushes.add(1, {\n clientGroupID: entry.push.clientGroupID,\n });\n\n // Record custom mutations for telemetry\n recordMutation('custom', entry.push.mutations.length);\n\n const url =\n this.#userPushURL ??\n must(this.#pushURLs[0], 'ZERO_MUTATE_URL is not set');\n\n this.#lc.debug?.(\n 'pushing to',\n url,\n 'with',\n entry.push.mutations.length,\n 'mutations',\n );\n\n let mutationIDs: MutationID[] = [];\n\n try {\n mutationIDs = entry.push.mutations.map(m => ({\n id: m.id,\n clientID: m.clientID,\n }));\n\n return await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n url === this.#userPushURL,\n this.#pushURLPatterns,\n {\n appID: this.#config.app.id,\n shardNum: this.#config.shard.num,\n },\n {\n apiKey: this.#apiKey,\n customHeaders: this.#userPushHeaders,\n token: entry.auth,\n cookie: entry.httpCookie,\n },\n entry.push,\n );\n } catch (e) {\n if (isProtocolError(e) && e.errorBody.kind === ErrorKind.PushFailed) {\n return {\n ...e.errorBody,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n\n return {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.Internal,\n message: `Failed to push: ${getErrorMessage(e)}`,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n }\n\n #failDownstream(\n downstream: Subscription<Downstream>,\n errorBody: PushFailedBody,\n ): void {\n const logLevel = errorBody.origin === ErrorOrigin.Server ? 'warn' : 'error';\n downstream.fail(new ProtocolErrorWithLevel(errorBody, logLevel));\n }\n}\n\n/**\n * Pushes for different clientIDs could theoretically be interleaved.\n *\n * In order to do efficient batching to the user's API server,\n * we collect all pushes for the same clientID into a single push.\n */\nexport function combinePushes(\n entries: readonly (PusherEntryOrStop | undefined)[],\n): [PusherEntry[], boolean] {\n const pushesByClientID = new Map<string, PusherEntry[]>();\n\n function collect() {\n const ret: PusherEntry[] = [];\n for (const entries of pushesByClientID.values()) {\n const composite: PusherEntry = {\n ...entries[0],\n push: {\n ...entries[0].push,\n mutations: [],\n },\n };\n ret.push(composite);\n for (const entry of entries) {\n assertAreCompatiblePushes(composite, entry);\n composite.push.mutations.push(...entry.push.mutations);\n }\n }\n return ret;\n }\n\n for (const entry of entries) {\n if (entry === 'stop' || entry === undefined) {\n return [collect(), true];\n }\n\n const {clientID} = entry;\n const existing = pushesByClientID.get(clientID);\n if (existing) {\n existing.push(entry);\n } else {\n pushesByClientID.set(clientID, [entry]);\n }\n }\n\n return [collect(), false] as const;\n}\n\n// These invariants should always be true for a given clientID.\n// If they are not, we have a bug in the code somewhere.\nfunction assertAreCompatiblePushes(left: PusherEntry, right: PusherEntry) {\n assert(\n left.clientID === right.clientID,\n 'clientID must be the same for all pushes',\n );\n assert(\n left.auth === right.auth,\n 'auth must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.schemaVersion === right.push.schemaVersion,\n 'schemaVersion must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.pushVersion === right.push.pushVersion,\n 'pushVersion must be the same for all pushes with the same clientID',\n );\n assert(\n left.httpCookie === right.httpCookie,\n 'httpCookie must be the same for all pushes with the same clientID',\n );\n}\n"],"names":["ErrorKind.PushFailed","ErrorOrigin.ZeroCache","ErrorReason.HTTP","ErrorOrigin.Server","ErrorReason.UnsupportedPushVersion","ErrorReason.Internal","ErrorReason.OutOfOrderMutation","m","entries"],"mappings":";;;;;;;;;;;;;;;;;AAiEO,MAAM,cAAyC;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EACA,YAAY;AAAA,EACZ,aAAa;AAAA,EAEb,YACE,UACA,WACA,YACA,IACA,eACA;AACA,SAAK,UAAU;AACf,SAAK,YAAY;AACjB,SAAK,SAAS,IAAI,MAAA;AAClB,SAAK,UAAU,IAAI;AAAA,MACjB;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,WAAW;AAAA,MACX,KAAK;AAAA,IAAA;AAEP,SAAK,KAAK;AACV,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,UAA8B;AAChC,WAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,EAC/B;AAAA,EAEA,eACE,UACA,MACA,aACA,iBACA;AACA,WAAO,KAAK,QAAQ;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,YACE,UACA,MACA,MACA,YACsC;AACtC,QAAI,CAAC,KAAK,YAAY,gBAAgB;AACpC,mBAAa;AAAA,IACf;AACA,SAAK,OAAO,QAAQ,EAAC,MAAM,MAAM,UAAU,YAAW;AAEtD,WAAO;AAAA,MACL,MAAM;AAAA,IAAA;AAAA,EAEV;AAAA,EAEA,MAAM,qBAAqB,QAAoB;AAE7C,UAAM,MAAM,KAAK;AACjB,UAAM,kBAAkB;AAAA,MACtB,eAAe;AAAA,QACb,OAAO,KAAK,QAAQ,IAAI;AAAA,QACxB,UAAU,KAAK,QAAQ,MAAM;AAAA,MAAA,CAC9B;AAAA,IAAA,CACF,sCAAsC,KAAK,EAAE,qBAAqB,OAAO,QAAQ,wBAAwB,OAAO,EAAE;AAAA,EACrH;AAAA,EAEA,MAAM;AACJ,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AAAA,EACT;AAAA,EAEA,QAAQ;AACN,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AACP,QAAI,KAAK,aAAa,GAAG;AACvB,WAAK,KAAK,KAAA;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,UAAmB;AACjB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,MAAqB;AACnB,SAAK,WAAW,KAAK,QAAQ,IAAA;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAAsB;AACpB,QAAI,KAAK,YAAY;AACnB,aAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,IAC3D;AACA,SAAK,aAAa;AAClB,SAAK,OAAO,QAAQ,MAAM;AAC1B,WAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,EAC3D;AACF;AAcA,MAAM,WAAW;AAAA,EACN;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAOT;AAAA,EACA;AAAA,EAES,mBAAmB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAEO,UAAU;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAGF,YACE,QACA,IACA,SACA,QACA,OACA;AACA,SAAK,YAAY;AACjB,SAAK,MAAM,GAAG,YAAY,aAAa,QAAQ;AAC/C,SAAK,mBAAmB,QAAQ,IAAI,iBAAiB;AACrD,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,+BAAe,IAAA;AAAA,EACtB;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,eACE,UACA,MACA,aACA,iBACA;AACA,UAAM,WAAW,KAAK,SAAS,IAAI,QAAQ;AAC3C,QAAI,YAAY,SAAS,SAAS,MAAM;AAEtC,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAGA,QAAI,UAAU;AACZ,eAAS,WAAW,OAAA;AAAA,IACtB;AAGA,QAAI,KAAK,iBAAiB,QAAW;AAEnC,WAAK,eAAe;AACpB,WAAK,mBAAmB;AAAA,IAC1B,OAAO;AAEL,UAAI,KAAK,iBAAiB,aAAa;AACrC,aAAK,IAAI;AAAA,UACP;AAAA,UACA;AAAA,YACE;AAAA,YACA,WAAW;AAAA,YACX,gBAAgB,KAAK;AAAA,UAAA;AAAA,QACvB;AAAA,MAEJ;AAAA,IACF;AAEA,UAAM,aAAa,aAAa,OAAmB;AAAA,MACjD,SAAS,MAAM;AACb,aAAK,SAAS,OAAO,QAAQ;AAAA,MAC/B;AAAA,IAAA,CACD;AACD,SAAK,SAAS,IAAI,UAAU,EAAC,MAAM,YAAW;AAC9C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,MAAM;AACV,eAAS;AACP,YAAM,OAAO,MAAM,KAAK,OAAO,QAAA;AAC/B,YAAM,OAAO,KAAK,OAAO,MAAA;AACzB,YAAM,CAAC,QAAQ,SAAS,IAAI,cAAc,CAAC,MAAM,GAAG,IAAI,CAAC;AACzD,iBAAW,QAAQ,QAAQ;AACzB,cAAM,WAAW,MAAM,KAAK,aAAa,IAAI;AAC7C,cAAM,KAAK,iBAAiB,QAAQ;AAAA,MACtC;AAEA,UAAI,WAAW;AACb;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,UAAwB;AACvC,UAAM,yBAAyC,CAAA;AAG/C,QAAI,UAAU,YAAY,WAAW,UAAU;AAC7C,WAAK,IAAI;AAAA,QACP;AAAA,QACA;AAAA,MAAA;AAEF,YAAM,qBAAqB;AAAA,QACzB,SAAS,eAAe,CAAA;AAAA,QACxB,OAAK,EAAE;AAAA,MAAA;AAET,iBAAW,CAAC,UAAU,WAAW,KAAK,oBAAoB;AACxD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAIA,YAAI,WAAW,UAAU;AAGvB,gBAAM,iBACJ,SAAS,UAAU,SACf;AAAA,YACE,MAAMA;AAAAA,YACN,QAAQC;AAAAA,YACR,QAAQC;AAAAA,YACR,QAAQ,SAAS;AAAA,YACjB,aAAa,SAAS;AAAA,YACtB;AAAA,YACA,SAAS,gDAAgD,SAAS,MAAM;AAAA,UAAA,IAE1E,SAAS,UAAU,2BACjB;AAAA,YACE,MAAMF;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQC;AAAAA,YACR;AAAA,YACA,SAAS;AAAA,UAAA,IAEX;AAAA,YACE,MAAMJ;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQE;AAAAA,YACR;AAAA,YACA,SACE,SAAS,UAAU,eACf,SAAS,UACT,SAAS,UAAU,6BACjB,+BACA;AAAA,UAAA;AAGlB,eAAK,gBAAgB,OAAO,YAAY,cAAc;AAAA,QACxD,WAAW,UAAU,UAAU;AAC7B,eAAK,gBAAgB,OAAO,YAAY,QAAQ;AAAA,QAClD,OAAO;AACL,sBAAoB;AAAA,QACtB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,mBAAmB,QAAQ,SAAS,WAAW,CAAA,MAAK,EAAE,GAAG,QAAQ;AACvE,iBAAW,CAAC,UAAU,SAAS,KAAK,kBAAkB;AACpD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAEA,YAAI;AACJ,YAAI,IAAI;AACR,eAAO,IAAI,UAAU,QAAQ,KAAK;AAChC,gBAAM,IAAI,UAAU,CAAC;AACrB,cAAI,WAAW,EAAE,QAAQ;AACvB,iBAAK,IAAI;AAAA,cACP;AAAA,cACA,EAAE;AAAA,YAAA;AAAA,UAEN;AAIA,cAAI,WAAW,EAAE,UAAU,EAAE,OAAO,UAAU,eAAe;AAC3D,sBAAU;AAAA,cACR,MAAML;AAAAA,cACN,QAAQG;AAAAA,cACR,QAAQG;AAAAA,cACR,SAAS;AAAA,cACT,SAAS,EAAE,OAAO;AAAA,cAClB,aAAa,UAAU,IAAI,CAAAC,QAAM;AAAA,gBAC/B,UAAUA,GAAE,GAAG;AAAA,gBACf,IAAIA,GAAE,GAAG;AAAA,cAAA,EACT;AAAA,YAAA;AAEJ;AAAA,UACF;AAAA,QACF;AAEA,YAAI,WAAW,IAAI,UAAU,SAAS,GAAG;AACvC,eAAK,IAAI;AAAA,YACP;AAAA,UAAA;AAAA,QAEJ;AAEA,YAAI,SAAS;AACX,iCAAuB;AAAA,YAAK,MAC1B,KAAK,gBAAgB,OAAO,YAAY,OAAO;AAAA,UAAA;AAAA,QAEnD;AAAA,MACF;AAAA,IACF;AAEA,2BAAuB,QAAQ,CAAA,OAAM,GAAA,CAAI;AAAA,EAC3C;AAAA,EAEA,MAAM,aAAa,OAA2C;AAC5D,SAAK,iBAAiB,IAAI,MAAM,KAAK,UAAU,QAAQ;AAAA,MACrD,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AACD,SAAK,QAAQ,IAAI,GAAG;AAAA,MAClB,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AAGD,mBAAe,UAAU,MAAM,KAAK,UAAU,MAAM;AAEpD,UAAM,MACJ,KAAK,gBACL,KAAK,KAAK,UAAU,CAAC,GAAG,4BAA4B;AAEtD,SAAK,IAAI;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,MACrB;AAAA,IAAA;AAGF,QAAI,cAA4B,CAAA;AAEhC,QAAI;AACF,oBAAc,MAAM,KAAK,UAAU,IAAI,CAAA,OAAM;AAAA,QAC3C,IAAI,EAAE;AAAA,QACN,UAAU,EAAE;AAAA,MAAA,EACZ;AAEF,aAAO,MAAM;AAAA,QACX;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA,QAAQ,KAAK;AAAA,QACb,KAAK;AAAA,QACL;AAAA,UACE,OAAO,KAAK,QAAQ,IAAI;AAAA,UACxB,UAAU,KAAK,QAAQ,MAAM;AAAA,QAAA;AAAA,QAE/B;AAAA,UACE,QAAQ,KAAK;AAAA,UACb,eAAe,KAAK;AAAA,UACpB,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,QAAA;AAAA,QAEhB,MAAM;AAAA,MAAA;AAAA,IAEV,SAAS,GAAG;AACV,UAAI,gBAAgB,CAAC,KAAK,EAAE,UAAU,SAASP,YAAsB;AACnE,eAAO;AAAA,UACL,GAAG,EAAE;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAEA,aAAO;AAAA,QACL,MAAMA;AAAAA,QACN,QAAQC;AAAAA,QACR,QAAQI;AAAAA,QACR,SAAS,mBAAmB,gBAAgB,CAAC,CAAC;AAAA,QAC9C;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AAAA,EAEA,gBACE,YACA,WACM;AACN,UAAM,WAAW,UAAU,WAAWF,SAAqB,SAAS;AACpE,eAAW,KAAK,IAAI,uBAAuB,WAAW,QAAQ,CAAC;AAAA,EACjE;AACF;AAQO,SAAS,cACd,SAC0B;AAC1B,QAAM,uCAAuB,IAAA;AAE7B,WAAS,UAAU;AACjB,UAAM,MAAqB,CAAA;AAC3B,eAAWK,YAAW,iBAAiB,UAAU;AAC/C,YAAM,YAAyB;AAAA,QAC7B,GAAGA,SAAQ,CAAC;AAAA,QACZ,MAAM;AAAA,UACJ,GAAGA,SAAQ,CAAC,EAAE;AAAA,UACd,WAAW,CAAA;AAAA,QAAC;AAAA,MACd;AAEF,UAAI,KAAK,SAAS;AAClB,iBAAW,SAASA,UAAS;AAC3B,kCAA0B,WAAW,KAAK;AAC1C,kBAAU,KAAK,UAAU,KAAK,GAAG,MAAM,KAAK,SAAS;AAAA,MACvD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,aAAW,SAAS,SAAS;AAC3B,QAAI,UAAU,UAAU,UAAU,QAAW;AAC3C,aAAO,CAAC,QAAA,GAAW,IAAI;AAAA,IACzB;AAEA,UAAM,EAAC,aAAY;AACnB,UAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,uBAAiB,IAAI,UAAU,CAAC,KAAK,CAAC;AAAA,IACxC;AAAA,EACF;AAEA,SAAO,CAAC,QAAA,GAAW,KAAK;AAC1B;AAIA,SAAS,0BAA0B,MAAmB,OAAoB;AACxE;AAAA,IACE,KAAK,aAAa,MAAM;AAAA,IACxB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,SAAS,MAAM;AAAA,IACpB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,kBAAkB,MAAM,KAAK;AAAA,IACvC;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,gBAAgB,MAAM,KAAK;AAAA,IACrC;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,eAAe,MAAM;AAAA,IAC1B;AAAA,EAAA;AAEJ;"}
|
|
1
|
+
{"version":3,"file":"pusher.js","sources":["../../../../../../zero-cache/src/services/mutagen/pusher.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {groupBy} from '../../../../shared/src/arrays.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {getErrorMessage} from '../../../../shared/src/error.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {Queue} from '../../../../shared/src/queue.ts';\nimport type {Downstream} from '../../../../zero-protocol/src/down.ts';\nimport {ErrorKind} from '../../../../zero-protocol/src/error-kind.ts';\nimport {ErrorOrigin} from '../../../../zero-protocol/src/error-origin.ts';\nimport {ErrorReason} from '../../../../zero-protocol/src/error-reason.ts';\nimport {\n isProtocolError,\n type PushFailedBody,\n} from '../../../../zero-protocol/src/error.ts';\nimport {\n pushResponseSchema,\n type MutationID,\n type PushBody,\n type PushResponse,\n} from '../../../../zero-protocol/src/push.ts';\nimport {type ZeroConfig} from '../../config/zero-config.ts';\nimport {compileUrlPattern, fetchFromAPIServer} from '../../custom/fetch.ts';\nimport {getOrCreateCounter} from '../../observability/metrics.ts';\nimport {recordMutation} from '../../server/anonymous-otel-start.ts';\nimport {ProtocolErrorWithLevel} from '../../types/error-with-level.ts';\nimport type {PostgresDB} from '../../types/pg.ts';\nimport {upstreamSchema} from '../../types/shards.ts';\nimport type {Source} from '../../types/streams.ts';\nimport {Subscription} from '../../types/subscription.ts';\nimport type {HandlerResult, StreamResult} from '../../workers/connection.ts';\nimport type {RefCountedService, Service} from '../service.ts';\n\nexport interface Pusher extends RefCountedService {\n readonly pushURL: string | undefined;\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n userPushHeaders: Record<string, string> | undefined,\n ): Source<Downstream>;\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): HandlerResult;\n ackMutationResponses(upToID: MutationID): Promise<void>;\n}\n\ntype Config = Pick<ZeroConfig, 'app' | 'shard'>;\n\n/**\n * Receives push messages from zero-client and forwards\n * them the the user's API server.\n *\n * If the user's API server is taking too long to process\n * the push, the PusherService will add the push to a queue\n * and send pushes in bulk the next time the user's API server\n * is available.\n *\n * - One PusherService exists per client group.\n * - Mutations for a given client are always sent in-order\n * - Mutations for different clients in the same group may be interleaved\n */\nexport class PusherService implements Service, Pusher {\n readonly id: string;\n readonly #pusher: PushWorker;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #pushConfig: ZeroConfig['push'] & {url: string[]};\n readonly #upstream: PostgresDB;\n readonly #config: Config;\n #stopped: Promise<void> | undefined;\n #refCount = 0;\n #isStopped = false;\n\n constructor(\n upstream: PostgresDB,\n appConfig: Config,\n pushConfig: ZeroConfig['push'] & {url: string[]},\n lc: LogContext,\n clientGroupID: string,\n ) {\n this.#config = appConfig;\n this.#upstream = upstream;\n this.#queue = new Queue();\n this.#pusher = new PushWorker(\n appConfig,\n lc,\n pushConfig.url,\n pushConfig.apiKey,\n this.#queue,\n );\n this.id = clientGroupID;\n this.#pushConfig = pushConfig;\n }\n\n get pushURL(): string | undefined {\n return this.#pusher.pushURL[0];\n }\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n userPushHeaders: Record<string, string> | undefined,\n ) {\n return this.#pusher.initConnection(\n clientID,\n wsID,\n userPushURL,\n userPushHeaders,\n );\n }\n\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): Exclude<HandlerResult, StreamResult> {\n if (!this.#pushConfig.forwardCookies) {\n httpCookie = undefined; // remove cookies if not forwarded\n }\n this.#queue.enqueue({push, auth, clientID, httpCookie});\n\n return {\n type: 'ok',\n };\n }\n\n async ackMutationResponses(upToID: MutationID) {\n // delete the relevant rows from the `mutations` table\n const sql = this.#upstream;\n await sql`DELETE FROM ${sql(\n upstreamSchema({\n appID: this.#config.app.id,\n shardNum: this.#config.shard.num,\n }),\n )}.mutations WHERE \"clientGroupID\" = ${this.id} AND \"clientID\" = ${upToID.clientID} AND \"mutationID\" <= ${upToID.id}`;\n }\n\n ref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n ++this.#refCount;\n }\n\n unref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n --this.#refCount;\n if (this.#refCount <= 0) {\n void this.stop();\n }\n }\n\n hasRefs(): boolean {\n return this.#refCount > 0;\n }\n\n run(): Promise<void> {\n this.#stopped = this.#pusher.run();\n return this.#stopped;\n }\n\n stop(): Promise<void> {\n if (this.#isStopped) {\n return must(this.#stopped, 'Stop was called before `run`');\n }\n this.#isStopped = true;\n this.#queue.enqueue('stop');\n return must(this.#stopped, 'Stop was called before `run`');\n }\n}\n\ntype PusherEntry = {\n push: PushBody;\n auth: string | undefined;\n httpCookie: string | undefined;\n clientID: string;\n};\ntype PusherEntryOrStop = PusherEntry | 'stop';\n\n/**\n * Awaits items in the queue then drains and sends them all\n * to the user's API server.\n */\nclass PushWorker {\n readonly #pushURLs: string[];\n readonly #pushURLPatterns: URLPattern[];\n readonly #apiKey: string | undefined;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #lc: LogContext;\n readonly #config: Config;\n readonly #clients: Map<\n string,\n {\n wsID: string;\n downstream: Subscription<Downstream>;\n }\n >;\n #userPushURL?: string | undefined;\n #userPushHeaders?: Record<string, string> | undefined;\n\n readonly #customMutations = getOrCreateCounter(\n 'mutation',\n 'custom',\n 'Number of custom mutations processed',\n );\n readonly #pushes = getOrCreateCounter(\n 'mutation',\n 'pushes',\n 'Number of pushes processed by the pusher',\n );\n\n constructor(\n config: Config,\n lc: LogContext,\n pushURL: string[],\n apiKey: string | undefined,\n queue: Queue<PusherEntryOrStop>,\n ) {\n this.#pushURLs = pushURL;\n this.#lc = lc.withContext('component', 'pusher');\n this.#pushURLPatterns = pushURL.map(compileUrlPattern);\n this.#apiKey = apiKey;\n this.#queue = queue;\n this.#config = config;\n this.#clients = new Map();\n }\n\n get pushURL() {\n return this.#pushURLs;\n }\n\n /**\n * Returns a new downstream stream if the clientID,wsID pair has not been seen before.\n * If a clientID already exists with a different wsID, that client's downstream is cancelled.\n */\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n userPushHeaders: Record<string, string> | undefined,\n ) {\n const existing = this.#clients.get(clientID);\n if (existing && existing.wsID === wsID) {\n // already initialized for this socket\n throw new Error('Connection was already initialized');\n }\n\n // client is back on a new connection\n if (existing) {\n existing.downstream.cancel();\n }\n\n // Handle client group level URL parameters\n if (this.#userPushURL === undefined) {\n // First client in the group - store its URL and headers\n this.#userPushURL = userPushURL;\n this.#userPushHeaders = userPushHeaders;\n } else {\n // Validate that subsequent clients have compatible parameters\n if (this.#userPushURL !== userPushURL) {\n this.#lc.warn?.(\n 'Client provided different mutate parameters than client group',\n {\n clientID,\n clientURL: userPushURL,\n clientGroupURL: this.#userPushURL,\n },\n );\n }\n }\n\n const downstream = Subscription.create<Downstream>({\n cleanup: () => {\n this.#clients.delete(clientID);\n },\n });\n this.#clients.set(clientID, {wsID, downstream});\n return downstream;\n }\n\n async run() {\n for (;;) {\n const task = await this.#queue.dequeue();\n const rest = this.#queue.drain();\n const [pushes, terminate] = combinePushes([task, ...rest]);\n for (const push of pushes) {\n const response = await this.#processPush(push);\n await this.#fanOutResponses(response);\n }\n\n if (terminate) {\n break;\n }\n }\n }\n\n /**\n * 1. If the entire `push` fails, we send the error to relevant clients.\n * 2. If the push succeeds, we look for any mutation failure that should cause the connection to terminate\n * and terminate the connection for those clients.\n */\n #fanOutResponses(response: PushResponse) {\n const connectionTerminations: (() => void)[] = [];\n\n // if the entire push failed, send that to the client.\n if ('kind' in response || 'error' in response) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a push error.',\n response,\n );\n const groupedMutationIDs = groupBy(\n response.mutationIDs ?? [],\n m => m.clientID,\n );\n for (const [clientID, mutationIDs] of groupedMutationIDs) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n // We do not resolve mutations on the client if the push fails\n // as those mutations will be retried.\n if ('error' in response) {\n // This error code path will eventually be removed when we\n // no longer support the legacy push error format.\n const pushFailedBody: PushFailedBody =\n response.error === 'http'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.HTTP,\n status: response.status,\n bodyPreview: response.details,\n mutationIDs,\n message: `Fetch from API server returned non-OK status ${response.status}`,\n }\n : response.error === 'unsupportedPushVersion'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.UnsupportedPushVersion,\n mutationIDs,\n message: `Unsupported push version`,\n }\n : {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.Internal,\n mutationIDs,\n message:\n response.error === 'zeroPusher'\n ? response.details\n : response.error === 'unsupportedSchemaVersion'\n ? 'Unsupported schema version'\n : 'An unknown error occurred while pushing to the API server',\n };\n\n this.#failDownstream(client.downstream, pushFailedBody);\n } else if ('kind' in response) {\n this.#failDownstream(client.downstream, response);\n } else {\n unreachable(response);\n }\n }\n } else {\n // Look for mutations results that should cause us to terminate the connection\n const groupedMutations = groupBy(response.mutations, m => m.id.clientID);\n for (const [clientID, mutations] of groupedMutations) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n let failure: PushFailedBody | undefined;\n let i = 0;\n for (; i < mutations.length; i++) {\n const m = mutations[i];\n if ('error' in m.result) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a mutation error.',\n m.result,\n );\n }\n // This error code path will eventually be removed,\n // keeping this for backwards compatibility, but the server\n // should now return a PushFailedBody with the mutationIDs\n if ('error' in m.result && m.result.error === 'oooMutation') {\n failure = {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.OutOfOrderMutation,\n message: 'mutation was out of order',\n details: m.result.details,\n mutationIDs: mutations.map(m => ({\n clientID: m.id.clientID,\n id: m.id.id,\n })),\n };\n break;\n }\n }\n\n if (failure && i < mutations.length - 1) {\n this.#lc.warn?.(\n 'push-response contains mutations after a mutation which should fatal the connection',\n );\n }\n\n if (failure) {\n connectionTerminations.push(() =>\n this.#failDownstream(client.downstream, failure),\n );\n }\n }\n }\n\n connectionTerminations.forEach(cb => cb());\n }\n\n async #processPush(entry: PusherEntry): Promise<PushResponse> {\n this.#customMutations.add(entry.push.mutations.length, {\n clientGroupID: entry.push.clientGroupID,\n });\n this.#pushes.add(1, {\n clientGroupID: entry.push.clientGroupID,\n });\n\n // Record custom mutations for telemetry\n recordMutation('custom', entry.push.mutations.length);\n\n const url =\n this.#userPushURL ??\n must(this.#pushURLs[0], 'ZERO_MUTATE_URL is not set');\n\n this.#lc.debug?.(\n 'pushing to',\n url,\n 'with',\n entry.push.mutations.length,\n 'mutations',\n );\n\n let mutationIDs: MutationID[] = [];\n\n try {\n mutationIDs = entry.push.mutations.map(m => ({\n id: m.id,\n clientID: m.clientID,\n }));\n\n return await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n this.#pushURLPatterns,\n {\n appID: this.#config.app.id,\n shardNum: this.#config.shard.num,\n },\n {\n apiKey: this.#apiKey,\n customHeaders: this.#userPushHeaders,\n token: entry.auth,\n cookie: entry.httpCookie,\n },\n entry.push,\n );\n } catch (e) {\n if (isProtocolError(e) && e.errorBody.kind === ErrorKind.PushFailed) {\n return {\n ...e.errorBody,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n\n return {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.Internal,\n message: `Failed to push: ${getErrorMessage(e)}`,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n }\n\n #failDownstream(\n downstream: Subscription<Downstream>,\n errorBody: PushFailedBody,\n ): void {\n downstream.fail(new ProtocolErrorWithLevel(errorBody, 'warn'));\n }\n}\n\n/**\n * Pushes for different clientIDs could theoretically be interleaved.\n *\n * In order to do efficient batching to the user's API server,\n * we collect all pushes for the same clientID into a single push.\n */\nexport function combinePushes(\n entries: readonly (PusherEntryOrStop | undefined)[],\n): [PusherEntry[], boolean] {\n const pushesByClientID = new Map<string, PusherEntry[]>();\n\n function collect() {\n const ret: PusherEntry[] = [];\n for (const entries of pushesByClientID.values()) {\n const composite: PusherEntry = {\n ...entries[0],\n push: {\n ...entries[0].push,\n mutations: [],\n },\n };\n ret.push(composite);\n for (const entry of entries) {\n assertAreCompatiblePushes(composite, entry);\n composite.push.mutations.push(...entry.push.mutations);\n }\n }\n return ret;\n }\n\n for (const entry of entries) {\n if (entry === 'stop' || entry === undefined) {\n return [collect(), true];\n }\n\n const {clientID} = entry;\n const existing = pushesByClientID.get(clientID);\n if (existing) {\n existing.push(entry);\n } else {\n pushesByClientID.set(clientID, [entry]);\n }\n }\n\n return [collect(), false] as const;\n}\n\n// These invariants should always be true for a given clientID.\n// If they are not, we have a bug in the code somewhere.\nfunction assertAreCompatiblePushes(left: PusherEntry, right: PusherEntry) {\n assert(\n left.clientID === right.clientID,\n 'clientID must be the same for all pushes',\n );\n assert(\n left.auth === right.auth,\n 'auth must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.schemaVersion === right.push.schemaVersion,\n 'schemaVersion must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.pushVersion === right.push.pushVersion,\n 'pushVersion must be the same for all pushes with the same clientID',\n );\n assert(\n left.httpCookie === right.httpCookie,\n 'httpCookie must be the same for all pushes with the same clientID',\n );\n}\n"],"names":["ErrorKind.PushFailed","ErrorOrigin.ZeroCache","ErrorReason.HTTP","ErrorOrigin.Server","ErrorReason.UnsupportedPushVersion","ErrorReason.Internal","ErrorReason.OutOfOrderMutation","m","entries"],"mappings":";;;;;;;;;;;;;;;;;AAiEO,MAAM,cAAyC;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EACA,YAAY;AAAA,EACZ,aAAa;AAAA,EAEb,YACE,UACA,WACA,YACA,IACA,eACA;AACA,SAAK,UAAU;AACf,SAAK,YAAY;AACjB,SAAK,SAAS,IAAI,MAAA;AAClB,SAAK,UAAU,IAAI;AAAA,MACjB;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,WAAW;AAAA,MACX,KAAK;AAAA,IAAA;AAEP,SAAK,KAAK;AACV,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,UAA8B;AAChC,WAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,EAC/B;AAAA,EAEA,eACE,UACA,MACA,aACA,iBACA;AACA,WAAO,KAAK,QAAQ;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,YACE,UACA,MACA,MACA,YACsC;AACtC,QAAI,CAAC,KAAK,YAAY,gBAAgB;AACpC,mBAAa;AAAA,IACf;AACA,SAAK,OAAO,QAAQ,EAAC,MAAM,MAAM,UAAU,YAAW;AAEtD,WAAO;AAAA,MACL,MAAM;AAAA,IAAA;AAAA,EAEV;AAAA,EAEA,MAAM,qBAAqB,QAAoB;AAE7C,UAAM,MAAM,KAAK;AACjB,UAAM,kBAAkB;AAAA,MACtB,eAAe;AAAA,QACb,OAAO,KAAK,QAAQ,IAAI;AAAA,QACxB,UAAU,KAAK,QAAQ,MAAM;AAAA,MAAA,CAC9B;AAAA,IAAA,CACF,sCAAsC,KAAK,EAAE,qBAAqB,OAAO,QAAQ,wBAAwB,OAAO,EAAE;AAAA,EACrH;AAAA,EAEA,MAAM;AACJ,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AAAA,EACT;AAAA,EAEA,QAAQ;AACN,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AACP,QAAI,KAAK,aAAa,GAAG;AACvB,WAAK,KAAK,KAAA;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,UAAmB;AACjB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,MAAqB;AACnB,SAAK,WAAW,KAAK,QAAQ,IAAA;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAAsB;AACpB,QAAI,KAAK,YAAY;AACnB,aAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,IAC3D;AACA,SAAK,aAAa;AAClB,SAAK,OAAO,QAAQ,MAAM;AAC1B,WAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,EAC3D;AACF;AAcA,MAAM,WAAW;AAAA,EACN;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAOT;AAAA,EACA;AAAA,EAES,mBAAmB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAEO,UAAU;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAGF,YACE,QACA,IACA,SACA,QACA,OACA;AACA,SAAK,YAAY;AACjB,SAAK,MAAM,GAAG,YAAY,aAAa,QAAQ;AAC/C,SAAK,mBAAmB,QAAQ,IAAI,iBAAiB;AACrD,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,+BAAe,IAAA;AAAA,EACtB;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,eACE,UACA,MACA,aACA,iBACA;AACA,UAAM,WAAW,KAAK,SAAS,IAAI,QAAQ;AAC3C,QAAI,YAAY,SAAS,SAAS,MAAM;AAEtC,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAGA,QAAI,UAAU;AACZ,eAAS,WAAW,OAAA;AAAA,IACtB;AAGA,QAAI,KAAK,iBAAiB,QAAW;AAEnC,WAAK,eAAe;AACpB,WAAK,mBAAmB;AAAA,IAC1B,OAAO;AAEL,UAAI,KAAK,iBAAiB,aAAa;AACrC,aAAK,IAAI;AAAA,UACP;AAAA,UACA;AAAA,YACE;AAAA,YACA,WAAW;AAAA,YACX,gBAAgB,KAAK;AAAA,UAAA;AAAA,QACvB;AAAA,MAEJ;AAAA,IACF;AAEA,UAAM,aAAa,aAAa,OAAmB;AAAA,MACjD,SAAS,MAAM;AACb,aAAK,SAAS,OAAO,QAAQ;AAAA,MAC/B;AAAA,IAAA,CACD;AACD,SAAK,SAAS,IAAI,UAAU,EAAC,MAAM,YAAW;AAC9C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,MAAM;AACV,eAAS;AACP,YAAM,OAAO,MAAM,KAAK,OAAO,QAAA;AAC/B,YAAM,OAAO,KAAK,OAAO,MAAA;AACzB,YAAM,CAAC,QAAQ,SAAS,IAAI,cAAc,CAAC,MAAM,GAAG,IAAI,CAAC;AACzD,iBAAW,QAAQ,QAAQ;AACzB,cAAM,WAAW,MAAM,KAAK,aAAa,IAAI;AAC7C,cAAM,KAAK,iBAAiB,QAAQ;AAAA,MACtC;AAEA,UAAI,WAAW;AACb;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,UAAwB;AACvC,UAAM,yBAAyC,CAAA;AAG/C,QAAI,UAAU,YAAY,WAAW,UAAU;AAC7C,WAAK,IAAI;AAAA,QACP;AAAA,QACA;AAAA,MAAA;AAEF,YAAM,qBAAqB;AAAA,QACzB,SAAS,eAAe,CAAA;AAAA,QACxB,OAAK,EAAE;AAAA,MAAA;AAET,iBAAW,CAAC,UAAU,WAAW,KAAK,oBAAoB;AACxD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAIA,YAAI,WAAW,UAAU;AAGvB,gBAAM,iBACJ,SAAS,UAAU,SACf;AAAA,YACE,MAAMA;AAAAA,YACN,QAAQC;AAAAA,YACR,QAAQC;AAAAA,YACR,QAAQ,SAAS;AAAA,YACjB,aAAa,SAAS;AAAA,YACtB;AAAA,YACA,SAAS,gDAAgD,SAAS,MAAM;AAAA,UAAA,IAE1E,SAAS,UAAU,2BACjB;AAAA,YACE,MAAMF;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQC;AAAAA,YACR;AAAA,YACA,SAAS;AAAA,UAAA,IAEX;AAAA,YACE,MAAMJ;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQE;AAAAA,YACR;AAAA,YACA,SACE,SAAS,UAAU,eACf,SAAS,UACT,SAAS,UAAU,6BACjB,+BACA;AAAA,UAAA;AAGlB,eAAK,gBAAgB,OAAO,YAAY,cAAc;AAAA,QACxD,WAAW,UAAU,UAAU;AAC7B,eAAK,gBAAgB,OAAO,YAAY,QAAQ;AAAA,QAClD,OAAO;AACL,sBAAoB;AAAA,QACtB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,mBAAmB,QAAQ,SAAS,WAAW,CAAA,MAAK,EAAE,GAAG,QAAQ;AACvE,iBAAW,CAAC,UAAU,SAAS,KAAK,kBAAkB;AACpD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAEA,YAAI;AACJ,YAAI,IAAI;AACR,eAAO,IAAI,UAAU,QAAQ,KAAK;AAChC,gBAAM,IAAI,UAAU,CAAC;AACrB,cAAI,WAAW,EAAE,QAAQ;AACvB,iBAAK,IAAI;AAAA,cACP;AAAA,cACA,EAAE;AAAA,YAAA;AAAA,UAEN;AAIA,cAAI,WAAW,EAAE,UAAU,EAAE,OAAO,UAAU,eAAe;AAC3D,sBAAU;AAAA,cACR,MAAML;AAAAA,cACN,QAAQG;AAAAA,cACR,QAAQG;AAAAA,cACR,SAAS;AAAA,cACT,SAAS,EAAE,OAAO;AAAA,cAClB,aAAa,UAAU,IAAI,CAAAC,QAAM;AAAA,gBAC/B,UAAUA,GAAE,GAAG;AAAA,gBACf,IAAIA,GAAE,GAAG;AAAA,cAAA,EACT;AAAA,YAAA;AAEJ;AAAA,UACF;AAAA,QACF;AAEA,YAAI,WAAW,IAAI,UAAU,SAAS,GAAG;AACvC,eAAK,IAAI;AAAA,YACP;AAAA,UAAA;AAAA,QAEJ;AAEA,YAAI,SAAS;AACX,iCAAuB;AAAA,YAAK,MAC1B,KAAK,gBAAgB,OAAO,YAAY,OAAO;AAAA,UAAA;AAAA,QAEnD;AAAA,MACF;AAAA,IACF;AAEA,2BAAuB,QAAQ,CAAA,OAAM,GAAA,CAAI;AAAA,EAC3C;AAAA,EAEA,MAAM,aAAa,OAA2C;AAC5D,SAAK,iBAAiB,IAAI,MAAM,KAAK,UAAU,QAAQ;AAAA,MACrD,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AACD,SAAK,QAAQ,IAAI,GAAG;AAAA,MAClB,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AAGD,mBAAe,UAAU,MAAM,KAAK,UAAU,MAAM;AAEpD,UAAM,MACJ,KAAK,gBACL,KAAK,KAAK,UAAU,CAAC,GAAG,4BAA4B;AAEtD,SAAK,IAAI;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,MACrB;AAAA,IAAA;AAGF,QAAI,cAA4B,CAAA;AAEhC,QAAI;AACF,oBAAc,MAAM,KAAK,UAAU,IAAI,CAAA,OAAM;AAAA,QAC3C,IAAI,EAAE;AAAA,QACN,UAAU,EAAE;AAAA,MAAA,EACZ;AAEF,aAAO,MAAM;AAAA,QACX;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA,KAAK;AAAA,QACL;AAAA,UACE,OAAO,KAAK,QAAQ,IAAI;AAAA,UACxB,UAAU,KAAK,QAAQ,MAAM;AAAA,QAAA;AAAA,QAE/B;AAAA,UACE,QAAQ,KAAK;AAAA,UACb,eAAe,KAAK;AAAA,UACpB,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,QAAA;AAAA,QAEhB,MAAM;AAAA,MAAA;AAAA,IAEV,SAAS,GAAG;AACV,UAAI,gBAAgB,CAAC,KAAK,EAAE,UAAU,SAASP,YAAsB;AACnE,eAAO;AAAA,UACL,GAAG,EAAE;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAEA,aAAO;AAAA,QACL,MAAMA;AAAAA,QACN,QAAQC;AAAAA,QACR,QAAQI;AAAAA,QACR,SAAS,mBAAmB,gBAAgB,CAAC,CAAC;AAAA,QAC9C;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AAAA,EAEA,gBACE,YACA,WACM;AACN,eAAW,KAAK,IAAI,uBAAuB,WAAW,MAAM,CAAC;AAAA,EAC/D;AACF;AAQO,SAAS,cACd,SAC0B;AAC1B,QAAM,uCAAuB,IAAA;AAE7B,WAAS,UAAU;AACjB,UAAM,MAAqB,CAAA;AAC3B,eAAWG,YAAW,iBAAiB,UAAU;AAC/C,YAAM,YAAyB;AAAA,QAC7B,GAAGA,SAAQ,CAAC;AAAA,QACZ,MAAM;AAAA,UACJ,GAAGA,SAAQ,CAAC,EAAE;AAAA,UACd,WAAW,CAAA;AAAA,QAAC;AAAA,MACd;AAEF,UAAI,KAAK,SAAS;AAClB,iBAAW,SAASA,UAAS;AAC3B,kCAA0B,WAAW,KAAK;AAC1C,kBAAU,KAAK,UAAU,KAAK,GAAG,MAAM,KAAK,SAAS;AAAA,MACvD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,aAAW,SAAS,SAAS;AAC3B,QAAI,UAAU,UAAU,UAAU,QAAW;AAC3C,aAAO,CAAC,QAAA,GAAW,IAAI;AAAA,IACzB;AAEA,UAAM,EAAC,aAAY;AACnB,UAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,uBAAiB,IAAI,UAAU,CAAC,KAAK,CAAC;AAAA,IACxC;AAAA,EACF;AAEA,SAAO,CAAC,QAAA,GAAW,KAAK;AAC1B;AAIA,SAAS,0BAA0B,MAAmB,OAAoB;AACxE;AAAA,IACE,KAAK,aAAa,MAAM;AAAA,IACxB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,SAAS,MAAM;AAAA,IACpB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,kBAAkB,MAAM,KAAK;AAAA,IACvC;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,gBAAgB,MAAM,KAAK;AAAA,IACrC;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,eAAe,MAAM;AAAA,IAC1B;AAAA,EAAA;AAEJ;"}
|
|
@@ -10,5 +10,6 @@ export declare class ReplicationStatusPublisher {
|
|
|
10
10
|
stop(): this;
|
|
11
11
|
}
|
|
12
12
|
export declare function publishReplicationError(lc: LogContext, stage: ReplicationStage, description: string, errorDetails?: JSONObject, now?: Date): Promise<void>;
|
|
13
|
-
export declare function
|
|
13
|
+
export declare function replicationStatusError(lc: LogContext, stage: ReplicationStage, e: unknown, now?: Date): ReplicationStatusEvent;
|
|
14
|
+
export declare function replicationStatusEvent(lc: LogContext, db: Database | null, stage: ReplicationStage, status: Status, description?: string, now?: Date): ReplicationStatusEvent;
|
|
14
15
|
//# sourceMappingURL=replication-status.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"replication-status.d.ts","sourceRoot":"","sources":["../../../../../../zero-cache/src/services/replicator/replication-status.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAEjD,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,qCAAqC,CAAC;AACpE,OAAO,KAAK,EAGV,gBAAgB,EAChB,sBAAsB,EACtB,MAAM,EACP,MAAM,uCAAuC,CAAC;AAC/C,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,8BAA8B,CAAC;AAY3D,qBAAa,0BAA0B;;gBAIzB,EAAE,EAAE,QAAQ;IAIxB,OAAO,CACL,EAAE,EAAE,UAAU,EACd,KAAK,EAAE,gBAAgB,EACvB,WAAW,CAAC,EAAE,MAAM,EACpB,QAAQ,SAAI,GACX,IAAI;IAgBD,oBAAoB,CACxB,EAAE,EAAE,UAAU,EACd,KAAK,EAAE,gBAAgB,EACvB,CAAC,EAAE,OAAO,GACT,OAAO,CAAC,KAAK,CAAC;IAcjB,IAAI,IAAI,IAAI;CAIb;AAED,wBAAsB,uBAAuB,CAC3C,EAAE,EAAE,UAAU,EACd,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,YAAY,CAAC,EAAE,UAAU,EACzB,GAAG,OAAa,iBAYjB;AAGD,wBAAgB,sBAAsB,CACpC,EAAE,EAAE,UAAU,EACd,EAAE,EAAE,QAAQ,
|
|
1
|
+
{"version":3,"file":"replication-status.d.ts","sourceRoot":"","sources":["../../../../../../zero-cache/src/services/replicator/replication-status.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAEjD,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,qCAAqC,CAAC;AACpE,OAAO,KAAK,EAGV,gBAAgB,EAChB,sBAAsB,EACtB,MAAM,EACP,MAAM,uCAAuC,CAAC;AAC/C,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,8BAA8B,CAAC;AAY3D,qBAAa,0BAA0B;;gBAIzB,EAAE,EAAE,QAAQ;IAIxB,OAAO,CACL,EAAE,EAAE,UAAU,EACd,KAAK,EAAE,gBAAgB,EACvB,WAAW,CAAC,EAAE,MAAM,EACpB,QAAQ,SAAI,GACX,IAAI;IAgBD,oBAAoB,CACxB,EAAE,EAAE,UAAU,EACd,KAAK,EAAE,gBAAgB,EACvB,CAAC,EAAE,OAAO,GACT,OAAO,CAAC,KAAK,CAAC;IAcjB,IAAI,IAAI,IAAI;CAIb;AAED,wBAAsB,uBAAuB,CAC3C,EAAE,EAAE,UAAU,EACd,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,YAAY,CAAC,EAAE,UAAU,EACzB,GAAG,OAAa,iBAYjB;AAED,wBAAgB,sBAAsB,CACpC,EAAE,EAAE,UAAU,EACd,KAAK,EAAE,gBAAgB,EACvB,CAAC,EAAE,OAAO,EACV,GAAG,OAAa,0BAGjB;AAGD,wBAAgB,sBAAsB,CACpC,EAAE,EAAE,UAAU,EACd,EAAE,EAAE,QAAQ,GAAG,IAAI,EACnB,KAAK,EAAE,gBAAgB,EACvB,MAAM,EAAE,MAAM,EACd,WAAW,CAAC,EAAE,MAAM,EACpB,GAAG,OAAa,GACf,sBAAsB,CA+BxB"}
|
|
@@ -52,6 +52,9 @@ async function publishReplicationError(lc, stage, description, errorDetails, now
|
|
|
52
52
|
};
|
|
53
53
|
await publishCriticalEvent(lc, event);
|
|
54
54
|
}
|
|
55
|
+
function replicationStatusError(lc, stage, e, now = /* @__PURE__ */ new Date()) {
|
|
56
|
+
return replicationStatusEvent(lc, null, stage, "ERROR", String(e), now);
|
|
57
|
+
}
|
|
55
58
|
function replicationStatusEvent(lc, db, stage, status, description, now = /* @__PURE__ */ new Date()) {
|
|
56
59
|
try {
|
|
57
60
|
return {
|
|
@@ -62,9 +65,9 @@ function replicationStatusEvent(lc, db, stage, status, description, now = /* @__
|
|
|
62
65
|
description,
|
|
63
66
|
time: now.toISOString(),
|
|
64
67
|
state: {
|
|
65
|
-
tables: getReplicatedTables(db),
|
|
66
|
-
indexes: getReplicatedIndexes(db),
|
|
67
|
-
replicaSize: getReplicaSize(db)
|
|
68
|
+
tables: db ? getReplicatedTables(db) : [],
|
|
69
|
+
indexes: db ? getReplicatedIndexes(db) : [],
|
|
70
|
+
replicaSize: db ? getReplicaSize(db) : void 0
|
|
68
71
|
}
|
|
69
72
|
};
|
|
70
73
|
} catch (e) {
|
|
@@ -119,6 +122,7 @@ function getReplicaSize(db) {
|
|
|
119
122
|
export {
|
|
120
123
|
ReplicationStatusPublisher,
|
|
121
124
|
publishReplicationError,
|
|
125
|
+
replicationStatusError,
|
|
122
126
|
replicationStatusEvent
|
|
123
127
|
};
|
|
124
128
|
//# sourceMappingURL=replication-status.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"replication-status.js","sources":["../../../../../../zero-cache/src/services/replicator/replication-status.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {createSilentLogContext} from '../../../../shared/src/logging-test-utils.ts';\nimport type {JSONObject} from '../../../../zero-events/src/json.ts';\nimport type {\n ReplicatedIndex,\n ReplicatedTable,\n ReplicationStage,\n ReplicationStatusEvent,\n Status,\n} from '../../../../zero-events/src/status.ts';\nimport type {Database} from '../../../../zqlite/src/db.ts';\nimport {computeZqlSpecs, listIndexes} from '../../db/lite-tables.ts';\nimport type {LiteTableSpec} from '../../db/specs.ts';\nimport {\n makeErrorDetails,\n publishCriticalEvent,\n publishEvent,\n} from '../../observability/events.ts';\n\nconst byKeys = (a: [string, unknown], b: [string, unknown]) =>\n a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0;\n\nexport class ReplicationStatusPublisher {\n readonly #db: Database;\n #timer: NodeJS.Timeout | undefined;\n\n constructor(db: Database) {\n this.#db = db;\n }\n\n publish(\n lc: LogContext,\n stage: ReplicationStage,\n description?: string,\n interval = 0,\n ): this {\n this.stop();\n publishEvent(\n lc,\n replicationStatusEvent(lc, this.#db, stage, 'OK', description),\n );\n\n if (interval) {\n this.#timer = setInterval(\n () => this.publish(lc, stage, description, interval),\n interval,\n );\n }\n return this;\n }\n\n async publishAndThrowError(\n lc: LogContext,\n stage: ReplicationStage,\n e: unknown,\n ): Promise<never> {\n this.stop();\n const event = replicationStatusEvent(\n lc,\n this.#db,\n stage,\n 'ERROR',\n String(e),\n );\n event.errorDetails = makeErrorDetails(e);\n await publishCriticalEvent(lc, event);\n throw e;\n }\n\n stop(): this {\n clearInterval(this.#timer);\n return this;\n }\n}\n\nexport async function publishReplicationError(\n lc: LogContext,\n stage: ReplicationStage,\n description: string,\n errorDetails?: JSONObject,\n now = new Date(),\n) {\n const event: ReplicationStatusEvent = {\n type: 'zero/events/status/replication/v1',\n component: 'replication',\n status: 'ERROR',\n stage,\n description,\n errorDetails,\n time: now.toISOString(),\n };\n await publishCriticalEvent(lc, event);\n}\n\n// Exported for testing.\nexport function replicationStatusEvent(\n lc: LogContext,\n db: Database,\n stage: ReplicationStage,\n status: Status,\n description?: string,\n now = new Date(),\n): ReplicationStatusEvent {\n try {\n return {\n type: 'zero/events/status/replication/v1',\n component: 'replication',\n status,\n stage,\n description,\n time: now.toISOString(),\n state: {\n tables: getReplicatedTables(db),\n indexes: getReplicatedIndexes(db),\n replicaSize: getReplicaSize(db),\n },\n };\n } catch (e) {\n lc.warn?.(`Unable to create full ReplicationStatusEvent`, e);\n return {\n type: 'zero/events/status/replication/v1',\n component: 'replication',\n status,\n stage,\n description,\n time: now.toISOString(),\n state: {\n tables: [],\n indexes: [],\n replicaSize: 0,\n },\n };\n }\n}\n\nfunction getReplicatedTables(db: Database): ReplicatedTable[] {\n const fullTables = new Map<string, LiteTableSpec>();\n const clientSchema = computeZqlSpecs(\n createSilentLogContext(), // avoid logging warnings about indexes\n db,\n new Map(),\n fullTables,\n );\n\n return [...fullTables.entries()].sort(byKeys).map(([table, spec]) => ({\n table,\n columns: Object.entries(spec.columns)\n .sort(byKeys)\n .map(([column, spec]) => ({\n column,\n upstreamType: spec.dataType.split('|')[0],\n clientType: clientSchema.get(table)?.zqlSpec[column]?.type ?? null,\n })),\n }));\n}\n\nfunction getReplicatedIndexes(db: Database): ReplicatedIndex[] {\n return listIndexes(db).map(({tableName: table, columns, unique}) => ({\n table,\n unique,\n columns: Object.entries(columns)\n .sort(byKeys)\n .map(([column, dir]) => ({column, dir})),\n }));\n}\n\nfunction getReplicaSize(db: Database) {\n const [{page_count: pageCount}] = db.pragma<{page_count: number}>(\n 'page_count',\n );\n const [{page_size: pageSize}] = db.pragma<{page_size: number}>('page_size');\n return pageCount * pageSize;\n}\n"],"names":["spec"],"mappings":";;;AAmBA,MAAM,SAAS,CAAC,GAAsB,MACpC,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI;AAEhC,MAAM,2BAA2B;AAAA,EAC7B;AAAA,EACT;AAAA,EAEA,YAAY,IAAc;AACxB,SAAK,MAAM;AAAA,EACb;AAAA,EAEA,QACE,IACA,OACA,aACA,WAAW,GACL;AACN,SAAK,KAAA;AACL;AAAA,MACE;AAAA,MACA,uBAAuB,IAAI,KAAK,KAAK,OAAO,MAAM,WAAW;AAAA,IAAA;AAG/D,QAAI,UAAU;AACZ,WAAK,SAAS;AAAA,QACZ,MAAM,KAAK,QAAQ,IAAI,OAAO,aAAa,QAAQ;AAAA,QACnD;AAAA,MAAA;AAAA,IAEJ;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,qBACJ,IACA,OACA,GACgB;AAChB,SAAK,KAAA;AACL,UAAM,QAAQ;AAAA,MACZ;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA,OAAO,CAAC;AAAA,IAAA;AAEV,UAAM,eAAe,iBAAiB,CAAC;AACvC,UAAM,qBAAqB,IAAI,KAAK;AACpC,UAAM;AAAA,EACR;AAAA,EAEA,OAAa;AACX,kBAAc,KAAK,MAAM;AACzB,WAAO;AAAA,EACT;AACF;AAEA,eAAsB,wBACpB,IACA,OACA,aACA,cACA,MAAM,oBAAI,QACV;AACA,QAAM,QAAgC;AAAA,IACpC,MAAM;AAAA,IACN,WAAW;AAAA,IACX,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,IAAI,YAAA;AAAA,EAAY;AAExB,QAAM,qBAAqB,IAAI,KAAK;AACtC;AAGO,SAAS,uBACd,IACA,IACA,OACA,QACA,aACA,MAAM,oBAAI,QACc;AACxB,MAAI;AACF,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,IAAI,YAAA;AAAA,MACV,OAAO;AAAA,QACL,QAAQ,oBAAoB,EAAE;AAAA,
|
|
1
|
+
{"version":3,"file":"replication-status.js","sources":["../../../../../../zero-cache/src/services/replicator/replication-status.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {createSilentLogContext} from '../../../../shared/src/logging-test-utils.ts';\nimport type {JSONObject} from '../../../../zero-events/src/json.ts';\nimport type {\n ReplicatedIndex,\n ReplicatedTable,\n ReplicationStage,\n ReplicationStatusEvent,\n Status,\n} from '../../../../zero-events/src/status.ts';\nimport type {Database} from '../../../../zqlite/src/db.ts';\nimport {computeZqlSpecs, listIndexes} from '../../db/lite-tables.ts';\nimport type {LiteTableSpec} from '../../db/specs.ts';\nimport {\n makeErrorDetails,\n publishCriticalEvent,\n publishEvent,\n} from '../../observability/events.ts';\n\nconst byKeys = (a: [string, unknown], b: [string, unknown]) =>\n a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0;\n\nexport class ReplicationStatusPublisher {\n readonly #db: Database;\n #timer: NodeJS.Timeout | undefined;\n\n constructor(db: Database) {\n this.#db = db;\n }\n\n publish(\n lc: LogContext,\n stage: ReplicationStage,\n description?: string,\n interval = 0,\n ): this {\n this.stop();\n publishEvent(\n lc,\n replicationStatusEvent(lc, this.#db, stage, 'OK', description),\n );\n\n if (interval) {\n this.#timer = setInterval(\n () => this.publish(lc, stage, description, interval),\n interval,\n );\n }\n return this;\n }\n\n async publishAndThrowError(\n lc: LogContext,\n stage: ReplicationStage,\n e: unknown,\n ): Promise<never> {\n this.stop();\n const event = replicationStatusEvent(\n lc,\n this.#db,\n stage,\n 'ERROR',\n String(e),\n );\n event.errorDetails = makeErrorDetails(e);\n await publishCriticalEvent(lc, event);\n throw e;\n }\n\n stop(): this {\n clearInterval(this.#timer);\n return this;\n }\n}\n\nexport async function publishReplicationError(\n lc: LogContext,\n stage: ReplicationStage,\n description: string,\n errorDetails?: JSONObject,\n now = new Date(),\n) {\n const event: ReplicationStatusEvent = {\n type: 'zero/events/status/replication/v1',\n component: 'replication',\n status: 'ERROR',\n stage,\n description,\n errorDetails,\n time: now.toISOString(),\n };\n await publishCriticalEvent(lc, event);\n}\n\nexport function replicationStatusError(\n lc: LogContext,\n stage: ReplicationStage,\n e: unknown,\n now = new Date(),\n) {\n return replicationStatusEvent(lc, null, stage, 'ERROR', String(e), now);\n}\n\n// Exported for testing.\nexport function replicationStatusEvent(\n lc: LogContext,\n db: Database | null,\n stage: ReplicationStage,\n status: Status,\n description?: string,\n now = new Date(),\n): ReplicationStatusEvent {\n try {\n return {\n type: 'zero/events/status/replication/v1',\n component: 'replication',\n status,\n stage,\n description,\n time: now.toISOString(),\n state: {\n tables: db ? getReplicatedTables(db) : [],\n indexes: db ? getReplicatedIndexes(db) : [],\n replicaSize: db ? getReplicaSize(db) : undefined,\n },\n };\n } catch (e) {\n lc.warn?.(`Unable to create full ReplicationStatusEvent`, e);\n return {\n type: 'zero/events/status/replication/v1',\n component: 'replication',\n status,\n stage,\n description,\n time: now.toISOString(),\n state: {\n tables: [],\n indexes: [],\n replicaSize: 0,\n },\n };\n }\n}\n\nfunction getReplicatedTables(db: Database): ReplicatedTable[] {\n const fullTables = new Map<string, LiteTableSpec>();\n const clientSchema = computeZqlSpecs(\n createSilentLogContext(), // avoid logging warnings about indexes\n db,\n new Map(),\n fullTables,\n );\n\n return [...fullTables.entries()].sort(byKeys).map(([table, spec]) => ({\n table,\n columns: Object.entries(spec.columns)\n .sort(byKeys)\n .map(([column, spec]) => ({\n column,\n upstreamType: spec.dataType.split('|')[0],\n clientType: clientSchema.get(table)?.zqlSpec[column]?.type ?? null,\n })),\n }));\n}\n\nfunction getReplicatedIndexes(db: Database): ReplicatedIndex[] {\n return listIndexes(db).map(({tableName: table, columns, unique}) => ({\n table,\n unique,\n columns: Object.entries(columns)\n .sort(byKeys)\n .map(([column, dir]) => ({column, dir})),\n }));\n}\n\nfunction getReplicaSize(db: Database) {\n const [{page_count: pageCount}] = db.pragma<{page_count: number}>(\n 'page_count',\n );\n const [{page_size: pageSize}] = db.pragma<{page_size: number}>('page_size');\n return pageCount * pageSize;\n}\n"],"names":["spec"],"mappings":";;;AAmBA,MAAM,SAAS,CAAC,GAAsB,MACpC,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI;AAEhC,MAAM,2BAA2B;AAAA,EAC7B;AAAA,EACT;AAAA,EAEA,YAAY,IAAc;AACxB,SAAK,MAAM;AAAA,EACb;AAAA,EAEA,QACE,IACA,OACA,aACA,WAAW,GACL;AACN,SAAK,KAAA;AACL;AAAA,MACE;AAAA,MACA,uBAAuB,IAAI,KAAK,KAAK,OAAO,MAAM,WAAW;AAAA,IAAA;AAG/D,QAAI,UAAU;AACZ,WAAK,SAAS;AAAA,QACZ,MAAM,KAAK,QAAQ,IAAI,OAAO,aAAa,QAAQ;AAAA,QACnD;AAAA,MAAA;AAAA,IAEJ;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,qBACJ,IACA,OACA,GACgB;AAChB,SAAK,KAAA;AACL,UAAM,QAAQ;AAAA,MACZ;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA,OAAO,CAAC;AAAA,IAAA;AAEV,UAAM,eAAe,iBAAiB,CAAC;AACvC,UAAM,qBAAqB,IAAI,KAAK;AACpC,UAAM;AAAA,EACR;AAAA,EAEA,OAAa;AACX,kBAAc,KAAK,MAAM;AACzB,WAAO;AAAA,EACT;AACF;AAEA,eAAsB,wBACpB,IACA,OACA,aACA,cACA,MAAM,oBAAI,QACV;AACA,QAAM,QAAgC;AAAA,IACpC,MAAM;AAAA,IACN,WAAW;AAAA,IACX,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM,IAAI,YAAA;AAAA,EAAY;AAExB,QAAM,qBAAqB,IAAI,KAAK;AACtC;AAEO,SAAS,uBACd,IACA,OACA,GACA,MAAM,oBAAI,QACV;AACA,SAAO,uBAAuB,IAAI,MAAM,OAAO,SAAS,OAAO,CAAC,GAAG,GAAG;AACxE;AAGO,SAAS,uBACd,IACA,IACA,OACA,QACA,aACA,MAAM,oBAAI,QACc;AACxB,MAAI;AACF,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,IAAI,YAAA;AAAA,MACV,OAAO;AAAA,QACL,QAAQ,KAAK,oBAAoB,EAAE,IAAI,CAAA;AAAA,QACvC,SAAS,KAAK,qBAAqB,EAAE,IAAI,CAAA;AAAA,QACzC,aAAa,KAAK,eAAe,EAAE,IAAI;AAAA,MAAA;AAAA,IACzC;AAAA,EAEJ,SAAS,GAAG;AACV,OAAG,OAAO,gDAAgD,CAAC;AAC3D,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,IAAI,YAAA;AAAA,MACV,OAAO;AAAA,QACL,QAAQ,CAAA;AAAA,QACR,SAAS,CAAA;AAAA,QACT,aAAa;AAAA,MAAA;AAAA,IACf;AAAA,EAEJ;AACF;AAEA,SAAS,oBAAoB,IAAiC;AAC5D,QAAM,iCAAiB,IAAA;AACvB,QAAM,eAAe;AAAA,IACnB,uBAAA;AAAA;AAAA,IACA;AAAA,wBACI,IAAA;AAAA,IACJ;AAAA,EAAA;AAGF,SAAO,CAAC,GAAG,WAAW,QAAA,CAAS,EAAE,KAAK,MAAM,EAAE,IAAI,CAAC,CAAC,OAAO,IAAI,OAAO;AAAA,IACpE;AAAA,IACA,SAAS,OAAO,QAAQ,KAAK,OAAO,EACjC,KAAK,MAAM,EACX,IAAI,CAAC,CAAC,QAAQA,KAAI,OAAO;AAAA,MACxB;AAAA,MACA,cAAcA,MAAK,SAAS,MAAM,GAAG,EAAE,CAAC;AAAA,MACxC,YAAY,aAAa,IAAI,KAAK,GAAG,QAAQ,MAAM,GAAG,QAAQ;AAAA,IAAA,EAC9D;AAAA,EAAA,EACJ;AACJ;AAEA,SAAS,qBAAqB,IAAiC;AAC7D,SAAO,YAAY,EAAE,EAAE,IAAI,CAAC,EAAC,WAAW,OAAO,SAAS,cAAa;AAAA,IACnE;AAAA,IACA;AAAA,IACA,SAAS,OAAO,QAAQ,OAAO,EAC5B,KAAK,MAAM,EACX,IAAI,CAAC,CAAC,QAAQ,GAAG,OAAO,EAAC,QAAQ,MAAK;AAAA,EAAA,EACzC;AACJ;AAEA,SAAS,eAAe,IAAc;AACpC,QAAM,CAAC,EAAC,YAAY,UAAA,CAAU,IAAI,GAAG;AAAA,IACnC;AAAA,EAAA;AAEF,QAAM,CAAC,EAAC,WAAW,SAAA,CAAS,IAAI,GAAG,OAA4B,WAAW;AAC1E,SAAO,YAAY;AACrB;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cvr-store.d.ts","sourceRoot":"","sources":["../../../../../../zero-cache/src/services/view-syncer/cvr-store.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAiBjD,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,+CAA+C,CAAC;AAKnF,OAAO,EAAC,sBAAsB,EAAC,MAAM,iCAAiC,CAAC;AACvE,OAAO,KAAK,EAAC,UAAU,EAAE,mBAAmB,EAAC,MAAM,mBAAmB,CAAC;AAEvE,OAAO,EAAY,KAAK,OAAO,EAAiB,MAAM,uBAAuB,CAAC;AAC9E,OAAO,KAAK,EAAQ,cAAc,EAAC,MAAM,qBAAqB,CAAC;AAC/D,OAAO,KAAK,EAAC,GAAG,EAAE,WAAW,EAAC,MAAM,UAAU,CAAC;AAE/C,OAAO,EAKL,KAAK,OAAO,EACb,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAEL,KAAK,YAAY,EAGjB,KAAK,UAAU,EAGf,KAAK,kBAAkB,EACvB,KAAK,UAAU,EACf,KAAK,WAAW,EAEhB,KAAK,KAAK,EACV,KAAK,SAAS,EAGf,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,KAAK,QAAQ,EAGd,MAAM,gBAAgB,CAAC;AAExB,MAAM,MAAM,aAAa,GAAG;IAC1B,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,MAAM,CAAC;CACpB,CAAC;
|
|
1
|
+
{"version":3,"file":"cvr-store.d.ts","sourceRoot":"","sources":["../../../../../../zero-cache/src/services/view-syncer/cvr-store.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAiBjD,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,+CAA+C,CAAC;AAKnF,OAAO,EAAC,sBAAsB,EAAC,MAAM,iCAAiC,CAAC;AACvE,OAAO,KAAK,EAAC,UAAU,EAAE,mBAAmB,EAAC,MAAM,mBAAmB,CAAC;AAEvE,OAAO,EAAY,KAAK,OAAO,EAAiB,MAAM,uBAAuB,CAAC;AAC9E,OAAO,KAAK,EAAQ,cAAc,EAAC,MAAM,qBAAqB,CAAC;AAC/D,OAAO,KAAK,EAAC,GAAG,EAAE,WAAW,EAAC,MAAM,UAAU,CAAC;AAE/C,OAAO,EAKL,KAAK,OAAO,EACb,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAEL,KAAK,YAAY,EAGjB,KAAK,UAAU,EAGf,KAAK,kBAAkB,EACvB,KAAK,UAAU,EACf,KAAK,WAAW,EAEhB,KAAK,KAAK,EACV,KAAK,SAAS,EAGf,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,KAAK,QAAQ,EAGd,MAAM,gBAAgB,CAAC;AAExB,MAAM,MAAM,aAAa,GAAG;IAC1B,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,MAAM,CAAC;CACpB,CAAC;AA0DF,qBAAa,QAAQ;;gBA4BjB,EAAE,EAAE,UAAU,EACd,KAAK,EAAE,UAAU,EAKjB,UAAU,EAAE,UAAU,GAAG,SAAS,EAClC,KAAK,EAAE,OAAO,EACd,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,EACb,WAAW,EAAE,CAAC,CAAC,EAAE,OAAO,KAAK,IAAI,EACjC,qBAAqB,SAA2B,EAChD,eAAe,SAAoB,EACnC,yBAAyB,SAAM,EAAE,qBAAqB;IACtD,YAAY,oBAAa;IA0B3B,IAAI,CAAC,EAAE,EAAE,UAAU,EAAE,eAAe,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC;IAgN3D,aAAa,IAAI,OAAO,CAAC,WAAW,CAAC,KAAK,EAAE,SAAS,CAAC,CAAC;IAIvD,YAAY,CAAC,GAAG,EAAE,SAAS,GAAG,IAAI;IAIlC;;;;;;;OAOG;IACH,YAAY,CAAC,EAAE,EAAE,KAAK,GAAG,IAAI;IAI7B;;;;OAIG;IACH,YAAY,CAAC,GAAG,GAAG,EAAE,KAAK,EAAE;IAM5B;;;;OAIG;IACG,cAAc,CAAC,QAAQ,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAO3E;;;;;OAKG;IACG,WAAW,IAAI,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;IAYlD,WAAW,CAAC,EACV,OAAO,EACP,cAAc,EACd,UAAU,EACV,YAAY,EACZ,SAAS,EACT,QAAQ,GACT,EAAE,IAAI,CACL,WAAW,EACT,SAAS,GACT,gBAAgB,GAChB,YAAY,GACZ,cAAc,GACd,WAAW,GACX,UAAU,CACb,GAAG,IAAI;IAsBR,kBAAkB,CAAC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,GAAG,IAAI;IAarE,QAAQ,CAAC,KAAK,EAAE,WAAW,GAAG,IAAI;IA4ClC,WAAW,CAAC,KAAK,EAAE,WAAW;IA2B9B,YAAY,CAAC,MAAM,EAAE,YAAY,GAAG,IAAI;IAYxC,YAAY,CAAC,QAAQ,EAAE,MAAM;IAgB7B,eAAe,CACb,UAAU,EAAE,UAAU,EACtB,KAAK,EAAE;QAAC,EAAE,EAAE,MAAM,CAAA;KAAC,EACnB,MAAM,EAAE;QAAC,EAAE,EAAE,MAAM,CAAA;KAAC,EACpB,OAAO,EAAE,OAAO,EAChB,aAAa,EAAE,QAAQ,GAAG,SAAS,EACnC,GAAG,EAAE,MAAM,GACV,IAAI;IA+CP,iBAAiB,CACf,EAAE,EAAE,UAAU,EACd,YAAY,EAAE,kBAAkB,EAChC,OAAO,EAAE,WAAW,EACpB,OAAO,EAAE,UAAU,EACnB,kBAAkB,GAAE,MAAM,EAAO,GAChC,cAAc,CAAC,OAAO,EAAE,EAAE,IAAI,EAAE,SAAS,CAAC;IAUvC,oBAAoB,CACxB,EAAE,EAAE,UAAU,EACd,YAAY,EAAE,kBAAkB,EAChC,OAAO,EAAE,WAAW,EACpB,OAAO,EAAE,UAAU,GAClB,OAAO,CAAC,cAAc,EAAE,CAAC;IA6N5B,IAAI,QAAQ,IAAI,MAAM,CAErB;IAEK,KAAK,CACT,EAAE,EAAE,UAAU,EACd,sBAAsB,EAAE,UAAU,EAClC,GAAG,EAAE,WAAW,EAChB,eAAe,EAAE,MAAM,GACtB,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC;IA+BhC,iBAAiB,IAAI,OAAO;IAI5B,qDAAqD;IACrD,OAAO,CAAC,EAAE,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;IAIhC,cAAc,CAClB,EAAE,EAAE,UAAU,EACd,QAAQ,EAAE,QAAQ,EAClB,QAAQ,CAAC,EAAE,MAAM,GAChB,OAAO,CAAC,eAAe,EAAE,CAAC;CAsC9B;AAED;;;;GAIG;AACH,wBAAsB,YAAY,CAChC,EAAE,EAAE,mBAAmB,EACvB,MAAM,EAAE,MAAM,EACd,aAAa,EAAE,MAAM,EACrB,sBAAsB,EAAE,UAAU,GACjC,OAAO,CAAC,IAAI,CAAC,CAUf;AAED,qBAAa,mBAAoB,SAAQ,sBAAsB;gBACjD,OAAO,EAAE,MAAM;CAU5B;AAED,qBAAa,+BAAgC,SAAQ,sBAAsB;IACzE,QAAQ,CAAC,IAAI,qCAAqC;gBAEtC,eAAe,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM;CAU3D;AAED,qBAAa,cAAe,SAAQ,sBAAsB;IACxD,QAAQ,CAAC,IAAI,oBAAoB;gBAG/B,KAAK,EAAE,MAAM,GAAG,IAAI,EACpB,SAAS,EAAE,MAAM,GAAG,IAAI,EACxB,eAAe,EAAE,MAAM;CAe1B;AAED,qBAAa,wBAAyB,SAAQ,sBAAsB;IAClE,QAAQ,CAAC,IAAI,8BAA8B;gBAE/B,KAAK,EAAE,OAAO;CAW3B;AAED,qBAAa,sBAAuB,SAAQ,KAAK;IAC/C,QAAQ,CAAC,IAAI,4BAA4B;IACzC,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;gBAExB,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,IAAI;CAK3D"}
|
|
@@ -23,6 +23,7 @@ import "pg-format";
|
|
|
23
23
|
import "../../../../shared/src/bigint-json.js";
|
|
24
24
|
import { EMPTY_CVR_VERSION, versionFromString, versionString, queryRecordToQueryRow, cmpVersions } from "./schema/types.js";
|
|
25
25
|
import { ttlClockFromNumber, ttlClockAsNumber } from "./ttl-clock.js";
|
|
26
|
+
let flushCounter = 0;
|
|
26
27
|
const tracer = trace.getTracer("cvr-store", version);
|
|
27
28
|
function asQuery(row) {
|
|
28
29
|
const maybeVersion = (s) => s === null ? void 0 : versionFromString(s);
|
|
@@ -138,28 +139,30 @@ class CVRStore {
|
|
|
138
139
|
clientSchema: null,
|
|
139
140
|
profileID: null
|
|
140
141
|
};
|
|
141
|
-
const [instance, clientsRows, queryRows, desiresRows] = await this.#db.begin(READONLY, (tx) =>
|
|
142
|
-
|
|
142
|
+
const [instance, clientsRows, queryRows, desiresRows] = await this.#db.begin(READONLY, (tx) => {
|
|
143
|
+
lc.debug?.(`CVR tx started after ${Date.now() - start} ms`);
|
|
144
|
+
return [
|
|
145
|
+
tx`SELECT cvr."version",
|
|
143
146
|
"lastActive",
|
|
144
147
|
"ttlClock",
|
|
145
|
-
"replicaVersion",
|
|
146
|
-
"owner",
|
|
148
|
+
"replicaVersion",
|
|
149
|
+
"owner",
|
|
147
150
|
"grantedAt",
|
|
148
|
-
"clientSchema",
|
|
151
|
+
"clientSchema",
|
|
149
152
|
"profileID",
|
|
150
153
|
"deleted",
|
|
151
154
|
rows."version" as "rowsVersion"
|
|
152
155
|
FROM ${this.#cvr("instances")} AS cvr
|
|
153
|
-
LEFT JOIN ${this.#cvr("rowsVersion")} AS rows
|
|
156
|
+
LEFT JOIN ${this.#cvr("rowsVersion")} AS rows
|
|
154
157
|
ON cvr."clientGroupID" = rows."clientGroupID"
|
|
155
158
|
WHERE cvr."clientGroupID" = ${id}`,
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
+
tx`SELECT "clientID" FROM ${this.#cvr(
|
|
160
|
+
"clients"
|
|
161
|
+
)}
|
|
159
162
|
WHERE "clientGroupID" = ${id}`,
|
|
160
|
-
|
|
163
|
+
tx`SELECT * FROM ${this.#cvr("queries")}
|
|
161
164
|
WHERE "clientGroupID" = ${id} AND deleted IS DISTINCT FROM true`,
|
|
162
|
-
|
|
165
|
+
tx`SELECT
|
|
163
166
|
"clientGroupID",
|
|
164
167
|
"clientID",
|
|
165
168
|
"queryHash",
|
|
@@ -169,7 +172,11 @@ class CVRStore {
|
|
|
169
172
|
"inactivatedAtMs" AS "inactivatedAt"
|
|
170
173
|
FROM ${this.#cvr("desires")}
|
|
171
174
|
WHERE "clientGroupID" = ${id}`
|
|
172
|
-
|
|
175
|
+
];
|
|
176
|
+
});
|
|
177
|
+
lc.debug?.(
|
|
178
|
+
`CVR tx completed after ${Date.now() - start} ms (${clientsRows.length} clients, ${queryRows.length} queries, ${desiresRows.length} desires)`
|
|
179
|
+
);
|
|
173
180
|
if (instance.length === 0) {
|
|
174
181
|
this.putInstance({
|
|
175
182
|
version: cvr.version,
|
|
@@ -521,7 +528,9 @@ class CVRStore {
|
|
|
521
528
|
reader.setDone();
|
|
522
529
|
}
|
|
523
530
|
}
|
|
524
|
-
async #checkVersionAndOwnership(tx, expectedCurrentVersion, lastConnectTime) {
|
|
531
|
+
async #checkVersionAndOwnership(lc, tx, expectedCurrentVersion, lastConnectTime) {
|
|
532
|
+
const start = Date.now();
|
|
533
|
+
lc.debug?.("checking cvr version and ownership");
|
|
525
534
|
const expected = versionString(expectedCurrentVersion);
|
|
526
535
|
const result = await tx`SELECT "version", "owner", "grantedAt" FROM ${this.#cvr("instances")}
|
|
527
536
|
WHERE "clientGroupID" = ${this.#id}
|
|
@@ -531,6 +540,9 @@ class CVRStore {
|
|
|
531
540
|
owner: null,
|
|
532
541
|
grantedAt: null
|
|
533
542
|
};
|
|
543
|
+
lc.debug?.(
|
|
544
|
+
"checked cvr version and ownership in " + (Date.now() - start) + " ms"
|
|
545
|
+
);
|
|
534
546
|
if (owner !== this.#taskID && (grantedAt ?? 0) > lastConnectTime) {
|
|
535
547
|
throw new OwnershipError(owner, grantedAt, lastConnectTime);
|
|
536
548
|
}
|
|
@@ -538,7 +550,7 @@ class CVRStore {
|
|
|
538
550
|
throw new ConcurrentModificationException(expected, version2);
|
|
539
551
|
}
|
|
540
552
|
}
|
|
541
|
-
async #flush(expectedCurrentVersion, cvr, lastConnectTime) {
|
|
553
|
+
async #flush(lc, expectedCurrentVersion, cvr, lastConnectTime) {
|
|
542
554
|
const stats = {
|
|
543
555
|
instances: 0,
|
|
544
556
|
queries: 0,
|
|
@@ -572,7 +584,10 @@ class CVRStore {
|
|
|
572
584
|
return null;
|
|
573
585
|
}
|
|
574
586
|
this.putInstance(cvr);
|
|
587
|
+
const start = Date.now();
|
|
588
|
+
lc.debug?.("flush tx beginning");
|
|
575
589
|
const rowsFlushed = await this.#db.begin(READ_COMMITTED, async (tx) => {
|
|
590
|
+
lc.debug?.(`flush tx begun after ${Date.now() - start} ms`);
|
|
576
591
|
const pipelined = [
|
|
577
592
|
// #checkVersionAndOwnership() executes a `SELECT ... FOR UPDATE`
|
|
578
593
|
// query to acquire a row-level lock so that version-updating
|
|
@@ -582,29 +597,41 @@ class CVRStore {
|
|
|
582
597
|
// to this lock and can thus commit / be-committed independently of
|
|
583
598
|
// cvr.instances.
|
|
584
599
|
this.#checkVersionAndOwnership(
|
|
600
|
+
lc,
|
|
585
601
|
tx,
|
|
586
602
|
expectedCurrentVersion,
|
|
587
603
|
lastConnectTime
|
|
588
604
|
)
|
|
589
605
|
];
|
|
606
|
+
let i = 0;
|
|
590
607
|
for (const write of this.#writes) {
|
|
591
608
|
stats.instances += write.stats.instances ?? 0;
|
|
592
609
|
stats.queries += write.stats.queries ?? 0;
|
|
593
610
|
stats.desires += write.stats.desires ?? 0;
|
|
594
611
|
stats.clients += write.stats.clients ?? 0;
|
|
595
612
|
stats.rows += write.stats.rows ?? 0;
|
|
596
|
-
|
|
613
|
+
const writeIndex = i++;
|
|
614
|
+
const writeStart = Date.now();
|
|
615
|
+
pipelined.push(
|
|
616
|
+
write.write(tx, lastConnectTime).execute().then(() => {
|
|
617
|
+
lc.debug?.(
|
|
618
|
+
`write ${writeIndex}/${this.#writes.size} completed in ${Date.now() - writeStart} ms`
|
|
619
|
+
);
|
|
620
|
+
})
|
|
621
|
+
);
|
|
597
622
|
stats.statements++;
|
|
598
623
|
}
|
|
599
624
|
const rowUpdates = this.#rowCache.executeRowUpdates(
|
|
600
625
|
tx,
|
|
601
626
|
cvr.version,
|
|
602
627
|
this.#pendingRowRecordUpdates,
|
|
603
|
-
"allow-defer"
|
|
628
|
+
"allow-defer",
|
|
629
|
+
lc
|
|
604
630
|
);
|
|
605
631
|
pipelined.push(...rowUpdates);
|
|
606
632
|
stats.statements += rowUpdates.length;
|
|
607
633
|
await Promise.all(pipelined);
|
|
634
|
+
lc.debug?.(`flush tx returning after ${Date.now() - start} ms`);
|
|
608
635
|
if (rowUpdates.length === 0) {
|
|
609
636
|
stats.rowsDeferred = this.#pendingRowRecordUpdates.size;
|
|
610
637
|
return false;
|
|
@@ -618,10 +645,16 @@ class CVRStore {
|
|
|
618
645
|
rowsFlushed
|
|
619
646
|
);
|
|
620
647
|
recordRowsSynced(this.#rowCount);
|
|
621
|
-
if (this.#upstreamDb) {
|
|
648
|
+
if (this.#upstreamDb && this.#upstreamWrites.length) {
|
|
649
|
+
const start2 = performance.now();
|
|
650
|
+
lc.debug?.("flushing upstream writes");
|
|
622
651
|
await this.#upstreamDb.begin(READ_COMMITTED, async (tx) => {
|
|
623
652
|
await Promise.all(this.#upstreamWrites.map((write) => write(tx)));
|
|
624
653
|
});
|
|
654
|
+
const elapsed = performance.now() - start2;
|
|
655
|
+
lc.debug?.(
|
|
656
|
+
`flushed upstream writes (${this.#upstreamWrites.length} statements) in ${elapsed} ms`
|
|
657
|
+
);
|
|
625
658
|
}
|
|
626
659
|
return stats;
|
|
627
660
|
}
|
|
@@ -630,8 +663,10 @@ class CVRStore {
|
|
|
630
663
|
}
|
|
631
664
|
async flush(lc, expectedCurrentVersion, cvr, lastConnectTime) {
|
|
632
665
|
const start = performance.now();
|
|
666
|
+
lc = lc.withContext("cvrFlushID", flushCounter++);
|
|
633
667
|
try {
|
|
634
668
|
const stats = await this.#flush(
|
|
669
|
+
lc,
|
|
635
670
|
expectedCurrentVersion,
|
|
636
671
|
cvr,
|
|
637
672
|
lastConnectTime
|
|
@@ -711,11 +746,14 @@ async function checkVersion(tx, schema, clientGroupID, expectedCurrentVersion) {
|
|
|
711
746
|
}
|
|
712
747
|
class ClientNotFoundError extends ProtocolErrorWithLevel {
|
|
713
748
|
constructor(message) {
|
|
714
|
-
super(
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
749
|
+
super(
|
|
750
|
+
{
|
|
751
|
+
kind: ClientNotFound,
|
|
752
|
+
message,
|
|
753
|
+
origin: ZeroCache
|
|
754
|
+
},
|
|
755
|
+
"warn"
|
|
756
|
+
);
|
|
719
757
|
}
|
|
720
758
|
}
|
|
721
759
|
class ConcurrentModificationException extends ProtocolErrorWithLevel {
|