@livestore/sync-cf 0.4.0-dev.2 → 0.4.0-dev.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/README.md +60 -0
  2. package/dist/.tsbuildinfo +1 -1
  3. package/dist/cf-worker/do/durable-object.d.ts +45 -0
  4. package/dist/cf-worker/do/durable-object.d.ts.map +1 -0
  5. package/dist/cf-worker/do/durable-object.js +154 -0
  6. package/dist/cf-worker/do/durable-object.js.map +1 -0
  7. package/dist/cf-worker/do/layer.d.ts +34 -0
  8. package/dist/cf-worker/do/layer.d.ts.map +1 -0
  9. package/dist/cf-worker/do/layer.js +68 -0
  10. package/dist/cf-worker/do/layer.js.map +1 -0
  11. package/dist/cf-worker/do/pull.d.ts +6 -0
  12. package/dist/cf-worker/do/pull.d.ts.map +1 -0
  13. package/dist/cf-worker/do/pull.js +39 -0
  14. package/dist/cf-worker/do/pull.js.map +1 -0
  15. package/dist/cf-worker/do/push.d.ts +14 -0
  16. package/dist/cf-worker/do/push.d.ts.map +1 -0
  17. package/dist/cf-worker/do/push.js +99 -0
  18. package/dist/cf-worker/do/push.js.map +1 -0
  19. package/dist/cf-worker/do/sqlite.d.ts +196 -0
  20. package/dist/cf-worker/do/sqlite.d.ts.map +1 -0
  21. package/dist/cf-worker/do/sqlite.js +27 -0
  22. package/dist/cf-worker/do/sqlite.js.map +1 -0
  23. package/dist/cf-worker/do/sync-storage.d.ts +17 -0
  24. package/dist/cf-worker/do/sync-storage.d.ts.map +1 -0
  25. package/dist/cf-worker/do/sync-storage.js +73 -0
  26. package/dist/cf-worker/do/sync-storage.js.map +1 -0
  27. package/dist/cf-worker/do/transport/do-rpc-server.d.ts +8 -0
  28. package/dist/cf-worker/do/transport/do-rpc-server.d.ts.map +1 -0
  29. package/dist/cf-worker/do/transport/do-rpc-server.js +45 -0
  30. package/dist/cf-worker/do/transport/do-rpc-server.js.map +1 -0
  31. package/dist/cf-worker/do/transport/http-rpc-server.d.ts +7 -0
  32. package/dist/cf-worker/do/transport/http-rpc-server.d.ts.map +1 -0
  33. package/dist/cf-worker/do/transport/http-rpc-server.js +24 -0
  34. package/dist/cf-worker/do/transport/http-rpc-server.js.map +1 -0
  35. package/dist/cf-worker/do/transport/ws-rpc-server.d.ts +4 -0
  36. package/dist/cf-worker/do/transport/ws-rpc-server.d.ts.map +1 -0
  37. package/dist/cf-worker/do/transport/ws-rpc-server.js +21 -0
  38. package/dist/cf-worker/do/transport/ws-rpc-server.js.map +1 -0
  39. package/dist/cf-worker/mod.d.ts +4 -2
  40. package/dist/cf-worker/mod.d.ts.map +1 -1
  41. package/dist/cf-worker/mod.js +3 -2
  42. package/dist/cf-worker/mod.js.map +1 -1
  43. package/dist/cf-worker/shared.d.ts +127 -0
  44. package/dist/cf-worker/shared.d.ts.map +1 -0
  45. package/dist/cf-worker/shared.js +26 -0
  46. package/dist/cf-worker/shared.js.map +1 -0
  47. package/dist/cf-worker/worker.d.ts +36 -21
  48. package/dist/cf-worker/worker.d.ts.map +1 -1
  49. package/dist/cf-worker/worker.js +39 -32
  50. package/dist/cf-worker/worker.js.map +1 -1
  51. package/dist/client/mod.d.ts +4 -0
  52. package/dist/client/mod.d.ts.map +1 -0
  53. package/dist/client/mod.js +4 -0
  54. package/dist/client/mod.js.map +1 -0
  55. package/dist/client/transport/do-rpc-client.d.ts +40 -0
  56. package/dist/client/transport/do-rpc-client.d.ts.map +1 -0
  57. package/dist/client/transport/do-rpc-client.js +102 -0
  58. package/dist/client/transport/do-rpc-client.js.map +1 -0
  59. package/dist/client/transport/http-rpc-client.d.ts +43 -0
  60. package/dist/client/transport/http-rpc-client.d.ts.map +1 -0
  61. package/dist/client/transport/http-rpc-client.js +87 -0
  62. package/dist/client/transport/http-rpc-client.js.map +1 -0
  63. package/dist/client/transport/ws-rpc-client.d.ts +45 -0
  64. package/dist/client/transport/ws-rpc-client.d.ts.map +1 -0
  65. package/dist/client/transport/ws-rpc-client.js +94 -0
  66. package/dist/client/transport/ws-rpc-client.js.map +1 -0
  67. package/dist/common/do-rpc-schema.d.ts +76 -0
  68. package/dist/common/do-rpc-schema.d.ts.map +1 -0
  69. package/dist/common/do-rpc-schema.js +48 -0
  70. package/dist/common/do-rpc-schema.js.map +1 -0
  71. package/dist/common/http-rpc-schema.d.ts +58 -0
  72. package/dist/common/http-rpc-schema.d.ts.map +1 -0
  73. package/dist/common/http-rpc-schema.js +37 -0
  74. package/dist/common/http-rpc-schema.js.map +1 -0
  75. package/dist/common/mod.d.ts +5 -1
  76. package/dist/common/mod.d.ts.map +1 -1
  77. package/dist/common/mod.js +4 -1
  78. package/dist/common/mod.js.map +1 -1
  79. package/dist/common/sync-message-types.d.ts +236 -0
  80. package/dist/common/sync-message-types.d.ts.map +1 -0
  81. package/dist/common/sync-message-types.js +60 -0
  82. package/dist/common/sync-message-types.js.map +1 -0
  83. package/dist/common/ws-rpc-schema.d.ts +55 -0
  84. package/dist/common/ws-rpc-schema.d.ts.map +1 -0
  85. package/dist/common/ws-rpc-schema.js +32 -0
  86. package/dist/common/ws-rpc-schema.js.map +1 -0
  87. package/package.json +7 -8
  88. package/src/cf-worker/do/durable-object.ts +241 -0
  89. package/src/cf-worker/do/layer.ts +107 -0
  90. package/src/cf-worker/do/pull.ts +64 -0
  91. package/src/cf-worker/do/push.ts +162 -0
  92. package/src/cf-worker/do/sqlite.ts +28 -0
  93. package/src/cf-worker/do/sync-storage.ts +126 -0
  94. package/src/cf-worker/do/transport/do-rpc-server.ts +82 -0
  95. package/src/cf-worker/do/transport/http-rpc-server.ts +37 -0
  96. package/src/cf-worker/do/transport/ws-rpc-server.ts +34 -0
  97. package/src/cf-worker/mod.ts +4 -2
  98. package/src/cf-worker/shared.ts +95 -0
  99. package/src/cf-worker/worker.ts +72 -63
  100. package/src/client/mod.ts +3 -0
  101. package/src/client/transport/do-rpc-client.ts +171 -0
  102. package/src/client/transport/http-rpc-client.ts +205 -0
  103. package/src/client/transport/ws-rpc-client.ts +182 -0
  104. package/src/common/do-rpc-schema.ts +54 -0
  105. package/src/common/http-rpc-schema.ts +40 -0
  106. package/src/common/mod.ts +8 -1
  107. package/src/common/sync-message-types.ts +117 -0
  108. package/src/common/ws-rpc-schema.ts +36 -0
  109. package/src/cf-worker/cf-types.ts +0 -12
  110. package/src/cf-worker/durable-object.ts +0 -478
  111. package/src/common/ws-message-types.ts +0 -114
  112. package/src/sync-impl/mod.ts +0 -1
  113. package/src/sync-impl/ws-impl.ts +0 -274
@@ -0,0 +1,241 @@
1
+ /// <reference types="@cloudflare/workers-types" />
2
+
3
+ import { DurableObject } from 'cloudflare:workers'
4
+ import { type CfTypes, setupDurableObjectWebSocketRpc } from '@livestore/common-cf'
5
+ import { CfDeclare } from '@livestore/common-cf/declare'
6
+ import {
7
+ Effect,
8
+ FetchHttpClient,
9
+ Layer,
10
+ Logger,
11
+ LogLevel,
12
+ Otlp,
13
+ RpcMessage,
14
+ Schema,
15
+ type Scope,
16
+ } from '@livestore/utils/effect'
17
+ import {
18
+ type Env,
19
+ getSyncRequestSearchParams,
20
+ type MakeDurableObjectClassOptions,
21
+ type SyncBackendRpcInterface,
22
+ WebSocketAttachmentSchema,
23
+ } from '../shared.ts'
24
+ import { DoCtx } from './layer.ts'
25
+ import { createDoRpcHandler } from './transport/do-rpc-server.ts'
26
+ import { createHttpRpcHandler } from './transport/http-rpc-server.ts'
27
+ import { makeRpcServer } from './transport/ws-rpc-server.ts'
28
+
29
+ // NOTE We need to redeclare runtime types here to avoid type conflicts with the lib.dom Response type.
30
+ // TODO get rid of those once CF fixed their type mismatch in the worker types
31
+ declare class Request extends CfDeclare.Request {}
32
+ declare class Response extends CfDeclare.Response {}
33
+ declare class WebSocketPair extends CfDeclare.WebSocketPair {}
34
+ declare class WebSocketRequestResponsePair extends CfDeclare.WebSocketRequestResponsePair {}
35
+
36
+ const DurableObjectBase = DurableObject as any as new (
37
+ state: CfTypes.DurableObjectState,
38
+ env: Env,
39
+ ) => CfTypes.DurableObject
40
+
41
+ // Type aliases needed to avoid TS bug https://github.com/microsoft/TypeScript/issues/55021
42
+ export type DoState = CfTypes.DurableObjectState
43
+ export type DoObject<T> = CfTypes.DurableObject & T
44
+
45
+ export type MakeDurableObjectClass = (options?: MakeDurableObjectClassOptions) => {
46
+ new (ctx: DoState, env: Env): DoObject<SyncBackendRpcInterface>
47
+ }
48
+
49
+ /**
50
+ * Creates a Durable Object class for handling WebSocket-based sync.
51
+ * A sync durable object is uniquely scoped to a specific `storeId`.
52
+ *
53
+ * The sync DO supports 3 transport modes:
54
+ * - HTTP JSON-RPC
55
+ * - WebSocket
56
+ * - Durable Object RPC calls (only works in combination with `@livestore/adapter-cf`)
57
+ *
58
+ * Example:
59
+ *
60
+ * ```ts
61
+ * // In your Cloudflare Worker file
62
+ * import { makeDurableObject } from '@livestore/sync-cf/cf-worker'
63
+ *
64
+ * export class SyncBackendDO extends makeDurableObject({
65
+ * onPush: async (message) => {
66
+ * console.log('onPush', message.batch)
67
+ * },
68
+ * onPull: async (message) => {
69
+ * console.log('onPull', message)
70
+ * },
71
+ * }) {}
72
+ * ```
73
+ *
74
+ * `wrangler.toml`
75
+ * ```toml
76
+ * [[durable_objects.bindings]]
77
+ * name = "SYNC_BACKEND_DO"
78
+ * class_name = "SyncBackendDO"
79
+
80
+ * [[migrations]]
81
+ * tag = "v1"
82
+ * new_sqlite_classes = ["SyncBackendDO"]
83
+ * ```
84
+ */
85
+ export const makeDurableObject: MakeDurableObjectClass = (options) => {
86
+ const enabledTransports = options?.enabledTransports ?? new Set(['http', 'ws', 'do-rpc'])
87
+
88
+ const Logging = Logger.consoleWithThread('SyncDo')
89
+
90
+ const Observability = options?.otel?.baseUrl
91
+ ? Otlp.layer({
92
+ baseUrl: options.otel.baseUrl,
93
+ tracerExportInterval: 50,
94
+ resource: {
95
+ serviceName: options.otel.serviceName ?? 'sync-cf-do',
96
+ },
97
+ }).pipe(Layer.provide(FetchHttpClient.layer))
98
+ : Layer.empty
99
+
100
+ return class SyncBackendDOBase extends DurableObjectBase implements SyncBackendRpcInterface {
101
+ __DURABLE_OBJECT_BRAND = 'SyncBackendDOBase' as never
102
+ ctx: CfTypes.DurableObjectState
103
+ env: Env
104
+
105
+ constructor(ctx: CfTypes.DurableObjectState, env: Env) {
106
+ super(ctx, env)
107
+ this.ctx = ctx
108
+ this.env = env
109
+
110
+ const WebSocketRpcServerLive = makeRpcServer({ doSelf: this, doOptions: options })
111
+
112
+ // This registers the `webSocketMessage` and `webSocketClose` handlers
113
+ if (enabledTransports.has('ws')) {
114
+ setupDurableObjectWebSocketRpc({
115
+ doSelf: this,
116
+ rpcLayer: WebSocketRpcServerLive,
117
+ webSocketMode: 'hibernate',
118
+ // See `pull.ts` for more details how `pull` Effect RPC requests streams are handled
119
+ // in combination with DO hibernation
120
+ onMessage: (request, ws) => {
121
+ if (request._tag === 'Request' && request.tag === 'SyncWsRpc.Pull') {
122
+ // Is Pull request: add requestId to pullRequestIds
123
+ const attachment = ws.deserializeAttachment()
124
+ const { pullRequestIds, ...rest } = Schema.decodeSync(WebSocketAttachmentSchema)(attachment)
125
+ ws.serializeAttachment(
126
+ Schema.encodeSync(WebSocketAttachmentSchema)({
127
+ ...rest,
128
+ pullRequestIds: [...pullRequestIds, request.id],
129
+ }),
130
+ )
131
+ } else if (request._tag === 'Interrupt') {
132
+ // Is Interrupt request: remove requestId from pullRequestIds
133
+ const attachment = ws.deserializeAttachment()
134
+ const { pullRequestIds, ...rest } = Schema.decodeSync(WebSocketAttachmentSchema)(attachment)
135
+ ws.serializeAttachment(
136
+ Schema.encodeSync(WebSocketAttachmentSchema)({
137
+ ...rest,
138
+ pullRequestIds: pullRequestIds.filter((id) => id !== request.requestId),
139
+ }),
140
+ )
141
+ // TODO also emit `Exit` stream RPC message
142
+ }
143
+ },
144
+ mainLayer: Observability,
145
+ })
146
+ }
147
+ }
148
+
149
+ fetch = async (request: Request): Promise<Response> =>
150
+ Effect.gen(this, function* () {
151
+ const requestParamsResult = getSyncRequestSearchParams(request)
152
+ if (requestParamsResult._tag === 'None') {
153
+ throw new Error('No search params found in request URL')
154
+ }
155
+
156
+ const { storeId, payload, transport } = requestParamsResult.value
157
+
158
+ if (enabledTransports.has(transport) === false) {
159
+ throw new Error(`Transport ${transport} is not enabled (based on \`options.enabledTransports\`)`)
160
+ }
161
+
162
+ if (transport === 'http') {
163
+ return yield* this.handleHttp(request)
164
+ }
165
+
166
+ if (transport === 'ws') {
167
+ const { 0: client, 1: server } = new WebSocketPair()
168
+
169
+ // Since we're using websocket hibernation, we need to remember the storeId for subsequent `webSocketMessage` calls
170
+ server.serializeAttachment(
171
+ Schema.encodeSync(WebSocketAttachmentSchema)({ storeId, payload, pullRequestIds: [] }),
172
+ )
173
+
174
+ // See https://developers.cloudflare.com/durable-objects/examples/websocket-hibernation-server
175
+
176
+ this.ctx.acceptWebSocket(server)
177
+
178
+ // Ping requests are sent by Effect RPC internally
179
+ this.ctx.setWebSocketAutoResponse(
180
+ new WebSocketRequestResponsePair(
181
+ JSON.stringify(RpcMessage.constPing),
182
+ JSON.stringify(RpcMessage.constPong),
183
+ ),
184
+ )
185
+
186
+ return new Response(null, {
187
+ status: 101,
188
+ webSocket: client,
189
+ })
190
+ }
191
+
192
+ console.error('Invalid path', request.url)
193
+
194
+ return new Response('Invalid path', {
195
+ status: 400,
196
+ statusText: 'Bad Request',
197
+ })
198
+ }).pipe(
199
+ Effect.tapCauseLogPretty, // Also log errors to console before catching them
200
+ Effect.catchAllCause((cause) =>
201
+ Effect.succeed(new Response('Error', { status: 500, statusText: cause.toString() })),
202
+ ),
203
+ Effect.withSpan('@livestore/sync-cf:durable-object:fetch'),
204
+ Effect.provide(DoCtx.Default({ doSelf: this, doOptions: options, from: request })),
205
+ this.runEffectAsPromise,
206
+ )
207
+
208
+ /**
209
+ * Handles DO <-> DO RPC calls
210
+ */
211
+ async rpc(payload: Uint8Array<ArrayBuffer>): Promise<Uint8Array<ArrayBuffer> | CfTypes.ReadableStream> {
212
+ if (enabledTransports.has('do-rpc') === false) {
213
+ throw new Error('Do RPC transport is not enabled (based on `options.enabledTransports`)')
214
+ }
215
+
216
+ return createDoRpcHandler({ payload, input: { doSelf: this, doOptions: options } }).pipe(
217
+ Effect.withSpan('@livestore/sync-cf:durable-object:rpc'),
218
+ this.runEffectAsPromise,
219
+ )
220
+ }
221
+
222
+ /**
223
+ * Handles HTTP RPC calls
224
+ *
225
+ * Requires the `enable_request_signal` compatibility flag to properly support `pull` streaming responses
226
+ */
227
+ private handleHttp = (request: CfTypes.Request) =>
228
+ createHttpRpcHandler({
229
+ request,
230
+ }).pipe(Effect.withSpan('@livestore/sync-cf:durable-object:handleHttp'))
231
+
232
+ private runEffectAsPromise = <T, E = never>(effect: Effect.Effect<T, E, Scope.Scope>): Promise<T> =>
233
+ effect.pipe(
234
+ Effect.tapCauseLogPretty,
235
+ Logger.withMinimumLogLevel(LogLevel.Debug),
236
+ Effect.provide(Layer.mergeAll(Observability, Logging)),
237
+ Effect.scoped,
238
+ Effect.runPromise,
239
+ )
240
+ }
241
+ }
@@ -0,0 +1,107 @@
1
+ import { UnexpectedError } from '@livestore/common'
2
+ import { EventSequenceNumber, State } from '@livestore/common/schema'
3
+ import type { CfTypes } from '@livestore/common-cf'
4
+ import { shouldNeverHappen } from '@livestore/utils'
5
+ import { Effect, Predicate } from '@livestore/utils/effect'
6
+ import { nanoid } from '@livestore/utils/nanoid'
7
+ import type { Env, MakeDurableObjectClassOptions, RpcSubscription } from '../shared.ts'
8
+ import { contextTable, eventlogTable } from './sqlite.ts'
9
+ import { makeStorage } from './sync-storage.ts'
10
+
11
+ const CacheSymbol = Symbol('Cache')
12
+
13
+ export interface DoCtxInput {
14
+ doSelf: CfTypes.DurableObject & {
15
+ ctx: CfTypes.DurableObjectState
16
+ env: Env
17
+ }
18
+ doOptions: MakeDurableObjectClassOptions | undefined
19
+ from: CfTypes.Request | { storeId: string }
20
+ }
21
+
22
+ export class DoCtx extends Effect.Service<DoCtx>()('DoCtx', {
23
+ effect: Effect.fn(
24
+ function* ({ doSelf, doOptions, from }: DoCtxInput) {
25
+ if ((doSelf as any)[CacheSymbol] !== undefined) {
26
+ return (doSelf as any)[CacheSymbol] as never
27
+ }
28
+
29
+ const getStoreId = (from: CfTypes.Request | { storeId: string }) => {
30
+ if (Predicate.hasProperty(from, 'url')) {
31
+ const url = new URL(from.url)
32
+ return (
33
+ url.searchParams.get('storeId') ?? shouldNeverHappen(`No storeId provided in request URL search params`)
34
+ )
35
+ }
36
+ return from.storeId
37
+ }
38
+
39
+ const storeId = getStoreId(from)
40
+ const storage = makeStorage(doSelf.ctx, doSelf.env, storeId)
41
+
42
+ // Initialize database tables
43
+ {
44
+ const colSpec = State.SQLite.makeColumnSpec(eventlogTable.sqliteDef.ast)
45
+ // D1 database is async, so we need to use a promise
46
+ yield* Effect.promise(() =>
47
+ doSelf.env.DB.exec(`CREATE TABLE IF NOT EXISTS "${storage.dbName}" (${colSpec}) strict`),
48
+ )
49
+ }
50
+ {
51
+ const colSpec = State.SQLite.makeColumnSpec(contextTable.sqliteDef.ast)
52
+ doSelf.ctx.storage.sql.exec(`CREATE TABLE IF NOT EXISTS "${contextTable.sqliteDef.name}" (${colSpec}) strict`)
53
+ }
54
+
55
+ const storageRow = doSelf.ctx.storage.sql
56
+ .exec(`SELECT * FROM "${contextTable.sqliteDef.name}" WHERE storeId = ?`, storeId)
57
+ .toArray()[0] as typeof contextTable.rowSchema.Type | undefined
58
+
59
+ const currentHeadRef = { current: storageRow?.currentHead ?? EventSequenceNumber.ROOT.global }
60
+
61
+ // TODO do concistency check with eventlog table to make sure the head is consistent
62
+
63
+ // Should be the same backendId for lifetime of the durable object
64
+ const backendId = storageRow?.backendId ?? nanoid()
65
+
66
+ const updateCurrentHead = (currentHead: EventSequenceNumber.GlobalEventSequenceNumber) => {
67
+ doSelf.ctx.storage.sql.exec(
68
+ `INSERT OR REPLACE INTO "${contextTable.sqliteDef.name}" (storeId, currentHead, backendId) VALUES (?, ?, ?)`,
69
+ storeId,
70
+ currentHead,
71
+ backendId,
72
+ )
73
+
74
+ currentHeadRef.current = currentHead
75
+
76
+ // I still don't know why we need to re-assign this ref to the `doSelf` object but somehow this seems to be needed 😵‍💫
77
+ // @ts-expect-error
78
+ doSelf[CacheSymbol].currentHeadRef = { current: currentHead }
79
+ }
80
+
81
+ const rpcSubscriptions = new Map<string, RpcSubscription>()
82
+
83
+ const storageCache = {
84
+ storeId,
85
+ backendId,
86
+ currentHeadRef,
87
+ updateCurrentHead,
88
+ storage,
89
+ doOptions,
90
+ env: doSelf.env,
91
+ ctx: doSelf.ctx,
92
+ rpcSubscriptions,
93
+ }
94
+
95
+ ;(doSelf as any)[CacheSymbol] = storageCache
96
+
97
+ // Set initial current head to root
98
+ if (storageRow === undefined) {
99
+ updateCurrentHead(EventSequenceNumber.ROOT.global)
100
+ }
101
+
102
+ return storageCache
103
+ },
104
+ UnexpectedError.mapToUnexpectedError,
105
+ Effect.withSpan('@livestore/sync-cf:durable-object:makeDoCtx'),
106
+ ),
107
+ }) {}
@@ -0,0 +1,64 @@
1
+ import { BackendIdMismatchError, InvalidPullError, SyncBackend, UnexpectedError } from '@livestore/common'
2
+ import { Effect, Option, pipe, ReadonlyArray, type Schema, Stream } from '@livestore/utils/effect'
3
+ import { SyncMessage } from '../../common/mod.ts'
4
+ import { PULL_CHUNK_SIZE } from '../shared.ts'
5
+ import { DoCtx } from './layer.ts'
6
+
7
+ // Notes on stream handling:
8
+ // We're intentionally closing the stream once we've read all existing events
9
+ //
10
+ // WebSocket:
11
+ // - Further chunks will be emitted manually in `push.ts`
12
+ // - If the client sends a `Interrupt` RPC message, it will be handled in the `durable-object.ts` constructor
13
+ // DO RPC:
14
+ // - Further chunks will be emitted manually in `push.ts`
15
+ // - If the client sends a `Interrupt` RPC message, TODO
16
+ export const makeEndingPullStream = (
17
+ req: SyncMessage.PullRequest,
18
+ payload: Schema.JsonValue | undefined,
19
+ ): Stream.Stream<SyncMessage.PullResponse, InvalidPullError, DoCtx> =>
20
+ Effect.gen(function* () {
21
+ const { doOptions, backendId, storeId, storage } = yield* DoCtx
22
+
23
+ if (doOptions?.onPull) {
24
+ yield* Effect.tryAll(() => doOptions!.onPull!(req, { storeId, payload })).pipe(
25
+ UnexpectedError.mapToUnexpectedError,
26
+ )
27
+ }
28
+
29
+ if (req.cursor._tag === 'Some' && req.cursor.value.backendId !== backendId) {
30
+ return yield* new BackendIdMismatchError({ expected: backendId, received: req.cursor.value.backendId })
31
+ }
32
+
33
+ // TODO use streaming for db results
34
+ const remainingEvents = yield* storage.getEvents(Option.getOrUndefined(req.cursor)?.eventSequenceNumber)
35
+
36
+ const batches = pipe(
37
+ remainingEvents,
38
+ ReadonlyArray.chunksOf(PULL_CHUNK_SIZE),
39
+ ReadonlyArray.map((batch, i) => {
40
+ const remaining = Math.max(0, remainingEvents.length - (i + 1) * PULL_CHUNK_SIZE)
41
+
42
+ return SyncMessage.PullResponse.make({
43
+ batch,
44
+ pageInfo: remaining > 0 ? SyncBackend.pageInfoMoreKnown(remaining) : SyncBackend.pageInfoNoMore,
45
+ backendId,
46
+ })
47
+ }),
48
+ )
49
+
50
+ return Stream.fromIterable(batches).pipe(
51
+ Stream.tap(
52
+ Effect.fn(function* (res) {
53
+ if (doOptions?.onPullRes) {
54
+ yield* Effect.tryAll(() => doOptions.onPullRes!(res)).pipe(UnexpectedError.mapToUnexpectedError)
55
+ }
56
+ }),
57
+ ),
58
+ Stream.emitIfEmpty(SyncMessage.emptyPullResponse(backendId)),
59
+ )
60
+ }).pipe(
61
+ Stream.unwrap,
62
+ Stream.mapError((cause) => InvalidPullError.make({ cause })),
63
+ Stream.withSpan('cloudflare-provider:pull'),
64
+ )
@@ -0,0 +1,162 @@
1
+ import {
2
+ BackendIdMismatchError,
3
+ InvalidPushError,
4
+ ServerAheadError,
5
+ SyncBackend,
6
+ UnexpectedError,
7
+ } from '@livestore/common'
8
+ import { type CfTypes, emitStreamResponse } from '@livestore/common-cf'
9
+ import { Effect, Option, type RpcMessage, Schema } from '@livestore/utils/effect'
10
+ import { SyncMessage } from '../../common/mod.ts'
11
+ import { type Env, type MakeDurableObjectClassOptions, type StoreId, WebSocketAttachmentSchema } from '../shared.ts'
12
+ import { DoCtx } from './layer.ts'
13
+
14
+ export const makePush =
15
+ ({
16
+ payload,
17
+ options,
18
+ storeId,
19
+ ctx,
20
+ env,
21
+ }: {
22
+ payload: Schema.JsonValue | undefined
23
+ options: MakeDurableObjectClassOptions | undefined
24
+ storeId: StoreId
25
+ ctx: CfTypes.DurableObjectState
26
+ env: Env
27
+ }) =>
28
+ (pushRequest: Omit<SyncMessage.PushRequest, '_tag'>) =>
29
+ Effect.gen(function* () {
30
+ // yield* Effect.log(`Pushing ${decodedMessage.batch.length} events`, decodedMessage.batch)
31
+ const { backendId, storage, currentHeadRef, updateCurrentHead, rpcSubscriptions } = yield* DoCtx
32
+
33
+ if (pushRequest.batch.length === 0) {
34
+ return SyncMessage.PushAck.make({})
35
+ }
36
+
37
+ if (options?.onPush) {
38
+ yield* Effect.tryAll(() => options.onPush!(pushRequest, { storeId, payload })).pipe(
39
+ UnexpectedError.mapToUnexpectedError,
40
+ )
41
+ }
42
+
43
+ if (pushRequest.backendId._tag === 'Some' && pushRequest.backendId.value !== backendId) {
44
+ return yield* new BackendIdMismatchError({ expected: backendId, received: pushRequest.backendId.value })
45
+ }
46
+
47
+ // This part of the code needs to run sequentially to avoid race conditions
48
+ const { createdAt } = yield* Effect.gen(function* () {
49
+ const currentHead = currentHeadRef.current
50
+ // TODO handle clientId unique conflict
51
+ // Validate the batch
52
+ const firstEventParent = pushRequest.batch[0]!.parentSeqNum
53
+ if (firstEventParent !== currentHead) {
54
+ return yield* new ServerAheadError({ minimumExpectedNum: currentHead, providedNum: firstEventParent })
55
+ }
56
+
57
+ const createdAt = new Date().toISOString()
58
+
59
+ // TODO possibly model this as a queue in order to speed up subsequent pushes
60
+ yield* storage.appendEvents(pushRequest.batch, createdAt)
61
+
62
+ updateCurrentHead(pushRequest.batch.at(-1)!.seqNum)
63
+
64
+ return { createdAt }
65
+ }).pipe(blockConcurrencyWhile(ctx))
66
+
67
+ // Run in background but already return the push ack to the client
68
+ yield* Effect.gen(function* () {
69
+ const connectedClients = ctx.getWebSockets()
70
+
71
+ // Dual broadcasting: WebSocket + RPC clients
72
+ const pullRes = SyncMessage.PullResponse.make({
73
+ batch: pushRequest.batch.map((eventEncoded) => ({
74
+ eventEncoded,
75
+ metadata: Option.some(SyncMessage.SyncMetadata.make({ createdAt })),
76
+ })),
77
+ pageInfo: SyncBackend.pageInfoNoMore,
78
+ backendId,
79
+ })
80
+
81
+ const pullResEnc = Schema.encodeSync(SyncMessage.PullResponse)(pullRes)
82
+
83
+ // Broadcast to WebSocket clients
84
+ if (connectedClients.length > 0) {
85
+ // Only calling once for now.
86
+ if (options?.onPullRes) {
87
+ yield* Effect.tryAll(() => options.onPullRes!(pullRes)).pipe(UnexpectedError.mapToUnexpectedError)
88
+ }
89
+
90
+ // NOTE we're also sending the pullRes to the pushing ws client as a confirmation
91
+ for (const conn of connectedClients) {
92
+ // conn.send(pullResEnc)
93
+ const attachment = Schema.decodeSync(WebSocketAttachmentSchema)(conn.deserializeAttachment())
94
+
95
+ // We're doing something a bit "advanced" here as we're directly emitting Effect RPC-compatible
96
+ // response messsages on the Effect RPC-managed websocket connection to the WS client.
97
+ // For this we need to get the RPC `requestId` from the WebSocket attachment.
98
+ for (const requestId of attachment.pullRequestIds) {
99
+ const res: RpcMessage.ResponseChunkEncoded = {
100
+ _tag: 'Chunk',
101
+ requestId,
102
+ values: [pullResEnc],
103
+ }
104
+ conn.send(JSON.stringify(res))
105
+ }
106
+ }
107
+
108
+ yield* Effect.logDebug(`Broadcasted to ${connectedClients.length} WebSocket clients`)
109
+ }
110
+
111
+ // RPC broadcasting would require reconstructing client stubs from clientIds
112
+ if (rpcSubscriptions.size > 0) {
113
+ yield* Effect.forEach(
114
+ rpcSubscriptions.values(),
115
+ (subscription) =>
116
+ emitStreamResponse({
117
+ callerContext: subscription.callerContext,
118
+ env,
119
+ requestId: subscription.requestId,
120
+ values: [pullResEnc],
121
+ }).pipe(Effect.tapCauseLogPretty, Effect.exit),
122
+ { concurrency: 'unbounded' },
123
+ )
124
+
125
+ yield* Effect.logDebug(`Broadcasted to ${rpcSubscriptions.size} RPC clients`)
126
+ }
127
+ }).pipe(
128
+ Effect.tapCauseLogPretty,
129
+ Effect.withSpan('push-rpc-broadcast'),
130
+ Effect.uninterruptible, // We need to make sure Effect RPC doesn't interrupt this fiber
131
+ Effect.fork,
132
+ )
133
+
134
+ // We need to yield here to make sure the fork above is kicked off before we let Effect RPC finish the request
135
+ yield* Effect.yieldNow()
136
+
137
+ return SyncMessage.PushAck.make({})
138
+ }).pipe(
139
+ Effect.tap(
140
+ Effect.fn(function* (message) {
141
+ if (options?.onPushRes) {
142
+ yield* Effect.tryAll(() => options.onPushRes!(message)).pipe(UnexpectedError.mapToUnexpectedError)
143
+ }
144
+ }),
145
+ ),
146
+ Effect.mapError((cause) => InvalidPushError.make({ cause })),
147
+ )
148
+
149
+ /**
150
+ * @see https://developers.cloudflare.com/durable-objects/api/state/#blockconcurrencywhile
151
+ */
152
+ const blockConcurrencyWhile =
153
+ (ctx: CfTypes.DurableObjectState) =>
154
+ <A, E, R>(eff: Effect.Effect<A, E, R>) =>
155
+ Effect.gen(function* () {
156
+ const runtime = yield* Effect.runtime<R>()
157
+ const exit = yield* Effect.promise(() =>
158
+ ctx.blockConcurrencyWhile(() => eff.pipe(Effect.provide(runtime), Effect.runPromiseExit)),
159
+ )
160
+
161
+ return yield* exit
162
+ })
@@ -0,0 +1,28 @@
1
+ import { EventSequenceNumber, State } from '@livestore/common/schema'
2
+ import { Schema } from '@livestore/utils/effect'
3
+ import { PERSISTENCE_FORMAT_VERSION } from '../shared.ts'
4
+
5
+ export const eventlogTable = State.SQLite.table({
6
+ // NOTE actual table name is determined at runtime to use proper storeId
7
+ name: `eventlog_${PERSISTENCE_FORMAT_VERSION}_$storeId`,
8
+ columns: {
9
+ seqNum: State.SQLite.integer({ primaryKey: true, schema: EventSequenceNumber.GlobalEventSequenceNumber }),
10
+ parentSeqNum: State.SQLite.integer({ schema: EventSequenceNumber.GlobalEventSequenceNumber }),
11
+ name: State.SQLite.text({}),
12
+ args: State.SQLite.text({ schema: Schema.parseJson(Schema.Any), nullable: true }),
13
+ /** ISO date format. Currently only used for debugging purposes. */
14
+ createdAt: State.SQLite.text({}),
15
+ clientId: State.SQLite.text({}),
16
+ sessionId: State.SQLite.text({}),
17
+ },
18
+ })
19
+
20
+ /** Will only ever have one row per durable object. */
21
+ export const contextTable = State.SQLite.table({
22
+ name: `context_${PERSISTENCE_FORMAT_VERSION}`,
23
+ columns: {
24
+ storeId: State.SQLite.text({ primaryKey: true }),
25
+ currentHead: State.SQLite.integer({ schema: EventSequenceNumber.GlobalEventSequenceNumber }),
26
+ backendId: State.SQLite.text({}),
27
+ },
28
+ })