@livestore/sync-cf 0.4.0-dev.2 → 0.4.0-dev.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (136) hide show
  1. package/README.md +60 -0
  2. package/dist/.tsbuildinfo +1 -1
  3. package/dist/cf-worker/do/durable-object.d.ts +45 -0
  4. package/dist/cf-worker/do/durable-object.d.ts.map +1 -0
  5. package/dist/cf-worker/do/durable-object.js +151 -0
  6. package/dist/cf-worker/do/durable-object.js.map +1 -0
  7. package/dist/cf-worker/do/layer.d.ts +34 -0
  8. package/dist/cf-worker/do/layer.d.ts.map +1 -0
  9. package/dist/cf-worker/do/layer.js +91 -0
  10. package/dist/cf-worker/do/layer.js.map +1 -0
  11. package/dist/cf-worker/do/pull.d.ts +6 -0
  12. package/dist/cf-worker/do/pull.d.ts.map +1 -0
  13. package/dist/cf-worker/do/pull.js +47 -0
  14. package/dist/cf-worker/do/pull.js.map +1 -0
  15. package/dist/cf-worker/do/push.d.ts +14 -0
  16. package/dist/cf-worker/do/push.d.ts.map +1 -0
  17. package/dist/cf-worker/do/push.js +131 -0
  18. package/dist/cf-worker/do/push.js.map +1 -0
  19. package/dist/cf-worker/{durable-object.d.ts → do/sqlite.d.ts} +83 -67
  20. package/dist/cf-worker/do/sqlite.d.ts.map +1 -0
  21. package/dist/cf-worker/do/sqlite.js +36 -0
  22. package/dist/cf-worker/do/sqlite.js.map +1 -0
  23. package/dist/cf-worker/do/sync-storage.d.ts +25 -0
  24. package/dist/cf-worker/do/sync-storage.d.ts.map +1 -0
  25. package/dist/cf-worker/do/sync-storage.js +191 -0
  26. package/dist/cf-worker/do/sync-storage.js.map +1 -0
  27. package/dist/cf-worker/do/transport/do-rpc-server.d.ts +9 -0
  28. package/dist/cf-worker/do/transport/do-rpc-server.d.ts.map +1 -0
  29. package/dist/cf-worker/do/transport/do-rpc-server.js +45 -0
  30. package/dist/cf-worker/do/transport/do-rpc-server.js.map +1 -0
  31. package/dist/cf-worker/do/transport/http-rpc-server.d.ts +8 -0
  32. package/dist/cf-worker/do/transport/http-rpc-server.d.ts.map +1 -0
  33. package/dist/cf-worker/do/transport/http-rpc-server.js +30 -0
  34. package/dist/cf-worker/do/transport/http-rpc-server.js.map +1 -0
  35. package/dist/cf-worker/do/transport/ws-rpc-server.d.ts +4 -0
  36. package/dist/cf-worker/do/transport/ws-rpc-server.d.ts.map +1 -0
  37. package/dist/cf-worker/do/transport/ws-rpc-server.js +21 -0
  38. package/dist/cf-worker/do/transport/ws-rpc-server.js.map +1 -0
  39. package/dist/cf-worker/mod.d.ts +4 -2
  40. package/dist/cf-worker/mod.d.ts.map +1 -1
  41. package/dist/cf-worker/mod.js +3 -2
  42. package/dist/cf-worker/mod.js.map +1 -1
  43. package/dist/cf-worker/shared.d.ts +175 -0
  44. package/dist/cf-worker/shared.d.ts.map +1 -0
  45. package/dist/cf-worker/shared.js +43 -0
  46. package/dist/cf-worker/shared.js.map +1 -0
  47. package/dist/cf-worker/worker.d.ts +59 -51
  48. package/dist/cf-worker/worker.d.ts.map +1 -1
  49. package/dist/cf-worker/worker.js +75 -43
  50. package/dist/cf-worker/worker.js.map +1 -1
  51. package/dist/client/mod.d.ts +4 -0
  52. package/dist/client/mod.d.ts.map +1 -0
  53. package/dist/client/mod.js +4 -0
  54. package/dist/client/mod.js.map +1 -0
  55. package/dist/client/transport/do-rpc-client.d.ts +40 -0
  56. package/dist/client/transport/do-rpc-client.d.ts.map +1 -0
  57. package/dist/client/transport/do-rpc-client.js +115 -0
  58. package/dist/client/transport/do-rpc-client.js.map +1 -0
  59. package/dist/client/transport/http-rpc-client.d.ts +43 -0
  60. package/dist/client/transport/http-rpc-client.d.ts.map +1 -0
  61. package/dist/client/transport/http-rpc-client.js +103 -0
  62. package/dist/client/transport/http-rpc-client.js.map +1 -0
  63. package/dist/client/transport/ws-rpc-client.d.ts +46 -0
  64. package/dist/client/transport/ws-rpc-client.d.ts.map +1 -0
  65. package/dist/client/transport/ws-rpc-client.js +108 -0
  66. package/dist/client/transport/ws-rpc-client.js.map +1 -0
  67. package/dist/common/constants.d.ts +7 -0
  68. package/dist/common/constants.d.ts.map +1 -0
  69. package/dist/common/constants.js +17 -0
  70. package/dist/common/constants.js.map +1 -0
  71. package/dist/common/do-rpc-schema.d.ts +76 -0
  72. package/dist/common/do-rpc-schema.d.ts.map +1 -0
  73. package/dist/common/do-rpc-schema.js +48 -0
  74. package/dist/common/do-rpc-schema.js.map +1 -0
  75. package/dist/common/http-rpc-schema.d.ts +58 -0
  76. package/dist/common/http-rpc-schema.d.ts.map +1 -0
  77. package/dist/common/http-rpc-schema.js +37 -0
  78. package/dist/common/http-rpc-schema.js.map +1 -0
  79. package/dist/common/mod.d.ts +8 -1
  80. package/dist/common/mod.d.ts.map +1 -1
  81. package/dist/common/mod.js +7 -1
  82. package/dist/common/mod.js.map +1 -1
  83. package/dist/common/{ws-message-types.d.ts → sync-message-types.d.ts} +119 -153
  84. package/dist/common/sync-message-types.d.ts.map +1 -0
  85. package/dist/common/sync-message-types.js +60 -0
  86. package/dist/common/sync-message-types.js.map +1 -0
  87. package/dist/common/ws-rpc-schema.d.ts +55 -0
  88. package/dist/common/ws-rpc-schema.d.ts.map +1 -0
  89. package/dist/common/ws-rpc-schema.js +32 -0
  90. package/dist/common/ws-rpc-schema.js.map +1 -0
  91. package/package.json +7 -8
  92. package/src/cf-worker/do/durable-object.ts +238 -0
  93. package/src/cf-worker/do/layer.ts +128 -0
  94. package/src/cf-worker/do/pull.ts +75 -0
  95. package/src/cf-worker/do/push.ts +205 -0
  96. package/src/cf-worker/do/sqlite.ts +37 -0
  97. package/src/cf-worker/do/sync-storage.ts +323 -0
  98. package/src/cf-worker/do/transport/do-rpc-server.ts +84 -0
  99. package/src/cf-worker/do/transport/http-rpc-server.ts +51 -0
  100. package/src/cf-worker/do/transport/ws-rpc-server.ts +34 -0
  101. package/src/cf-worker/mod.ts +4 -2
  102. package/src/cf-worker/shared.ts +141 -0
  103. package/src/cf-worker/worker.ts +138 -116
  104. package/src/client/mod.ts +3 -0
  105. package/src/client/transport/do-rpc-client.ts +189 -0
  106. package/src/client/transport/http-rpc-client.ts +225 -0
  107. package/src/client/transport/ws-rpc-client.ts +202 -0
  108. package/src/common/constants.ts +18 -0
  109. package/src/common/do-rpc-schema.ts +54 -0
  110. package/src/common/http-rpc-schema.ts +40 -0
  111. package/src/common/mod.ts +10 -1
  112. package/src/common/sync-message-types.ts +117 -0
  113. package/src/common/ws-rpc-schema.ts +36 -0
  114. package/dist/cf-worker/cf-types.d.ts +0 -2
  115. package/dist/cf-worker/cf-types.d.ts.map +0 -1
  116. package/dist/cf-worker/cf-types.js +0 -2
  117. package/dist/cf-worker/cf-types.js.map +0 -1
  118. package/dist/cf-worker/durable-object.d.ts.map +0 -1
  119. package/dist/cf-worker/durable-object.js +0 -317
  120. package/dist/cf-worker/durable-object.js.map +0 -1
  121. package/dist/common/ws-message-types.d.ts.map +0 -1
  122. package/dist/common/ws-message-types.js +0 -57
  123. package/dist/common/ws-message-types.js.map +0 -1
  124. package/dist/sync-impl/mod.d.ts +0 -2
  125. package/dist/sync-impl/mod.d.ts.map +0 -1
  126. package/dist/sync-impl/mod.js +0 -2
  127. package/dist/sync-impl/mod.js.map +0 -1
  128. package/dist/sync-impl/ws-impl.d.ts +0 -7
  129. package/dist/sync-impl/ws-impl.d.ts.map +0 -1
  130. package/dist/sync-impl/ws-impl.js +0 -175
  131. package/dist/sync-impl/ws-impl.js.map +0 -1
  132. package/src/cf-worker/cf-types.ts +0 -12
  133. package/src/cf-worker/durable-object.ts +0 -478
  134. package/src/common/ws-message-types.ts +0 -114
  135. package/src/sync-impl/mod.ts +0 -1
  136. package/src/sync-impl/ws-impl.ts +0 -274
@@ -0,0 +1,238 @@
1
+ /// <reference types="@cloudflare/workers-types" />
2
+
3
+ import { DurableObject } from 'cloudflare:workers'
4
+ import { type CfTypes, setupDurableObjectWebSocketRpc } from '@livestore/common-cf'
5
+ import { CfDeclare } from '@livestore/common-cf/declare'
6
+ import {
7
+ Effect,
8
+ FetchHttpClient,
9
+ Layer,
10
+ Logger,
11
+ LogLevel,
12
+ Otlp,
13
+ RpcMessage,
14
+ Schema,
15
+ type Scope,
16
+ } from '@livestore/utils/effect'
17
+ import {
18
+ type Env,
19
+ type MakeDurableObjectClassOptions,
20
+ matchSyncRequest,
21
+ type SyncBackendRpcInterface,
22
+ WebSocketAttachmentSchema,
23
+ } from '../shared.ts'
24
+ import { DoCtx } from './layer.ts'
25
+ import { createDoRpcHandler } from './transport/do-rpc-server.ts'
26
+ import { createHttpRpcHandler } from './transport/http-rpc-server.ts'
27
+ import { makeRpcServer } from './transport/ws-rpc-server.ts'
28
+
29
+ // NOTE We need to redeclare runtime types here to avoid type conflicts with the lib.dom Response type.
30
+ // TODO get rid of those once CF fixed their type mismatch in the worker types
31
+ declare class Request extends CfDeclare.Request {}
32
+ declare class Response extends CfDeclare.Response {}
33
+ declare class WebSocketPair extends CfDeclare.WebSocketPair {}
34
+ declare class WebSocketRequestResponsePair extends CfDeclare.WebSocketRequestResponsePair {}
35
+
36
+ const DurableObjectBase = DurableObject<Env> as any as new (
37
+ state: CfTypes.DurableObjectState,
38
+ env: Env,
39
+ ) => CfTypes.DurableObject & { ctx: CfTypes.DurableObjectState; env: Env }
40
+
41
+ // Type aliases needed to avoid TS bug https://github.com/microsoft/TypeScript/issues/55021
42
+ export type DoState = CfTypes.DurableObjectState
43
+ export type DoObject<T> = CfTypes.DurableObject & T
44
+
45
+ export type MakeDurableObjectClass = (options?: MakeDurableObjectClassOptions) => {
46
+ new (ctx: DoState, env: Env): DoObject<SyncBackendRpcInterface>
47
+ }
48
+
49
+ /**
50
+ * Creates a Durable Object class for handling WebSocket-based sync.
51
+ * A sync durable object is uniquely scoped to a specific `storeId`.
52
+ *
53
+ * The sync DO supports 3 transport modes:
54
+ * - HTTP JSON-RPC
55
+ * - WebSocket
56
+ * - Durable Object RPC calls (only works in combination with `@livestore/adapter-cf`)
57
+ *
58
+ * Example:
59
+ *
60
+ * ```ts
61
+ * // In your Cloudflare Worker file
62
+ * import { makeDurableObject } from '@livestore/sync-cf/cf-worker'
63
+ *
64
+ * export class SyncBackendDO extends makeDurableObject({
65
+ * onPush: async (message) => {
66
+ * console.log('onPush', message.batch)
67
+ * },
68
+ * onPull: async (message) => {
69
+ * console.log('onPull', message)
70
+ * },
71
+ * }) {}
72
+ * ```
73
+ *
74
+ * `wrangler.toml`
75
+ * ```toml
76
+ * [[durable_objects.bindings]]
77
+ * name = "SYNC_BACKEND_DO"
78
+ * class_name = "SyncBackendDO"
79
+
80
+ * [[migrations]]
81
+ * tag = "v1"
82
+ * new_sqlite_classes = ["SyncBackendDO"]
83
+ * ```
84
+ */
85
+ export const makeDurableObject: MakeDurableObjectClass = (options) => {
86
+ const enabledTransports = options?.enabledTransports ?? new Set(['http', 'ws', 'do-rpc'])
87
+
88
+ const Logging = Logger.consoleWithThread('SyncDo')
89
+
90
+ const Observability = options?.otel?.baseUrl
91
+ ? Otlp.layer({
92
+ baseUrl: options.otel.baseUrl,
93
+ tracerExportInterval: 50,
94
+ resource: {
95
+ serviceName: options.otel.serviceName ?? 'sync-cf-do',
96
+ },
97
+ }).pipe(Layer.provide(FetchHttpClient.layer))
98
+ : Layer.empty
99
+
100
+ return class SyncBackendDOBase extends DurableObjectBase implements SyncBackendRpcInterface {
101
+ __DURABLE_OBJECT_BRAND = 'SyncBackendDOBase' as never
102
+
103
+ constructor(ctx: CfTypes.DurableObjectState, env: Env) {
104
+ super(ctx, env)
105
+
106
+ const WebSocketRpcServerLive = makeRpcServer({ doSelf: this, doOptions: options })
107
+
108
+ // This registers the `webSocketMessage` and `webSocketClose` handlers
109
+ if (enabledTransports.has('ws')) {
110
+ setupDurableObjectWebSocketRpc({
111
+ doSelf: this,
112
+ rpcLayer: WebSocketRpcServerLive,
113
+ webSocketMode: 'hibernate',
114
+ // See `pull.ts` for more details how `pull` Effect RPC requests streams are handled
115
+ // in combination with DO hibernation
116
+ onMessage: (request, ws) => {
117
+ if (request._tag === 'Request' && request.tag === 'SyncWsRpc.Pull') {
118
+ // Is Pull request: add requestId to pullRequestIds
119
+ const attachment = ws.deserializeAttachment()
120
+ const { pullRequestIds, ...rest } = Schema.decodeSync(WebSocketAttachmentSchema)(attachment)
121
+ ws.serializeAttachment(
122
+ Schema.encodeSync(WebSocketAttachmentSchema)({
123
+ ...rest,
124
+ pullRequestIds: [...pullRequestIds, request.id],
125
+ }),
126
+ )
127
+ } else if (request._tag === 'Interrupt') {
128
+ // Is Interrupt request: remove requestId from pullRequestIds
129
+ const attachment = ws.deserializeAttachment()
130
+ const { pullRequestIds, ...rest } = Schema.decodeSync(WebSocketAttachmentSchema)(attachment)
131
+ ws.serializeAttachment(
132
+ Schema.encodeSync(WebSocketAttachmentSchema)({
133
+ ...rest,
134
+ pullRequestIds: pullRequestIds.filter((id) => id !== request.requestId),
135
+ }),
136
+ )
137
+ // TODO also emit `Exit` stream RPC message
138
+ }
139
+ },
140
+ mainLayer: Observability,
141
+ })
142
+ }
143
+ }
144
+
145
+ fetch = async (request: Request): Promise<Response> =>
146
+ Effect.gen(this, function* () {
147
+ const searchParams = matchSyncRequest(request)
148
+ if (searchParams === undefined) {
149
+ throw new Error('No search params found in request URL')
150
+ }
151
+
152
+ const { storeId, payload, transport } = searchParams
153
+
154
+ if (enabledTransports.has(transport) === false) {
155
+ throw new Error(`Transport ${transport} is not enabled (based on \`options.enabledTransports\`)`)
156
+ }
157
+
158
+ if (transport === 'http') {
159
+ return yield* this.handleHttp(request)
160
+ }
161
+
162
+ if (transport === 'ws') {
163
+ const { 0: client, 1: server } = new WebSocketPair()
164
+
165
+ // Since we're using websocket hibernation, we need to remember the storeId for subsequent `webSocketMessage` calls
166
+ server.serializeAttachment(
167
+ Schema.encodeSync(WebSocketAttachmentSchema)({ storeId, payload, pullRequestIds: [] }),
168
+ )
169
+
170
+ // See https://developers.cloudflare.com/durable-objects/examples/websocket-hibernation-server
171
+
172
+ this.ctx.acceptWebSocket(server)
173
+
174
+ // Ping requests are sent by Effect RPC internally
175
+ this.ctx.setWebSocketAutoResponse(
176
+ new WebSocketRequestResponsePair(
177
+ JSON.stringify(RpcMessage.constPing),
178
+ JSON.stringify(RpcMessage.constPong),
179
+ ),
180
+ )
181
+
182
+ return new Response(null, {
183
+ status: 101,
184
+ webSocket: client,
185
+ })
186
+ }
187
+
188
+ console.error('Invalid path', request.url)
189
+
190
+ return new Response('Invalid path', {
191
+ status: 400,
192
+ statusText: 'Bad Request',
193
+ })
194
+ }).pipe(
195
+ Effect.tapCauseLogPretty, // Also log errors to console before catching them
196
+ Effect.catchAllCause((cause) =>
197
+ Effect.succeed(new Response('Error', { status: 500, statusText: cause.toString() })),
198
+ ),
199
+ Effect.withSpan('@livestore/sync-cf:durable-object:fetch'),
200
+ Effect.provide(DoCtx.Default({ doSelf: this, doOptions: options, from: request })),
201
+ this.runEffectAsPromise,
202
+ )
203
+
204
+ /**
205
+ * Handles DO <-> DO RPC calls
206
+ */
207
+ async rpc(payload: Uint8Array<ArrayBuffer>): Promise<Uint8Array<ArrayBuffer> | CfTypes.ReadableStream> {
208
+ if (enabledTransports.has('do-rpc') === false) {
209
+ throw new Error('Do RPC transport is not enabled (based on `options.enabledTransports`)')
210
+ }
211
+
212
+ return createDoRpcHandler({ payload, input: { doSelf: this, doOptions: options } }).pipe(
213
+ Effect.withSpan('@livestore/sync-cf:durable-object:rpc'),
214
+ this.runEffectAsPromise,
215
+ )
216
+ }
217
+
218
+ /**
219
+ * Handles HTTP RPC calls
220
+ *
221
+ * Requires the `enable_request_signal` compatibility flag to properly support `pull` streaming responses
222
+ */
223
+ private handleHttp = (request: CfTypes.Request) =>
224
+ createHttpRpcHandler({
225
+ request,
226
+ responseHeaders: options?.http?.responseHeaders,
227
+ }).pipe(Effect.withSpan('@livestore/sync-cf:durable-object:handleHttp'))
228
+
229
+ private runEffectAsPromise = <T, E = never>(effect: Effect.Effect<T, E, Scope.Scope>): Promise<T> =>
230
+ effect.pipe(
231
+ Effect.tapCauseLogPretty,
232
+ Logger.withMinimumLogLevel(LogLevel.Debug),
233
+ Effect.provide(Layer.mergeAll(Observability, Logging)),
234
+ Effect.scoped,
235
+ Effect.runPromise,
236
+ )
237
+ }
238
+ }
@@ -0,0 +1,128 @@
1
+ import { UnknownError } from '@livestore/common'
2
+ import { EventSequenceNumber, State } from '@livestore/common/schema'
3
+ import type { CfTypes } from '@livestore/common-cf'
4
+ import { shouldNeverHappen } from '@livestore/utils'
5
+ import { Effect, Predicate } from '@livestore/utils/effect'
6
+ import { nanoid } from '@livestore/utils/nanoid'
7
+ import type { Env, MakeDurableObjectClassOptions, RpcSubscription } from '../shared.ts'
8
+ import { contextTable, eventlogTable } from './sqlite.ts'
9
+ import { makeStorage } from './sync-storage.ts'
10
+
11
+ const CacheSymbol = Symbol('Cache')
12
+
13
+ export interface DoCtxInput {
14
+ doSelf: CfTypes.DurableObject & {
15
+ ctx: CfTypes.DurableObjectState
16
+ env: Env
17
+ }
18
+ doOptions: MakeDurableObjectClassOptions | undefined
19
+ from: CfTypes.Request | { storeId: string }
20
+ }
21
+
22
+ export class DoCtx extends Effect.Service<DoCtx>()('DoCtx', {
23
+ effect: Effect.fn(
24
+ function* ({ doSelf, doOptions, from }: DoCtxInput) {
25
+ if ((doSelf as any)[CacheSymbol] !== undefined) {
26
+ return (doSelf as any)[CacheSymbol] as never
27
+ }
28
+
29
+ const getStoreId = (from: CfTypes.Request | { storeId: string }) => {
30
+ if (Predicate.hasProperty(from, 'url')) {
31
+ const url = new URL(from.url)
32
+ return (
33
+ url.searchParams.get('storeId') ?? shouldNeverHappen(`No storeId provided in request URL search params`)
34
+ )
35
+ }
36
+ return from.storeId
37
+ }
38
+
39
+ const storeId = getStoreId(from)
40
+ // Resolve storage engine
41
+ const makeEngine = Effect.gen(function* () {
42
+ const opt = doOptions?.storage
43
+ if (opt?._tag === 'd1') {
44
+ const db = (doSelf.env as any)[opt.binding]
45
+ if (!db) {
46
+ return yield* UnknownError.make({ cause: new Error(`D1 binding '${opt.binding}' not found on env`) })
47
+ }
48
+ return { _tag: 'd1' as const, db }
49
+ } else if (opt?._tag === 'do-sqlite' || opt === undefined) {
50
+ return { _tag: 'do-sqlite' as const }
51
+ } else return shouldNeverHappen(`Invalid storage engine`, opt)
52
+ })
53
+
54
+ const engine = yield* makeEngine
55
+
56
+ const storage = makeStorage(doSelf.ctx, storeId, engine)
57
+
58
+ // Initialize database tables
59
+ {
60
+ const colSpec = State.SQLite.makeColumnSpec(eventlogTable.sqliteDef.ast)
61
+ if (engine._tag === 'd1') {
62
+ // D1 database is async, so we need to use a promise
63
+ yield* Effect.promise(() =>
64
+ engine.db.exec(`CREATE TABLE IF NOT EXISTS "${storage.dbName}" (${colSpec}) strict`),
65
+ )
66
+ } else {
67
+ // DO SQLite table lives in Durable Object storage
68
+ doSelf.ctx.storage.sql.exec(`CREATE TABLE IF NOT EXISTS "${storage.dbName}" (${colSpec}) strict`)
69
+ }
70
+ }
71
+ {
72
+ const colSpec = State.SQLite.makeColumnSpec(contextTable.sqliteDef.ast)
73
+ doSelf.ctx.storage.sql.exec(`CREATE TABLE IF NOT EXISTS "${contextTable.sqliteDef.name}" (${colSpec}) strict`)
74
+ }
75
+
76
+ const storageRow = doSelf.ctx.storage.sql
77
+ .exec(`SELECT * FROM "${contextTable.sqliteDef.name}" WHERE storeId = ?`, storeId)
78
+ .toArray()[0] as typeof contextTable.rowSchema.Type | undefined
79
+
80
+ const currentHeadRef = { current: storageRow?.currentHead ?? EventSequenceNumber.Client.ROOT.global }
81
+
82
+ // TODO do concistency check with eventlog table to make sure the head is consistent
83
+
84
+ // Should be the same backendId for lifetime of the durable object
85
+ const backendId = storageRow?.backendId ?? nanoid()
86
+
87
+ const updateCurrentHead = (currentHead: EventSequenceNumber.Global.Type) => {
88
+ doSelf.ctx.storage.sql.exec(
89
+ `INSERT OR REPLACE INTO "${contextTable.sqliteDef.name}" (storeId, currentHead, backendId) VALUES (?, ?, ?)`,
90
+ storeId,
91
+ currentHead,
92
+ backendId,
93
+ )
94
+
95
+ currentHeadRef.current = currentHead
96
+
97
+ // I still don't know why we need to re-assign this ref to the `doSelf` object but somehow this seems to be needed 😵‍💫
98
+ // @ts-expect-error
99
+ doSelf[CacheSymbol].currentHeadRef = { current: currentHead }
100
+ }
101
+
102
+ const rpcSubscriptions = new Map<string, RpcSubscription>()
103
+
104
+ const storageCache = {
105
+ storeId,
106
+ backendId,
107
+ currentHeadRef,
108
+ updateCurrentHead,
109
+ storage,
110
+ doOptions,
111
+ env: doSelf.env,
112
+ ctx: doSelf.ctx,
113
+ rpcSubscriptions,
114
+ }
115
+
116
+ ;(doSelf as any)[CacheSymbol] = storageCache
117
+
118
+ // Set initial current head to root
119
+ if (storageRow === undefined) {
120
+ updateCurrentHead(EventSequenceNumber.Client.ROOT.global)
121
+ }
122
+
123
+ return storageCache
124
+ },
125
+ UnknownError.mapToUnknownError,
126
+ Effect.withSpan('@livestore/sync-cf:durable-object:makeDoCtx'),
127
+ ),
128
+ }) {}
@@ -0,0 +1,75 @@
1
+ import { BackendIdMismatchError, InvalidPullError, SyncBackend, UnknownError } from '@livestore/common'
2
+ import { splitChunkBySize } from '@livestore/common/sync'
3
+ import { Chunk, Effect, Option, Schema, Stream } from '@livestore/utils/effect'
4
+ import { MAX_PULL_EVENTS_PER_MESSAGE, MAX_WS_MESSAGE_BYTES } from '../../common/constants.ts'
5
+ import { SyncMessage } from '../../common/mod.ts'
6
+ import { DoCtx } from './layer.ts'
7
+
8
+ const encodePullResponse = Schema.encodeSync(SyncMessage.PullResponse)
9
+
10
+ // Notes on stream handling:
11
+ // We're intentionally closing the stream once we've read all existing events
12
+ //
13
+ // WebSocket:
14
+ // - Further chunks will be emitted manually in `push.ts`
15
+ // - If the client sends a `Interrupt` RPC message, it will be handled in the `durable-object.ts` constructor
16
+ // DO RPC:
17
+ // - Further chunks will be emitted manually in `push.ts`
18
+ // - If the client sends a `Interrupt` RPC message, TODO
19
+ export const makeEndingPullStream = (
20
+ req: SyncMessage.PullRequest,
21
+ payload: Schema.JsonValue | undefined,
22
+ ): Stream.Stream<SyncMessage.PullResponse, InvalidPullError, DoCtx> =>
23
+ Effect.gen(function* () {
24
+ const { doOptions, backendId, storeId, storage } = yield* DoCtx
25
+
26
+ if (doOptions?.onPull) {
27
+ yield* Effect.tryAll(() => doOptions!.onPull!(req, { storeId, payload })).pipe(UnknownError.mapToUnknownError)
28
+ }
29
+
30
+ if (req.cursor._tag === 'Some' && req.cursor.value.backendId !== backendId) {
31
+ return yield* new BackendIdMismatchError({ expected: backendId, received: req.cursor.value.backendId })
32
+ }
33
+
34
+ const { stream: storedEvents, total } = yield* storage.getEvents(
35
+ Option.getOrUndefined(req.cursor)?.eventSequenceNumber,
36
+ )
37
+
38
+ return storedEvents.pipe(
39
+ Stream.mapChunksEffect(
40
+ splitChunkBySize({
41
+ maxItems: MAX_PULL_EVENTS_PER_MESSAGE,
42
+ maxBytes: MAX_WS_MESSAGE_BYTES,
43
+ encode: (batch) =>
44
+ encodePullResponse(
45
+ SyncMessage.PullResponse.make({ batch, pageInfo: SyncBackend.pageInfoNoMore, backendId }),
46
+ ),
47
+ }),
48
+ ),
49
+ Stream.mapAccum(total, (remaining, chunk) => {
50
+ const asArray = Chunk.toReadonlyArray(chunk)
51
+ const nextRemaining = Math.max(0, remaining - asArray.length)
52
+
53
+ return [
54
+ nextRemaining,
55
+ SyncMessage.PullResponse.make({
56
+ batch: asArray,
57
+ pageInfo: nextRemaining > 0 ? SyncBackend.pageInfoMoreKnown(nextRemaining) : SyncBackend.pageInfoNoMore,
58
+ backendId,
59
+ }),
60
+ ] as const
61
+ }),
62
+ Stream.tap(
63
+ Effect.fn(function* (res) {
64
+ if (doOptions?.onPullRes) {
65
+ yield* Effect.tryAll(() => doOptions.onPullRes!(res)).pipe(UnknownError.mapToUnknownError)
66
+ }
67
+ }),
68
+ ),
69
+ Stream.emitIfEmpty(SyncMessage.emptyPullResponse(backendId)),
70
+ )
71
+ }).pipe(
72
+ Stream.unwrap,
73
+ Stream.mapError((cause) => InvalidPullError.make({ cause })),
74
+ Stream.withSpan('cloudflare-provider:pull'),
75
+ )
@@ -0,0 +1,205 @@
1
+ import {
2
+ BackendIdMismatchError,
3
+ InvalidPushError,
4
+ ServerAheadError,
5
+ SyncBackend,
6
+ UnknownError,
7
+ } from '@livestore/common'
8
+ import { splitChunkBySize } from '@livestore/common/sync'
9
+ import { type CfTypes, emitStreamResponse } from '@livestore/common-cf'
10
+ import { Chunk, Effect, Option, type RpcMessage, Schema } from '@livestore/utils/effect'
11
+ import { MAX_PUSH_EVENTS_PER_REQUEST, MAX_WS_MESSAGE_BYTES } from '../../common/constants.ts'
12
+ import { SyncMessage } from '../../common/mod.ts'
13
+ import { type Env, type MakeDurableObjectClassOptions, type StoreId, WebSocketAttachmentSchema } from '../shared.ts'
14
+ import { DoCtx } from './layer.ts'
15
+
16
+ const encodePullResponse = Schema.encodeSync(SyncMessage.PullResponse)
17
+ type PullBatchItem = SyncMessage.PullResponse['batch'][number]
18
+
19
+ export const makePush =
20
+ ({
21
+ payload,
22
+ options,
23
+ storeId,
24
+ ctx,
25
+ env,
26
+ }: {
27
+ payload: Schema.JsonValue | undefined
28
+ options: MakeDurableObjectClassOptions | undefined
29
+ storeId: StoreId
30
+ ctx: CfTypes.DurableObjectState
31
+ env: Env
32
+ }) =>
33
+ (pushRequest: Omit<SyncMessage.PushRequest, '_tag'>) =>
34
+ Effect.gen(function* () {
35
+ // yield* Effect.log(`Pushing ${decodedMessage.batch.length} events`, decodedMessage.batch)
36
+ const { backendId, storage, currentHeadRef, updateCurrentHead, rpcSubscriptions } = yield* DoCtx
37
+
38
+ if (pushRequest.batch.length === 0) {
39
+ return SyncMessage.PushAck.make({})
40
+ }
41
+
42
+ if (options?.onPush) {
43
+ yield* Effect.tryAll(() => options.onPush!(pushRequest, { storeId, payload })).pipe(
44
+ UnknownError.mapToUnknownError,
45
+ )
46
+ }
47
+
48
+ if (pushRequest.backendId._tag === 'Some' && pushRequest.backendId.value !== backendId) {
49
+ return yield* new BackendIdMismatchError({ expected: backendId, received: pushRequest.backendId.value })
50
+ }
51
+
52
+ // This part of the code needs to run sequentially to avoid race conditions
53
+ const { createdAt } = yield* Effect.gen(function* () {
54
+ const currentHead = currentHeadRef.current
55
+ // TODO handle clientId unique conflict
56
+ // Validate the batch
57
+ const firstEventParent = pushRequest.batch[0]!.parentSeqNum
58
+ if (firstEventParent !== currentHead) {
59
+ // yield* Effect.logDebug('ServerAheadError: backend head mismatch', {
60
+ // expectedHead: currentHead,
61
+ // providedHead: firstEventParent,
62
+ // batchSize: pushRequest.batch.length,
63
+ // backendId,
64
+ // })
65
+
66
+ return yield* new ServerAheadError({ minimumExpectedNum: currentHead, providedNum: firstEventParent })
67
+ }
68
+
69
+ const createdAt = new Date().toISOString()
70
+
71
+ // TODO possibly model this as a queue in order to speed up subsequent pushes
72
+ yield* storage.appendEvents(pushRequest.batch, createdAt)
73
+
74
+ updateCurrentHead(pushRequest.batch.at(-1)!.seqNum)
75
+
76
+ return { createdAt }
77
+ }).pipe(blockConcurrencyWhile(ctx))
78
+
79
+ // Run in background but already return the push ack to the client
80
+ yield* Effect.gen(function* () {
81
+ const connectedClients = ctx.getWebSockets()
82
+
83
+ // Preparing chunks of responses to make sure we don't exceed the WS message size limit.
84
+ const responses = yield* Chunk.fromIterable(pushRequest.batch).pipe(
85
+ splitChunkBySize({
86
+ maxItems: MAX_PUSH_EVENTS_PER_REQUEST,
87
+ maxBytes: MAX_WS_MESSAGE_BYTES,
88
+ encode: (items) =>
89
+ encodePullResponse(
90
+ SyncMessage.PullResponse.make({
91
+ batch: items.map(
92
+ (eventEncoded): PullBatchItem => ({
93
+ eventEncoded,
94
+ metadata: Option.some(SyncMessage.SyncMetadata.make({ createdAt })),
95
+ }),
96
+ ),
97
+ pageInfo: SyncBackend.pageInfoNoMore,
98
+ backendId,
99
+ }),
100
+ ),
101
+ }),
102
+ Effect.map(
103
+ Chunk.map((eventsChunk) => {
104
+ const batchWithMetadata = Chunk.toReadonlyArray(eventsChunk).map((eventEncoded) => ({
105
+ eventEncoded,
106
+ metadata: Option.some(SyncMessage.SyncMetadata.make({ createdAt })),
107
+ }))
108
+
109
+ const response = SyncMessage.PullResponse.make({
110
+ batch: batchWithMetadata,
111
+ pageInfo: SyncBackend.pageInfoNoMore,
112
+ backendId,
113
+ })
114
+
115
+ return {
116
+ response,
117
+ encoded: Schema.encodeSync(SyncMessage.PullResponse)(response),
118
+ }
119
+ }),
120
+ ),
121
+ )
122
+
123
+ // Dual broadcasting: WebSocket + RPC clients
124
+
125
+ // Broadcast to WebSocket clients
126
+ if (connectedClients.length > 0) {
127
+ for (const { response, encoded } of responses) {
128
+ // Only calling once for now.
129
+ if (options?.onPullRes) {
130
+ yield* Effect.tryAll(() => options.onPullRes!(response)).pipe(UnknownError.mapToUnknownError)
131
+ }
132
+
133
+ // NOTE we're also sending the pullRes chunk to the pushing ws client as confirmation
134
+ for (const conn of connectedClients) {
135
+ const attachment = Schema.decodeSync(WebSocketAttachmentSchema)(conn.deserializeAttachment())
136
+
137
+ // We're doing something a bit "advanced" here as we're directly emitting Effect RPC-compatible
138
+ // response messsages on the Effect RPC-managed websocket connection to the WS client.
139
+ // For this we need to get the RPC `requestId` from the WebSocket attachment.
140
+ for (const requestId of attachment.pullRequestIds) {
141
+ const res: RpcMessage.ResponseChunkEncoded = {
142
+ _tag: 'Chunk',
143
+ requestId,
144
+ values: [encoded],
145
+ }
146
+ conn.send(JSON.stringify(res))
147
+ }
148
+ }
149
+ }
150
+
151
+ yield* Effect.logDebug(`Broadcasted to ${connectedClients.length} WebSocket clients`)
152
+ }
153
+
154
+ // RPC broadcasting would require reconstructing client stubs from clientIds
155
+ if (rpcSubscriptions.size > 0) {
156
+ for (const subscription of rpcSubscriptions.values()) {
157
+ for (const { encoded } of responses) {
158
+ yield* emitStreamResponse({
159
+ callerContext: subscription.callerContext,
160
+ env,
161
+ requestId: subscription.requestId,
162
+ values: [encoded],
163
+ }).pipe(Effect.tapCauseLogPretty, Effect.exit)
164
+ }
165
+ }
166
+
167
+ yield* Effect.logDebug(`Broadcasted to ${rpcSubscriptions.size} RPC clients`)
168
+ }
169
+ }).pipe(
170
+ Effect.tapCauseLogPretty,
171
+ Effect.withSpan('push-rpc-broadcast'),
172
+ Effect.uninterruptible, // We need to make sure Effect RPC doesn't interrupt this fiber
173
+ Effect.fork,
174
+ )
175
+
176
+ // We need to yield here to make sure the fork above is kicked off before we let Effect RPC finish the request
177
+ yield* Effect.yieldNow()
178
+
179
+ return SyncMessage.PushAck.make({})
180
+ }).pipe(
181
+ Effect.tap(
182
+ Effect.fn(function* (message) {
183
+ if (options?.onPushRes) {
184
+ yield* Effect.tryAll(() => options.onPushRes!(message)).pipe(UnknownError.mapToUnknownError)
185
+ }
186
+ }),
187
+ ),
188
+ Effect.mapError((cause) => InvalidPushError.make({ cause })),
189
+ Effect.withSpan('sync-cf:do:push', { attributes: { storeId, batchSize: pushRequest.batch.length } }),
190
+ )
191
+
192
+ /**
193
+ * @see https://developers.cloudflare.com/durable-objects/api/state/#blockconcurrencywhile
194
+ */
195
+ const blockConcurrencyWhile =
196
+ (ctx: CfTypes.DurableObjectState) =>
197
+ <A, E, R>(eff: Effect.Effect<A, E, R>) =>
198
+ Effect.gen(function* () {
199
+ const runtime = yield* Effect.runtime<R>()
200
+ const exit = yield* Effect.promise(() =>
201
+ ctx.blockConcurrencyWhile(() => eff.pipe(Effect.provide(runtime), Effect.runPromiseExit)),
202
+ )
203
+
204
+ return yield* exit
205
+ })
@@ -0,0 +1,37 @@
1
+ import { EventSequenceNumber, State } from '@livestore/common/schema'
2
+ import { Schema } from '@livestore/utils/effect'
3
+ import { PERSISTENCE_FORMAT_VERSION } from '../shared.ts'
4
+
5
+ /**
6
+ * Main event log table storing all LiveStore events.
7
+ *
8
+ * ⚠️ IMPORTANT: Any changes to this schema require bumping PERSISTENCE_FORMAT_VERSION in shared.ts
9
+ */
10
+ export const eventlogTable = State.SQLite.table({
11
+ // NOTE actual table name is determined at runtime to use proper storeId
12
+ name: `eventlog_${PERSISTENCE_FORMAT_VERSION}_$storeId`,
13
+ columns: {
14
+ seqNum: State.SQLite.integer({ primaryKey: true, schema: EventSequenceNumber.Global.Schema }),
15
+ parentSeqNum: State.SQLite.integer({ schema: EventSequenceNumber.Global.Schema }),
16
+ name: State.SQLite.text({}),
17
+ args: State.SQLite.text({ schema: Schema.parseJson(Schema.Any), nullable: true }),
18
+ /** ISO date format. Currently only used for debugging purposes. */
19
+ createdAt: State.SQLite.text({}),
20
+ clientId: State.SQLite.text({}),
21
+ sessionId: State.SQLite.text({}),
22
+ },
23
+ })
24
+
25
+ /**
26
+ * Context metadata table - one row per durable object.
27
+ *
28
+ * ⚠️ IMPORTANT: Any changes to this schema require bumping PERSISTENCE_FORMAT_VERSION in shared.ts
29
+ */
30
+ export const contextTable = State.SQLite.table({
31
+ name: `context_${PERSISTENCE_FORMAT_VERSION}`,
32
+ columns: {
33
+ storeId: State.SQLite.text({ primaryKey: true }),
34
+ currentHead: State.SQLite.integer({ schema: EventSequenceNumber.Global.Schema }),
35
+ backendId: State.SQLite.text({}),
36
+ },
37
+ })