@livestore/sync-cf 0.3.0-dev.5 → 0.3.0-dev.51
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.tsbuildinfo +1 -1
- package/dist/cf-worker/durable-object.d.ts +60 -46
- package/dist/cf-worker/durable-object.d.ts.map +1 -1
- package/dist/cf-worker/durable-object.js +230 -148
- package/dist/cf-worker/durable-object.js.map +1 -1
- package/dist/cf-worker/mod.d.ts +3 -0
- package/dist/cf-worker/mod.d.ts.map +1 -0
- package/dist/cf-worker/mod.js +3 -0
- package/dist/cf-worker/mod.js.map +1 -0
- package/dist/cf-worker/worker.d.ts +40 -0
- package/dist/cf-worker/worker.d.ts.map +1 -0
- package/dist/cf-worker/worker.js +92 -0
- package/dist/cf-worker/worker.js.map +1 -0
- package/dist/common/mod.d.ts +7 -0
- package/dist/common/mod.d.ts.map +1 -0
- package/dist/common/mod.js +7 -0
- package/dist/common/mod.js.map +1 -0
- package/dist/common/ws-message-types.d.ts +148 -98
- package/dist/common/ws-message-types.d.ts.map +1 -1
- package/dist/common/ws-message-types.js +19 -24
- package/dist/common/ws-message-types.js.map +1 -1
- package/dist/sync-impl/mod.d.ts +2 -0
- package/dist/sync-impl/mod.d.ts.map +1 -0
- package/dist/sync-impl/mod.js +2 -0
- package/dist/sync-impl/mod.js.map +1 -0
- package/dist/sync-impl/ws-impl.d.ts +3 -15
- package/dist/sync-impl/ws-impl.d.ts.map +1 -1
- package/dist/sync-impl/ws-impl.js +89 -36
- package/dist/sync-impl/ws-impl.js.map +1 -1
- package/package.json +15 -13
- package/src/cf-worker/durable-object.ts +311 -165
- package/src/cf-worker/mod.ts +2 -0
- package/src/cf-worker/worker.ts +129 -0
- package/src/common/mod.ts +8 -0
- package/src/common/ws-message-types.ts +22 -36
- package/src/sync-impl/ws-impl.ts +146 -100
- package/dist/cf-worker/index.d.ts +0 -8
- package/dist/cf-worker/index.d.ts.map +0 -1
- package/dist/cf-worker/index.js +0 -67
- package/dist/cf-worker/index.js.map +0 -1
- package/dist/common/index.d.ts +0 -2
- package/dist/common/index.d.ts.map +0 -1
- package/dist/common/index.js +0 -2
- package/dist/common/index.js.map +0 -1
- package/dist/sync-impl/index.d.ts +0 -2
- package/dist/sync-impl/index.d.ts.map +0 -1
- package/dist/sync-impl/index.js +0 -2
- package/dist/sync-impl/index.js.map +0 -1
- package/src/cf-worker/index.ts +0 -84
- package/src/common/index.ts +0 -1
- /package/src/sync-impl/{index.ts → mod.ts} +0 -0
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import { makeColumnSpec } from '@livestore/common'
|
|
2
|
-
import {
|
|
1
|
+
import { makeColumnSpec, UnexpectedError } from '@livestore/common'
|
|
2
|
+
import { EventSequenceNumber, type LiveStoreEvent, State } from '@livestore/common/schema'
|
|
3
3
|
import { shouldNeverHappen } from '@livestore/utils'
|
|
4
4
|
import { Effect, Logger, LogLevel, Option, Schema } from '@livestore/utils/effect'
|
|
5
5
|
import { DurableObject } from 'cloudflare:workers'
|
|
6
6
|
|
|
7
|
-
import { WSMessage } from '../common/
|
|
7
|
+
import { WSMessage } from '../common/mod.js'
|
|
8
8
|
import type { SyncMetadata } from '../common/ws-message-types.js'
|
|
9
9
|
|
|
10
10
|
export interface Env {
|
|
11
|
-
WEBSOCKET_SERVER: DurableObjectNamespace
|
|
11
|
+
WEBSOCKET_SERVER: DurableObjectNamespace
|
|
12
12
|
DB: D1Database
|
|
13
13
|
ADMIN_SECRET: string
|
|
14
14
|
}
|
|
@@ -19,58 +19,88 @@ const encodeOutgoingMessage = Schema.encodeSync(Schema.parseJson(WSMessage.Backe
|
|
|
19
19
|
const encodeIncomingMessage = Schema.encodeSync(Schema.parseJson(WSMessage.ClientToBackendMessage))
|
|
20
20
|
const decodeIncomingMessage = Schema.decodeUnknownEither(Schema.parseJson(WSMessage.ClientToBackendMessage))
|
|
21
21
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
22
|
+
export const eventlogTable = State.SQLite.table({
|
|
23
|
+
// NOTE actual table name is determined at runtime
|
|
24
|
+
name: 'eventlog_${PERSISTENCE_FORMAT_VERSION}_${storeId}',
|
|
25
|
+
columns: {
|
|
26
|
+
seqNum: State.SQLite.integer({ primaryKey: true, schema: EventSequenceNumber.GlobalEventSequenceNumber }),
|
|
27
|
+
parentSeqNum: State.SQLite.integer({ schema: EventSequenceNumber.GlobalEventSequenceNumber }),
|
|
28
|
+
name: State.SQLite.text({}),
|
|
29
|
+
args: State.SQLite.text({ schema: Schema.parseJson(Schema.Any), nullable: true }),
|
|
30
|
+
/** ISO date format. Currently only used for debugging purposes. */
|
|
31
|
+
createdAt: State.SQLite.text({}),
|
|
32
|
+
clientId: State.SQLite.text({}),
|
|
33
|
+
sessionId: State.SQLite.text({}),
|
|
34
|
+
},
|
|
30
35
|
})
|
|
31
36
|
|
|
37
|
+
const WebSocketAttachmentSchema = Schema.parseJson(
|
|
38
|
+
Schema.Struct({
|
|
39
|
+
storeId: Schema.String,
|
|
40
|
+
}),
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
export const PULL_CHUNK_SIZE = 100
|
|
44
|
+
|
|
32
45
|
/**
|
|
33
|
-
* Needs to be bumped when the storage format changes (e.g.
|
|
46
|
+
* Needs to be bumped when the storage format changes (e.g. eventlogTable schema changes)
|
|
34
47
|
*
|
|
35
48
|
* Changing this version number will lead to a "soft reset".
|
|
36
49
|
*/
|
|
37
|
-
const PERSISTENCE_FORMAT_VERSION =
|
|
50
|
+
export const PERSISTENCE_FORMAT_VERSION = 6
|
|
38
51
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
52
|
+
export type MakeDurableObjectClassOptions = {
|
|
53
|
+
onPush?: (message: WSMessage.PushReq) => Effect.Effect<void> | Promise<void>
|
|
54
|
+
onPushRes?: (message: WSMessage.PushAck | WSMessage.Error) => Effect.Effect<void> | Promise<void>
|
|
55
|
+
onPull?: (message: WSMessage.PullReq) => Effect.Effect<void> | Promise<void>
|
|
56
|
+
onPullRes?: (message: WSMessage.PullRes | WSMessage.Error) => Effect.Effect<void> | Promise<void>
|
|
57
|
+
}
|
|
43
58
|
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
59
|
+
export type MakeDurableObjectClass = (options?: MakeDurableObjectClassOptions) => {
|
|
60
|
+
new (ctx: DurableObjectState, env: Env): DurableObject<Env>
|
|
61
|
+
}
|
|
47
62
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
63
|
+
export const makeDurableObject: MakeDurableObjectClass = (options) => {
|
|
64
|
+
return class WebSocketServerBase extends DurableObject<Env> {
|
|
65
|
+
/** Needed to prevent concurrent pushes */
|
|
66
|
+
private pushSemaphore = Effect.makeSemaphore(1).pipe(Effect.runSync)
|
|
51
67
|
|
|
52
|
-
|
|
68
|
+
constructor(ctx: DurableObjectState, env: Env) {
|
|
69
|
+
super(ctx, env)
|
|
70
|
+
}
|
|
53
71
|
|
|
54
|
-
|
|
72
|
+
fetch = async (request: Request) =>
|
|
73
|
+
Effect.gen(this, function* () {
|
|
74
|
+
const storeId = getStoreId(request)
|
|
75
|
+
const storage = makeStorage(this.ctx, this.env, storeId)
|
|
55
76
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
77
|
+
const { 0: client, 1: server } = new WebSocketPair()
|
|
78
|
+
|
|
79
|
+
// Since we're using websocket hibernation, we need to remember the storeId for subsequent `webSocketMessage` calls
|
|
80
|
+
server.serializeAttachment(Schema.encodeSync(WebSocketAttachmentSchema)({ storeId }))
|
|
81
|
+
|
|
82
|
+
// See https://developers.cloudflare.com/durable-objects/examples/websocket-hibernation-server
|
|
62
83
|
|
|
63
|
-
|
|
64
|
-
this.env.DB.exec(`CREATE TABLE IF NOT EXISTS ${this.dbName} (${colSpec}) strict`)
|
|
84
|
+
this.ctx.acceptWebSocket(server)
|
|
65
85
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
86
|
+
this.ctx.setWebSocketAutoResponse(
|
|
87
|
+
new WebSocketRequestResponsePair(
|
|
88
|
+
encodeIncomingMessage(WSMessage.Ping.make({ requestId: 'ping' })),
|
|
89
|
+
encodeOutgoingMessage(WSMessage.Pong.make({ requestId: 'ping' })),
|
|
90
|
+
),
|
|
91
|
+
)
|
|
71
92
|
|
|
72
|
-
|
|
73
|
-
|
|
93
|
+
const colSpec = makeColumnSpec(eventlogTable.sqliteDef.ast)
|
|
94
|
+
this.env.DB.exec(`CREATE TABLE IF NOT EXISTS ${storage.dbName} (${colSpec}) strict`)
|
|
95
|
+
|
|
96
|
+
return new Response(null, {
|
|
97
|
+
status: 101,
|
|
98
|
+
webSocket: client,
|
|
99
|
+
})
|
|
100
|
+
}).pipe(Effect.tapCauseLogPretty, Effect.runPromise)
|
|
101
|
+
|
|
102
|
+
webSocketMessage = (ws: WebSocketClient, message: ArrayBuffer | string) => {
|
|
103
|
+
console.log('webSocketMessage', message)
|
|
74
104
|
const decodedMessageRes = decodeIncomingMessage(message)
|
|
75
105
|
|
|
76
106
|
if (decodedMessageRes._tag === 'Left') {
|
|
@@ -81,175 +111,291 @@ export class WebSocketServer extends DurableObject<Env> {
|
|
|
81
111
|
const decodedMessage = decodedMessageRes.right
|
|
82
112
|
const requestId = decodedMessage.requestId
|
|
83
113
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
const cursor = decodedMessage.cursor
|
|
88
|
-
const CHUNK_SIZE = 100
|
|
114
|
+
return Effect.gen(this, function* () {
|
|
115
|
+
const { storeId } = yield* Schema.decode(WebSocketAttachmentSchema)(ws.deserializeAttachment())
|
|
116
|
+
const storage = makeStorage(this.ctx, this.env, storeId)
|
|
89
117
|
|
|
90
|
-
|
|
91
|
-
|
|
118
|
+
try {
|
|
119
|
+
switch (decodedMessage._tag) {
|
|
120
|
+
// TODO allow pulling concurrently to not block incoming push requests
|
|
121
|
+
case 'WSMessage.PullReq': {
|
|
122
|
+
if (options?.onPull) {
|
|
123
|
+
yield* Effect.tryAll(() => options.onPull!(decodedMessage))
|
|
124
|
+
}
|
|
92
125
|
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
126
|
+
const respond = (message: WSMessage.PullRes) =>
|
|
127
|
+
Effect.gen(function* () {
|
|
128
|
+
if (options?.onPullRes) {
|
|
129
|
+
yield* Effect.tryAll(() => options.onPullRes!(message))
|
|
130
|
+
}
|
|
131
|
+
ws.send(encodeOutgoingMessage(message))
|
|
132
|
+
})
|
|
96
133
|
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
134
|
+
const cursor = decodedMessage.cursor
|
|
135
|
+
|
|
136
|
+
// TODO use streaming
|
|
137
|
+
const remainingEvents = yield* storage.getEvents(cursor)
|
|
100
138
|
|
|
101
|
-
if
|
|
102
|
-
|
|
139
|
+
// Send at least one response, even if there are no events
|
|
140
|
+
const batches =
|
|
141
|
+
remainingEvents.length === 0
|
|
142
|
+
? [[]]
|
|
143
|
+
: Array.from({ length: Math.ceil(remainingEvents.length / PULL_CHUNK_SIZE) }, (_, i) =>
|
|
144
|
+
remainingEvents.slice(i * PULL_CHUNK_SIZE, (i + 1) * PULL_CHUNK_SIZE),
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
for (const [index, batch] of batches.entries()) {
|
|
148
|
+
const remaining = Math.max(0, remainingEvents.length - (index + 1) * PULL_CHUNK_SIZE)
|
|
149
|
+
yield* respond(WSMessage.PullRes.make({ batch, remaining, requestId: { context: 'pull', requestId } }))
|
|
103
150
|
}
|
|
151
|
+
|
|
152
|
+
break
|
|
104
153
|
}
|
|
154
|
+
case 'WSMessage.PushReq': {
|
|
155
|
+
const respond = (message: WSMessage.PushAck | WSMessage.Error) =>
|
|
156
|
+
Effect.gen(function* () {
|
|
157
|
+
if (options?.onPushRes) {
|
|
158
|
+
yield* Effect.tryAll(() => options.onPushRes!(message))
|
|
159
|
+
}
|
|
160
|
+
ws.send(encodeOutgoingMessage(message))
|
|
161
|
+
})
|
|
105
162
|
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
163
|
+
if (decodedMessage.batch.length === 0) {
|
|
164
|
+
yield* respond(WSMessage.PushAck.make({ requestId }))
|
|
165
|
+
return
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
yield* this.pushSemaphore.take(1)
|
|
169
|
+
|
|
170
|
+
if (options?.onPush) {
|
|
171
|
+
yield* Effect.tryAll(() => options.onPush!(decodedMessage))
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// TODO check whether we could use the Durable Object storage for this to speed up the lookup
|
|
175
|
+
const expectedParentNum = yield* storage.getHead
|
|
176
|
+
|
|
177
|
+
// TODO handle clientId unique conflict
|
|
178
|
+
// Validate the batch
|
|
179
|
+
const firstEvent = decodedMessage.batch[0]!
|
|
180
|
+
if (firstEvent.parentSeqNum !== expectedParentNum) {
|
|
116
181
|
const err = WSMessage.Error.make({
|
|
117
|
-
message: `Invalid parent
|
|
182
|
+
message: `Invalid parent event number. Received e${firstEvent.parentSeqNum} but expected e${expectedParentNum}`,
|
|
118
183
|
requestId,
|
|
119
184
|
})
|
|
120
185
|
|
|
121
|
-
yield* Effect.
|
|
186
|
+
yield* Effect.logError(err)
|
|
122
187
|
|
|
123
|
-
|
|
188
|
+
yield* respond(err)
|
|
189
|
+
yield* this.pushSemaphore.release(1)
|
|
124
190
|
return
|
|
125
191
|
}
|
|
126
192
|
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
const createdAt = new Date().toISOString()
|
|
193
|
+
yield* respond(WSMessage.PushAck.make({ requestId }))
|
|
130
194
|
|
|
131
|
-
|
|
132
|
-
const storePromise = this.storage.appendEvent(mutationEventEncoded, createdAt)
|
|
195
|
+
yield* this.pushSemaphore.release(1)
|
|
133
196
|
|
|
134
|
-
|
|
197
|
+
const createdAt = new Date().toISOString()
|
|
135
198
|
|
|
136
|
-
//
|
|
199
|
+
// NOTE we're not waiting for this to complete yet to allow the broadcast to happen right away
|
|
200
|
+
// while letting the async storage write happen in the background
|
|
201
|
+
const storeFiber = yield* storage.appendEvents(decodedMessage.batch, createdAt).pipe(Effect.fork)
|
|
137
202
|
|
|
138
203
|
const connectedClients = this.ctx.getWebSockets()
|
|
139
204
|
|
|
205
|
+
// console.debug(`Broadcasting push batch to ${this.subscribedWebSockets.size} clients`)
|
|
140
206
|
if (connectedClients.length > 0) {
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
207
|
+
// TODO refactor to batch api
|
|
208
|
+
const pullRes = WSMessage.PullRes.make({
|
|
209
|
+
batch: decodedMessage.batch.map((eventEncoded) => ({
|
|
210
|
+
eventEncoded,
|
|
145
211
|
metadata: Option.some({ createdAt }),
|
|
146
|
-
}),
|
|
147
|
-
|
|
212
|
+
})),
|
|
213
|
+
remaining: 0,
|
|
214
|
+
requestId: { context: 'push', requestId },
|
|
215
|
+
})
|
|
216
|
+
const pullResEnc = encodeOutgoingMessage(pullRes)
|
|
148
217
|
|
|
218
|
+
// Only calling once for now.
|
|
219
|
+
if (options?.onPullRes) {
|
|
220
|
+
yield* Effect.tryAll(() => options.onPullRes!(pullRes))
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// NOTE we're also sending the pullRes to the pushing ws client as a confirmation
|
|
149
224
|
for (const conn of connectedClients) {
|
|
150
|
-
|
|
151
|
-
// if (conn !== ws) {
|
|
152
|
-
conn.send(broadcastMessage)
|
|
153
|
-
// }
|
|
225
|
+
conn.send(pullResEnc)
|
|
154
226
|
}
|
|
155
227
|
}
|
|
156
228
|
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
i++
|
|
160
|
-
}
|
|
229
|
+
// Wait for the storage write to complete before finishing this request
|
|
230
|
+
yield* storeFiber
|
|
161
231
|
|
|
162
|
-
|
|
163
|
-
}
|
|
164
|
-
case 'WSMessage.AdminResetRoomReq': {
|
|
165
|
-
if (decodedMessage.adminSecret !== this.env.ADMIN_SECRET) {
|
|
166
|
-
ws.send(encodeOutgoingMessage(WSMessage.Error.make({ message: 'Invalid admin secret', requestId })))
|
|
167
|
-
return
|
|
232
|
+
break
|
|
168
233
|
}
|
|
234
|
+
case 'WSMessage.AdminResetRoomReq': {
|
|
235
|
+
if (decodedMessage.adminSecret !== this.env.ADMIN_SECRET) {
|
|
236
|
+
ws.send(encodeOutgoingMessage(WSMessage.Error.make({ message: 'Invalid admin secret', requestId })))
|
|
237
|
+
return
|
|
238
|
+
}
|
|
169
239
|
|
|
170
|
-
|
|
171
|
-
|
|
240
|
+
yield* storage.resetStore
|
|
241
|
+
ws.send(encodeOutgoingMessage(WSMessage.AdminResetRoomRes.make({ requestId })))
|
|
172
242
|
|
|
173
|
-
|
|
174
|
-
}
|
|
175
|
-
case 'WSMessage.AdminInfoReq': {
|
|
176
|
-
if (decodedMessage.adminSecret !== this.env.ADMIN_SECRET) {
|
|
177
|
-
ws.send(encodeOutgoingMessage(WSMessage.Error.make({ message: 'Invalid admin secret', requestId })))
|
|
178
|
-
return
|
|
243
|
+
break
|
|
179
244
|
}
|
|
245
|
+
case 'WSMessage.AdminInfoReq': {
|
|
246
|
+
if (decodedMessage.adminSecret !== this.env.ADMIN_SECRET) {
|
|
247
|
+
ws.send(encodeOutgoingMessage(WSMessage.Error.make({ message: 'Invalid admin secret', requestId })))
|
|
248
|
+
return
|
|
249
|
+
}
|
|
180
250
|
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
251
|
+
ws.send(
|
|
252
|
+
encodeOutgoingMessage(
|
|
253
|
+
WSMessage.AdminInfoRes.make({ requestId, info: { durableObjectId: this.ctx.id.toString() } }),
|
|
254
|
+
),
|
|
255
|
+
)
|
|
186
256
|
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
257
|
+
break
|
|
258
|
+
}
|
|
259
|
+
default: {
|
|
260
|
+
console.error('unsupported message', decodedMessage)
|
|
261
|
+
return shouldNeverHappen()
|
|
262
|
+
}
|
|
192
263
|
}
|
|
264
|
+
} catch (error: any) {
|
|
265
|
+
ws.send(encodeOutgoingMessage(WSMessage.Error.make({ message: error.message, requestId })))
|
|
193
266
|
}
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
267
|
+
}).pipe(
|
|
268
|
+
Effect.withSpan(`@livestore/sync-cf:durable-object:webSocketMessage:${decodedMessage._tag}`, {
|
|
269
|
+
attributes: { requestId },
|
|
270
|
+
}),
|
|
271
|
+
Effect.tapCauseLogPretty,
|
|
272
|
+
Effect.tapErrorCause((cause) =>
|
|
273
|
+
Effect.sync(() =>
|
|
274
|
+
ws.send(encodeOutgoingMessage(WSMessage.Error.make({ message: cause.toString(), requestId }))),
|
|
275
|
+
),
|
|
276
|
+
),
|
|
277
|
+
Logger.withMinimumLogLevel(LogLevel.Debug),
|
|
278
|
+
Effect.provide(Logger.prettyWithThread('durable-object')),
|
|
279
|
+
Effect.runPromise,
|
|
280
|
+
)
|
|
281
|
+
}
|
|
204
282
|
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
283
|
+
webSocketClose = async (ws: WebSocketClient, code: number, _reason: string, _wasClean: boolean) => {
|
|
284
|
+
// If the client closes the connection, the runtime will invoke the webSocketClose() handler.
|
|
285
|
+
ws.close(code, 'Durable Object is closing WebSocket')
|
|
286
|
+
}
|
|
208
287
|
}
|
|
209
288
|
}
|
|
210
289
|
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
290
|
+
type SyncStorage = {
|
|
291
|
+
dbName: string
|
|
292
|
+
getHead: Effect.Effect<EventSequenceNumber.GlobalEventSequenceNumber, UnexpectedError>
|
|
293
|
+
getEvents: (
|
|
294
|
+
cursor: number | undefined,
|
|
295
|
+
) => Effect.Effect<
|
|
296
|
+
ReadonlyArray<{ eventEncoded: LiveStoreEvent.AnyEncodedGlobal; metadata: Option.Option<SyncMetadata> }>,
|
|
297
|
+
UnexpectedError
|
|
298
|
+
>
|
|
299
|
+
appendEvents: (
|
|
300
|
+
batch: ReadonlyArray<LiveStoreEvent.AnyEncodedGlobal>,
|
|
301
|
+
createdAt: string,
|
|
302
|
+
) => Effect.Effect<void, UnexpectedError>
|
|
303
|
+
resetStore: Effect.Effect<void, UnexpectedError>
|
|
304
|
+
}
|
|
218
305
|
|
|
219
|
-
|
|
220
|
-
}
|
|
306
|
+
const makeStorage = (ctx: DurableObjectState, env: Env, storeId: string): SyncStorage => {
|
|
307
|
+
const dbName = `eventlog_${PERSISTENCE_FORMAT_VERSION}_${toValidTableName(storeId)}`
|
|
221
308
|
|
|
222
|
-
const
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
// TODO handle case where `cursor` was not found
|
|
230
|
-
const rawEvents = await env.DB.prepare(sql).all()
|
|
231
|
-
if (rawEvents.error) {
|
|
232
|
-
throw new Error(rawEvents.error)
|
|
233
|
-
}
|
|
234
|
-
const events = Schema.decodeUnknownSync(Schema.Array(mutationLogTable.schema))(rawEvents.results).map(
|
|
235
|
-
({ createdAt, ...mutationEventEncoded }) => ({
|
|
236
|
-
mutationEventEncoded,
|
|
237
|
-
metadata: Option.some({ createdAt }),
|
|
238
|
-
}),
|
|
309
|
+
const execDb = <T>(cb: (db: D1Database) => Promise<D1Result<T>>) =>
|
|
310
|
+
Effect.tryPromise({
|
|
311
|
+
try: () => cb(env.DB),
|
|
312
|
+
catch: (error) => new UnexpectedError({ cause: error, payload: { dbName } }),
|
|
313
|
+
}).pipe(
|
|
314
|
+
Effect.map((_) => _.results),
|
|
315
|
+
Effect.withSpan('@livestore/sync-cf:durable-object:execDb'),
|
|
239
316
|
)
|
|
240
|
-
return events
|
|
241
|
-
}
|
|
242
317
|
|
|
243
|
-
const
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
}
|
|
318
|
+
const getHead: Effect.Effect<EventSequenceNumber.GlobalEventSequenceNumber, UnexpectedError> = Effect.gen(
|
|
319
|
+
function* () {
|
|
320
|
+
const result = yield* execDb<{ seqNum: EventSequenceNumber.GlobalEventSequenceNumber }>((db) =>
|
|
321
|
+
db.prepare(`SELECT seqNum FROM ${dbName} ORDER BY seqNum DESC LIMIT 1`).all(),
|
|
322
|
+
)
|
|
249
323
|
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
324
|
+
return result[0]?.seqNum ?? EventSequenceNumber.ROOT.global
|
|
325
|
+
},
|
|
326
|
+
).pipe(UnexpectedError.mapToUnexpectedError)
|
|
253
327
|
|
|
254
|
-
|
|
328
|
+
const getEvents = (
|
|
329
|
+
cursor: number | undefined,
|
|
330
|
+
): Effect.Effect<
|
|
331
|
+
ReadonlyArray<{ eventEncoded: LiveStoreEvent.AnyEncodedGlobal; metadata: Option.Option<SyncMetadata> }>,
|
|
332
|
+
UnexpectedError
|
|
333
|
+
> =>
|
|
334
|
+
Effect.gen(function* () {
|
|
335
|
+
const whereClause = cursor === undefined ? '' : `WHERE seqNum > ${cursor}`
|
|
336
|
+
const sql = `SELECT * FROM ${dbName} ${whereClause} ORDER BY seqNum ASC`
|
|
337
|
+
// TODO handle case where `cursor` was not found
|
|
338
|
+
const rawEvents = yield* execDb((db) => db.prepare(sql).all())
|
|
339
|
+
const events = Schema.decodeUnknownSync(Schema.Array(eventlogTable.rowSchema))(rawEvents).map(
|
|
340
|
+
({ createdAt, ...eventEncoded }) => ({
|
|
341
|
+
eventEncoded,
|
|
342
|
+
metadata: Option.some({ createdAt }),
|
|
343
|
+
}),
|
|
344
|
+
)
|
|
345
|
+
return events
|
|
346
|
+
}).pipe(UnexpectedError.mapToUnexpectedError)
|
|
347
|
+
|
|
348
|
+
const appendEvents: SyncStorage['appendEvents'] = (batch, createdAt) =>
|
|
349
|
+
Effect.gen(function* () {
|
|
350
|
+
// If there are no events, do nothing.
|
|
351
|
+
if (batch.length === 0) return
|
|
352
|
+
|
|
353
|
+
// CF D1 limits:
|
|
354
|
+
// Maximum bound parameters per query 100, Maximum arguments per SQL function 32
|
|
355
|
+
// Thus we need to split the batch into chunks of max (100/7=)14 events each.
|
|
356
|
+
const CHUNK_SIZE = 14
|
|
357
|
+
|
|
358
|
+
for (let i = 0; i < batch.length; i += CHUNK_SIZE) {
|
|
359
|
+
const chunk = batch.slice(i, i + CHUNK_SIZE)
|
|
360
|
+
|
|
361
|
+
// Create a list of placeholders ("(?, ?, ?, ?, ?, ?, ?)"), corresponding to each event.
|
|
362
|
+
const valuesPlaceholders = chunk.map(() => '(?, ?, ?, ?, ?, ?, ?)').join(', ')
|
|
363
|
+
const sql = `INSERT INTO ${dbName} (seqNum, parentSeqNum, args, name, createdAt, clientId, sessionId) VALUES ${valuesPlaceholders}`
|
|
364
|
+
// Flatten the event properties into a parameters array.
|
|
365
|
+
const params = chunk.flatMap((event) => [
|
|
366
|
+
event.seqNum,
|
|
367
|
+
event.parentSeqNum,
|
|
368
|
+
event.args === undefined ? null : JSON.stringify(event.args),
|
|
369
|
+
event.name,
|
|
370
|
+
createdAt,
|
|
371
|
+
event.clientId,
|
|
372
|
+
event.sessionId,
|
|
373
|
+
])
|
|
374
|
+
|
|
375
|
+
yield* execDb((db) =>
|
|
376
|
+
db
|
|
377
|
+
.prepare(sql)
|
|
378
|
+
.bind(...params)
|
|
379
|
+
.run(),
|
|
380
|
+
)
|
|
381
|
+
}
|
|
382
|
+
}).pipe(UnexpectedError.mapToUnexpectedError)
|
|
383
|
+
|
|
384
|
+
const resetStore = Effect.gen(function* () {
|
|
385
|
+
yield* Effect.promise(() => ctx.storage.deleteAll())
|
|
386
|
+
}).pipe(UnexpectedError.mapToUnexpectedError)
|
|
387
|
+
|
|
388
|
+
return { dbName, getHead, getEvents, appendEvents, resetStore }
|
|
255
389
|
}
|
|
390
|
+
|
|
391
|
+
const getStoreId = (request: Request) => {
|
|
392
|
+
const url = new URL(request.url)
|
|
393
|
+
const searchParams = url.searchParams
|
|
394
|
+
const storeId = searchParams.get('storeId')
|
|
395
|
+
if (storeId === null) {
|
|
396
|
+
throw new Error('storeId search param is required')
|
|
397
|
+
}
|
|
398
|
+
return storeId
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
const toValidTableName = (str: string) => str.replaceAll(/[^a-zA-Z0-9]/g, '_')
|