@livestore/sync-cf 0.3.0-dev.8 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.tsbuildinfo +1 -1
- package/dist/cf-worker/durable-object.d.ts +45 -28
- package/dist/cf-worker/durable-object.d.ts.map +1 -1
- package/dist/cf-worker/durable-object.js +224 -121
- package/dist/cf-worker/durable-object.js.map +1 -1
- package/dist/cf-worker/worker.d.ts +44 -1
- package/dist/cf-worker/worker.d.ts.map +1 -1
- package/dist/cf-worker/worker.js +83 -15
- package/dist/cf-worker/worker.js.map +1 -1
- package/dist/common/mod.d.ts +5 -0
- package/dist/common/mod.d.ts.map +1 -1
- package/dist/common/mod.js +5 -0
- package/dist/common/mod.js.map +1 -1
- package/dist/common/ws-message-types.d.ts +148 -98
- package/dist/common/ws-message-types.d.ts.map +1 -1
- package/dist/common/ws-message-types.js +19 -24
- package/dist/common/ws-message-types.js.map +1 -1
- package/dist/sync-impl/ws-impl.d.ts +2 -5
- package/dist/sync-impl/ws-impl.d.ts.map +1 -1
- package/dist/sync-impl/ws-impl.js +89 -36
- package/dist/sync-impl/ws-impl.js.map +1 -1
- package/package.json +4 -4
- package/src/cf-worker/durable-object.ts +273 -126
- package/src/cf-worker/worker.ts +125 -16
- package/src/common/mod.ts +7 -0
- package/src/common/ws-message-types.ts +22 -36
- package/src/sync-impl/ws-impl.ts +145 -90
- package/dist/cf-worker/index.d.ts +0 -3
- package/dist/cf-worker/index.d.ts.map +0 -1
- package/dist/cf-worker/index.js +0 -33
- package/dist/cf-worker/index.js.map +0 -1
- package/dist/cf-worker/make-worker.d.ts +0 -6
- package/dist/cf-worker/make-worker.d.ts.map +0 -1
- package/dist/cf-worker/make-worker.js +0 -31
- package/dist/cf-worker/make-worker.js.map +0 -1
- package/dist/cf-worker/types.d.ts +0 -2
- package/dist/cf-worker/types.d.ts.map +0 -1
- package/dist/cf-worker/types.js +0 -2
- package/dist/cf-worker/types.js.map +0 -1
- package/dist/common/index.d.ts +0 -2
- package/dist/common/index.d.ts.map +0 -1
- package/dist/common/index.js +0 -2
- package/dist/common/index.js.map +0 -1
- package/dist/sync-impl/index.d.ts +0 -2
- package/dist/sync-impl/index.d.ts.map +0 -1
- package/dist/sync-impl/index.js +0 -2
- package/dist/sync-impl/index.js.map +0 -1
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { makeColumnSpec } from '@livestore/common'
|
|
2
|
-
import {
|
|
1
|
+
import { makeColumnSpec, UnexpectedError } from '@livestore/common'
|
|
2
|
+
import { EventSequenceNumber, type LiveStoreEvent, State } from '@livestore/common/schema'
|
|
3
3
|
import { shouldNeverHappen } from '@livestore/utils'
|
|
4
4
|
import { Effect, Logger, LogLevel, Option, Schema } from '@livestore/utils/effect'
|
|
5
5
|
import { DurableObject } from 'cloudflare:workers'
|
|
@@ -8,7 +8,6 @@ import { WSMessage } from '../common/mod.js'
|
|
|
8
8
|
import type { SyncMetadata } from '../common/ws-message-types.js'
|
|
9
9
|
|
|
10
10
|
export interface Env {
|
|
11
|
-
WEBSOCKET_SERVER: DurableObjectNamespace
|
|
12
11
|
DB: D1Database
|
|
13
12
|
ADMIN_SECRET: string
|
|
14
13
|
}
|
|
@@ -19,26 +18,41 @@ const encodeOutgoingMessage = Schema.encodeSync(Schema.parseJson(WSMessage.Backe
|
|
|
19
18
|
const encodeIncomingMessage = Schema.encodeSync(Schema.parseJson(WSMessage.ClientToBackendMessage))
|
|
20
19
|
const decodeIncomingMessage = Schema.decodeUnknownEither(Schema.parseJson(WSMessage.ClientToBackendMessage))
|
|
21
20
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
21
|
+
export const eventlogTable = State.SQLite.table({
|
|
22
|
+
// NOTE actual table name is determined at runtime
|
|
23
|
+
name: 'eventlog_${PERSISTENCE_FORMAT_VERSION}_${storeId}',
|
|
24
|
+
columns: {
|
|
25
|
+
seqNum: State.SQLite.integer({ primaryKey: true, schema: EventSequenceNumber.GlobalEventSequenceNumber }),
|
|
26
|
+
parentSeqNum: State.SQLite.integer({ schema: EventSequenceNumber.GlobalEventSequenceNumber }),
|
|
27
|
+
name: State.SQLite.text({}),
|
|
28
|
+
args: State.SQLite.text({ schema: Schema.parseJson(Schema.Any), nullable: true }),
|
|
29
|
+
/** ISO date format. Currently only used for debugging purposes. */
|
|
30
|
+
createdAt: State.SQLite.text({}),
|
|
31
|
+
clientId: State.SQLite.text({}),
|
|
32
|
+
sessionId: State.SQLite.text({}),
|
|
33
|
+
},
|
|
30
34
|
})
|
|
31
35
|
|
|
36
|
+
const WebSocketAttachmentSchema = Schema.parseJson(
|
|
37
|
+
Schema.Struct({
|
|
38
|
+
storeId: Schema.String,
|
|
39
|
+
}),
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
export const PULL_CHUNK_SIZE = 100
|
|
43
|
+
|
|
32
44
|
/**
|
|
33
|
-
* Needs to be bumped when the storage format changes (e.g.
|
|
45
|
+
* Needs to be bumped when the storage format changes (e.g. eventlogTable schema changes)
|
|
34
46
|
*
|
|
35
47
|
* Changing this version number will lead to a "soft reset".
|
|
36
48
|
*/
|
|
37
|
-
export const PERSISTENCE_FORMAT_VERSION =
|
|
49
|
+
export const PERSISTENCE_FORMAT_VERSION = 7
|
|
38
50
|
|
|
39
51
|
export type MakeDurableObjectClassOptions = {
|
|
40
52
|
onPush?: (message: WSMessage.PushReq) => Effect.Effect<void> | Promise<void>
|
|
53
|
+
onPushRes?: (message: WSMessage.PushAck | WSMessage.Error) => Effect.Effect<void> | Promise<void>
|
|
41
54
|
onPull?: (message: WSMessage.PullReq) => Effect.Effect<void> | Promise<void>
|
|
55
|
+
onPullRes?: (message: WSMessage.PullRes | WSMessage.Error) => Effect.Effect<void> | Promise<void>
|
|
42
56
|
}
|
|
43
57
|
|
|
44
58
|
export type MakeDurableObjectClass = (options?: MakeDurableObjectClassOptions) => {
|
|
@@ -47,17 +61,21 @@ export type MakeDurableObjectClass = (options?: MakeDurableObjectClassOptions) =
|
|
|
47
61
|
|
|
48
62
|
export const makeDurableObject: MakeDurableObjectClass = (options) => {
|
|
49
63
|
return class WebSocketServerBase extends DurableObject<Env> {
|
|
50
|
-
|
|
51
|
-
|
|
64
|
+
/** Needed to prevent concurrent pushes */
|
|
65
|
+
private pushSemaphore = Effect.makeSemaphore(1).pipe(Effect.runSync)
|
|
52
66
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
67
|
+
private currentHead: EventSequenceNumber.GlobalEventSequenceNumber | 'uninitialized' = 'uninitialized'
|
|
68
|
+
|
|
69
|
+
fetch = async (request: Request) =>
|
|
70
|
+
Effect.sync(() => {
|
|
71
|
+
const storeId = getStoreId(request)
|
|
72
|
+
const storage = makeStorage(this.ctx, this.env, storeId)
|
|
56
73
|
|
|
57
|
-
fetch = async (_request: Request) =>
|
|
58
|
-
Effect.gen(this, function* () {
|
|
59
74
|
const { 0: client, 1: server } = new WebSocketPair()
|
|
60
75
|
|
|
76
|
+
// Since we're using websocket hibernation, we need to remember the storeId for subsequent `webSocketMessage` calls
|
|
77
|
+
server.serializeAttachment(Schema.encodeSync(WebSocketAttachmentSchema)({ storeId }))
|
|
78
|
+
|
|
61
79
|
// See https://developers.cloudflare.com/durable-objects/examples/websocket-hibernation-server
|
|
62
80
|
|
|
63
81
|
this.ctx.acceptWebSocket(server)
|
|
@@ -69,8 +87,8 @@ export const makeDurableObject: MakeDurableObjectClass = (options) => {
|
|
|
69
87
|
),
|
|
70
88
|
)
|
|
71
89
|
|
|
72
|
-
const colSpec = makeColumnSpec(
|
|
73
|
-
this.env.DB.exec(`CREATE TABLE IF NOT EXISTS ${
|
|
90
|
+
const colSpec = makeColumnSpec(eventlogTable.sqliteDef.ast)
|
|
91
|
+
this.env.DB.exec(`CREATE TABLE IF NOT EXISTS ${storage.dbName} (${colSpec}) strict`)
|
|
74
92
|
|
|
75
93
|
return new Response(null, {
|
|
76
94
|
status: 101,
|
|
@@ -78,108 +96,156 @@ export const makeDurableObject: MakeDurableObjectClass = (options) => {
|
|
|
78
96
|
})
|
|
79
97
|
}).pipe(Effect.tapCauseLogPretty, Effect.runPromise)
|
|
80
98
|
|
|
81
|
-
webSocketMessage = (ws: WebSocketClient, message: ArrayBuffer | string) =>
|
|
82
|
-
|
|
83
|
-
|
|
99
|
+
webSocketMessage = (ws: WebSocketClient, message: ArrayBuffer | string) => {
|
|
100
|
+
console.log('webSocketMessage', message)
|
|
101
|
+
const decodedMessageRes = decodeIncomingMessage(message)
|
|
84
102
|
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
103
|
+
if (decodedMessageRes._tag === 'Left') {
|
|
104
|
+
console.error('Invalid message received', decodedMessageRes.left)
|
|
105
|
+
return
|
|
106
|
+
}
|
|
89
107
|
|
|
90
|
-
|
|
91
|
-
|
|
108
|
+
const decodedMessage = decodedMessageRes.right
|
|
109
|
+
const requestId = decodedMessage.requestId
|
|
110
|
+
|
|
111
|
+
return Effect.gen(this, function* () {
|
|
112
|
+
const { storeId } = yield* Schema.decode(WebSocketAttachmentSchema)(ws.deserializeAttachment())
|
|
113
|
+
const storage = makeStorage(this.ctx, this.env, storeId)
|
|
92
114
|
|
|
93
115
|
try {
|
|
94
116
|
switch (decodedMessage._tag) {
|
|
117
|
+
// TODO allow pulling concurrently to not block incoming push requests
|
|
95
118
|
case 'WSMessage.PullReq': {
|
|
96
119
|
if (options?.onPull) {
|
|
97
120
|
yield* Effect.tryAll(() => options.onPull!(decodedMessage))
|
|
98
121
|
}
|
|
99
122
|
|
|
123
|
+
const respond = (message: WSMessage.PullRes) =>
|
|
124
|
+
Effect.gen(function* () {
|
|
125
|
+
if (options?.onPullRes) {
|
|
126
|
+
yield* Effect.tryAll(() => options.onPullRes!(message))
|
|
127
|
+
}
|
|
128
|
+
ws.send(encodeOutgoingMessage(message))
|
|
129
|
+
})
|
|
130
|
+
|
|
100
131
|
const cursor = decodedMessage.cursor
|
|
101
|
-
const CHUNK_SIZE = 100
|
|
102
132
|
|
|
103
133
|
// TODO use streaming
|
|
104
|
-
const remainingEvents =
|
|
105
|
-
|
|
106
|
-
//
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
break
|
|
118
|
-
}
|
|
134
|
+
const remainingEvents = yield* storage.getEvents(cursor)
|
|
135
|
+
|
|
136
|
+
// Send at least one response, even if there are no events
|
|
137
|
+
const batches =
|
|
138
|
+
remainingEvents.length === 0
|
|
139
|
+
? [[]]
|
|
140
|
+
: Array.from({ length: Math.ceil(remainingEvents.length / PULL_CHUNK_SIZE) }, (_, i) =>
|
|
141
|
+
remainingEvents.slice(i * PULL_CHUNK_SIZE, (i + 1) * PULL_CHUNK_SIZE),
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
for (const [index, batch] of batches.entries()) {
|
|
145
|
+
const remaining = Math.max(0, remainingEvents.length - (index + 1) * PULL_CHUNK_SIZE)
|
|
146
|
+
yield* respond(WSMessage.PullRes.make({ batch, remaining, requestId: { context: 'pull', requestId } }))
|
|
119
147
|
}
|
|
120
148
|
|
|
121
149
|
break
|
|
122
150
|
}
|
|
123
151
|
case 'WSMessage.PushReq': {
|
|
152
|
+
const respond = (message: WSMessage.PushAck | WSMessage.Error) =>
|
|
153
|
+
Effect.gen(function* () {
|
|
154
|
+
if (options?.onPushRes) {
|
|
155
|
+
yield* Effect.tryAll(() => options.onPushRes!(message))
|
|
156
|
+
}
|
|
157
|
+
ws.send(encodeOutgoingMessage(message))
|
|
158
|
+
})
|
|
159
|
+
|
|
160
|
+
if (decodedMessage.batch.length === 0) {
|
|
161
|
+
yield* respond(WSMessage.PushAck.make({ requestId }))
|
|
162
|
+
return
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
yield* this.pushSemaphore.take(1)
|
|
166
|
+
|
|
124
167
|
if (options?.onPush) {
|
|
125
168
|
yield* Effect.tryAll(() => options.onPush!(decodedMessage))
|
|
126
169
|
}
|
|
127
170
|
|
|
128
171
|
// TODO check whether we could use the Durable Object storage for this to speed up the lookup
|
|
129
|
-
const
|
|
130
|
-
|
|
172
|
+
// const expectedParentNum = yield* storage.getHead
|
|
173
|
+
|
|
174
|
+
let currentHead: EventSequenceNumber.GlobalEventSequenceNumber
|
|
175
|
+
if (this.currentHead === 'uninitialized') {
|
|
176
|
+
const currentHeadFromStorage = yield* Effect.promise(() => this.ctx.storage.get('currentHead'))
|
|
177
|
+
// console.log('currentHeadFromStorage', currentHeadFromStorage)
|
|
178
|
+
if (currentHeadFromStorage === undefined) {
|
|
179
|
+
// console.log('currentHeadFromStorage is null, getting from D1')
|
|
180
|
+
// currentHead = yield* storage.getHead
|
|
181
|
+
// console.log('currentHeadFromStorage is null, using root')
|
|
182
|
+
currentHead = EventSequenceNumber.ROOT.global
|
|
183
|
+
} else {
|
|
184
|
+
currentHead = currentHeadFromStorage as EventSequenceNumber.GlobalEventSequenceNumber
|
|
185
|
+
}
|
|
186
|
+
} else {
|
|
187
|
+
// console.log('currentHead is already initialized', this.currentHead)
|
|
188
|
+
currentHead = this.currentHead
|
|
189
|
+
}
|
|
131
190
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
191
|
+
// TODO handle clientId unique conflict
|
|
192
|
+
// Validate the batch
|
|
193
|
+
const firstEvent = decodedMessage.batch[0]!
|
|
194
|
+
if (firstEvent.parentSeqNum !== currentHead) {
|
|
195
|
+
const err = WSMessage.Error.make({
|
|
196
|
+
message: `Invalid parent event number. Received e${firstEvent.parentSeqNum} but expected e${currentHead}`,
|
|
197
|
+
requestId,
|
|
198
|
+
})
|
|
139
199
|
|
|
140
|
-
|
|
200
|
+
yield* Effect.logError(err)
|
|
141
201
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
202
|
+
yield* respond(err)
|
|
203
|
+
yield* this.pushSemaphore.release(1)
|
|
204
|
+
return
|
|
205
|
+
}
|
|
145
206
|
|
|
146
|
-
|
|
207
|
+
yield* respond(WSMessage.PushAck.make({ requestId }))
|
|
147
208
|
|
|
148
|
-
|
|
209
|
+
const createdAt = new Date().toISOString()
|
|
149
210
|
|
|
150
|
-
|
|
151
|
-
|
|
211
|
+
// NOTE we're not waiting for this to complete yet to allow the broadcast to happen right away
|
|
212
|
+
// while letting the async storage write happen in the background
|
|
213
|
+
const storeFiber = yield* storage.appendEvents(decodedMessage.batch, createdAt).pipe(Effect.fork)
|
|
152
214
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
)
|
|
215
|
+
this.currentHead = decodedMessage.batch.at(-1)!.seqNum
|
|
216
|
+
yield* Effect.promise(() => this.ctx.storage.put('currentHead', this.currentHead))
|
|
156
217
|
|
|
157
|
-
|
|
218
|
+
yield* this.pushSemaphore.release(1)
|
|
158
219
|
|
|
159
|
-
|
|
220
|
+
const connectedClients = this.ctx.getWebSockets()
|
|
160
221
|
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
}),
|
|
168
|
-
)
|
|
222
|
+
// console.debug(`Broadcasting push batch to ${this.subscribedWebSockets.size} clients`)
|
|
223
|
+
if (connectedClients.length > 0) {
|
|
224
|
+
// TODO refactor to batch api
|
|
225
|
+
const pullRes = WSMessage.PullRes.make({
|
|
226
|
+
batch: decodedMessage.batch.map((eventEncoded) => ({
|
|
227
|
+
eventEncoded,
|
|
228
|
+
metadata: Option.some({ createdAt }),
|
|
229
|
+
})),
|
|
230
|
+
remaining: 0,
|
|
231
|
+
requestId: { context: 'push', requestId },
|
|
232
|
+
})
|
|
233
|
+
const pullResEnc = encodeOutgoingMessage(pullRes)
|
|
169
234
|
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
conn.send(broadcastMessage)
|
|
174
|
-
// }
|
|
175
|
-
}
|
|
235
|
+
// Only calling once for now.
|
|
236
|
+
if (options?.onPullRes) {
|
|
237
|
+
yield* Effect.tryAll(() => options.onPullRes!(pullRes))
|
|
176
238
|
}
|
|
177
239
|
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
240
|
+
// NOTE we're also sending the pullRes to the pushing ws client as a confirmation
|
|
241
|
+
for (const conn of connectedClients) {
|
|
242
|
+
conn.send(pullResEnc)
|
|
243
|
+
}
|
|
181
244
|
}
|
|
182
245
|
|
|
246
|
+
// Wait for the storage write to complete before finishing this request
|
|
247
|
+
yield* storeFiber
|
|
248
|
+
|
|
183
249
|
break
|
|
184
250
|
}
|
|
185
251
|
case 'WSMessage.AdminResetRoomReq': {
|
|
@@ -188,7 +254,7 @@ export const makeDurableObject: MakeDurableObjectClass = (options) => {
|
|
|
188
254
|
return
|
|
189
255
|
}
|
|
190
256
|
|
|
191
|
-
yield*
|
|
257
|
+
yield* storage.resetStore
|
|
192
258
|
ws.send(encodeOutgoingMessage(WSMessage.AdminResetRoomRes.make({ requestId })))
|
|
193
259
|
|
|
194
260
|
break
|
|
@@ -216,12 +282,20 @@ export const makeDurableObject: MakeDurableObjectClass = (options) => {
|
|
|
216
282
|
ws.send(encodeOutgoingMessage(WSMessage.Error.make({ message: error.message, requestId })))
|
|
217
283
|
}
|
|
218
284
|
}).pipe(
|
|
219
|
-
Effect.withSpan(
|
|
285
|
+
Effect.withSpan(`@livestore/sync-cf:durable-object:webSocketMessage:${decodedMessage._tag}`, {
|
|
286
|
+
attributes: { requestId },
|
|
287
|
+
}),
|
|
220
288
|
Effect.tapCauseLogPretty,
|
|
289
|
+
Effect.tapErrorCause((cause) =>
|
|
290
|
+
Effect.sync(() =>
|
|
291
|
+
ws.send(encodeOutgoingMessage(WSMessage.Error.make({ message: cause.toString(), requestId }))),
|
|
292
|
+
),
|
|
293
|
+
),
|
|
221
294
|
Logger.withMinimumLogLevel(LogLevel.Debug),
|
|
222
|
-
Effect.provide(Logger.
|
|
295
|
+
Effect.provide(Logger.prettyWithThread('durable-object')),
|
|
223
296
|
Effect.runPromise,
|
|
224
297
|
)
|
|
298
|
+
}
|
|
225
299
|
|
|
226
300
|
webSocketClose = async (ws: WebSocketClient, code: number, _reason: string, _wasClean: boolean) => {
|
|
227
301
|
// If the client closes the connection, the runtime will invoke the webSocketClose() handler.
|
|
@@ -230,48 +304,121 @@ export const makeDurableObject: MakeDurableObjectClass = (options) => {
|
|
|
230
304
|
}
|
|
231
305
|
}
|
|
232
306
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
307
|
+
type SyncStorage = {
|
|
308
|
+
dbName: string
|
|
309
|
+
// getHead: Effect.Effect<EventSequenceNumber.GlobalEventSequenceNumber, UnexpectedError>
|
|
310
|
+
getEvents: (
|
|
311
|
+
cursor: number | undefined,
|
|
312
|
+
) => Effect.Effect<
|
|
313
|
+
ReadonlyArray<{ eventEncoded: LiveStoreEvent.AnyEncodedGlobal; metadata: Option.Option<SyncMetadata> }>,
|
|
314
|
+
UnexpectedError
|
|
315
|
+
>
|
|
316
|
+
appendEvents: (
|
|
317
|
+
batch: ReadonlyArray<LiveStoreEvent.AnyEncodedGlobal>,
|
|
318
|
+
createdAt: string,
|
|
319
|
+
) => Effect.Effect<void, UnexpectedError>
|
|
320
|
+
resetStore: Effect.Effect<void, UnexpectedError>
|
|
321
|
+
}
|
|
240
322
|
|
|
241
|
-
|
|
242
|
-
}
|
|
323
|
+
const makeStorage = (ctx: DurableObjectState, env: Env, storeId: string): SyncStorage => {
|
|
324
|
+
const dbName = `eventlog_${PERSISTENCE_FORMAT_VERSION}_${toValidTableName(storeId)}`
|
|
243
325
|
|
|
244
|
-
const
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
// TODO handle case where `cursor` was not found
|
|
252
|
-
const rawEvents = await env.DB.prepare(sql).all()
|
|
253
|
-
if (rawEvents.error) {
|
|
254
|
-
throw new Error(rawEvents.error)
|
|
255
|
-
}
|
|
256
|
-
const events = Schema.decodeUnknownSync(Schema.Array(mutationLogTable.schema))(rawEvents.results).map(
|
|
257
|
-
({ createdAt, ...mutationEventEncoded }) => ({
|
|
258
|
-
mutationEventEncoded,
|
|
259
|
-
metadata: Option.some({ createdAt }),
|
|
260
|
-
}),
|
|
326
|
+
const execDb = <T>(cb: (db: D1Database) => Promise<D1Result<T>>) =>
|
|
327
|
+
Effect.tryPromise({
|
|
328
|
+
try: () => cb(env.DB),
|
|
329
|
+
catch: (error) => new UnexpectedError({ cause: error, payload: { dbName } }),
|
|
330
|
+
}).pipe(
|
|
331
|
+
Effect.map((_) => _.results),
|
|
332
|
+
Effect.withSpan('@livestore/sync-cf:durable-object:execDb'),
|
|
261
333
|
)
|
|
262
|
-
return events
|
|
263
|
-
}
|
|
264
334
|
|
|
265
|
-
const
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
335
|
+
// const getHead: Effect.Effect<EventSequenceNumber.GlobalEventSequenceNumber, UnexpectedError> = Effect.gen(
|
|
336
|
+
// function* () {
|
|
337
|
+
// const result = yield* execDb<{ seqNum: EventSequenceNumber.GlobalEventSequenceNumber }>((db) =>
|
|
338
|
+
// db.prepare(`SELECT seqNum FROM ${dbName} ORDER BY seqNum DESC LIMIT 1`).all(),
|
|
339
|
+
// )
|
|
340
|
+
|
|
341
|
+
// return result[0]?.seqNum ?? EventSequenceNumber.ROOT.global
|
|
342
|
+
// },
|
|
343
|
+
// ).pipe(UnexpectedError.mapToUnexpectedError)
|
|
271
344
|
|
|
272
|
-
const
|
|
273
|
-
|
|
345
|
+
const getEvents = (
|
|
346
|
+
cursor: number | undefined,
|
|
347
|
+
): Effect.Effect<
|
|
348
|
+
ReadonlyArray<{ eventEncoded: LiveStoreEvent.AnyEncodedGlobal; metadata: Option.Option<SyncMetadata> }>,
|
|
349
|
+
UnexpectedError
|
|
350
|
+
> =>
|
|
351
|
+
Effect.gen(function* () {
|
|
352
|
+
const whereClause = cursor === undefined ? '' : `WHERE seqNum > ${cursor}`
|
|
353
|
+
const sql = `SELECT * FROM ${dbName} ${whereClause} ORDER BY seqNum ASC`
|
|
354
|
+
// TODO handle case where `cursor` was not found
|
|
355
|
+
const rawEvents = yield* execDb((db) => db.prepare(sql).all())
|
|
356
|
+
const events = Schema.decodeUnknownSync(Schema.Array(eventlogTable.rowSchema))(rawEvents).map(
|
|
357
|
+
({ createdAt, ...eventEncoded }) => ({
|
|
358
|
+
eventEncoded,
|
|
359
|
+
metadata: Option.some({ createdAt }),
|
|
360
|
+
}),
|
|
361
|
+
)
|
|
362
|
+
return events
|
|
363
|
+
}).pipe(UnexpectedError.mapToUnexpectedError)
|
|
364
|
+
|
|
365
|
+
const appendEvents: SyncStorage['appendEvents'] = (batch, createdAt) =>
|
|
366
|
+
Effect.gen(function* () {
|
|
367
|
+
// If there are no events, do nothing.
|
|
368
|
+
if (batch.length === 0) return
|
|
369
|
+
|
|
370
|
+
// CF D1 limits:
|
|
371
|
+
// Maximum bound parameters per query 100, Maximum arguments per SQL function 32
|
|
372
|
+
// Thus we need to split the batch into chunks of max (100/7=)14 events each.
|
|
373
|
+
const CHUNK_SIZE = 14
|
|
374
|
+
|
|
375
|
+
for (let i = 0; i < batch.length; i += CHUNK_SIZE) {
|
|
376
|
+
const chunk = batch.slice(i, i + CHUNK_SIZE)
|
|
377
|
+
|
|
378
|
+
// Create a list of placeholders ("(?, ?, ?, ?, ?, ?, ?)"), corresponding to each event.
|
|
379
|
+
const valuesPlaceholders = chunk.map(() => '(?, ?, ?, ?, ?, ?, ?)').join(', ')
|
|
380
|
+
const sql = `INSERT INTO ${dbName} (seqNum, parentSeqNum, args, name, createdAt, clientId, sessionId) VALUES ${valuesPlaceholders}`
|
|
381
|
+
// Flatten the event properties into a parameters array.
|
|
382
|
+
const params = chunk.flatMap((event) => [
|
|
383
|
+
event.seqNum,
|
|
384
|
+
event.parentSeqNum,
|
|
385
|
+
event.args === undefined ? null : JSON.stringify(event.args),
|
|
386
|
+
event.name,
|
|
387
|
+
createdAt,
|
|
388
|
+
event.clientId,
|
|
389
|
+
event.sessionId,
|
|
390
|
+
])
|
|
391
|
+
|
|
392
|
+
yield* execDb((db) =>
|
|
393
|
+
db
|
|
394
|
+
.prepare(sql)
|
|
395
|
+
.bind(...params)
|
|
396
|
+
.run(),
|
|
397
|
+
)
|
|
398
|
+
}
|
|
399
|
+
}).pipe(UnexpectedError.mapToUnexpectedError)
|
|
400
|
+
|
|
401
|
+
const resetStore = Effect.gen(function* () {
|
|
402
|
+
yield* Effect.promise(() => ctx.storage.deleteAll())
|
|
403
|
+
}).pipe(UnexpectedError.mapToUnexpectedError)
|
|
404
|
+
|
|
405
|
+
return {
|
|
406
|
+
dbName,
|
|
407
|
+
// getHead,
|
|
408
|
+
getEvents,
|
|
409
|
+
appendEvents,
|
|
410
|
+
resetStore,
|
|
274
411
|
}
|
|
412
|
+
}
|
|
275
413
|
|
|
276
|
-
|
|
414
|
+
const getStoreId = (request: Request) => {
|
|
415
|
+
const url = new URL(request.url)
|
|
416
|
+
const searchParams = url.searchParams
|
|
417
|
+
const storeId = searchParams.get('storeId')
|
|
418
|
+
if (storeId === null) {
|
|
419
|
+
throw new Error('storeId search param is required')
|
|
420
|
+
}
|
|
421
|
+
return storeId
|
|
277
422
|
}
|
|
423
|
+
|
|
424
|
+
const toValidTableName = (str: string) => str.replaceAll(/[^a-zA-Z0-9]/g, '_')
|