@livestore/sync-cf 0.4.0-dev.2 → 0.4.0-dev.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +60 -0
- package/dist/.tsbuildinfo +1 -1
- package/dist/cf-worker/do/durable-object.d.ts +45 -0
- package/dist/cf-worker/do/durable-object.d.ts.map +1 -0
- package/dist/cf-worker/do/durable-object.js +151 -0
- package/dist/cf-worker/do/durable-object.js.map +1 -0
- package/dist/cf-worker/do/layer.d.ts +34 -0
- package/dist/cf-worker/do/layer.d.ts.map +1 -0
- package/dist/cf-worker/do/layer.js +91 -0
- package/dist/cf-worker/do/layer.js.map +1 -0
- package/dist/cf-worker/do/pull.d.ts +6 -0
- package/dist/cf-worker/do/pull.d.ts.map +1 -0
- package/dist/cf-worker/do/pull.js +47 -0
- package/dist/cf-worker/do/pull.js.map +1 -0
- package/dist/cf-worker/do/push.d.ts +14 -0
- package/dist/cf-worker/do/push.d.ts.map +1 -0
- package/dist/cf-worker/do/push.js +131 -0
- package/dist/cf-worker/do/push.js.map +1 -0
- package/dist/cf-worker/{durable-object.d.ts → do/sqlite.d.ts} +83 -67
- package/dist/cf-worker/do/sqlite.d.ts.map +1 -0
- package/dist/cf-worker/do/sqlite.js +36 -0
- package/dist/cf-worker/do/sqlite.js.map +1 -0
- package/dist/cf-worker/do/sync-storage.d.ts +25 -0
- package/dist/cf-worker/do/sync-storage.d.ts.map +1 -0
- package/dist/cf-worker/do/sync-storage.js +191 -0
- package/dist/cf-worker/do/sync-storage.js.map +1 -0
- package/dist/cf-worker/do/transport/do-rpc-server.d.ts +9 -0
- package/dist/cf-worker/do/transport/do-rpc-server.d.ts.map +1 -0
- package/dist/cf-worker/do/transport/do-rpc-server.js +45 -0
- package/dist/cf-worker/do/transport/do-rpc-server.js.map +1 -0
- package/dist/cf-worker/do/transport/http-rpc-server.d.ts +8 -0
- package/dist/cf-worker/do/transport/http-rpc-server.d.ts.map +1 -0
- package/dist/cf-worker/do/transport/http-rpc-server.js +30 -0
- package/dist/cf-worker/do/transport/http-rpc-server.js.map +1 -0
- package/dist/cf-worker/do/transport/ws-rpc-server.d.ts +4 -0
- package/dist/cf-worker/do/transport/ws-rpc-server.d.ts.map +1 -0
- package/dist/cf-worker/do/transport/ws-rpc-server.js +21 -0
- package/dist/cf-worker/do/transport/ws-rpc-server.js.map +1 -0
- package/dist/cf-worker/mod.d.ts +4 -2
- package/dist/cf-worker/mod.d.ts.map +1 -1
- package/dist/cf-worker/mod.js +3 -2
- package/dist/cf-worker/mod.js.map +1 -1
- package/dist/cf-worker/shared.d.ts +175 -0
- package/dist/cf-worker/shared.d.ts.map +1 -0
- package/dist/cf-worker/shared.js +43 -0
- package/dist/cf-worker/shared.js.map +1 -0
- package/dist/cf-worker/worker.d.ts +59 -51
- package/dist/cf-worker/worker.d.ts.map +1 -1
- package/dist/cf-worker/worker.js +75 -43
- package/dist/cf-worker/worker.js.map +1 -1
- package/dist/client/mod.d.ts +4 -0
- package/dist/client/mod.d.ts.map +1 -0
- package/dist/client/mod.js +4 -0
- package/dist/client/mod.js.map +1 -0
- package/dist/client/transport/do-rpc-client.d.ts +40 -0
- package/dist/client/transport/do-rpc-client.d.ts.map +1 -0
- package/dist/client/transport/do-rpc-client.js +115 -0
- package/dist/client/transport/do-rpc-client.js.map +1 -0
- package/dist/client/transport/http-rpc-client.d.ts +43 -0
- package/dist/client/transport/http-rpc-client.d.ts.map +1 -0
- package/dist/client/transport/http-rpc-client.js +103 -0
- package/dist/client/transport/http-rpc-client.js.map +1 -0
- package/dist/client/transport/ws-rpc-client.d.ts +46 -0
- package/dist/client/transport/ws-rpc-client.d.ts.map +1 -0
- package/dist/client/transport/ws-rpc-client.js +108 -0
- package/dist/client/transport/ws-rpc-client.js.map +1 -0
- package/dist/common/constants.d.ts +7 -0
- package/dist/common/constants.d.ts.map +1 -0
- package/dist/common/constants.js +17 -0
- package/dist/common/constants.js.map +1 -0
- package/dist/common/do-rpc-schema.d.ts +76 -0
- package/dist/common/do-rpc-schema.d.ts.map +1 -0
- package/dist/common/do-rpc-schema.js +48 -0
- package/dist/common/do-rpc-schema.js.map +1 -0
- package/dist/common/http-rpc-schema.d.ts +58 -0
- package/dist/common/http-rpc-schema.d.ts.map +1 -0
- package/dist/common/http-rpc-schema.js +37 -0
- package/dist/common/http-rpc-schema.js.map +1 -0
- package/dist/common/mod.d.ts +8 -1
- package/dist/common/mod.d.ts.map +1 -1
- package/dist/common/mod.js +7 -1
- package/dist/common/mod.js.map +1 -1
- package/dist/common/{ws-message-types.d.ts → sync-message-types.d.ts} +119 -153
- package/dist/common/sync-message-types.d.ts.map +1 -0
- package/dist/common/sync-message-types.js +60 -0
- package/dist/common/sync-message-types.js.map +1 -0
- package/dist/common/ws-rpc-schema.d.ts +55 -0
- package/dist/common/ws-rpc-schema.d.ts.map +1 -0
- package/dist/common/ws-rpc-schema.js +32 -0
- package/dist/common/ws-rpc-schema.js.map +1 -0
- package/package.json +7 -8
- package/src/cf-worker/do/durable-object.ts +238 -0
- package/src/cf-worker/do/layer.ts +128 -0
- package/src/cf-worker/do/pull.ts +75 -0
- package/src/cf-worker/do/push.ts +205 -0
- package/src/cf-worker/do/sqlite.ts +37 -0
- package/src/cf-worker/do/sync-storage.ts +323 -0
- package/src/cf-worker/do/transport/do-rpc-server.ts +84 -0
- package/src/cf-worker/do/transport/http-rpc-server.ts +51 -0
- package/src/cf-worker/do/transport/ws-rpc-server.ts +34 -0
- package/src/cf-worker/mod.ts +4 -2
- package/src/cf-worker/shared.ts +141 -0
- package/src/cf-worker/worker.ts +138 -116
- package/src/client/mod.ts +3 -0
- package/src/client/transport/do-rpc-client.ts +189 -0
- package/src/client/transport/http-rpc-client.ts +225 -0
- package/src/client/transport/ws-rpc-client.ts +202 -0
- package/src/common/constants.ts +18 -0
- package/src/common/do-rpc-schema.ts +54 -0
- package/src/common/http-rpc-schema.ts +40 -0
- package/src/common/mod.ts +10 -1
- package/src/common/sync-message-types.ts +117 -0
- package/src/common/ws-rpc-schema.ts +36 -0
- package/dist/cf-worker/cf-types.d.ts +0 -2
- package/dist/cf-worker/cf-types.d.ts.map +0 -1
- package/dist/cf-worker/cf-types.js +0 -2
- package/dist/cf-worker/cf-types.js.map +0 -1
- package/dist/cf-worker/durable-object.d.ts.map +0 -1
- package/dist/cf-worker/durable-object.js +0 -317
- package/dist/cf-worker/durable-object.js.map +0 -1
- package/dist/common/ws-message-types.d.ts.map +0 -1
- package/dist/common/ws-message-types.js +0 -57
- package/dist/common/ws-message-types.js.map +0 -1
- package/dist/sync-impl/mod.d.ts +0 -2
- package/dist/sync-impl/mod.d.ts.map +0 -1
- package/dist/sync-impl/mod.js +0 -2
- package/dist/sync-impl/mod.js.map +0 -1
- package/dist/sync-impl/ws-impl.d.ts +0 -7
- package/dist/sync-impl/ws-impl.d.ts.map +0 -1
- package/dist/sync-impl/ws-impl.js +0 -175
- package/dist/sync-impl/ws-impl.js.map +0 -1
- package/src/cf-worker/cf-types.ts +0 -12
- package/src/cf-worker/durable-object.ts +0 -478
- package/src/common/ws-message-types.ts +0 -114
- package/src/sync-impl/mod.ts +0 -1
- package/src/sync-impl/ws-impl.ts +0 -274
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
import { UnknownError } from '@livestore/common'
|
|
2
|
+
import type { LiveStoreEvent } from '@livestore/common/schema'
|
|
3
|
+
import type { CfTypes } from '@livestore/common-cf'
|
|
4
|
+
import { Chunk, Effect, Option, Schema, Stream } from '@livestore/utils/effect'
|
|
5
|
+
import { SyncMetadata } from '../../common/sync-message-types.ts'
|
|
6
|
+
import { PERSISTENCE_FORMAT_VERSION, type StoreId } from '../shared.ts'
|
|
7
|
+
import { eventlogTable } from './sqlite.ts'
|
|
8
|
+
|
|
9
|
+
export type SyncStorage = {
|
|
10
|
+
dbName: string
|
|
11
|
+
getEvents: (cursor: number | undefined) => Effect.Effect<
|
|
12
|
+
{
|
|
13
|
+
total: number
|
|
14
|
+
stream: Stream.Stream<
|
|
15
|
+
{ eventEncoded: LiveStoreEvent.Global.Encoded; metadata: Option.Option<SyncMetadata> },
|
|
16
|
+
UnknownError
|
|
17
|
+
>
|
|
18
|
+
},
|
|
19
|
+
UnknownError
|
|
20
|
+
>
|
|
21
|
+
appendEvents: (
|
|
22
|
+
batch: ReadonlyArray<LiveStoreEvent.Global.Encoded>,
|
|
23
|
+
createdAt: string,
|
|
24
|
+
) => Effect.Effect<void, UnknownError>
|
|
25
|
+
resetStore: Effect.Effect<void, UnknownError>
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export const makeStorage = (
|
|
29
|
+
ctx: CfTypes.DurableObjectState,
|
|
30
|
+
storeId: StoreId,
|
|
31
|
+
engine: { _tag: 'd1'; db: CfTypes.D1Database } | { _tag: 'do-sqlite' },
|
|
32
|
+
): SyncStorage => {
|
|
33
|
+
const dbName = `eventlog_${PERSISTENCE_FORMAT_VERSION}_${toValidTableName(storeId)}`
|
|
34
|
+
|
|
35
|
+
const execDb = <T>(cb: (db: CfTypes.D1Database) => Promise<CfTypes.D1Result<T>>) =>
|
|
36
|
+
Effect.tryPromise({
|
|
37
|
+
try: () => cb(engine._tag === 'd1' ? engine.db : (undefined as never)),
|
|
38
|
+
catch: (error) => new UnknownError({ cause: error, payload: { dbName } }),
|
|
39
|
+
}).pipe(
|
|
40
|
+
Effect.map((_) => _.results),
|
|
41
|
+
Effect.withSpan('@livestore/sync-cf:durable-object:execDb'),
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
// Cloudflare's D1 HTTP endpoint rejects JSON responses once they exceed ~1MB.
|
|
45
|
+
// Keep individual SELECT batches comfortably below that threshold so we can
|
|
46
|
+
// serve large histories without tripping the limit.
|
|
47
|
+
const D1_MAX_JSON_RESPONSE_BYTES = 1_000_000
|
|
48
|
+
const D1_RESPONSE_SAFETY_MARGIN_BYTES = 64 * 1024
|
|
49
|
+
const D1_TARGET_RESPONSE_BYTES = D1_MAX_JSON_RESPONSE_BYTES - D1_RESPONSE_SAFETY_MARGIN_BYTES
|
|
50
|
+
const D1_INITIAL_PAGE_SIZE = 256
|
|
51
|
+
const D1_MIN_PAGE_SIZE = 1
|
|
52
|
+
|
|
53
|
+
const decodeEventlogRows = Schema.decodeUnknownSync(Schema.Array(eventlogTable.rowSchema))
|
|
54
|
+
const textEncoder = new TextEncoder()
|
|
55
|
+
|
|
56
|
+
const decreaseLimit = (limit: number) => Math.max(D1_MIN_PAGE_SIZE, Math.floor(limit / 2))
|
|
57
|
+
const increaseLimit = (limit: number) => Math.min(D1_INITIAL_PAGE_SIZE, limit * 2)
|
|
58
|
+
|
|
59
|
+
const computeNextLimit = (limit: number, encodedSize: number) => {
|
|
60
|
+
if (encodedSize > D1_TARGET_RESPONSE_BYTES && limit > D1_MIN_PAGE_SIZE) {
|
|
61
|
+
const next = decreaseLimit(limit)
|
|
62
|
+
return next === limit ? limit : next
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (encodedSize < D1_TARGET_RESPONSE_BYTES / 2 && limit < D1_INITIAL_PAGE_SIZE) {
|
|
66
|
+
const next = increaseLimit(limit)
|
|
67
|
+
return next === limit ? limit : next
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return limit
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const getEventsD1 = (
|
|
74
|
+
cursor: number | undefined,
|
|
75
|
+
): Effect.Effect<
|
|
76
|
+
{
|
|
77
|
+
total: number
|
|
78
|
+
stream: Stream.Stream<
|
|
79
|
+
{ eventEncoded: LiveStoreEvent.Global.Encoded; metadata: Option.Option<SyncMetadata> },
|
|
80
|
+
UnknownError
|
|
81
|
+
>
|
|
82
|
+
},
|
|
83
|
+
UnknownError
|
|
84
|
+
> =>
|
|
85
|
+
Effect.gen(function* () {
|
|
86
|
+
const countStatement =
|
|
87
|
+
cursor === undefined
|
|
88
|
+
? `SELECT COUNT(*) as total FROM ${dbName}`
|
|
89
|
+
: `SELECT COUNT(*) as total FROM ${dbName} WHERE seqNum > ?`
|
|
90
|
+
|
|
91
|
+
const countRows = yield* execDb<{ total: number }>((db) => {
|
|
92
|
+
const prepared = db.prepare(countStatement)
|
|
93
|
+
return cursor === undefined ? prepared.all() : prepared.bind(cursor).all()
|
|
94
|
+
})
|
|
95
|
+
|
|
96
|
+
const total = Number(countRows[0]?.total ?? 0)
|
|
97
|
+
|
|
98
|
+
type State = { cursor: number | undefined; limit: number }
|
|
99
|
+
type EmittedEvent = { eventEncoded: LiveStoreEvent.Global.Encoded; metadata: Option.Option<SyncMetadata> }
|
|
100
|
+
|
|
101
|
+
const initialState: State = { cursor, limit: D1_INITIAL_PAGE_SIZE }
|
|
102
|
+
|
|
103
|
+
const fetchPage = (
|
|
104
|
+
state: State,
|
|
105
|
+
): Effect.Effect<Option.Option<readonly [Chunk.Chunk<EmittedEvent>, State]>, UnknownError> =>
|
|
106
|
+
Effect.gen(function* () {
|
|
107
|
+
const statement =
|
|
108
|
+
state.cursor === undefined
|
|
109
|
+
? `SELECT * FROM ${dbName} ORDER BY seqNum ASC LIMIT ?`
|
|
110
|
+
: `SELECT * FROM ${dbName} WHERE seqNum > ? ORDER BY seqNum ASC LIMIT ?`
|
|
111
|
+
|
|
112
|
+
const rawEvents = yield* execDb((db) => {
|
|
113
|
+
const prepared = db.prepare(statement)
|
|
114
|
+
return state.cursor === undefined
|
|
115
|
+
? prepared.bind(state.limit).all()
|
|
116
|
+
: prepared.bind(state.cursor, state.limit).all()
|
|
117
|
+
})
|
|
118
|
+
|
|
119
|
+
if (rawEvents.length === 0) {
|
|
120
|
+
return Option.none()
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const encodedSize = textEncoder.encode(JSON.stringify(rawEvents)).byteLength
|
|
124
|
+
|
|
125
|
+
if (encodedSize > D1_TARGET_RESPONSE_BYTES && state.limit > D1_MIN_PAGE_SIZE) {
|
|
126
|
+
const nextLimit = decreaseLimit(state.limit)
|
|
127
|
+
|
|
128
|
+
if (nextLimit !== state.limit) {
|
|
129
|
+
return yield* fetchPage({ cursor: state.cursor, limit: nextLimit })
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
const decodedRows = Chunk.fromIterable(decodeEventlogRows(rawEvents))
|
|
134
|
+
|
|
135
|
+
const eventsChunk = Chunk.map(decodedRows, ({ createdAt, ...eventEncoded }) => ({
|
|
136
|
+
eventEncoded,
|
|
137
|
+
metadata: Option.some(SyncMetadata.make({ createdAt })),
|
|
138
|
+
}))
|
|
139
|
+
|
|
140
|
+
const lastSeqNum = Chunk.unsafeLast(decodedRows).seqNum
|
|
141
|
+
const nextState: State = { cursor: lastSeqNum, limit: computeNextLimit(state.limit, encodedSize) }
|
|
142
|
+
|
|
143
|
+
return Option.some([eventsChunk, nextState] as const)
|
|
144
|
+
})
|
|
145
|
+
|
|
146
|
+
const stream = Stream.unfoldChunkEffect(initialState, fetchPage)
|
|
147
|
+
|
|
148
|
+
return { total, stream }
|
|
149
|
+
}).pipe(
|
|
150
|
+
UnknownError.mapToUnknownError,
|
|
151
|
+
Effect.withSpan('@livestore/sync-cf:durable-object:getEvents', {
|
|
152
|
+
attributes: { dbName, cursor, engine: engine._tag },
|
|
153
|
+
}),
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
const appendEventsD1: SyncStorage['appendEvents'] = (batch, createdAt) =>
|
|
157
|
+
Effect.gen(function* () {
|
|
158
|
+
// If there are no events, do nothing.
|
|
159
|
+
if (batch.length === 0) return
|
|
160
|
+
|
|
161
|
+
// CF D1 limits:
|
|
162
|
+
// Maximum bound parameters per query 100, Maximum arguments per SQL function 32
|
|
163
|
+
// Thus we need to split the batch into chunks of max (100/7=)14 events each.
|
|
164
|
+
const CHUNK_SIZE = 14
|
|
165
|
+
|
|
166
|
+
for (let i = 0; i < batch.length; i += CHUNK_SIZE) {
|
|
167
|
+
const chunk = batch.slice(i, i + CHUNK_SIZE)
|
|
168
|
+
|
|
169
|
+
// Create a list of placeholders ("(?, ?, ?, ?, ?, ?, ?)"), corresponding to each event.
|
|
170
|
+
const valuesPlaceholders = chunk.map(() => '(?, ?, ?, ?, ?, ?, ?)').join(', ')
|
|
171
|
+
const sql = `INSERT INTO ${dbName} (seqNum, parentSeqNum, args, name, createdAt, clientId, sessionId) VALUES ${valuesPlaceholders}`
|
|
172
|
+
// Flatten the event properties into a parameters array.
|
|
173
|
+
const params = chunk.flatMap((event) => [
|
|
174
|
+
event.seqNum,
|
|
175
|
+
event.parentSeqNum,
|
|
176
|
+
event.args === undefined ? null : JSON.stringify(event.args),
|
|
177
|
+
event.name,
|
|
178
|
+
createdAt,
|
|
179
|
+
event.clientId,
|
|
180
|
+
event.sessionId,
|
|
181
|
+
])
|
|
182
|
+
|
|
183
|
+
yield* execDb((db) =>
|
|
184
|
+
db
|
|
185
|
+
.prepare(sql)
|
|
186
|
+
.bind(...params)
|
|
187
|
+
.run(),
|
|
188
|
+
)
|
|
189
|
+
}
|
|
190
|
+
}).pipe(
|
|
191
|
+
UnknownError.mapToUnknownError,
|
|
192
|
+
Effect.withSpan('@livestore/sync-cf:durable-object:appendEvents', {
|
|
193
|
+
attributes: { dbName, batchLength: batch.length, engine: engine._tag },
|
|
194
|
+
}),
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
const resetStore = Effect.promise(() => ctx.storage.deleteAll()).pipe(
|
|
198
|
+
UnknownError.mapToUnknownError,
|
|
199
|
+
Effect.withSpan('@livestore/sync-cf:durable-object:resetStore'),
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
// DO SQLite engine implementation
|
|
203
|
+
const getEventsDoSqlite = (
|
|
204
|
+
cursor: number | undefined,
|
|
205
|
+
): Effect.Effect<
|
|
206
|
+
{
|
|
207
|
+
total: number
|
|
208
|
+
stream: Stream.Stream<
|
|
209
|
+
{ eventEncoded: LiveStoreEvent.Global.Encoded; metadata: Option.Option<SyncMetadata> },
|
|
210
|
+
UnknownError
|
|
211
|
+
>
|
|
212
|
+
},
|
|
213
|
+
UnknownError
|
|
214
|
+
> =>
|
|
215
|
+
Effect.gen(function* () {
|
|
216
|
+
const selectCountSql =
|
|
217
|
+
cursor === undefined
|
|
218
|
+
? `SELECT COUNT(*) as total FROM "${dbName}"`
|
|
219
|
+
: `SELECT COUNT(*) as total FROM "${dbName}" WHERE seqNum > ?`
|
|
220
|
+
|
|
221
|
+
const total = yield* Effect.try({
|
|
222
|
+
try: () => {
|
|
223
|
+
const cursorIter =
|
|
224
|
+
cursor === undefined ? ctx.storage.sql.exec(selectCountSql) : ctx.storage.sql.exec(selectCountSql, cursor)
|
|
225
|
+
let computed = 0
|
|
226
|
+
for (const row of cursorIter) {
|
|
227
|
+
computed = Number((row as any).total ?? 0)
|
|
228
|
+
}
|
|
229
|
+
return computed
|
|
230
|
+
},
|
|
231
|
+
catch: (error) => new UnknownError({ cause: error, payload: { dbName, stage: 'count' } }),
|
|
232
|
+
})
|
|
233
|
+
|
|
234
|
+
type State = { cursor: number | undefined }
|
|
235
|
+
type EmittedEvent = { eventEncoded: LiveStoreEvent.Global.Encoded; metadata: Option.Option<SyncMetadata> }
|
|
236
|
+
|
|
237
|
+
const DO_PAGE_SIZE = 256
|
|
238
|
+
const initialState: State = { cursor }
|
|
239
|
+
|
|
240
|
+
const fetchPage = (
|
|
241
|
+
state: State,
|
|
242
|
+
): Effect.Effect<Option.Option<readonly [Chunk.Chunk<EmittedEvent>, State]>, UnknownError> =>
|
|
243
|
+
Effect.try({
|
|
244
|
+
try: () => {
|
|
245
|
+
const sql =
|
|
246
|
+
state.cursor === undefined
|
|
247
|
+
? `SELECT * FROM "${dbName}" ORDER BY seqNum ASC LIMIT ?`
|
|
248
|
+
: `SELECT * FROM "${dbName}" WHERE seqNum > ? ORDER BY seqNum ASC LIMIT ?`
|
|
249
|
+
|
|
250
|
+
const iter =
|
|
251
|
+
state.cursor === undefined
|
|
252
|
+
? ctx.storage.sql.exec(sql, DO_PAGE_SIZE)
|
|
253
|
+
: ctx.storage.sql.exec(sql, state.cursor, DO_PAGE_SIZE)
|
|
254
|
+
|
|
255
|
+
const rows: any[] = []
|
|
256
|
+
for (const row of iter) rows.push(row)
|
|
257
|
+
|
|
258
|
+
if (rows.length === 0) {
|
|
259
|
+
return Option.none()
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
const decodedRows = Chunk.fromIterable(decodeEventlogRows(rows))
|
|
263
|
+
const eventsChunk = Chunk.map(decodedRows, ({ createdAt, ...eventEncoded }) => ({
|
|
264
|
+
eventEncoded,
|
|
265
|
+
metadata: Option.some(SyncMetadata.make({ createdAt })),
|
|
266
|
+
}))
|
|
267
|
+
|
|
268
|
+
const lastSeqNum = Chunk.unsafeLast(decodedRows).seqNum
|
|
269
|
+
const nextState: State = { cursor: lastSeqNum }
|
|
270
|
+
|
|
271
|
+
return Option.some([eventsChunk, nextState] as const)
|
|
272
|
+
},
|
|
273
|
+
catch: (error) => new UnknownError({ cause: error, payload: { dbName, stage: 'select' } }),
|
|
274
|
+
})
|
|
275
|
+
|
|
276
|
+
const stream = Stream.unfoldChunkEffect(initialState, fetchPage)
|
|
277
|
+
|
|
278
|
+
return { total, stream }
|
|
279
|
+
}).pipe(
|
|
280
|
+
UnknownError.mapToUnknownError,
|
|
281
|
+
Effect.withSpan('@livestore/sync-cf:durable-object:getEvents', {
|
|
282
|
+
attributes: { dbName, cursor, engine: engine._tag },
|
|
283
|
+
}),
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
const appendEventsDoSqlite: SyncStorage['appendEvents'] = (batch, createdAt) =>
|
|
287
|
+
Effect.try({
|
|
288
|
+
try: () => {
|
|
289
|
+
if (batch.length === 0) return
|
|
290
|
+
// Keep params per statement within conservative limits (align with D1 bound params ~100)
|
|
291
|
+
const CHUNK_SIZE = 14
|
|
292
|
+
for (let i = 0; i < batch.length; i += CHUNK_SIZE) {
|
|
293
|
+
const chunk = batch.slice(i, i + CHUNK_SIZE)
|
|
294
|
+
const placeholders = chunk.map(() => '(?, ?, ?, ?, ?, ?, ?)').join(', ')
|
|
295
|
+
const sql = `INSERT INTO "${dbName}" (seqNum, parentSeqNum, args, name, createdAt, clientId, sessionId) VALUES ${placeholders}`
|
|
296
|
+
const params = chunk.flatMap((event) => [
|
|
297
|
+
event.seqNum,
|
|
298
|
+
event.parentSeqNum,
|
|
299
|
+
event.args === undefined ? null : JSON.stringify(event.args),
|
|
300
|
+
event.name,
|
|
301
|
+
createdAt,
|
|
302
|
+
event.clientId,
|
|
303
|
+
event.sessionId,
|
|
304
|
+
])
|
|
305
|
+
ctx.storage.sql.exec(sql, ...params)
|
|
306
|
+
}
|
|
307
|
+
},
|
|
308
|
+
catch: (error) => new UnknownError({ cause: error, payload: { dbName, stage: 'insert' } }),
|
|
309
|
+
}).pipe(
|
|
310
|
+
Effect.withSpan('@livestore/sync-cf:durable-object:appendEvents', {
|
|
311
|
+
attributes: { dbName, batchLength: batch.length, engine: engine._tag },
|
|
312
|
+
}),
|
|
313
|
+
UnknownError.mapToUnknownError,
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
if (engine._tag === 'd1') {
|
|
317
|
+
return { dbName, getEvents: getEventsD1, appendEvents: appendEventsD1, resetStore }
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
return { dbName, getEvents: getEventsDoSqlite, appendEvents: appendEventsDoSqlite, resetStore }
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
const toValidTableName = (str: string) => str.replaceAll(/[^a-zA-Z0-9]/g, '_')
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { InvalidPullError, InvalidPushError } from '@livestore/common'
|
|
2
|
+
import { type CfTypes, toDurableObjectHandler } from '@livestore/common-cf'
|
|
3
|
+
import {
|
|
4
|
+
Effect,
|
|
5
|
+
Headers,
|
|
6
|
+
HttpServer,
|
|
7
|
+
Layer,
|
|
8
|
+
Logger,
|
|
9
|
+
LogLevel,
|
|
10
|
+
Option,
|
|
11
|
+
RpcSerialization,
|
|
12
|
+
Stream,
|
|
13
|
+
} from '@livestore/utils/effect'
|
|
14
|
+
import { SyncDoRpc } from '../../../common/do-rpc-schema.ts'
|
|
15
|
+
import { SyncMessage } from '../../../common/mod.ts'
|
|
16
|
+
import { DoCtx, type DoCtxInput } from '../layer.ts'
|
|
17
|
+
import { makeEndingPullStream } from '../pull.ts'
|
|
18
|
+
import { makePush } from '../push.ts'
|
|
19
|
+
|
|
20
|
+
export interface DoRpcHandlerOptions {
|
|
21
|
+
payload: Uint8Array<ArrayBuffer>
|
|
22
|
+
input: Omit<DoCtxInput, 'from'>
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export const createDoRpcHandler = (
|
|
26
|
+
options: DoRpcHandlerOptions,
|
|
27
|
+
): Effect.Effect<Uint8Array<ArrayBuffer> | CfTypes.ReadableStream> =>
|
|
28
|
+
Effect.gen(this, function* () {
|
|
29
|
+
const { payload, input } = options
|
|
30
|
+
// const { rpcSubscriptions, backendId, doOptions, ctx, env } = yield* DoCtx
|
|
31
|
+
|
|
32
|
+
// TODO add admin RPCs
|
|
33
|
+
const RpcLive = SyncDoRpc.toLayer({
|
|
34
|
+
'SyncDoRpc.Ping': (_req) => {
|
|
35
|
+
return Effect.succeed(SyncMessage.Pong.make({}))
|
|
36
|
+
},
|
|
37
|
+
'SyncDoRpc.Pull': (req, { headers }) =>
|
|
38
|
+
Effect.gen(this, function* () {
|
|
39
|
+
const { rpcSubscriptions } = yield* DoCtx
|
|
40
|
+
|
|
41
|
+
// TODO rename `req.rpcContext` to something more appropriate
|
|
42
|
+
if (req.rpcContext) {
|
|
43
|
+
rpcSubscriptions.set(req.storeId, {
|
|
44
|
+
storeId: req.storeId,
|
|
45
|
+
payload: req.payload,
|
|
46
|
+
subscribedAt: Date.now(),
|
|
47
|
+
requestId: Headers.get(headers, 'x-rpc-request-id').pipe(Option.getOrThrow),
|
|
48
|
+
callerContext: req.rpcContext.callerContext,
|
|
49
|
+
})
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return makeEndingPullStream(req, req.payload)
|
|
53
|
+
}).pipe(
|
|
54
|
+
Stream.unwrap,
|
|
55
|
+
Stream.map((res) => ({
|
|
56
|
+
...res,
|
|
57
|
+
rpcRequestId: Headers.get(headers, 'x-rpc-request-id').pipe(Option.getOrThrow),
|
|
58
|
+
})),
|
|
59
|
+
Stream.provideLayer(DoCtx.Default({ ...input, from: { storeId: req.storeId } })),
|
|
60
|
+
Stream.mapError((cause) => (cause._tag === 'InvalidPullError' ? cause : InvalidPullError.make({ cause }))),
|
|
61
|
+
Stream.tapErrorCause(Effect.log),
|
|
62
|
+
),
|
|
63
|
+
'SyncDoRpc.Push': (req) =>
|
|
64
|
+
Effect.gen(this, function* () {
|
|
65
|
+
const { doOptions, ctx, env, storeId } = yield* DoCtx
|
|
66
|
+
const push = makePush({ storeId, payload: req.payload, options: doOptions, ctx, env })
|
|
67
|
+
|
|
68
|
+
return yield* push(req)
|
|
69
|
+
}).pipe(
|
|
70
|
+
Effect.provide(DoCtx.Default({ ...input, from: { storeId: req.storeId } })),
|
|
71
|
+
Effect.mapError((cause) => (cause._tag === 'InvalidPushError' ? cause : InvalidPushError.make({ cause }))),
|
|
72
|
+
Effect.tapCauseLogPretty,
|
|
73
|
+
),
|
|
74
|
+
})
|
|
75
|
+
|
|
76
|
+
const handler = toDurableObjectHandler(SyncDoRpc, {
|
|
77
|
+
layer: Layer.mergeAll(RpcLive, RpcSerialization.layerJson, HttpServer.layerContext).pipe(
|
|
78
|
+
Layer.provide(Logger.consoleWithThread('SyncDo')),
|
|
79
|
+
Layer.provide(Logger.minimumLogLevel(LogLevel.Debug)),
|
|
80
|
+
),
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
return yield* handler(payload)
|
|
84
|
+
}).pipe(Effect.withSpan('createDoRpcHandler'))
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import type { CfTypes } from '@livestore/common-cf'
|
|
2
|
+
import { Effect, HttpApp, Layer, RpcSerialization, RpcServer } from '@livestore/utils/effect'
|
|
3
|
+
import { SyncHttpRpc } from '../../../common/http-rpc-schema.ts'
|
|
4
|
+
import * as SyncMessage from '../../../common/sync-message-types.ts'
|
|
5
|
+
import { DoCtx } from '../layer.ts'
|
|
6
|
+
import { makeEndingPullStream } from '../pull.ts'
|
|
7
|
+
import { makePush } from '../push.ts'
|
|
8
|
+
|
|
9
|
+
export const createHttpRpcHandler = ({
|
|
10
|
+
request,
|
|
11
|
+
responseHeaders,
|
|
12
|
+
}: {
|
|
13
|
+
request: CfTypes.Request
|
|
14
|
+
responseHeaders?: Record<string, string>
|
|
15
|
+
}) =>
|
|
16
|
+
Effect.gen(function* () {
|
|
17
|
+
const handlerLayer = createHttpRpcLayer
|
|
18
|
+
const httpApp = RpcServer.toHttpApp(SyncHttpRpc).pipe(Effect.provide(handlerLayer))
|
|
19
|
+
const webHandler = yield* httpApp.pipe(Effect.map(HttpApp.toWebHandler))
|
|
20
|
+
|
|
21
|
+
const response = yield* Effect.promise(
|
|
22
|
+
() => webHandler(request as TODO as Request) as TODO as Promise<CfTypes.Response>,
|
|
23
|
+
).pipe(Effect.timeout(10000))
|
|
24
|
+
|
|
25
|
+
if (responseHeaders !== undefined) {
|
|
26
|
+
for (const [key, value] of Object.entries(responseHeaders)) {
|
|
27
|
+
response.headers.set(key, value)
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return response
|
|
32
|
+
}).pipe(Effect.withSpan('createHttpRpcHandler'))
|
|
33
|
+
|
|
34
|
+
const createHttpRpcLayer =
|
|
35
|
+
// TODO implement admin requests
|
|
36
|
+
SyncHttpRpc.toLayer({
|
|
37
|
+
'SyncHttpRpc.Pull': (req) => makeEndingPullStream(req, req.payload),
|
|
38
|
+
|
|
39
|
+
'SyncHttpRpc.Push': (req) =>
|
|
40
|
+
Effect.gen(function* () {
|
|
41
|
+
const { ctx, env, doOptions, storeId } = yield* DoCtx
|
|
42
|
+
const push = makePush({ payload: undefined, options: doOptions, storeId, ctx, env })
|
|
43
|
+
|
|
44
|
+
return yield* push(req)
|
|
45
|
+
}),
|
|
46
|
+
|
|
47
|
+
'SyncHttpRpc.Ping': () => Effect.succeed(SyncMessage.Pong.make({})),
|
|
48
|
+
}).pipe(
|
|
49
|
+
Layer.provideMerge(RpcServer.layerProtocolHttp({ path: '/http-rpc' })),
|
|
50
|
+
Layer.provideMerge(RpcSerialization.layerJson),
|
|
51
|
+
)
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { InvalidPullError, InvalidPushError } from '@livestore/common'
|
|
2
|
+
import { Effect, identity, Layer, RpcServer, Stream } from '@livestore/utils/effect'
|
|
3
|
+
import { SyncWsRpc } from '../../../common/ws-rpc-schema.ts'
|
|
4
|
+
import { DoCtx, type DoCtxInput } from '../layer.ts'
|
|
5
|
+
import { makeEndingPullStream } from '../pull.ts'
|
|
6
|
+
import { makePush } from '../push.ts'
|
|
7
|
+
|
|
8
|
+
export const makeRpcServer = ({ doSelf, doOptions }: Omit<DoCtxInput, 'from'>) => {
|
|
9
|
+
// TODO implement admin requests
|
|
10
|
+
const handlersLayer = SyncWsRpc.toLayer({
|
|
11
|
+
'SyncWsRpc.Pull': (req) =>
|
|
12
|
+
makeEndingPullStream(req, req.payload).pipe(
|
|
13
|
+
// Needed to keep the stream alive on the client side for phase 2 (i.e. not send the `Exit` stream RPC message)
|
|
14
|
+
req.live ? Stream.concat(Stream.never) : identity,
|
|
15
|
+
Stream.provideLayer(DoCtx.Default({ doSelf, doOptions, from: { storeId: req.storeId } })),
|
|
16
|
+
Stream.mapError((cause) => (cause._tag === 'InvalidPullError' ? cause : InvalidPullError.make({ cause }))),
|
|
17
|
+
// Stream.tapErrorCause(Effect.log),
|
|
18
|
+
),
|
|
19
|
+
'SyncWsRpc.Push': (req) =>
|
|
20
|
+
Effect.gen(function* () {
|
|
21
|
+
const { doOptions, storeId, ctx, env } = yield* DoCtx
|
|
22
|
+
|
|
23
|
+
const push = makePush({ options: doOptions, storeId, payload: req.payload, ctx, env })
|
|
24
|
+
|
|
25
|
+
return yield* push(req)
|
|
26
|
+
}).pipe(
|
|
27
|
+
Effect.provide(DoCtx.Default({ doSelf, doOptions, from: { storeId: req.storeId } })),
|
|
28
|
+
Effect.mapError((cause) => (cause._tag === 'InvalidPushError' ? cause : InvalidPushError.make({ cause }))),
|
|
29
|
+
Effect.tapCauseLogPretty,
|
|
30
|
+
),
|
|
31
|
+
})
|
|
32
|
+
|
|
33
|
+
return RpcServer.layer(SyncWsRpc).pipe(Layer.provide(handlersLayer))
|
|
34
|
+
}
|
package/src/cf-worker/mod.ts
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
-
export
|
|
2
|
-
export
|
|
1
|
+
export type { CfTypes } from '@livestore/common-cf'
|
|
2
|
+
export { CfDeclare } from '@livestore/common-cf/declare'
|
|
3
|
+
export * from './do/durable-object.ts'
|
|
4
|
+
export * from './shared.ts'
|
|
3
5
|
export * from './worker.ts'
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import type { InvalidPullError, InvalidPushError } from '@livestore/common'
|
|
2
|
+
import type { CfTypes } from '@livestore/common-cf'
|
|
3
|
+
import { Effect, Schema, UrlParams } from '@livestore/utils/effect'
|
|
4
|
+
|
|
5
|
+
import type { SearchParams } from '../common/mod.ts'
|
|
6
|
+
import { SearchParamsSchema, SyncMessage } from '../common/mod.ts'
|
|
7
|
+
|
|
8
|
+
export type Env = {}
|
|
9
|
+
|
|
10
|
+
export type MakeDurableObjectClassOptions = {
|
|
11
|
+
onPush?: (
|
|
12
|
+
message: SyncMessage.PushRequest,
|
|
13
|
+
context: { storeId: StoreId; payload?: Schema.JsonValue },
|
|
14
|
+
) => Effect.SyncOrPromiseOrEffect<void>
|
|
15
|
+
onPushRes?: (message: SyncMessage.PushAck | InvalidPushError) => Effect.SyncOrPromiseOrEffect<void>
|
|
16
|
+
onPull?: (
|
|
17
|
+
message: SyncMessage.PullRequest,
|
|
18
|
+
context: { storeId: StoreId; payload?: Schema.JsonValue },
|
|
19
|
+
) => Effect.SyncOrPromiseOrEffect<void>
|
|
20
|
+
onPullRes?: (message: SyncMessage.PullResponse | InvalidPullError) => Effect.SyncOrPromiseOrEffect<void>
|
|
21
|
+
/**
|
|
22
|
+
* Storage engine for event persistence.
|
|
23
|
+
* - Default: `{ _tag: 'do-sqlite' }` (Durable Object SQLite)
|
|
24
|
+
* - D1: `{ _tag: 'd1', binding: string }` where `binding` is the D1 binding name in wrangler.toml.
|
|
25
|
+
*
|
|
26
|
+
* If omitted, the runtime defaults to DO SQLite. For backwards-compatibility, if an env binding named
|
|
27
|
+
* `DB` exists and looks like a D1Database, D1 will be used.
|
|
28
|
+
*
|
|
29
|
+
* Trade-offs:
|
|
30
|
+
* - DO SQLite: simpler deploy, data co-located with DO, not externally queryable
|
|
31
|
+
* - D1: centralized DB, inspectable with DB tools, extra network hop and JSON size limits
|
|
32
|
+
*/
|
|
33
|
+
storage?: { _tag: 'do-sqlite' } | { _tag: 'd1'; binding: string }
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Enabled transports for sync backend
|
|
37
|
+
* - `http`: HTTP JSON-RPC
|
|
38
|
+
* - `ws`: WebSocket
|
|
39
|
+
* - `do-rpc`: Durable Object RPC calls (only works in combination with `@livestore/adapter-cf`)
|
|
40
|
+
*
|
|
41
|
+
* @default Set(['http', 'ws', 'do-rpc'])
|
|
42
|
+
*/
|
|
43
|
+
enabledTransports?: Set<'http' | 'ws' | 'do-rpc'>
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Custom HTTP response headers for HTTP transport
|
|
47
|
+
* These headers will be added to all HTTP RPC responses (Pull, Push, Ping)
|
|
48
|
+
*
|
|
49
|
+
* @example
|
|
50
|
+
* ```ts
|
|
51
|
+
* {
|
|
52
|
+
* http: {
|
|
53
|
+
* responseHeaders: {
|
|
54
|
+
* 'Access-Control-Allow-Origin': '*',
|
|
55
|
+
* 'Cache-Control': 'no-cache'
|
|
56
|
+
* }
|
|
57
|
+
* }
|
|
58
|
+
* }
|
|
59
|
+
* ```
|
|
60
|
+
*/
|
|
61
|
+
http?: {
|
|
62
|
+
responseHeaders?: Record<string, string>
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
otel?: {
|
|
66
|
+
baseUrl?: string
|
|
67
|
+
serviceName?: string
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export type StoreId = string
|
|
72
|
+
export type DurableObjectId = string
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* CRITICAL: Increment this version whenever you modify the database schema structure.
|
|
76
|
+
*
|
|
77
|
+
* Bump required when:
|
|
78
|
+
* - Adding/removing/renaming columns in eventlogTable or contextTable (see sqlite.ts)
|
|
79
|
+
* - Changing column types or constraints
|
|
80
|
+
* - Modifying primary keys or indexes
|
|
81
|
+
*
|
|
82
|
+
* Bump NOT required when:
|
|
83
|
+
* - Changing query patterns, pagination logic, or streaming behavior
|
|
84
|
+
* - Adding new tables (as long as existing table schemas remain unchanged)
|
|
85
|
+
* - Updating implementation details in sync-storage.ts
|
|
86
|
+
*
|
|
87
|
+
* Impact: Changing this version triggers a "soft reset" - new table names are created
|
|
88
|
+
* and old data becomes inaccessible (but remains in storage).
|
|
89
|
+
*/
|
|
90
|
+
export const PERSISTENCE_FORMAT_VERSION = 7
|
|
91
|
+
|
|
92
|
+
export const encodeOutgoingMessage = Schema.encodeSync(Schema.parseJson(SyncMessage.BackendToClientMessage))
|
|
93
|
+
export const encodeIncomingMessage = Schema.encodeSync(Schema.parseJson(SyncMessage.ClientToBackendMessage))
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Extracts the LiveStore sync search parameters from a request. Returns
|
|
97
|
+
* `undefined` when the request does not carry valid sync metadata so callers
|
|
98
|
+
* can fall back to custom routing.
|
|
99
|
+
*/
|
|
100
|
+
export const matchSyncRequest = (request: CfTypes.Request): SearchParams | undefined => {
|
|
101
|
+
const url = new URL(request.url)
|
|
102
|
+
const urlParams = UrlParams.fromInput(url.searchParams)
|
|
103
|
+
const paramsResult = UrlParams.schemaStruct(SearchParamsSchema)(urlParams).pipe(Effect.option, Effect.runSync)
|
|
104
|
+
|
|
105
|
+
if (paramsResult._tag === 'None') {
|
|
106
|
+
return undefined
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return paramsResult.value
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// RPC subscription storage (TODO refactor)
|
|
113
|
+
export type RpcSubscription = {
|
|
114
|
+
storeId: StoreId
|
|
115
|
+
payload?: Schema.JsonValue
|
|
116
|
+
subscribedAt: number
|
|
117
|
+
/** Effect RPC request ID */
|
|
118
|
+
requestId: string
|
|
119
|
+
callerContext: {
|
|
120
|
+
bindingName: string
|
|
121
|
+
durableObjectId: string
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Durable Object interface supporting the DO RPC protocol for DO <> DO syncing.
|
|
127
|
+
*/
|
|
128
|
+
export interface SyncBackendRpcInterface {
|
|
129
|
+
__DURABLE_OBJECT_BRAND: never
|
|
130
|
+
rpc(payload: Uint8Array): Promise<Uint8Array | CfTypes.ReadableStream>
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
export const WebSocketAttachmentSchema = Schema.parseJson(
|
|
134
|
+
Schema.Struct({
|
|
135
|
+
// Same across all websocket connections
|
|
136
|
+
storeId: Schema.String,
|
|
137
|
+
// Different for each websocket connection
|
|
138
|
+
payload: Schema.optional(Schema.JsonValue),
|
|
139
|
+
pullRequestIds: Schema.Array(Schema.String),
|
|
140
|
+
}),
|
|
141
|
+
)
|