@livestore/common 0.3.0-dev.23 → 0.3.0-dev.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.tsbuildinfo +1 -1
- package/dist/adapter-types.d.ts +4 -2
- package/dist/adapter-types.d.ts.map +1 -1
- package/dist/adapter-types.js +1 -1
- package/dist/adapter-types.js.map +1 -1
- package/dist/derived-mutations.d.ts +8 -8
- package/dist/devtools/devtools-messages-client-session.d.ts +21 -21
- package/dist/devtools/devtools-messages-common.d.ts +6 -6
- package/dist/devtools/devtools-messages-leader.d.ts +25 -24
- package/dist/devtools/devtools-messages-leader.d.ts.map +1 -1
- package/dist/leader-thread/LeaderSyncProcessor.d.ts +2 -1
- package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
- package/dist/leader-thread/LeaderSyncProcessor.js +16 -12
- package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
- package/dist/leader-thread/apply-mutation.js +1 -1
- package/dist/leader-thread/apply-mutation.js.map +1 -1
- package/dist/leader-thread/leader-worker-devtools.js +2 -2
- package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
- package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
- package/dist/leader-thread/make-leader-thread-layer.js +3 -2
- package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
- package/dist/leader-thread/mutationlog.d.ts +1 -0
- package/dist/leader-thread/mutationlog.d.ts.map +1 -1
- package/dist/leader-thread/mutationlog.js +2 -1
- package/dist/leader-thread/mutationlog.js.map +1 -1
- package/dist/leader-thread/pull-queue-set.d.ts.map +1 -1
- package/dist/leader-thread/types.d.ts +1 -1
- package/dist/leader-thread/types.d.ts.map +1 -1
- package/dist/mutation.d.ts.map +1 -1
- package/dist/mutation.js +13 -2
- package/dist/mutation.js.map +1 -1
- package/dist/query-builder/api.d.ts +118 -20
- package/dist/query-builder/api.d.ts.map +1 -1
- package/dist/query-builder/api.js.map +1 -1
- package/dist/query-builder/astToSql.d.ts +7 -0
- package/dist/query-builder/astToSql.d.ts.map +1 -0
- package/dist/query-builder/astToSql.js +168 -0
- package/dist/query-builder/astToSql.js.map +1 -0
- package/dist/query-builder/impl.d.ts +1 -5
- package/dist/query-builder/impl.d.ts.map +1 -1
- package/dist/query-builder/impl.js +130 -96
- package/dist/query-builder/impl.js.map +1 -1
- package/dist/query-builder/impl.test.js +94 -0
- package/dist/query-builder/impl.test.js.map +1 -1
- package/dist/query-builder/mod.d.ts +7 -0
- package/dist/query-builder/mod.d.ts.map +1 -1
- package/dist/query-builder/mod.js +7 -0
- package/dist/query-builder/mod.js.map +1 -1
- package/dist/query-info.d.ts +4 -1
- package/dist/query-info.d.ts.map +1 -1
- package/dist/query-info.js.map +1 -1
- package/dist/rehydrate-from-mutationlog.js +1 -1
- package/dist/rehydrate-from-mutationlog.js.map +1 -1
- package/dist/schema/MutationEvent.d.ts +27 -10
- package/dist/schema/MutationEvent.d.ts.map +1 -1
- package/dist/schema/MutationEvent.js +24 -8
- package/dist/schema/MutationEvent.js.map +1 -1
- package/dist/schema/db-schema/dsl/mod.d.ts +7 -5
- package/dist/schema/db-schema/dsl/mod.d.ts.map +1 -1
- package/dist/schema/db-schema/dsl/mod.js +6 -0
- package/dist/schema/db-schema/dsl/mod.js.map +1 -1
- package/dist/schema/mutations.d.ts +12 -3
- package/dist/schema/mutations.d.ts.map +1 -1
- package/dist/schema/mutations.js.map +1 -1
- package/dist/schema/system-tables.d.ts +5 -5
- package/dist/schema/system-tables.d.ts.map +1 -1
- package/dist/schema/system-tables.js +1 -2
- package/dist/schema/system-tables.js.map +1 -1
- package/dist/schema/table-def.d.ts +7 -3
- package/dist/schema/table-def.d.ts.map +1 -1
- package/dist/schema/table-def.js +7 -1
- package/dist/schema/table-def.js.map +1 -1
- package/dist/sync/ClientSessionSyncProcessor.d.ts +2 -0
- package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
- package/dist/sync/ClientSessionSyncProcessor.js +8 -5
- package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
- package/dist/sync/next/rebase-events.d.ts +1 -1
- package/dist/sync/next/rebase-events.d.ts.map +1 -1
- package/dist/sync/sync.d.ts +19 -1
- package/dist/sync/sync.d.ts.map +1 -1
- package/dist/sync/sync.js.map +1 -1
- package/dist/sync/syncstate.d.ts +26 -4
- package/dist/sync/syncstate.d.ts.map +1 -1
- package/dist/sync/syncstate.js +95 -25
- package/dist/sync/syncstate.js.map +1 -1
- package/dist/sync/syncstate.test.js +60 -29
- package/dist/sync/syncstate.test.js.map +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +2 -2
- package/src/adapter-types.ts +4 -2
- package/src/leader-thread/LeaderSyncProcessor.ts +19 -13
- package/src/leader-thread/apply-mutation.ts +2 -2
- package/src/leader-thread/leader-worker-devtools.ts +2 -2
- package/src/leader-thread/make-leader-thread-layer.ts +3 -2
- package/src/leader-thread/mutationlog.ts +2 -1
- package/src/leader-thread/types.ts +1 -1
- package/src/mutation.ts +20 -3
- package/src/query-builder/api.ts +192 -15
- package/src/query-builder/astToSql.ts +203 -0
- package/src/query-builder/impl.test.ts +104 -0
- package/src/query-builder/impl.ts +157 -113
- package/src/query-builder/mod.ts +7 -0
- package/src/query-info.ts +6 -1
- package/src/rehydrate-from-mutationlog.ts +1 -1
- package/src/schema/MutationEvent.ts +28 -12
- package/src/schema/db-schema/dsl/mod.ts +30 -2
- package/src/schema/mutations.ts +12 -1
- package/src/schema/system-tables.ts +1 -2
- package/src/schema/table-def.ts +14 -4
- package/src/sync/ClientSessionSyncProcessor.ts +10 -4
- package/src/sync/next/rebase-events.ts +1 -1
- package/src/sync/sync.ts +19 -3
- package/src/sync/syncstate.test.ts +66 -32
- package/src/sync/syncstate.ts +116 -34
- package/src/version.ts +1 -1
- package/tmp/pack.tgz +0 -0
@@ -17,6 +17,8 @@ import * as SyncState from './syncstate.js'
|
|
17
17
|
* - The goal is to never block the UI, so we'll interrupt rebasing if a new mutations is pushed by the client session.
|
18
18
|
* - We also want to avoid "backwards-jumping" in the UI, so we'll transactionally apply a read model changes during a rebase.
|
19
19
|
* - We might need to make the rebase behaviour configurable e.g. to let users manually trigger a rebase
|
20
|
+
*
|
21
|
+
* Longer term we should evalutate whether we can unify the ClientSessionSyncProcessor with the LeaderSyncProcessor.
|
20
22
|
*/
|
21
23
|
export const makeClientSessionSyncProcessor = ({
|
22
24
|
schema,
|
@@ -64,7 +66,7 @@ export const makeClientSessionSyncProcessor = ({
|
|
64
66
|
}
|
65
67
|
|
66
68
|
const syncStateUpdateQueue = Queue.unbounded<SyncState.SyncState>().pipe(Effect.runSync)
|
67
|
-
const
|
69
|
+
const isClientEvent = (mutationEventEncoded: MutationEvent.EncodedWithMeta) =>
|
68
70
|
getMutationDef(schema, mutationEventEncoded.mutation).options.clientOnly
|
69
71
|
|
70
72
|
/** We're queuing push requests to reduce the number of messages sent to the leader by batching them */
|
@@ -91,7 +93,7 @@ export const makeClientSessionSyncProcessor = ({
|
|
91
93
|
const updateResult = SyncState.updateSyncState({
|
92
94
|
syncState: syncStateRef.current,
|
93
95
|
payload: { _tag: 'local-push', newEvents: encodedMutationEvents },
|
94
|
-
|
96
|
+
isClientEvent,
|
95
97
|
isEqualEvent: MutationEvent.isEqualEncoded,
|
96
98
|
})
|
97
99
|
|
@@ -166,7 +168,10 @@ export const makeClientSessionSyncProcessor = ({
|
|
166
168
|
|
167
169
|
yield* FiberHandle.run(leaderPushingFiberHandle, backgroundLeaderPushing)
|
168
170
|
|
169
|
-
|
171
|
+
// NOTE We need to lazily call `.pull` as we want the cursor to be updated
|
172
|
+
yield* Stream.suspend(() =>
|
173
|
+
clientSession.leaderThread.mutations.pull({ cursor: syncStateRef.current.localHead }),
|
174
|
+
).pipe(
|
170
175
|
Stream.tap(({ payload, remaining }) =>
|
171
176
|
Effect.gen(function* () {
|
172
177
|
// console.log('pulled payload from leader', { payload, remaining })
|
@@ -177,7 +182,7 @@ export const makeClientSessionSyncProcessor = ({
|
|
177
182
|
const updateResult = SyncState.updateSyncState({
|
178
183
|
syncState: syncStateRef.current,
|
179
184
|
payload,
|
180
|
-
|
185
|
+
isClientEvent,
|
181
186
|
isEqualEvent: MutationEvent.isEqualEncoded,
|
182
187
|
})
|
183
188
|
|
@@ -259,6 +264,7 @@ export const makeClientSessionSyncProcessor = ({
|
|
259
264
|
),
|
260
265
|
Stream.runDrain,
|
261
266
|
Effect.forever, // NOTE Whenever the leader changes, we need to re-start the stream
|
267
|
+
Effect.interruptible,
|
262
268
|
Effect.withSpan('client-session-sync-processor:pull'),
|
263
269
|
Effect.tapCauseLogPretty,
|
264
270
|
Effect.forkScoped,
|
@@ -51,7 +51,7 @@ export const rebaseEvents = ({
|
|
51
51
|
rebaseFn: RebaseFn
|
52
52
|
currentFactsSnapshot: MutationEventFactsSnapshot
|
53
53
|
clientId: string
|
54
|
-
sessionId: string
|
54
|
+
sessionId: string
|
55
55
|
}): ReadonlyArray<MutationEvent.AnyDecoded> => {
|
56
56
|
const initialSnapshot = new Map(currentFactsSnapshot)
|
57
57
|
applyFactGroups(
|
package/src/sync/sync.ts
CHANGED
@@ -16,13 +16,25 @@ export type MakeBackendArgs = {
|
|
16
16
|
}
|
17
17
|
|
18
18
|
export type SyncOptions = {
|
19
|
-
|
20
|
-
args: MakeBackendArgs,
|
21
|
-
) => Effect.Effect<SyncBackend<any>, UnexpectedError, Scope.Scope | HttpClient.HttpClient>
|
19
|
+
backend?: SyncBackendConstructor<any>
|
22
20
|
/** @default { _tag: 'Skip' } */
|
23
21
|
initialSyncOptions?: InitialSyncOptions
|
22
|
+
/**
|
23
|
+
* What to do if there is an error during sync.
|
24
|
+
*
|
25
|
+
* Options:
|
26
|
+
* `shutdown` will stop the sync processor and cause the app to crash.
|
27
|
+
* `ignore` will log the error and let the app continue running acting as if it was offline.
|
28
|
+
*
|
29
|
+
* @default 'ignore'
|
30
|
+
* */
|
31
|
+
onSyncError?: 'shutdown' | 'ignore'
|
24
32
|
}
|
25
33
|
|
34
|
+
export type SyncBackendConstructor<TSyncMetadata = Schema.JsonValue> = (
|
35
|
+
args: MakeBackendArgs,
|
36
|
+
) => Effect.Effect<SyncBackend<TSyncMetadata>, UnexpectedError, Scope.Scope | HttpClient.HttpClient>
|
37
|
+
|
26
38
|
export type SyncBackend<TSyncMetadata = Schema.JsonValue> = {
|
27
39
|
pull: (
|
28
40
|
args: Option.Option<{
|
@@ -57,6 +69,10 @@ export type SyncBackend<TSyncMetadata = Schema.JsonValue> = {
|
|
57
69
|
HttpClient.HttpClient
|
58
70
|
>
|
59
71
|
isConnected: SubscriptionRef.SubscriptionRef<boolean>
|
72
|
+
/**
|
73
|
+
* Metadata describing the sync backend.
|
74
|
+
*/
|
75
|
+
metadata: { name: string; description: string } & Record<string, Schema.JsonValue>
|
60
76
|
}
|
61
77
|
|
62
78
|
export class IsOfflineError extends Schema.TaggedError<IsOfflineError>()('IsOfflineError', {}) {}
|
@@ -19,7 +19,7 @@ class TestEvent extends MutationEvent.EncodedWithMeta {
|
|
19
19
|
args: payload,
|
20
20
|
|
21
21
|
clientId: 'static-local-id',
|
22
|
-
sessionId:
|
22
|
+
sessionId: 'static-session-id',
|
23
23
|
})
|
24
24
|
}
|
25
25
|
|
@@ -39,22 +39,23 @@ const e_0_2 = new TestEvent({ global: 0, client: 2 }, e_0_1.id, 'a', true)
|
|
39
39
|
const e_0_3 = new TestEvent({ global: 0, client: 3 }, e_0_2.id, 'a', true)
|
40
40
|
const e_1_0 = new TestEvent({ global: 1, client: 0 }, e_0_0.id, 'a', false)
|
41
41
|
const e_1_1 = new TestEvent({ global: 1, client: 1 }, e_1_0.id, 'a', true)
|
42
|
+
const e_2_0 = new TestEvent({ global: 2, client: 0 }, e_1_0.id, 'a', false)
|
42
43
|
|
43
44
|
const isEqualEvent = MutationEvent.isEqualEncoded
|
44
45
|
|
45
|
-
const
|
46
|
+
const isClientEvent = (event: MutationEvent.EncodedWithMeta) => (event as TestEvent).isLocal
|
46
47
|
|
47
48
|
describe('syncstate', () => {
|
48
49
|
describe('updateSyncState', () => {
|
49
|
-
const
|
50
|
+
const update = ({
|
50
51
|
syncState,
|
51
52
|
payload,
|
52
|
-
|
53
|
+
ignoreClientEvents = false,
|
53
54
|
}: {
|
54
55
|
syncState: SyncState.SyncState
|
55
56
|
payload: typeof SyncState.Payload.Type
|
56
|
-
|
57
|
-
}) => SyncState.updateSyncState({ syncState, payload,
|
57
|
+
ignoreClientEvents?: boolean
|
58
|
+
}) => SyncState.updateSyncState({ syncState, payload, isClientEvent, isEqualEvent, ignoreClientEvents })
|
58
59
|
|
59
60
|
describe.each([{ trimRollbackUntil: false }, { trimRollbackUntil: true }])(
|
60
61
|
'upstream-rebase (trimRollbackUntil: $trimRollbackUntil)',
|
@@ -68,7 +69,7 @@ describe('syncstate', () => {
|
|
68
69
|
})
|
69
70
|
const e_0_0_e_1_0 = e_0_0.rebase_(e_1_0.id)
|
70
71
|
const e_0_1_e_1_1 = e_0_1.rebase_(e_0_0_e_1_0.id)
|
71
|
-
const result =
|
72
|
+
const result = update({
|
72
73
|
syncState,
|
73
74
|
payload: {
|
74
75
|
_tag: 'upstream-rebase',
|
@@ -99,7 +100,7 @@ describe('syncstate', () => {
|
|
99
100
|
localHead: e_1_0.id,
|
100
101
|
})
|
101
102
|
const e_0_1_e_1_0 = e_0_1.rebase_(e_0_0.id)
|
102
|
-
const result =
|
103
|
+
const result = update({
|
103
104
|
syncState,
|
104
105
|
payload: {
|
105
106
|
_tag: 'upstream-rebase',
|
@@ -129,7 +130,7 @@ describe('syncstate', () => {
|
|
129
130
|
upstreamHead: EventId.ROOT,
|
130
131
|
localHead: e_0_0.id,
|
131
132
|
})
|
132
|
-
const result =
|
133
|
+
const result = update({
|
133
134
|
syncState,
|
134
135
|
payload: { _tag: 'upstream-rebase', rollbackUntil: e_0_0.id, newEvents: [e_1_0] },
|
135
136
|
})
|
@@ -148,7 +149,7 @@ describe('syncstate', () => {
|
|
148
149
|
upstreamHead: EventId.ROOT,
|
149
150
|
localHead: e_0_0.id,
|
150
151
|
})
|
151
|
-
const result =
|
152
|
+
const result = update({
|
152
153
|
syncState,
|
153
154
|
payload: { _tag: 'upstream-rebase', rollbackUntil: e_0_0.id, newEvents: [e_1_0] },
|
154
155
|
})
|
@@ -162,7 +163,7 @@ describe('syncstate', () => {
|
|
162
163
|
upstreamHead: EventId.ROOT,
|
163
164
|
localHead: e_0_0.id,
|
164
165
|
})
|
165
|
-
const result =
|
166
|
+
const result = update({
|
166
167
|
syncState,
|
167
168
|
payload: { _tag: 'upstream-rebase', rollbackUntil: e_0_0.id, newEvents: [] },
|
168
169
|
})
|
@@ -184,7 +185,7 @@ describe('syncstate', () => {
|
|
184
185
|
upstreamHead: EventId.ROOT,
|
185
186
|
localHead: e_0_0.id,
|
186
187
|
})
|
187
|
-
const result =
|
188
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_0_1, e_0_0] } })
|
188
189
|
expect(result).toMatchObject({ _tag: 'unexpected-error' })
|
189
190
|
})
|
190
191
|
|
@@ -195,7 +196,40 @@ describe('syncstate', () => {
|
|
195
196
|
upstreamHead: EventId.ROOT,
|
196
197
|
localHead: e_0_0.id,
|
197
198
|
})
|
198
|
-
const result =
|
199
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_1_0, e_0_0] } })
|
200
|
+
expect(result).toMatchObject({ _tag: 'unexpected-error' })
|
201
|
+
})
|
202
|
+
|
203
|
+
it('should throw error if incoming event is < expected upstream head', () => {
|
204
|
+
const syncState = new SyncState.SyncState({
|
205
|
+
pending: [],
|
206
|
+
rollbackTail: [],
|
207
|
+
upstreamHead: e_1_0.id,
|
208
|
+
localHead: e_1_0.id,
|
209
|
+
})
|
210
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_0_0] } })
|
211
|
+
expect(result).toMatchObject({ _tag: 'unexpected-error' })
|
212
|
+
})
|
213
|
+
|
214
|
+
it('should throw error if incoming event is = expected upstream head', () => {
|
215
|
+
const syncState = new SyncState.SyncState({
|
216
|
+
pending: [],
|
217
|
+
rollbackTail: [],
|
218
|
+
upstreamHead: e_1_0.id,
|
219
|
+
localHead: e_1_0.id,
|
220
|
+
})
|
221
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_1_0] } })
|
222
|
+
expect(result).toMatchObject({ _tag: 'unexpected-error' })
|
223
|
+
})
|
224
|
+
|
225
|
+
it('should throw if the parent id of the first incoming event is unknown', () => {
|
226
|
+
const syncState = new SyncState.SyncState({
|
227
|
+
pending: [],
|
228
|
+
rollbackTail: [e_0_0],
|
229
|
+
upstreamHead: EventId.ROOT,
|
230
|
+
localHead: e_0_0.id,
|
231
|
+
})
|
232
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_2_0] } })
|
199
233
|
expect(result).toMatchObject({ _tag: 'unexpected-error' })
|
200
234
|
})
|
201
235
|
|
@@ -206,7 +240,7 @@ describe('syncstate', () => {
|
|
206
240
|
upstreamHead: EventId.ROOT,
|
207
241
|
localHead: e_0_0.id,
|
208
242
|
})
|
209
|
-
const result =
|
243
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_0_0] } })
|
210
244
|
|
211
245
|
expectAdvance(result)
|
212
246
|
expectEventArraysEqual(result.newSyncState.pending, [])
|
@@ -223,7 +257,7 @@ describe('syncstate', () => {
|
|
223
257
|
upstreamHead: EventId.ROOT,
|
224
258
|
localHead: e_1_0.id,
|
225
259
|
})
|
226
|
-
const result =
|
260
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_0_0] } })
|
227
261
|
|
228
262
|
expectAdvance(result)
|
229
263
|
expectEventArraysEqual(result.newSyncState.pending, [e_1_0])
|
@@ -240,7 +274,7 @@ describe('syncstate', () => {
|
|
240
274
|
upstreamHead: EventId.ROOT,
|
241
275
|
localHead: e_0_0.id,
|
242
276
|
})
|
243
|
-
const result =
|
277
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_0_0, e_0_1] } })
|
244
278
|
|
245
279
|
expectAdvance(result)
|
246
280
|
expectEventArraysEqual(result.newSyncState.pending, [])
|
@@ -257,7 +291,7 @@ describe('syncstate', () => {
|
|
257
291
|
upstreamHead: e_0_0.id,
|
258
292
|
localHead: e_0_1.id,
|
259
293
|
})
|
260
|
-
const result =
|
294
|
+
const result = update({
|
261
295
|
syncState,
|
262
296
|
payload: { _tag: 'upstream-advance', newEvents: [e_0_1, e_0_2, e_0_3, e_1_0, e_1_1] },
|
263
297
|
})
|
@@ -277,10 +311,10 @@ describe('syncstate', () => {
|
|
277
311
|
upstreamHead: EventId.ROOT,
|
278
312
|
localHead: e_0_0.id,
|
279
313
|
})
|
280
|
-
const result =
|
314
|
+
const result = update({
|
281
315
|
syncState,
|
282
316
|
payload: { _tag: 'upstream-advance', newEvents: [e_0_0] },
|
283
|
-
|
317
|
+
ignoreClientEvents: true,
|
284
318
|
})
|
285
319
|
expectAdvance(result)
|
286
320
|
expectEventArraysEqual(result.newSyncState.pending, [])
|
@@ -297,10 +331,10 @@ describe('syncstate', () => {
|
|
297
331
|
upstreamHead: EventId.ROOT,
|
298
332
|
localHead: e_0_0.id,
|
299
333
|
})
|
300
|
-
const result =
|
334
|
+
const result = update({
|
301
335
|
syncState,
|
302
336
|
payload: { _tag: 'upstream-advance', newEvents: [e_0_0] },
|
303
|
-
|
337
|
+
ignoreClientEvents: true,
|
304
338
|
})
|
305
339
|
expectAdvance(result)
|
306
340
|
expectEventArraysEqual(result.newSyncState.pending, [e_1_0])
|
@@ -317,10 +351,10 @@ describe('syncstate', () => {
|
|
317
351
|
upstreamHead: EventId.ROOT,
|
318
352
|
localHead: e_0_1.id,
|
319
353
|
})
|
320
|
-
const result =
|
354
|
+
const result = update({
|
321
355
|
syncState,
|
322
356
|
payload: { _tag: 'upstream-advance', newEvents: [e_0_0, e_1_0] },
|
323
|
-
|
357
|
+
ignoreClientEvents: true,
|
324
358
|
})
|
325
359
|
|
326
360
|
expectAdvance(result)
|
@@ -338,7 +372,7 @@ describe('syncstate', () => {
|
|
338
372
|
upstreamHead: e_1_0.id,
|
339
373
|
localHead: e_1_0.id,
|
340
374
|
})
|
341
|
-
const result =
|
375
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_0_0] } })
|
342
376
|
expect(result).toMatchObject({ _tag: 'unexpected-error' })
|
343
377
|
})
|
344
378
|
})
|
@@ -351,7 +385,7 @@ describe('syncstate', () => {
|
|
351
385
|
upstreamHead: EventId.ROOT,
|
352
386
|
localHead: e_0_0.id,
|
353
387
|
})
|
354
|
-
const result =
|
388
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_0_1] } })
|
355
389
|
|
356
390
|
const e_0_0_e_0_2 = e_0_0.rebase_(e_0_1.id)
|
357
391
|
|
@@ -372,7 +406,7 @@ describe('syncstate', () => {
|
|
372
406
|
upstreamHead: EventId.ROOT,
|
373
407
|
localHead: e_0_0_b.id,
|
374
408
|
})
|
375
|
-
const result =
|
409
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_0_0] } })
|
376
410
|
|
377
411
|
const e_0_0_e_1_0 = e_0_0_b.rebase_(e_0_0.id)
|
378
412
|
|
@@ -393,7 +427,7 @@ describe('syncstate', () => {
|
|
393
427
|
upstreamHead: EventId.ROOT,
|
394
428
|
localHead: e_1_0_b.id,
|
395
429
|
})
|
396
|
-
const result =
|
430
|
+
const result = update({ syncState, payload: { _tag: 'upstream-advance', newEvents: [e_1_0] } })
|
397
431
|
const e_1_0_e_2_0 = e_1_0_b.rebase_(e_1_0.id)
|
398
432
|
|
399
433
|
expectRebase(result)
|
@@ -412,7 +446,7 @@ describe('syncstate', () => {
|
|
412
446
|
upstreamHead: EventId.ROOT,
|
413
447
|
localHead: e_0_0.id,
|
414
448
|
})
|
415
|
-
const result =
|
449
|
+
const result = update({
|
416
450
|
syncState,
|
417
451
|
payload: { _tag: 'upstream-advance', newEvents: [e_0_1, e_0_2, e_0_3, e_1_0] },
|
418
452
|
})
|
@@ -433,7 +467,7 @@ describe('syncstate', () => {
|
|
433
467
|
upstreamHead: EventId.ROOT,
|
434
468
|
localHead: e_0_0.id,
|
435
469
|
})
|
436
|
-
const result =
|
470
|
+
const result = update({
|
437
471
|
syncState,
|
438
472
|
payload: { _tag: 'upstream-advance', newEvents: [e_0_0, e_0_2, e_0_3, e_1_0] },
|
439
473
|
})
|
@@ -456,7 +490,7 @@ describe('syncstate', () => {
|
|
456
490
|
upstreamHead: EventId.ROOT,
|
457
491
|
localHead: e_0_1.id,
|
458
492
|
})
|
459
|
-
const result =
|
493
|
+
const result = update({
|
460
494
|
syncState,
|
461
495
|
payload: { _tag: 'upstream-advance', newEvents: [e_0_1, e_0_2, e_0_3, e_1_0] },
|
462
496
|
})
|
@@ -482,7 +516,7 @@ describe('syncstate', () => {
|
|
482
516
|
upstreamHead: EventId.ROOT,
|
483
517
|
localHead: e_0_0.id,
|
484
518
|
})
|
485
|
-
const result =
|
519
|
+
const result = update({ syncState, payload: { _tag: 'local-push', newEvents: [e_0_1, e_0_2, e_0_3] } })
|
486
520
|
|
487
521
|
expectAdvance(result)
|
488
522
|
expectEventArraysEqual(result.newSyncState.pending, [e_0_0, e_0_1, e_0_2, e_0_3])
|
@@ -501,7 +535,7 @@ describe('syncstate', () => {
|
|
501
535
|
upstreamHead: EventId.ROOT,
|
502
536
|
localHead: e_0_1.id,
|
503
537
|
})
|
504
|
-
const result =
|
538
|
+
const result = update({ syncState, payload: { _tag: 'local-push', newEvents: [e_0_1, e_0_2] } })
|
505
539
|
|
506
540
|
expectReject(result)
|
507
541
|
expect(result.expectedMinimumId).toMatchObject(e_0_2.id)
|
package/src/sync/syncstate.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { casesHandled } from '@livestore/utils'
|
1
|
+
import { casesHandled, shouldNeverHappen } from '@livestore/utils'
|
2
2
|
import { Match, ReadonlyArray, Schema } from '@livestore/utils/effect'
|
3
3
|
|
4
4
|
import { UnexpectedError } from '../adapter-types.js'
|
@@ -24,6 +24,9 @@ import * as MutationEvent from '../schema/MutationEvent.js'
|
|
24
24
|
* - Can be confirmed or rejected by the upstream.
|
25
25
|
* - Subject to rebase if rejected.
|
26
26
|
* - **Rollback Tail**: Events that are kept around temporarily for potential rollback until confirmed by upstream.
|
27
|
+
* - Currently only needed for ClientSessionSyncProcessor.
|
28
|
+
* - Note: Confirmation of an event is stronger than acknowledgment of an event and can only be done by the
|
29
|
+
* absolute authority in the sync hierarchy (i.e. the sync backend in our case).
|
27
30
|
*
|
28
31
|
* Payloads:
|
29
32
|
* - `PayloadUpstreamRebase`: Upstream has performed a rebase, so downstream must roll back to the specified event
|
@@ -37,12 +40,19 @@ import * as MutationEvent from '../schema/MutationEvent.js'
|
|
37
40
|
* 2. **Head Ordering**: Upstream Head ≤ Local Head.
|
38
41
|
* 3. **ID Sequence**: Must follow the pattern (1,0)→(1,1)→(1,2)→(2,0).
|
39
42
|
*
|
43
|
+
* A few further notes to help form an intuition:
|
44
|
+
* - The goal is to keep the pending events as small as possible (i.e. to have synced with the next upstream node)
|
45
|
+
* - There are 2 cases for rebasing:
|
46
|
+
* - The conflicting event only conflicts with the pending events -> only (some of) the pending events need to be rolled back
|
47
|
+
* - The conflicting event conflicts even with the rollback tail (additionally to the pending events) -> events from both need to be rolled back
|
48
|
+
*
|
40
49
|
* The `updateSyncState` function processes updates to the sync state based on incoming payloads,
|
41
50
|
* handling cases such as upstream rebase, advance, local push, and rollback tail trimming.
|
42
51
|
*/
|
43
52
|
export class SyncState extends Schema.Class<SyncState>('SyncState')({
|
44
53
|
pending: Schema.Array(MutationEvent.EncodedWithMeta),
|
45
54
|
rollbackTail: Schema.Array(MutationEvent.EncodedWithMeta),
|
55
|
+
/** What this node expects the next upstream node to have as its own local head */
|
46
56
|
upstreamHead: EventId.EventId,
|
47
57
|
localHead: EventId.EventId,
|
48
58
|
}) {
|
@@ -182,17 +192,19 @@ const unexpectedError = (cause: unknown): UpdateResultUnexpectedError =>
|
|
182
192
|
export const updateSyncState = ({
|
183
193
|
syncState,
|
184
194
|
payload,
|
185
|
-
|
195
|
+
isClientEvent,
|
186
196
|
isEqualEvent,
|
187
|
-
|
197
|
+
ignoreClientEvents = false,
|
188
198
|
}: {
|
189
199
|
syncState: SyncState
|
190
200
|
payload: typeof Payload.Type
|
191
|
-
|
201
|
+
isClientEvent: (event: MutationEvent.EncodedWithMeta) => boolean
|
192
202
|
isEqualEvent: (a: MutationEvent.EncodedWithMeta, b: MutationEvent.EncodedWithMeta) => boolean
|
193
|
-
/** This is used in the leader which should ignore
|
194
|
-
|
203
|
+
/** This is used in the leader which should ignore client events when receiving an upstream-advance payload */
|
204
|
+
ignoreClientEvents?: boolean
|
195
205
|
}): typeof UpdateResult.Type => {
|
206
|
+
validateSyncState(syncState)
|
207
|
+
|
196
208
|
const trimRollbackTail = (
|
197
209
|
rollbackTail: ReadonlyArray<MutationEvent.EncodedWithMeta>,
|
198
210
|
): ReadonlyArray<MutationEvent.EncodedWithMeta> => {
|
@@ -226,7 +238,7 @@ export const updateSyncState = ({
|
|
226
238
|
const rebasedPending = rebaseEvents({
|
227
239
|
events: syncState.pending,
|
228
240
|
baseEventId: newUpstreamHead,
|
229
|
-
|
241
|
+
isClientEvent,
|
230
242
|
})
|
231
243
|
|
232
244
|
return UpdateResultRebase.make({
|
@@ -243,6 +255,7 @@ export const updateSyncState = ({
|
|
243
255
|
})
|
244
256
|
}
|
245
257
|
|
258
|
+
// #region upstream-advance
|
246
259
|
case 'upstream-advance': {
|
247
260
|
if (payload.newEvents.length === 0) {
|
248
261
|
return UpdateResultAdvance.make({
|
@@ -268,9 +281,23 @@ export const updateSyncState = ({
|
|
268
281
|
}
|
269
282
|
|
270
283
|
// Validate that incoming events are larger than upstream head
|
271
|
-
if (
|
284
|
+
if (
|
285
|
+
EventId.isGreaterThan(syncState.upstreamHead, payload.newEvents[0]!.id) ||
|
286
|
+
EventId.isEqual(syncState.upstreamHead, payload.newEvents[0]!.id)
|
287
|
+
) {
|
288
|
+
return unexpectedError(
|
289
|
+
`Incoming events must be greater than upstream head. Expected greater than: (${syncState.upstreamHead.global},${syncState.upstreamHead.client}). Received: [${payload.newEvents.map((e) => `(${e.id.global},${e.id.client})`).join(', ')}]`,
|
290
|
+
)
|
291
|
+
}
|
292
|
+
|
293
|
+
// Validate that the parent id of the first incoming event is known
|
294
|
+
const knownEventGlobalIds = [...syncState.rollbackTail, ...syncState.pending].map((e) => e.id.global)
|
295
|
+
knownEventGlobalIds.push(syncState.upstreamHead.global)
|
296
|
+
const firstNewEvent = payload.newEvents[0]!
|
297
|
+
const hasUnknownParentId = knownEventGlobalIds.includes(firstNewEvent.parentId.global) === false
|
298
|
+
if (hasUnknownParentId) {
|
272
299
|
return unexpectedError(
|
273
|
-
`Incoming events must
|
300
|
+
`Incoming events must have a known parent id. Received: [${payload.newEvents.map((e) => `(${e.id.global},${e.id.client})`).join(', ')}]`,
|
274
301
|
)
|
275
302
|
}
|
276
303
|
|
@@ -280,10 +307,11 @@ export const updateSyncState = ({
|
|
280
307
|
existingEvents: syncState.pending,
|
281
308
|
incomingEvents: payload.newEvents,
|
282
309
|
isEqualEvent,
|
283
|
-
|
284
|
-
|
310
|
+
isClientEvent,
|
311
|
+
ignoreClientEvents,
|
285
312
|
})
|
286
313
|
|
314
|
+
// No divergent pending events, thus we can just advance (some of) the pending events
|
287
315
|
if (divergentPendingIndex === -1) {
|
288
316
|
const pendingEventIds = new Set(syncState.pending.map((e) => `${e.id.global},${e.id.client}`))
|
289
317
|
const newEvents = payload.newEvents.filter((e) => !pendingEventIds.has(`${e.id.global},${e.id.client}`))
|
@@ -292,17 +320,17 @@ export const updateSyncState = ({
|
|
292
320
|
// we need to split the pending events into two groups:
|
293
321
|
// - pendingMatching: The pending events up to point where they match the incoming events
|
294
322
|
// - pendingRemaining: The pending events after the point where they match the incoming events
|
295
|
-
// The `
|
296
|
-
let
|
323
|
+
// The `clientIndexOffset` is used to account for the client events that are being ignored
|
324
|
+
let clientIndexOffset = 0
|
297
325
|
const [pendingMatching, pendingRemaining] = ReadonlyArray.splitWhere(
|
298
326
|
syncState.pending,
|
299
327
|
(pendingEvent, index) => {
|
300
|
-
if (
|
301
|
-
|
328
|
+
if (ignoreClientEvents && isClientEvent(pendingEvent)) {
|
329
|
+
clientIndexOffset++
|
302
330
|
return false
|
303
331
|
}
|
304
332
|
|
305
|
-
const newEvent = payload.newEvents.at(index -
|
333
|
+
const newEvent = payload.newEvents.at(index - clientIndexOffset)
|
306
334
|
if (!newEvent) {
|
307
335
|
return true
|
308
336
|
}
|
@@ -336,15 +364,15 @@ export const updateSyncState = ({
|
|
336
364
|
const rebasedPending = rebaseEvents({
|
337
365
|
events: divergentPending,
|
338
366
|
baseEventId: newUpstreamHead,
|
339
|
-
|
367
|
+
isClientEvent,
|
340
368
|
})
|
341
369
|
|
342
370
|
const divergentNewEventsIndex = findDivergencePoint({
|
343
371
|
existingEvents: payload.newEvents,
|
344
372
|
incomingEvents: syncState.pending,
|
345
373
|
isEqualEvent,
|
346
|
-
|
347
|
-
|
374
|
+
isClientEvent,
|
375
|
+
ignoreClientEvents,
|
348
376
|
})
|
349
377
|
|
350
378
|
return UpdateResultRebase.make({
|
@@ -361,6 +389,7 @@ export const updateSyncState = ({
|
|
361
389
|
})
|
362
390
|
}
|
363
391
|
}
|
392
|
+
// #endregion
|
364
393
|
|
365
394
|
case 'local-push': {
|
366
395
|
if (payload.newEvents.length === 0) {
|
@@ -407,32 +436,32 @@ export const updateSyncState = ({
|
|
407
436
|
* Gets the index relative to `existingEvents` where the divergence point is
|
408
437
|
* by comparing each event in `existingEvents` to the corresponding event in `incomingEvents`
|
409
438
|
*/
|
410
|
-
const findDivergencePoint = ({
|
439
|
+
export const findDivergencePoint = ({
|
411
440
|
existingEvents,
|
412
441
|
incomingEvents,
|
413
442
|
isEqualEvent,
|
414
|
-
|
415
|
-
|
443
|
+
isClientEvent,
|
444
|
+
ignoreClientEvents,
|
416
445
|
}: {
|
417
446
|
existingEvents: ReadonlyArray<MutationEvent.EncodedWithMeta>
|
418
447
|
incomingEvents: ReadonlyArray<MutationEvent.EncodedWithMeta>
|
419
448
|
isEqualEvent: (a: MutationEvent.EncodedWithMeta, b: MutationEvent.EncodedWithMeta) => boolean
|
420
|
-
|
421
|
-
|
449
|
+
isClientEvent: (event: MutationEvent.EncodedWithMeta) => boolean
|
450
|
+
ignoreClientEvents: boolean
|
422
451
|
}): number => {
|
423
|
-
if (
|
424
|
-
const filteredExistingEvents = existingEvents.filter((event) => !
|
425
|
-
const
|
452
|
+
if (ignoreClientEvents) {
|
453
|
+
const filteredExistingEvents = existingEvents.filter((event) => !isClientEvent(event))
|
454
|
+
const divergencePointWithoutClientEvents = findDivergencePoint({
|
426
455
|
existingEvents: filteredExistingEvents,
|
427
456
|
incomingEvents,
|
428
457
|
isEqualEvent,
|
429
|
-
|
430
|
-
|
458
|
+
isClientEvent,
|
459
|
+
ignoreClientEvents: false,
|
431
460
|
})
|
432
461
|
|
433
|
-
if (
|
462
|
+
if (divergencePointWithoutClientEvents === -1) return -1
|
434
463
|
|
435
|
-
const divergencePointEventId = existingEvents[
|
464
|
+
const divergencePointEventId = existingEvents[divergencePointWithoutClientEvents]!.id
|
436
465
|
// Now find the divergence point in the original array
|
437
466
|
return existingEvents.findIndex((event) => EventId.isEqual(event.id, divergencePointEventId))
|
438
467
|
}
|
@@ -447,15 +476,15 @@ const findDivergencePoint = ({
|
|
447
476
|
const rebaseEvents = ({
|
448
477
|
events,
|
449
478
|
baseEventId,
|
450
|
-
|
479
|
+
isClientEvent,
|
451
480
|
}: {
|
452
481
|
events: ReadonlyArray<MutationEvent.EncodedWithMeta>
|
453
482
|
baseEventId: EventId.EventId
|
454
|
-
|
483
|
+
isClientEvent: (event: MutationEvent.EncodedWithMeta) => boolean
|
455
484
|
}): ReadonlyArray<MutationEvent.EncodedWithMeta> => {
|
456
485
|
let prevEventId = baseEventId
|
457
486
|
return events.map((event) => {
|
458
|
-
const isLocal =
|
487
|
+
const isLocal = isClientEvent(event)
|
459
488
|
const newEvent = event.rebase(prevEventId, isLocal)
|
460
489
|
prevEventId = newEvent.id
|
461
490
|
return newEvent
|
@@ -470,3 +499,56 @@ const rebaseEvents = ({
|
|
470
499
|
* can process more efficiently which avoids push-threshing
|
471
500
|
*/
|
472
501
|
const _flattenUpdateResults = (_updateResults: ReadonlyArray<UpdateResult>) => {}
|
502
|
+
|
503
|
+
const validateSyncState = (syncState: SyncState) => {
|
504
|
+
// Validate that the rollback tail and pending events together form a continuous chain of events / linked list via the parentId
|
505
|
+
const chain = [...syncState.rollbackTail, ...syncState.pending]
|
506
|
+
for (let i = 0; i < chain.length; i++) {
|
507
|
+
const event = chain[i]!
|
508
|
+
const nextEvent = chain[i + 1]
|
509
|
+
if (nextEvent === undefined) break // Reached end of chain
|
510
|
+
|
511
|
+
if (EventId.isGreaterThan(event.id, nextEvent.id)) {
|
512
|
+
shouldNeverHappen('Events must be sorted in ascending order by eventId', chain, {
|
513
|
+
event,
|
514
|
+
nextEvent,
|
515
|
+
})
|
516
|
+
}
|
517
|
+
|
518
|
+
// If the global id has increased, then the client id must be 0
|
519
|
+
const globalIdHasIncreased = nextEvent.id.global > event.id.global
|
520
|
+
if (globalIdHasIncreased) {
|
521
|
+
if (nextEvent.id.client !== 0) {
|
522
|
+
shouldNeverHappen(
|
523
|
+
`New global events must point to clientId 0 in the parentId. Received: (${nextEvent.id.global},${nextEvent.id.client})`,
|
524
|
+
chain,
|
525
|
+
{
|
526
|
+
event,
|
527
|
+
nextEvent,
|
528
|
+
},
|
529
|
+
)
|
530
|
+
}
|
531
|
+
} else {
|
532
|
+
// Otherwise, the parentId must be the same as the previous event's id
|
533
|
+
if (EventId.isEqual(nextEvent.parentId, event.id) === false) {
|
534
|
+
shouldNeverHappen('Events must be linked in a continuous chain via the parentId', chain, {
|
535
|
+
event,
|
536
|
+
nextEvent,
|
537
|
+
})
|
538
|
+
}
|
539
|
+
}
|
540
|
+
}
|
541
|
+
|
542
|
+
// TODO double check this
|
543
|
+
// const globalRollbackTail = syncState.rollbackTail.filter((event) => event.id.client === 0)
|
544
|
+
// // The parent of the first global rollback tail event ("oldest event") must be the upstream head (if there is a rollback tail)
|
545
|
+
// if (globalRollbackTail.length > 0) {
|
546
|
+
// const firstRollbackTailEvent = globalRollbackTail[0]!
|
547
|
+
// if (EventId.isEqual(firstRollbackTailEvent.parentId, syncState.upstreamHead) === false) {
|
548
|
+
// shouldNeverHappen('The parent of the first rollback tail event must be the upstream head', chain, {
|
549
|
+
// event: firstRollbackTailEvent,
|
550
|
+
// upstreamHead: syncState.upstreamHead,
|
551
|
+
// })
|
552
|
+
// }
|
553
|
+
// }
|
554
|
+
}
|