@livestore/common 0.3.0-dev.24 → 0.3.0-dev.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/adapter-types.d.ts +4 -2
  3. package/dist/adapter-types.d.ts.map +1 -1
  4. package/dist/adapter-types.js +1 -1
  5. package/dist/adapter-types.js.map +1 -1
  6. package/dist/devtools/devtools-messages-client-session.d.ts +21 -21
  7. package/dist/devtools/devtools-messages-common.d.ts +6 -6
  8. package/dist/devtools/devtools-messages-leader.d.ts +24 -24
  9. package/dist/leader-thread/LeaderSyncProcessor.d.ts +2 -1
  10. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  11. package/dist/leader-thread/LeaderSyncProcessor.js +39 -37
  12. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  13. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  14. package/dist/leader-thread/make-leader-thread-layer.js +1 -0
  15. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  16. package/dist/leader-thread/mutationlog.d.ts +1 -0
  17. package/dist/leader-thread/mutationlog.d.ts.map +1 -1
  18. package/dist/leader-thread/mutationlog.js +1 -0
  19. package/dist/leader-thread/mutationlog.js.map +1 -1
  20. package/dist/schema/MutationEvent.d.ts +17 -1
  21. package/dist/schema/MutationEvent.d.ts.map +1 -1
  22. package/dist/schema/MutationEvent.js +18 -2
  23. package/dist/schema/MutationEvent.js.map +1 -1
  24. package/dist/sync/ClientSessionSyncProcessor.d.ts +2 -0
  25. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  26. package/dist/sync/ClientSessionSyncProcessor.js +36 -33
  27. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  28. package/dist/sync/sync.d.ts +10 -0
  29. package/dist/sync/sync.d.ts.map +1 -1
  30. package/dist/sync/sync.js.map +1 -1
  31. package/dist/sync/syncstate.d.ts +38 -16
  32. package/dist/sync/syncstate.d.ts.map +1 -1
  33. package/dist/sync/syncstate.js +110 -40
  34. package/dist/sync/syncstate.js.map +1 -1
  35. package/dist/sync/syncstate.test.js +60 -29
  36. package/dist/sync/syncstate.test.js.map +1 -1
  37. package/dist/version.d.ts +1 -1
  38. package/dist/version.js +1 -1
  39. package/package.json +2 -2
  40. package/src/adapter-types.ts +4 -2
  41. package/src/leader-thread/LeaderSyncProcessor.ts +42 -38
  42. package/src/leader-thread/make-leader-thread-layer.ts +1 -0
  43. package/src/leader-thread/mutationlog.ts +1 -0
  44. package/src/schema/MutationEvent.ts +18 -2
  45. package/src/sync/ClientSessionSyncProcessor.ts +39 -33
  46. package/src/sync/sync.ts +10 -0
  47. package/src/sync/syncstate.test.ts +72 -38
  48. package/src/sync/syncstate.ts +138 -58
  49. package/src/version.ts +1 -1
  50. package/tmp/pack.tgz +0 -0
@@ -1,4 +1,4 @@
1
- import { casesHandled } from '@livestore/utils'
1
+ import { casesHandled, shouldNeverHappen } from '@livestore/utils'
2
2
  import { Match, ReadonlyArray, Schema } from '@livestore/utils/effect'
3
3
 
4
4
  import { UnexpectedError } from '../adapter-types.js'
@@ -24,6 +24,9 @@ import * as MutationEvent from '../schema/MutationEvent.js'
24
24
  * - Can be confirmed or rejected by the upstream.
25
25
  * - Subject to rebase if rejected.
26
26
  * - **Rollback Tail**: Events that are kept around temporarily for potential rollback until confirmed by upstream.
27
+ * - Currently only needed for ClientSessionSyncProcessor.
28
+ * - Note: Confirmation of an event is stronger than acknowledgment of an event and can only be done by the
29
+ * absolute authority in the sync hierarchy (i.e. the sync backend in our case).
27
30
  *
28
31
  * Payloads:
29
32
  * - `PayloadUpstreamRebase`: Upstream has performed a rebase, so downstream must roll back to the specified event
@@ -37,12 +40,19 @@ import * as MutationEvent from '../schema/MutationEvent.js'
37
40
  * 2. **Head Ordering**: Upstream Head ≤ Local Head.
38
41
  * 3. **ID Sequence**: Must follow the pattern (1,0)→(1,1)→(1,2)→(2,0).
39
42
  *
40
- * The `updateSyncState` function processes updates to the sync state based on incoming payloads,
43
+ * A few further notes to help form an intuition:
44
+ * - The goal is to keep the pending events as small as possible (i.e. to have synced with the next upstream node)
45
+ * - There are 2 cases for rebasing:
46
+ * - The conflicting event only conflicts with the pending events -> only (some of) the pending events need to be rolled back
47
+ * - The conflicting event conflicts even with the rollback tail (additionally to the pending events) -> events from both need to be rolled back
48
+ *
49
+ * The `merge` function processes updates to the sync state based on incoming payloads,
41
50
  * handling cases such as upstream rebase, advance, local push, and rollback tail trimming.
42
51
  */
43
52
  export class SyncState extends Schema.Class<SyncState>('SyncState')({
44
53
  pending: Schema.Array(MutationEvent.EncodedWithMeta),
45
54
  rollbackTail: Schema.Array(MutationEvent.EncodedWithMeta),
55
+ /** What this node expects the next upstream node to have as its own local head */
46
56
  upstreamHead: EventId.EventId,
47
57
  localHead: EventId.EventId,
48
58
  }) {
@@ -108,7 +118,7 @@ export class UpdateContext extends Schema.Class<UpdateContext>('UpdateContext')(
108
118
  }
109
119
  }
110
120
 
111
- export class UpdateResultAdvance extends Schema.Class<UpdateResultAdvance>('UpdateResultAdvance')({
121
+ export class MergeResultAdvance extends Schema.Class<MergeResultAdvance>('MergeResultAdvance')({
112
122
  _tag: Schema.Literal('advance'),
113
123
  newSyncState: SyncState,
114
124
  /** Events which weren't pending before the update */
@@ -125,7 +135,7 @@ export class UpdateResultAdvance extends Schema.Class<UpdateResultAdvance>('Upda
125
135
  }
126
136
  }
127
137
 
128
- export class UpdateResultRebase extends Schema.Class<UpdateResultRebase>('UpdateResultRebase')({
138
+ export class MergeResultRebase extends Schema.Class<MergeResultRebase>('MergeResultRebase')({
129
139
  _tag: Schema.Literal('rebase'),
130
140
  newSyncState: SyncState,
131
141
  /** Events which weren't pending before the update */
@@ -144,7 +154,7 @@ export class UpdateResultRebase extends Schema.Class<UpdateResultRebase>('Update
144
154
  }
145
155
  }
146
156
 
147
- export class UpdateResultReject extends Schema.Class<UpdateResultReject>('UpdateResultReject')({
157
+ export class MergeResultReject extends Schema.Class<MergeResultReject>('MergeResultReject')({
148
158
  _tag: Schema.Literal('reject'),
149
159
  /** The minimum id that the new events must have */
150
160
  expectedMinimumId: EventId.EventId,
@@ -159,40 +169,40 @@ export class UpdateResultReject extends Schema.Class<UpdateResultReject>('Update
159
169
  }
160
170
  }
161
171
 
162
- export class UpdateResultUnexpectedError extends Schema.Class<UpdateResultUnexpectedError>(
163
- 'UpdateResultUnexpectedError',
164
- )({
172
+ export class MergeResultUnexpectedError extends Schema.Class<MergeResultUnexpectedError>('MergeResultUnexpectedError')({
165
173
  _tag: Schema.Literal('unexpected-error'),
166
174
  cause: UnexpectedError,
167
175
  }) {}
168
176
 
169
- export class UpdateResult extends Schema.Union(
170
- UpdateResultAdvance,
171
- UpdateResultRebase,
172
- UpdateResultReject,
173
- UpdateResultUnexpectedError,
177
+ export class MergeResult extends Schema.Union(
178
+ MergeResultAdvance,
179
+ MergeResultRebase,
180
+ MergeResultReject,
181
+ MergeResultUnexpectedError,
174
182
  ) {}
175
183
 
176
- const unexpectedError = (cause: unknown): UpdateResultUnexpectedError =>
177
- UpdateResultUnexpectedError.make({
184
+ const unexpectedError = (cause: unknown): MergeResultUnexpectedError =>
185
+ MergeResultUnexpectedError.make({
178
186
  _tag: 'unexpected-error',
179
187
  cause: new UnexpectedError({ cause }),
180
188
  })
181
189
 
182
- export const updateSyncState = ({
190
+ export const merge = ({
183
191
  syncState,
184
192
  payload,
185
- isLocalEvent,
193
+ isClientEvent,
186
194
  isEqualEvent,
187
- ignoreLocalEvents = false,
195
+ ignoreClientEvents = false,
188
196
  }: {
189
197
  syncState: SyncState
190
198
  payload: typeof Payload.Type
191
- isLocalEvent: (event: MutationEvent.EncodedWithMeta) => boolean
199
+ isClientEvent: (event: MutationEvent.EncodedWithMeta) => boolean
192
200
  isEqualEvent: (a: MutationEvent.EncodedWithMeta, b: MutationEvent.EncodedWithMeta) => boolean
193
- /** This is used in the leader which should ignore local events when receiving an upstream-advance payload */
194
- ignoreLocalEvents?: boolean
195
- }): typeof UpdateResult.Type => {
201
+ /** This is used in the leader which should ignore client events when receiving an upstream-advance payload */
202
+ ignoreClientEvents?: boolean
203
+ }): typeof MergeResult.Type => {
204
+ validateSyncState(syncState)
205
+
196
206
  const trimRollbackTail = (
197
207
  rollbackTail: ReadonlyArray<MutationEvent.EncodedWithMeta>,
198
208
  ): ReadonlyArray<MutationEvent.EncodedWithMeta> => {
@@ -226,10 +236,10 @@ export const updateSyncState = ({
226
236
  const rebasedPending = rebaseEvents({
227
237
  events: syncState.pending,
228
238
  baseEventId: newUpstreamHead,
229
- isLocalEvent,
239
+ isClientEvent,
230
240
  })
231
241
 
232
- return UpdateResultRebase.make({
242
+ return MergeResultRebase.make({
233
243
  _tag: 'rebase',
234
244
  newSyncState: new SyncState({
235
245
  pending: rebasedPending,
@@ -243,9 +253,10 @@ export const updateSyncState = ({
243
253
  })
244
254
  }
245
255
 
256
+ // #region upstream-advance
246
257
  case 'upstream-advance': {
247
258
  if (payload.newEvents.length === 0) {
248
- return UpdateResultAdvance.make({
259
+ return MergeResultAdvance.make({
249
260
  _tag: 'advance',
250
261
  newSyncState: new SyncState({
251
262
  pending: syncState.pending,
@@ -268,9 +279,23 @@ export const updateSyncState = ({
268
279
  }
269
280
 
270
281
  // Validate that incoming events are larger than upstream head
271
- if (EventId.isGreaterThan(syncState.upstreamHead, payload.newEvents[0]!.id)) {
282
+ if (
283
+ EventId.isGreaterThan(syncState.upstreamHead, payload.newEvents[0]!.id) ||
284
+ EventId.isEqual(syncState.upstreamHead, payload.newEvents[0]!.id)
285
+ ) {
272
286
  return unexpectedError(
273
- `Incoming events must be greater than upstream head. Expected greater than: [${syncState.upstreamHead.global},${syncState.upstreamHead.client}]. Received: [${payload.newEvents.map((e) => `(${e.id.global},${e.id.client})`).join(', ')}]`,
287
+ `Incoming events must be greater than upstream head. Expected greater than: (${syncState.upstreamHead.global},${syncState.upstreamHead.client}). Received: [${payload.newEvents.map((e) => `(${e.id.global},${e.id.client})`).join(', ')}]`,
288
+ )
289
+ }
290
+
291
+ // Validate that the parent id of the first incoming event is known
292
+ const knownEventGlobalIds = [...syncState.rollbackTail, ...syncState.pending].map((e) => e.id.global)
293
+ knownEventGlobalIds.push(syncState.upstreamHead.global)
294
+ const firstNewEvent = payload.newEvents[0]!
295
+ const hasUnknownParentId = knownEventGlobalIds.includes(firstNewEvent.parentId.global) === false
296
+ if (hasUnknownParentId) {
297
+ return unexpectedError(
298
+ `Incoming events must have a known parent id. Received: [${payload.newEvents.map((e) => `(${e.id.global},${e.id.client})`).join(', ')}]`,
274
299
  )
275
300
  }
276
301
 
@@ -280,10 +305,11 @@ export const updateSyncState = ({
280
305
  existingEvents: syncState.pending,
281
306
  incomingEvents: payload.newEvents,
282
307
  isEqualEvent,
283
- isLocalEvent,
284
- ignoreLocalEvents,
308
+ isClientEvent,
309
+ ignoreClientEvents,
285
310
  })
286
311
 
312
+ // No divergent pending events, thus we can just advance (some of) the pending events
287
313
  if (divergentPendingIndex === -1) {
288
314
  const pendingEventIds = new Set(syncState.pending.map((e) => `${e.id.global},${e.id.client}`))
289
315
  const newEvents = payload.newEvents.filter((e) => !pendingEventIds.has(`${e.id.global},${e.id.client}`))
@@ -292,17 +318,17 @@ export const updateSyncState = ({
292
318
  // we need to split the pending events into two groups:
293
319
  // - pendingMatching: The pending events up to point where they match the incoming events
294
320
  // - pendingRemaining: The pending events after the point where they match the incoming events
295
- // The `localIndexOffset` is used to account for the local events that are being ignored
296
- let localIndexOffset = 0
321
+ // The `clientIndexOffset` is used to account for the client events that are being ignored
322
+ let clientIndexOffset = 0
297
323
  const [pendingMatching, pendingRemaining] = ReadonlyArray.splitWhere(
298
324
  syncState.pending,
299
325
  (pendingEvent, index) => {
300
- if (ignoreLocalEvents && isLocalEvent(pendingEvent)) {
301
- localIndexOffset++
326
+ if (ignoreClientEvents && isClientEvent(pendingEvent)) {
327
+ clientIndexOffset++
302
328
  return false
303
329
  }
304
330
 
305
- const newEvent = payload.newEvents.at(index - localIndexOffset)
331
+ const newEvent = payload.newEvents.at(index - clientIndexOffset)
306
332
  if (!newEvent) {
307
333
  return true
308
334
  }
@@ -320,7 +346,7 @@ export const updateSyncState = ({
320
346
  return true
321
347
  })
322
348
 
323
- return UpdateResultAdvance.make({
349
+ return MergeResultAdvance.make({
324
350
  _tag: 'advance',
325
351
  newSyncState: new SyncState({
326
352
  pending: pendingRemaining,
@@ -336,18 +362,18 @@ export const updateSyncState = ({
336
362
  const rebasedPending = rebaseEvents({
337
363
  events: divergentPending,
338
364
  baseEventId: newUpstreamHead,
339
- isLocalEvent,
365
+ isClientEvent,
340
366
  })
341
367
 
342
368
  const divergentNewEventsIndex = findDivergencePoint({
343
369
  existingEvents: payload.newEvents,
344
370
  incomingEvents: syncState.pending,
345
371
  isEqualEvent,
346
- isLocalEvent,
347
- ignoreLocalEvents,
372
+ isClientEvent,
373
+ ignoreClientEvents,
348
374
  })
349
375
 
350
- return UpdateResultRebase.make({
376
+ return MergeResultRebase.make({
351
377
  _tag: 'rebase',
352
378
  newSyncState: new SyncState({
353
379
  pending: rebasedPending,
@@ -361,10 +387,11 @@ export const updateSyncState = ({
361
387
  })
362
388
  }
363
389
  }
390
+ // #endregion
364
391
 
365
392
  case 'local-push': {
366
393
  if (payload.newEvents.length === 0) {
367
- return UpdateResultAdvance.make({
394
+ return MergeResultAdvance.make({
368
395
  _tag: 'advance',
369
396
  newSyncState: syncState,
370
397
  newEvents: [],
@@ -377,13 +404,13 @@ export const updateSyncState = ({
377
404
 
378
405
  if (invalidEventId) {
379
406
  const expectedMinimumId = EventId.nextPair(syncState.localHead, true).id
380
- return UpdateResultReject.make({
407
+ return MergeResultReject.make({
381
408
  _tag: 'reject',
382
409
  expectedMinimumId,
383
410
  updateContext,
384
411
  })
385
412
  } else {
386
- return UpdateResultAdvance.make({
413
+ return MergeResultAdvance.make({
387
414
  _tag: 'advance',
388
415
  newSyncState: new SyncState({
389
416
  pending: [...syncState.pending, ...payload.newEvents],
@@ -407,32 +434,32 @@ export const updateSyncState = ({
407
434
  * Gets the index relative to `existingEvents` where the divergence point is
408
435
  * by comparing each event in `existingEvents` to the corresponding event in `incomingEvents`
409
436
  */
410
- const findDivergencePoint = ({
437
+ export const findDivergencePoint = ({
411
438
  existingEvents,
412
439
  incomingEvents,
413
440
  isEqualEvent,
414
- isLocalEvent,
415
- ignoreLocalEvents,
441
+ isClientEvent,
442
+ ignoreClientEvents,
416
443
  }: {
417
444
  existingEvents: ReadonlyArray<MutationEvent.EncodedWithMeta>
418
445
  incomingEvents: ReadonlyArray<MutationEvent.EncodedWithMeta>
419
446
  isEqualEvent: (a: MutationEvent.EncodedWithMeta, b: MutationEvent.EncodedWithMeta) => boolean
420
- isLocalEvent: (event: MutationEvent.EncodedWithMeta) => boolean
421
- ignoreLocalEvents: boolean
447
+ isClientEvent: (event: MutationEvent.EncodedWithMeta) => boolean
448
+ ignoreClientEvents: boolean
422
449
  }): number => {
423
- if (ignoreLocalEvents) {
424
- const filteredExistingEvents = existingEvents.filter((event) => !isLocalEvent(event))
425
- const divergencePointWithoutLocalEvents = findDivergencePoint({
450
+ if (ignoreClientEvents) {
451
+ const filteredExistingEvents = existingEvents.filter((event) => !isClientEvent(event))
452
+ const divergencePointWithoutClientEvents = findDivergencePoint({
426
453
  existingEvents: filteredExistingEvents,
427
454
  incomingEvents,
428
455
  isEqualEvent,
429
- isLocalEvent,
430
- ignoreLocalEvents: false,
456
+ isClientEvent,
457
+ ignoreClientEvents: false,
431
458
  })
432
459
 
433
- if (divergencePointWithoutLocalEvents === -1) return -1
460
+ if (divergencePointWithoutClientEvents === -1) return -1
434
461
 
435
- const divergencePointEventId = existingEvents[divergencePointWithoutLocalEvents]!.id
462
+ const divergencePointEventId = existingEvents[divergencePointWithoutClientEvents]!.id
436
463
  // Now find the divergence point in the original array
437
464
  return existingEvents.findIndex((event) => EventId.isEqual(event.id, divergencePointEventId))
438
465
  }
@@ -447,15 +474,15 @@ const findDivergencePoint = ({
447
474
  const rebaseEvents = ({
448
475
  events,
449
476
  baseEventId,
450
- isLocalEvent,
477
+ isClientEvent,
451
478
  }: {
452
479
  events: ReadonlyArray<MutationEvent.EncodedWithMeta>
453
480
  baseEventId: EventId.EventId
454
- isLocalEvent: (event: MutationEvent.EncodedWithMeta) => boolean
481
+ isClientEvent: (event: MutationEvent.EncodedWithMeta) => boolean
455
482
  }): ReadonlyArray<MutationEvent.EncodedWithMeta> => {
456
483
  let prevEventId = baseEventId
457
484
  return events.map((event) => {
458
- const isLocal = isLocalEvent(event)
485
+ const isLocal = isClientEvent(event)
459
486
  const newEvent = event.rebase(prevEventId, isLocal)
460
487
  prevEventId = newEvent.id
461
488
  return newEvent
@@ -469,4 +496,57 @@ const rebaseEvents = ({
469
496
  * it could make sense to "flatten" update results into a single update result which the client session
470
497
  * can process more efficiently which avoids push-threshing
471
498
  */
472
- const _flattenUpdateResults = (_updateResults: ReadonlyArray<UpdateResult>) => {}
499
+ const _flattenMergeResults = (_updateResults: ReadonlyArray<MergeResult>) => {}
500
+
501
+ const validateSyncState = (syncState: SyncState) => {
502
+ // Validate that the rollback tail and pending events together form a continuous chain of events / linked list via the parentId
503
+ const chain = [...syncState.rollbackTail, ...syncState.pending]
504
+ for (let i = 0; i < chain.length; i++) {
505
+ const event = chain[i]!
506
+ const nextEvent = chain[i + 1]
507
+ if (nextEvent === undefined) break // Reached end of chain
508
+
509
+ if (EventId.isGreaterThan(event.id, nextEvent.id)) {
510
+ shouldNeverHappen('Events must be sorted in ascending order by eventId', chain, {
511
+ event,
512
+ nextEvent,
513
+ })
514
+ }
515
+
516
+ // If the global id has increased, then the client id must be 0
517
+ const globalIdHasIncreased = nextEvent.id.global > event.id.global
518
+ if (globalIdHasIncreased) {
519
+ if (nextEvent.id.client !== 0) {
520
+ shouldNeverHappen(
521
+ `New global events must point to clientId 0 in the parentId. Received: (${nextEvent.id.global},${nextEvent.id.client})`,
522
+ chain,
523
+ {
524
+ event,
525
+ nextEvent,
526
+ },
527
+ )
528
+ }
529
+ } else {
530
+ // Otherwise, the parentId must be the same as the previous event's id
531
+ if (EventId.isEqual(nextEvent.parentId, event.id) === false) {
532
+ shouldNeverHappen('Events must be linked in a continuous chain via the parentId', chain, {
533
+ event,
534
+ nextEvent,
535
+ })
536
+ }
537
+ }
538
+ }
539
+
540
+ // TODO double check this
541
+ // const globalRollbackTail = syncState.rollbackTail.filter((event) => event.id.client === 0)
542
+ // // The parent of the first global rollback tail event ("oldest event") must be the upstream head (if there is a rollback tail)
543
+ // if (globalRollbackTail.length > 0) {
544
+ // const firstRollbackTailEvent = globalRollbackTail[0]!
545
+ // if (EventId.isEqual(firstRollbackTailEvent.parentId, syncState.upstreamHead) === false) {
546
+ // shouldNeverHappen('The parent of the first rollback tail event must be the upstream head', chain, {
547
+ // event: firstRollbackTailEvent,
548
+ // upstreamHead: syncState.upstreamHead,
549
+ // })
550
+ // }
551
+ // }
552
+ }
package/src/version.ts CHANGED
@@ -2,7 +2,7 @@
2
2
  // import packageJson from '../package.json' with { type: 'json' }
3
3
  // export const liveStoreVersion = packageJson.version
4
4
 
5
- export const liveStoreVersion = '0.3.0-dev.24' as const
5
+ export const liveStoreVersion = '0.3.0-dev.26' as const
6
6
 
7
7
  /**
8
8
  * This version number is incremented whenever the internal storage format changes in a breaking way.
package/tmp/pack.tgz CHANGED
Binary file