@livestore/common 0.3.0-dev.25 → 0.3.0-dev.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/adapter-types.d.ts +13 -12
  3. package/dist/adapter-types.d.ts.map +1 -1
  4. package/dist/adapter-types.js +5 -6
  5. package/dist/adapter-types.js.map +1 -1
  6. package/dist/devtools/devtools-messages-client-session.d.ts +21 -21
  7. package/dist/devtools/devtools-messages-common.d.ts +13 -6
  8. package/dist/devtools/devtools-messages-common.d.ts.map +1 -1
  9. package/dist/devtools/devtools-messages-common.js +6 -0
  10. package/dist/devtools/devtools-messages-common.js.map +1 -1
  11. package/dist/devtools/devtools-messages-leader.d.ts +25 -25
  12. package/dist/devtools/devtools-messages-leader.d.ts.map +1 -1
  13. package/dist/devtools/devtools-messages-leader.js +1 -2
  14. package/dist/devtools/devtools-messages-leader.js.map +1 -1
  15. package/dist/leader-thread/LeaderSyncProcessor.d.ts +15 -6
  16. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  17. package/dist/leader-thread/LeaderSyncProcessor.js +229 -207
  18. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  19. package/dist/leader-thread/apply-mutation.d.ts +14 -9
  20. package/dist/leader-thread/apply-mutation.d.ts.map +1 -1
  21. package/dist/leader-thread/apply-mutation.js +43 -36
  22. package/dist/leader-thread/apply-mutation.js.map +1 -1
  23. package/dist/leader-thread/leader-worker-devtools.d.ts +1 -1
  24. package/dist/leader-thread/leader-worker-devtools.d.ts.map +1 -1
  25. package/dist/leader-thread/leader-worker-devtools.js +4 -5
  26. package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
  27. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  28. package/dist/leader-thread/make-leader-thread-layer.js +21 -33
  29. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  30. package/dist/leader-thread/mod.d.ts +1 -1
  31. package/dist/leader-thread/mod.d.ts.map +1 -1
  32. package/dist/leader-thread/mod.js +1 -1
  33. package/dist/leader-thread/mod.js.map +1 -1
  34. package/dist/leader-thread/mutationlog.d.ts +19 -3
  35. package/dist/leader-thread/mutationlog.d.ts.map +1 -1
  36. package/dist/leader-thread/mutationlog.js +105 -12
  37. package/dist/leader-thread/mutationlog.js.map +1 -1
  38. package/dist/leader-thread/pull-queue-set.d.ts +1 -1
  39. package/dist/leader-thread/pull-queue-set.d.ts.map +1 -1
  40. package/dist/leader-thread/pull-queue-set.js +6 -16
  41. package/dist/leader-thread/pull-queue-set.js.map +1 -1
  42. package/dist/leader-thread/recreate-db.d.ts.map +1 -1
  43. package/dist/leader-thread/recreate-db.js +4 -3
  44. package/dist/leader-thread/recreate-db.js.map +1 -1
  45. package/dist/leader-thread/types.d.ts +34 -19
  46. package/dist/leader-thread/types.d.ts.map +1 -1
  47. package/dist/leader-thread/types.js.map +1 -1
  48. package/dist/rehydrate-from-mutationlog.d.ts +5 -4
  49. package/dist/rehydrate-from-mutationlog.d.ts.map +1 -1
  50. package/dist/rehydrate-from-mutationlog.js +7 -9
  51. package/dist/rehydrate-from-mutationlog.js.map +1 -1
  52. package/dist/schema/EventId.d.ts +9 -0
  53. package/dist/schema/EventId.d.ts.map +1 -1
  54. package/dist/schema/EventId.js +17 -2
  55. package/dist/schema/EventId.js.map +1 -1
  56. package/dist/schema/MutationEvent.d.ts +78 -25
  57. package/dist/schema/MutationEvent.d.ts.map +1 -1
  58. package/dist/schema/MutationEvent.js +25 -12
  59. package/dist/schema/MutationEvent.js.map +1 -1
  60. package/dist/schema/schema.js +1 -1
  61. package/dist/schema/schema.js.map +1 -1
  62. package/dist/schema/system-tables.d.ts +67 -0
  63. package/dist/schema/system-tables.d.ts.map +1 -1
  64. package/dist/schema/system-tables.js +12 -1
  65. package/dist/schema/system-tables.js.map +1 -1
  66. package/dist/sync/ClientSessionSyncProcessor.d.ts +9 -1
  67. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  68. package/dist/sync/ClientSessionSyncProcessor.js +49 -43
  69. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  70. package/dist/sync/sync.d.ts +6 -5
  71. package/dist/sync/sync.d.ts.map +1 -1
  72. package/dist/sync/sync.js.map +1 -1
  73. package/dist/sync/syncstate.d.ts +60 -84
  74. package/dist/sync/syncstate.d.ts.map +1 -1
  75. package/dist/sync/syncstate.js +127 -136
  76. package/dist/sync/syncstate.js.map +1 -1
  77. package/dist/sync/syncstate.test.js +205 -276
  78. package/dist/sync/syncstate.test.js.map +1 -1
  79. package/dist/version.d.ts +1 -1
  80. package/dist/version.js +1 -1
  81. package/package.json +2 -2
  82. package/src/adapter-types.ts +11 -13
  83. package/src/devtools/devtools-messages-common.ts +9 -0
  84. package/src/devtools/devtools-messages-leader.ts +1 -2
  85. package/src/leader-thread/LeaderSyncProcessor.ts +399 -364
  86. package/src/leader-thread/apply-mutation.ts +81 -71
  87. package/src/leader-thread/leader-worker-devtools.ts +5 -7
  88. package/src/leader-thread/make-leader-thread-layer.ts +26 -41
  89. package/src/leader-thread/mod.ts +1 -1
  90. package/src/leader-thread/mutationlog.ts +166 -13
  91. package/src/leader-thread/recreate-db.ts +4 -3
  92. package/src/leader-thread/types.ts +33 -23
  93. package/src/rehydrate-from-mutationlog.ts +12 -12
  94. package/src/schema/EventId.ts +20 -2
  95. package/src/schema/MutationEvent.ts +32 -16
  96. package/src/schema/schema.ts +1 -1
  97. package/src/schema/system-tables.ts +20 -1
  98. package/src/sync/ClientSessionSyncProcessor.ts +59 -47
  99. package/src/sync/sync.ts +6 -9
  100. package/src/sync/syncstate.test.ts +239 -315
  101. package/src/sync/syncstate.ts +191 -188
  102. package/src/version.ts +1 -1
  103. package/tmp/pack.tgz +0 -0
  104. package/src/leader-thread/pull-queue-set.ts +0 -67
@@ -9,75 +9,65 @@ import * as MutationEvent from '../schema/MutationEvent.js'
9
9
  * SyncState represents the current sync state of a sync node relative to an upstream node.
10
10
  * Events flow from local to upstream, with each state maintaining its own event head.
11
11
  *
12
- * Event Chain Structure:
12
+ * Example:
13
13
  * ```
14
- * +-------------------------+------------------------+
15
- * | ROLLBACK TAIL | PENDING EVENTS |
16
- * +-------------------------+------------------------+
17
- * ▼ ▼
18
- * Upstream Head Local Head
19
- * Example: (0,0), (0,1), (1,0) (1,1), (1,2), (2,0)
14
+ * +------------------------+
15
+ * | PENDING EVENTS |
16
+ * +------------------------+
17
+ * ▼ ▼
18
+ * Upstream Head Local Head
19
+ * (1,0) (1,1), (1,2), (2,0)
20
20
  * ```
21
21
  *
22
- * State:
23
- * - **Pending Events**: Events awaiting acknowledgment from the upstream.
24
- * - Can be confirmed or rejected by the upstream.
25
- * - Subject to rebase if rejected.
26
- * - **Rollback Tail**: Events that are kept around temporarily for potential rollback until confirmed by upstream.
27
- * - Currently only needed for ClientSessionSyncProcessor.
28
- * - Note: Confirmation of an event is stronger than acknowledgment of an event and can only be done by the
29
- * absolute authority in the sync hierarchy (i.e. the sync backend in our case).
22
+ * **Pending Events**: Events awaiting acknowledgment from the upstream.
23
+ * - Can be confirmed or rejected by the upstream.
24
+ * - Subject to rebase if rejected.
30
25
  *
31
26
  * Payloads:
32
27
  * - `PayloadUpstreamRebase`: Upstream has performed a rebase, so downstream must roll back to the specified event
33
28
  * and rebase the pending events on top of the new events.
34
29
  * - `PayloadUpstreamAdvance`: Upstream has advanced, so downstream must rebase the pending events on top of the new events.
35
- * - `PayloadUpstreamTrimRollbackTail`: Upstream has advanced, so downstream can trim the rollback tail.
36
30
  * - `PayloadLocalPush`: Local push payload
37
31
  *
38
32
  * Invariants:
39
33
  * 1. **Chain Continuity**: Each event must reference its immediate parent.
40
34
  * 2. **Head Ordering**: Upstream Head ≤ Local Head.
41
- * 3. **ID Sequence**: Must follow the pattern (1,0)→(1,1)→(1,2)→(2,0).
35
+ * 3. **Event number sequence**: Must follow the pattern (1,0)→(1,1)→(1,2)→(2,0).
42
36
  *
43
37
  * A few further notes to help form an intuition:
44
38
  * - The goal is to keep the pending events as small as possible (i.e. to have synced with the next upstream node)
45
39
  * - There are 2 cases for rebasing:
46
40
  * - The conflicting event only conflicts with the pending events -> only (some of) the pending events need to be rolled back
47
- * - The conflicting event conflicts even with the rollback tail (additionally to the pending events) -> events from both need to be rolled back
48
41
  *
49
- * The `updateSyncState` function processes updates to the sync state based on incoming payloads,
50
- * handling cases such as upstream rebase, advance, local push, and rollback tail trimming.
42
+ * The `merge` function processes updates to the sync state based on incoming payloads,
43
+ * handling cases such as upstream rebase, advance and local push.
51
44
  */
52
45
  export class SyncState extends Schema.Class<SyncState>('SyncState')({
53
46
  pending: Schema.Array(MutationEvent.EncodedWithMeta),
54
- rollbackTail: Schema.Array(MutationEvent.EncodedWithMeta),
55
47
  /** What this node expects the next upstream node to have as its own local head */
56
48
  upstreamHead: EventId.EventId,
49
+ /** Equivalent to `pending.at(-1)?.id` if there are pending events */
57
50
  localHead: EventId.EventId,
58
51
  }) {
59
- toJSON = (): any => {
60
- return {
61
- pending: this.pending.map((e) => e.toJSON()),
62
- rollbackTail: this.rollbackTail.map((e) => e.toJSON()),
63
- upstreamHead: `(${this.upstreamHead.global},${this.upstreamHead.client})`,
64
- localHead: `(${this.localHead.global},${this.localHead.client})`,
65
- }
66
- }
52
+ toJSON = (): any => ({
53
+ pending: this.pending.map((e) => e.toJSON()),
54
+ upstreamHead: EventId.toString(this.upstreamHead),
55
+ localHead: EventId.toString(this.localHead),
56
+ })
67
57
  }
68
58
 
59
+ /**
60
+ * This payload propagates a rebase from the upstream node
61
+ */
69
62
  export class PayloadUpstreamRebase extends Schema.TaggedStruct('upstream-rebase', {
70
- /** Rollback until this event in the rollback tail (inclusive). Starting from the end of the rollback tail. */
71
- rollbackUntil: EventId.EventId,
63
+ /** Events which need to be rolled back */
64
+ rollbackEvents: Schema.Array(MutationEvent.EncodedWithMeta),
65
+ /** Events which need to be applied after the rollback (already rebased by the upstream node) */
72
66
  newEvents: Schema.Array(MutationEvent.EncodedWithMeta),
73
- /** Trim rollback tail up to this event (inclusive). */
74
- trimRollbackUntil: Schema.optional(EventId.EventId),
75
67
  }) {}
76
68
 
77
69
  export class PayloadUpstreamAdvance extends Schema.TaggedStruct('upstream-advance', {
78
70
  newEvents: Schema.Array(MutationEvent.EncodedWithMeta),
79
- /** Trim rollback tail up to this event (inclusive). */
80
- trimRollbackUntil: Schema.optional(EventId.EventId),
81
71
  }) {}
82
72
 
83
73
  export class PayloadLocalPush extends Schema.TaggedStruct('local-push', {
@@ -86,12 +76,10 @@ export class PayloadLocalPush extends Schema.TaggedStruct('local-push', {
86
76
 
87
77
  export class Payload extends Schema.Union(PayloadUpstreamRebase, PayloadUpstreamAdvance, PayloadLocalPush) {}
88
78
 
89
- export const PayloadUpstream = Schema.Union(PayloadUpstreamRebase, PayloadUpstreamAdvance)
90
-
91
- export type PayloadUpstream = typeof PayloadUpstream.Type
79
+ export class PayloadUpstream extends Schema.Union(PayloadUpstreamRebase, PayloadUpstreamAdvance) {}
92
80
 
93
81
  /** Only used for debugging purposes */
94
- export class UpdateContext extends Schema.Class<UpdateContext>('UpdateContext')({
82
+ export class MergeContext extends Schema.Class<MergeContext>('MergeContext')({
95
83
  payload: Payload,
96
84
  syncState: SyncState,
97
85
  }) {
@@ -105,9 +93,10 @@ export class UpdateContext extends Schema.Class<UpdateContext>('UpdateContext')(
105
93
  _tag: 'upstream-advance',
106
94
  newEvents: this.payload.newEvents.map((e) => e.toJSON()),
107
95
  })),
108
- Match.tag('upstream-rebase', () => ({
96
+ Match.tag('upstream-rebase', (payload) => ({
109
97
  _tag: 'upstream-rebase',
110
- newEvents: this.payload.newEvents.map((e) => e.toJSON()),
98
+ newEvents: payload.newEvents.map((e) => e.toJSON()),
99
+ rollbackEvents: payload.rollbackEvents.map((e) => e.toJSON()),
111
100
  })),
112
101
  Match.exhaustive,
113
102
  )
@@ -118,78 +107,85 @@ export class UpdateContext extends Schema.Class<UpdateContext>('UpdateContext')(
118
107
  }
119
108
  }
120
109
 
121
- export class UpdateResultAdvance extends Schema.Class<UpdateResultAdvance>('UpdateResultAdvance')({
110
+ export class MergeResultAdvance extends Schema.Class<MergeResultAdvance>('MergeResultAdvance')({
122
111
  _tag: Schema.Literal('advance'),
123
112
  newSyncState: SyncState,
124
- /** Events which weren't pending before the update */
125
113
  newEvents: Schema.Array(MutationEvent.EncodedWithMeta),
126
- updateContext: UpdateContext,
114
+ /** Events which were previously pending but are now confirmed */
115
+ confirmedEvents: Schema.Array(MutationEvent.EncodedWithMeta),
116
+ mergeContext: MergeContext,
127
117
  }) {
128
118
  toJSON = (): any => {
129
119
  return {
130
120
  _tag: this._tag,
131
121
  newSyncState: this.newSyncState.toJSON(),
132
122
  newEvents: this.newEvents.map((e) => e.toJSON()),
133
- updateContext: this.updateContext.toJSON(),
123
+ confirmedEvents: this.confirmedEvents.map((e) => e.toJSON()),
124
+ mergeContext: this.mergeContext.toJSON(),
134
125
  }
135
126
  }
136
127
  }
137
128
 
138
- export class UpdateResultRebase extends Schema.Class<UpdateResultRebase>('UpdateResultRebase')({
129
+ export class MergeResultRebase extends Schema.Class<MergeResultRebase>('MergeResultRebase')({
139
130
  _tag: Schema.Literal('rebase'),
140
131
  newSyncState: SyncState,
141
- /** Events which weren't pending before the update */
142
132
  newEvents: Schema.Array(MutationEvent.EncodedWithMeta),
143
- eventsToRollback: Schema.Array(MutationEvent.EncodedWithMeta),
144
- updateContext: UpdateContext,
133
+ /** Events which need to be rolled back */
134
+ rollbackEvents: Schema.Array(MutationEvent.EncodedWithMeta),
135
+ mergeContext: MergeContext,
145
136
  }) {
146
137
  toJSON = (): any => {
147
138
  return {
148
139
  _tag: this._tag,
149
140
  newSyncState: this.newSyncState.toJSON(),
150
141
  newEvents: this.newEvents.map((e) => e.toJSON()),
151
- eventsToRollback: this.eventsToRollback.map((e) => e.toJSON()),
152
- updateContext: this.updateContext.toJSON(),
142
+ rollbackEvents: this.rollbackEvents.map((e) => e.toJSON()),
143
+ mergeContext: this.mergeContext.toJSON(),
153
144
  }
154
145
  }
155
146
  }
156
147
 
157
- export class UpdateResultReject extends Schema.Class<UpdateResultReject>('UpdateResultReject')({
148
+ export class MergeResultReject extends Schema.Class<MergeResultReject>('MergeResultReject')({
158
149
  _tag: Schema.Literal('reject'),
159
150
  /** The minimum id that the new events must have */
160
151
  expectedMinimumId: EventId.EventId,
161
- updateContext: UpdateContext,
152
+ mergeContext: MergeContext,
162
153
  }) {
163
154
  toJSON = (): any => {
164
155
  return {
165
156
  _tag: this._tag,
166
- expectedMinimumId: `(${this.expectedMinimumId.global},${this.expectedMinimumId.client})`,
167
- updateContext: this.updateContext.toJSON(),
157
+ expectedMinimumId: EventId.toString(this.expectedMinimumId),
158
+ mergeContext: this.mergeContext.toJSON(),
168
159
  }
169
160
  }
170
161
  }
171
162
 
172
- export class UpdateResultUnexpectedError extends Schema.Class<UpdateResultUnexpectedError>(
173
- 'UpdateResultUnexpectedError',
174
- )({
163
+ export class MergeResultUnexpectedError extends Schema.Class<MergeResultUnexpectedError>('MergeResultUnexpectedError')({
175
164
  _tag: Schema.Literal('unexpected-error'),
176
165
  cause: UnexpectedError,
177
166
  }) {}
178
167
 
179
- export class UpdateResult extends Schema.Union(
180
- UpdateResultAdvance,
181
- UpdateResultRebase,
182
- UpdateResultReject,
183
- UpdateResultUnexpectedError,
168
+ export class MergeResult extends Schema.Union(
169
+ MergeResultAdvance,
170
+ MergeResultRebase,
171
+ MergeResultReject,
172
+ MergeResultUnexpectedError,
184
173
  ) {}
185
174
 
186
- const unexpectedError = (cause: unknown): UpdateResultUnexpectedError =>
187
- UpdateResultUnexpectedError.make({
175
+ const unexpectedError = (cause: unknown): MergeResultUnexpectedError =>
176
+ MergeResultUnexpectedError.make({
188
177
  _tag: 'unexpected-error',
189
178
  cause: new UnexpectedError({ cause }),
190
179
  })
191
180
 
192
- export const updateSyncState = ({
181
+ // TODO Idea: call merge recursively through hierarchy levels
182
+ /*
183
+ Idea: have a map that maps from `globalEventId` to Array<ClientEvents>
184
+ The same applies to even further hierarchy levels
185
+
186
+ TODO: possibly even keep the client events in a separate table in the client leader
187
+ */
188
+ export const merge = ({
193
189
  syncState,
194
190
  payload,
195
191
  isClientEvent,
@@ -202,34 +198,14 @@ export const updateSyncState = ({
202
198
  isEqualEvent: (a: MutationEvent.EncodedWithMeta, b: MutationEvent.EncodedWithMeta) => boolean
203
199
  /** This is used in the leader which should ignore client events when receiving an upstream-advance payload */
204
200
  ignoreClientEvents?: boolean
205
- }): typeof UpdateResult.Type => {
201
+ }): typeof MergeResult.Type => {
206
202
  validateSyncState(syncState)
207
203
 
208
- const trimRollbackTail = (
209
- rollbackTail: ReadonlyArray<MutationEvent.EncodedWithMeta>,
210
- ): ReadonlyArray<MutationEvent.EncodedWithMeta> => {
211
- const trimRollbackUntil = payload._tag === 'local-push' ? undefined : payload.trimRollbackUntil
212
- if (trimRollbackUntil === undefined) return rollbackTail
213
- const index = rollbackTail.findIndex((event) => EventId.isEqual(event.id, trimRollbackUntil))
214
- if (index === -1) return []
215
- return rollbackTail.slice(index + 1)
216
- }
217
-
218
- const updateContext = UpdateContext.make({ payload, syncState })
204
+ const mergeContext = MergeContext.make({ payload, syncState })
219
205
 
220
206
  switch (payload._tag) {
221
207
  case 'upstream-rebase': {
222
- // Find the index of the rollback event in the rollback tail
223
- const rollbackIndex = syncState.rollbackTail.findIndex((event) =>
224
- EventId.isEqual(event.id, payload.rollbackUntil),
225
- )
226
- if (rollbackIndex === -1) {
227
- return unexpectedError(
228
- `Rollback event not found in rollback tail. Rollback until: [${payload.rollbackUntil.global},${payload.rollbackUntil.client}]. Rollback tail: [${syncState.rollbackTail.map((e) => e.toString()).join(', ')}]`,
229
- )
230
- }
231
-
232
- const eventsToRollback = [...syncState.rollbackTail.slice(rollbackIndex), ...syncState.pending]
208
+ const rollbackEvents = [...payload.rollbackEvents, ...syncState.pending]
233
209
 
234
210
  // Get the last new event's ID as the new upstream head
235
211
  const newUpstreamHead = payload.newEvents.at(-1)?.id ?? syncState.upstreamHead
@@ -241,41 +217,44 @@ export const updateSyncState = ({
241
217
  isClientEvent,
242
218
  })
243
219
 
244
- return UpdateResultRebase.make({
245
- _tag: 'rebase',
246
- newSyncState: new SyncState({
247
- pending: rebasedPending,
248
- rollbackTail: trimRollbackTail([...syncState.rollbackTail.slice(0, rollbackIndex), ...payload.newEvents]),
249
- upstreamHead: newUpstreamHead,
250
- localHead: rebasedPending.at(-1)?.id ?? newUpstreamHead,
220
+ return validateMergeResult(
221
+ MergeResultRebase.make({
222
+ _tag: 'rebase',
223
+ newSyncState: new SyncState({
224
+ pending: rebasedPending,
225
+ upstreamHead: newUpstreamHead,
226
+ localHead: rebasedPending.at(-1)?.id ?? newUpstreamHead,
227
+ }),
228
+ newEvents: [...payload.newEvents, ...rebasedPending],
229
+ rollbackEvents,
230
+ mergeContext,
251
231
  }),
252
- newEvents: [...payload.newEvents, ...rebasedPending],
253
- eventsToRollback,
254
- updateContext,
255
- })
232
+ )
256
233
  }
257
234
 
258
235
  // #region upstream-advance
259
236
  case 'upstream-advance': {
260
237
  if (payload.newEvents.length === 0) {
261
- return UpdateResultAdvance.make({
262
- _tag: 'advance',
263
- newSyncState: new SyncState({
264
- pending: syncState.pending,
265
- rollbackTail: trimRollbackTail(syncState.rollbackTail),
266
- upstreamHead: syncState.upstreamHead,
267
- localHead: syncState.localHead,
238
+ return validateMergeResult(
239
+ MergeResultAdvance.make({
240
+ _tag: 'advance',
241
+ newSyncState: new SyncState({
242
+ pending: syncState.pending,
243
+ upstreamHead: syncState.upstreamHead,
244
+ localHead: syncState.localHead,
245
+ }),
246
+ newEvents: [],
247
+ confirmedEvents: [],
248
+ mergeContext: mergeContext,
268
249
  }),
269
- newEvents: [],
270
- updateContext,
271
- })
250
+ )
272
251
  }
273
252
 
274
253
  // Validate that newEvents are sorted in ascending order by eventId
275
254
  for (let i = 1; i < payload.newEvents.length; i++) {
276
255
  if (EventId.isGreaterThan(payload.newEvents[i - 1]!.id, payload.newEvents[i]!.id)) {
277
256
  return unexpectedError(
278
- `Events must be sorted in ascending order by eventId. Received: [${payload.newEvents.map((e) => `(${e.id.global},${e.id.client})`).join(', ')}]`,
257
+ `Events must be sorted in ascending order by eventId. Received: [${payload.newEvents.map((e) => EventId.toString(e.id)).join(', ')}]`,
279
258
  )
280
259
  }
281
260
  }
@@ -286,18 +265,18 @@ export const updateSyncState = ({
286
265
  EventId.isEqual(syncState.upstreamHead, payload.newEvents[0]!.id)
287
266
  ) {
288
267
  return unexpectedError(
289
- `Incoming events must be greater than upstream head. Expected greater than: (${syncState.upstreamHead.global},${syncState.upstreamHead.client}). Received: [${payload.newEvents.map((e) => `(${e.id.global},${e.id.client})`).join(', ')}]`,
268
+ `Incoming events must be greater than upstream head. Expected greater than: ${EventId.toString(syncState.upstreamHead)}. Received: [${payload.newEvents.map((e) => EventId.toString(e.id)).join(', ')}]`,
290
269
  )
291
270
  }
292
271
 
293
272
  // Validate that the parent id of the first incoming event is known
294
- const knownEventGlobalIds = [...syncState.rollbackTail, ...syncState.pending].map((e) => e.id.global)
273
+ const knownEventGlobalIds = [...syncState.pending].flatMap((e) => [e.id.global, e.parentId.global])
295
274
  knownEventGlobalIds.push(syncState.upstreamHead.global)
296
275
  const firstNewEvent = payload.newEvents[0]!
297
276
  const hasUnknownParentId = knownEventGlobalIds.includes(firstNewEvent.parentId.global) === false
298
277
  if (hasUnknownParentId) {
299
278
  return unexpectedError(
300
- `Incoming events must have a known parent id. Received: [${payload.newEvents.map((e) => `(${e.id.global},${e.id.client})`).join(', ')}]`,
279
+ `Incoming events must have a known parent id. Received: [${payload.newEvents.map((e) => EventId.toString(e.id)).join(', ')}]`,
301
280
  )
302
281
  }
303
282
 
@@ -338,27 +317,19 @@ export const updateSyncState = ({
338
317
  },
339
318
  )
340
319
 
341
- const seenEventIds = new Set<string>()
342
- const pendingAndNewEvents = [...pendingMatching, ...payload.newEvents].filter((event) => {
343
- const eventIdStr = `${event.id.global},${event.id.client}`
344
- if (seenEventIds.has(eventIdStr)) {
345
- return false
346
- }
347
- seenEventIds.add(eventIdStr)
348
- return true
349
- })
350
-
351
- return UpdateResultAdvance.make({
352
- _tag: 'advance',
353
- newSyncState: new SyncState({
354
- pending: pendingRemaining,
355
- rollbackTail: trimRollbackTail([...syncState.rollbackTail, ...pendingAndNewEvents]),
356
- upstreamHead: newUpstreamHead,
357
- localHead: pendingRemaining.at(-1)?.id ?? newUpstreamHead,
320
+ return validateMergeResult(
321
+ MergeResultAdvance.make({
322
+ _tag: 'advance',
323
+ newSyncState: new SyncState({
324
+ pending: pendingRemaining,
325
+ upstreamHead: newUpstreamHead,
326
+ localHead: pendingRemaining.at(-1)?.id ?? EventId.max(syncState.localHead, newUpstreamHead),
327
+ }),
328
+ newEvents,
329
+ confirmedEvents: pendingMatching,
330
+ mergeContext: mergeContext,
358
331
  }),
359
- newEvents,
360
- updateContext,
361
- })
332
+ )
362
333
  } else {
363
334
  const divergentPending = syncState.pending.slice(divergentPendingIndex)
364
335
  const rebasedPending = rebaseEvents({
@@ -375,30 +346,35 @@ export const updateSyncState = ({
375
346
  ignoreClientEvents,
376
347
  })
377
348
 
378
- return UpdateResultRebase.make({
379
- _tag: 'rebase',
380
- newSyncState: new SyncState({
381
- pending: rebasedPending,
382
- rollbackTail: trimRollbackTail([...syncState.rollbackTail, ...payload.newEvents]),
383
- upstreamHead: newUpstreamHead,
384
- localHead: rebasedPending.at(-1)!.id,
349
+ return validateMergeResult(
350
+ MergeResultRebase.make({
351
+ _tag: 'rebase',
352
+ newSyncState: new SyncState({
353
+ pending: rebasedPending,
354
+ upstreamHead: newUpstreamHead,
355
+ localHead: rebasedPending.at(-1)!.id,
356
+ }),
357
+ newEvents: [...payload.newEvents.slice(divergentNewEventsIndex), ...rebasedPending],
358
+ rollbackEvents: divergentPending,
359
+ mergeContext,
385
360
  }),
386
- newEvents: [...payload.newEvents.slice(divergentNewEventsIndex), ...rebasedPending],
387
- eventsToRollback: [...syncState.rollbackTail, ...divergentPending],
388
- updateContext,
389
- })
361
+ )
390
362
  }
391
363
  }
392
364
  // #endregion
393
365
 
366
+ // This is the same as what's running in the sync backend
394
367
  case 'local-push': {
395
368
  if (payload.newEvents.length === 0) {
396
- return UpdateResultAdvance.make({
397
- _tag: 'advance',
398
- newSyncState: syncState,
399
- newEvents: [],
400
- updateContext,
401
- })
369
+ return validateMergeResult(
370
+ MergeResultAdvance.make({
371
+ _tag: 'advance',
372
+ newSyncState: syncState,
373
+ newEvents: [],
374
+ confirmedEvents: [],
375
+ mergeContext: mergeContext,
376
+ }),
377
+ )
402
378
  }
403
379
 
404
380
  const newEventsFirst = payload.newEvents.at(0)!
@@ -406,23 +382,27 @@ export const updateSyncState = ({
406
382
 
407
383
  if (invalidEventId) {
408
384
  const expectedMinimumId = EventId.nextPair(syncState.localHead, true).id
409
- return UpdateResultReject.make({
410
- _tag: 'reject',
411
- expectedMinimumId,
412
- updateContext,
413
- })
385
+ return validateMergeResult(
386
+ MergeResultReject.make({
387
+ _tag: 'reject',
388
+ expectedMinimumId,
389
+ mergeContext,
390
+ }),
391
+ )
414
392
  } else {
415
- return UpdateResultAdvance.make({
416
- _tag: 'advance',
417
- newSyncState: new SyncState({
418
- pending: [...syncState.pending, ...payload.newEvents],
419
- rollbackTail: syncState.rollbackTail,
420
- upstreamHead: syncState.upstreamHead,
421
- localHead: payload.newEvents.at(-1)!.id,
393
+ return validateMergeResult(
394
+ MergeResultAdvance.make({
395
+ _tag: 'advance',
396
+ newSyncState: new SyncState({
397
+ pending: [...syncState.pending, ...payload.newEvents],
398
+ upstreamHead: syncState.upstreamHead,
399
+ localHead: payload.newEvents.at(-1)!.id,
400
+ }),
401
+ newEvents: payload.newEvents,
402
+ confirmedEvents: [],
403
+ mergeContext: mergeContext,
422
404
  }),
423
- newEvents: payload.newEvents,
424
- updateContext,
425
- })
405
+ )
426
406
  }
427
407
  }
428
408
 
@@ -498,18 +478,16 @@ const rebaseEvents = ({
498
478
  * it could make sense to "flatten" update results into a single update result which the client session
499
479
  * can process more efficiently which avoids push-threshing
500
480
  */
501
- const _flattenUpdateResults = (_updateResults: ReadonlyArray<UpdateResult>) => {}
481
+ const _flattenMergeResults = (_updateResults: ReadonlyArray<MergeResult>) => {}
502
482
 
503
483
  const validateSyncState = (syncState: SyncState) => {
504
- // Validate that the rollback tail and pending events together form a continuous chain of events / linked list via the parentId
505
- const chain = [...syncState.rollbackTail, ...syncState.pending]
506
- for (let i = 0; i < chain.length; i++) {
507
- const event = chain[i]!
508
- const nextEvent = chain[i + 1]
484
+ for (let i = 0; i < syncState.pending.length; i++) {
485
+ const event = syncState.pending[i]!
486
+ const nextEvent = syncState.pending[i + 1]
509
487
  if (nextEvent === undefined) break // Reached end of chain
510
488
 
511
489
  if (EventId.isGreaterThan(event.id, nextEvent.id)) {
512
- shouldNeverHappen('Events must be sorted in ascending order by eventId', chain, {
490
+ shouldNeverHappen('Events must be sorted in ascending order by eventId', {
513
491
  event,
514
492
  nextEvent,
515
493
  })
@@ -520,8 +498,8 @@ const validateSyncState = (syncState: SyncState) => {
520
498
  if (globalIdHasIncreased) {
521
499
  if (nextEvent.id.client !== 0) {
522
500
  shouldNeverHappen(
523
- `New global events must point to clientId 0 in the parentId. Received: (${nextEvent.id.global},${nextEvent.id.client})`,
524
- chain,
501
+ `New global events must point to clientId 0 in the parentId. Received: (${EventId.toString(nextEvent.id)})`,
502
+ syncState.pending,
525
503
  {
526
504
  event,
527
505
  nextEvent,
@@ -531,24 +509,49 @@ const validateSyncState = (syncState: SyncState) => {
531
509
  } else {
532
510
  // Otherwise, the parentId must be the same as the previous event's id
533
511
  if (EventId.isEqual(nextEvent.parentId, event.id) === false) {
534
- shouldNeverHappen('Events must be linked in a continuous chain via the parentId', chain, {
512
+ shouldNeverHappen('Events must be linked in a continuous chain via the parentId', syncState.pending, {
535
513
  event,
536
514
  nextEvent,
537
515
  })
538
516
  }
539
517
  }
540
518
  }
519
+ }
520
+
521
+ const validateMergeResult = (mergeResult: typeof MergeResult.Type) => {
522
+ if (mergeResult._tag === 'unexpected-error' || mergeResult._tag === 'reject') return mergeResult
523
+
524
+ // Ensure local head is always greater than or equal to upstream head
525
+ if (EventId.isGreaterThan(mergeResult.newSyncState.upstreamHead, mergeResult.newSyncState.localHead)) {
526
+ shouldNeverHappen('Local head must be greater than or equal to upstream head', {
527
+ localHead: mergeResult.newSyncState.localHead,
528
+ upstreamHead: mergeResult.newSyncState.upstreamHead,
529
+ })
530
+ }
531
+
532
+ // Ensure new local head is greater than or equal to the previous local head
533
+ if (
534
+ EventId.isGreaterThanOrEqual(mergeResult.newSyncState.localHead, mergeResult.mergeContext.syncState.localHead) ===
535
+ false
536
+ ) {
537
+ shouldNeverHappen('New local head must be greater than or equal to the previous local head', {
538
+ localHead: mergeResult.newSyncState.localHead,
539
+ previousLocalHead: mergeResult.mergeContext.syncState.localHead,
540
+ })
541
+ }
542
+
543
+ // Ensure new upstream head is greater than or equal to the previous upstream head
544
+ if (
545
+ EventId.isGreaterThanOrEqual(
546
+ mergeResult.newSyncState.upstreamHead,
547
+ mergeResult.mergeContext.syncState.upstreamHead,
548
+ ) === false
549
+ ) {
550
+ shouldNeverHappen('New upstream head must be greater than or equal to the previous upstream head', {
551
+ upstreamHead: mergeResult.newSyncState.upstreamHead,
552
+ previousUpstreamHead: mergeResult.mergeContext.syncState.upstreamHead,
553
+ })
554
+ }
541
555
 
542
- // TODO double check this
543
- // const globalRollbackTail = syncState.rollbackTail.filter((event) => event.id.client === 0)
544
- // // The parent of the first global rollback tail event ("oldest event") must be the upstream head (if there is a rollback tail)
545
- // if (globalRollbackTail.length > 0) {
546
- // const firstRollbackTailEvent = globalRollbackTail[0]!
547
- // if (EventId.isEqual(firstRollbackTailEvent.parentId, syncState.upstreamHead) === false) {
548
- // shouldNeverHappen('The parent of the first rollback tail event must be the upstream head', chain, {
549
- // event: firstRollbackTailEvent,
550
- // upstreamHead: syncState.upstreamHead,
551
- // })
552
- // }
553
- // }
556
+ return mergeResult
554
557
  }
package/src/version.ts CHANGED
@@ -2,7 +2,7 @@
2
2
  // import packageJson from '../package.json' with { type: 'json' }
3
3
  // export const liveStoreVersion = packageJson.version
4
4
 
5
- export const liveStoreVersion = '0.3.0-dev.25' as const
5
+ export const liveStoreVersion = '0.3.0-dev.27' as const
6
6
 
7
7
  /**
8
8
  * This version number is incremented whenever the internal storage format changes in a breaking way.
package/tmp/pack.tgz CHANGED
Binary file