@livestore/common 0.3.0-dev.26 → 0.3.0-dev.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.tsbuildinfo +1 -1
- package/dist/adapter-types.d.ts +13 -12
- package/dist/adapter-types.d.ts.map +1 -1
- package/dist/adapter-types.js +5 -6
- package/dist/adapter-types.js.map +1 -1
- package/dist/devtools/devtools-messages-client-session.d.ts +21 -21
- package/dist/devtools/devtools-messages-common.d.ts +13 -6
- package/dist/devtools/devtools-messages-common.d.ts.map +1 -1
- package/dist/devtools/devtools-messages-common.js +6 -0
- package/dist/devtools/devtools-messages-common.js.map +1 -1
- package/dist/devtools/devtools-messages-leader.d.ts +25 -25
- package/dist/devtools/devtools-messages-leader.d.ts.map +1 -1
- package/dist/devtools/devtools-messages-leader.js +1 -2
- package/dist/devtools/devtools-messages-leader.js.map +1 -1
- package/dist/leader-thread/LeaderSyncProcessor.d.ts +29 -7
- package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
- package/dist/leader-thread/LeaderSyncProcessor.js +259 -199
- package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
- package/dist/leader-thread/apply-mutation.d.ts +14 -9
- package/dist/leader-thread/apply-mutation.d.ts.map +1 -1
- package/dist/leader-thread/apply-mutation.js +43 -36
- package/dist/leader-thread/apply-mutation.js.map +1 -1
- package/dist/leader-thread/leader-worker-devtools.d.ts +1 -1
- package/dist/leader-thread/leader-worker-devtools.d.ts.map +1 -1
- package/dist/leader-thread/leader-worker-devtools.js +4 -5
- package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
- package/dist/leader-thread/make-leader-thread-layer.d.ts +15 -3
- package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
- package/dist/leader-thread/make-leader-thread-layer.js +29 -34
- package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
- package/dist/leader-thread/mod.d.ts +1 -1
- package/dist/leader-thread/mod.d.ts.map +1 -1
- package/dist/leader-thread/mod.js +1 -1
- package/dist/leader-thread/mod.js.map +1 -1
- package/dist/leader-thread/mutationlog.d.ts +19 -3
- package/dist/leader-thread/mutationlog.d.ts.map +1 -1
- package/dist/leader-thread/mutationlog.js +105 -12
- package/dist/leader-thread/mutationlog.js.map +1 -1
- package/dist/leader-thread/pull-queue-set.d.ts +1 -1
- package/dist/leader-thread/pull-queue-set.d.ts.map +1 -1
- package/dist/leader-thread/pull-queue-set.js +6 -16
- package/dist/leader-thread/pull-queue-set.js.map +1 -1
- package/dist/leader-thread/recreate-db.d.ts.map +1 -1
- package/dist/leader-thread/recreate-db.js +4 -3
- package/dist/leader-thread/recreate-db.js.map +1 -1
- package/dist/leader-thread/types.d.ts +34 -19
- package/dist/leader-thread/types.d.ts.map +1 -1
- package/dist/leader-thread/types.js.map +1 -1
- package/dist/rehydrate-from-mutationlog.d.ts +5 -4
- package/dist/rehydrate-from-mutationlog.d.ts.map +1 -1
- package/dist/rehydrate-from-mutationlog.js +7 -9
- package/dist/rehydrate-from-mutationlog.js.map +1 -1
- package/dist/schema/EventId.d.ts +9 -0
- package/dist/schema/EventId.d.ts.map +1 -1
- package/dist/schema/EventId.js +22 -2
- package/dist/schema/EventId.js.map +1 -1
- package/dist/schema/MutationEvent.d.ts +78 -25
- package/dist/schema/MutationEvent.d.ts.map +1 -1
- package/dist/schema/MutationEvent.js +25 -12
- package/dist/schema/MutationEvent.js.map +1 -1
- package/dist/schema/schema.js +1 -1
- package/dist/schema/schema.js.map +1 -1
- package/dist/schema/system-tables.d.ts +67 -0
- package/dist/schema/system-tables.d.ts.map +1 -1
- package/dist/schema/system-tables.js +12 -1
- package/dist/schema/system-tables.js.map +1 -1
- package/dist/sync/ClientSessionSyncProcessor.d.ts +9 -1
- package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
- package/dist/sync/ClientSessionSyncProcessor.js +25 -19
- package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
- package/dist/sync/sync.d.ts +6 -5
- package/dist/sync/sync.d.ts.map +1 -1
- package/dist/sync/sync.js.map +1 -1
- package/dist/sync/syncstate.d.ts +47 -71
- package/dist/sync/syncstate.d.ts.map +1 -1
- package/dist/sync/syncstate.js +136 -139
- package/dist/sync/syncstate.js.map +1 -1
- package/dist/sync/syncstate.test.js +203 -284
- package/dist/sync/syncstate.test.js.map +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +2 -2
- package/src/adapter-types.ts +11 -13
- package/src/devtools/devtools-messages-common.ts +9 -0
- package/src/devtools/devtools-messages-leader.ts +1 -2
- package/src/leader-thread/LeaderSyncProcessor.ts +457 -351
- package/src/leader-thread/apply-mutation.ts +81 -71
- package/src/leader-thread/leader-worker-devtools.ts +5 -7
- package/src/leader-thread/make-leader-thread-layer.ts +60 -53
- package/src/leader-thread/mod.ts +1 -1
- package/src/leader-thread/mutationlog.ts +166 -13
- package/src/leader-thread/recreate-db.ts +4 -3
- package/src/leader-thread/types.ts +33 -23
- package/src/rehydrate-from-mutationlog.ts +12 -12
- package/src/schema/EventId.ts +26 -2
- package/src/schema/MutationEvent.ts +32 -16
- package/src/schema/schema.ts +1 -1
- package/src/schema/system-tables.ts +20 -1
- package/src/sync/ClientSessionSyncProcessor.ts +35 -23
- package/src/sync/sync.ts +6 -9
- package/src/sync/syncstate.test.ts +228 -315
- package/src/sync/syncstate.ts +202 -187
- package/src/version.ts +1 -1
- package/tmp/pack.tgz +0 -0
- package/src/leader-thread/pull-queue-set.ts +0 -67
package/src/sync/syncstate.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { casesHandled, shouldNeverHappen } from '@livestore/utils'
|
1
|
+
import { casesHandled, LS_DEV, shouldNeverHappen } from '@livestore/utils'
|
2
2
|
import { Match, ReadonlyArray, Schema } from '@livestore/utils/effect'
|
3
3
|
|
4
4
|
import { UnexpectedError } from '../adapter-types.js'
|
@@ -9,75 +9,65 @@ import * as MutationEvent from '../schema/MutationEvent.js'
|
|
9
9
|
* SyncState represents the current sync state of a sync node relative to an upstream node.
|
10
10
|
* Events flow from local to upstream, with each state maintaining its own event head.
|
11
11
|
*
|
12
|
-
*
|
12
|
+
* Example:
|
13
13
|
* ```
|
14
|
-
*
|
15
|
-
* |
|
16
|
-
*
|
17
|
-
*
|
18
|
-
*
|
19
|
-
*
|
14
|
+
* +------------------------+
|
15
|
+
* | PENDING EVENTS |
|
16
|
+
* +------------------------+
|
17
|
+
* ▼ ▼
|
18
|
+
* Upstream Head Local Head
|
19
|
+
* (1,0) (1,1), (1,2), (2,0)
|
20
20
|
* ```
|
21
21
|
*
|
22
|
-
*
|
23
|
-
* -
|
24
|
-
*
|
25
|
-
* - Subject to rebase if rejected.
|
26
|
-
* - **Rollback Tail**: Events that are kept around temporarily for potential rollback until confirmed by upstream.
|
27
|
-
* - Currently only needed for ClientSessionSyncProcessor.
|
28
|
-
* - Note: Confirmation of an event is stronger than acknowledgment of an event and can only be done by the
|
29
|
-
* absolute authority in the sync hierarchy (i.e. the sync backend in our case).
|
22
|
+
* **Pending Events**: Events awaiting acknowledgment from the upstream.
|
23
|
+
* - Can be confirmed or rejected by the upstream.
|
24
|
+
* - Subject to rebase if rejected.
|
30
25
|
*
|
31
26
|
* Payloads:
|
32
27
|
* - `PayloadUpstreamRebase`: Upstream has performed a rebase, so downstream must roll back to the specified event
|
33
28
|
* and rebase the pending events on top of the new events.
|
34
29
|
* - `PayloadUpstreamAdvance`: Upstream has advanced, so downstream must rebase the pending events on top of the new events.
|
35
|
-
* - `PayloadUpstreamTrimRollbackTail`: Upstream has advanced, so downstream can trim the rollback tail.
|
36
30
|
* - `PayloadLocalPush`: Local push payload
|
37
31
|
*
|
38
32
|
* Invariants:
|
39
33
|
* 1. **Chain Continuity**: Each event must reference its immediate parent.
|
40
34
|
* 2. **Head Ordering**: Upstream Head ≤ Local Head.
|
41
|
-
* 3. **
|
35
|
+
* 3. **Event number sequence**: Must follow the pattern (1,0)→(1,1)→(1,2)→(2,0).
|
42
36
|
*
|
43
37
|
* A few further notes to help form an intuition:
|
44
38
|
* - The goal is to keep the pending events as small as possible (i.e. to have synced with the next upstream node)
|
45
39
|
* - There are 2 cases for rebasing:
|
46
40
|
* - The conflicting event only conflicts with the pending events -> only (some of) the pending events need to be rolled back
|
47
|
-
* - The conflicting event conflicts even with the rollback tail (additionally to the pending events) -> events from both need to be rolled back
|
48
41
|
*
|
49
42
|
* The `merge` function processes updates to the sync state based on incoming payloads,
|
50
|
-
* handling cases such as upstream rebase, advance
|
43
|
+
* handling cases such as upstream rebase, advance and local push.
|
51
44
|
*/
|
52
45
|
export class SyncState extends Schema.Class<SyncState>('SyncState')({
|
53
46
|
pending: Schema.Array(MutationEvent.EncodedWithMeta),
|
54
|
-
rollbackTail: Schema.Array(MutationEvent.EncodedWithMeta),
|
55
47
|
/** What this node expects the next upstream node to have as its own local head */
|
56
48
|
upstreamHead: EventId.EventId,
|
49
|
+
/** Equivalent to `pending.at(-1)?.id` if there are pending events */
|
57
50
|
localHead: EventId.EventId,
|
58
51
|
}) {
|
59
|
-
toJSON = (): any => {
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
localHead: `(${this.localHead.global},${this.localHead.client})`,
|
65
|
-
}
|
66
|
-
}
|
52
|
+
toJSON = (): any => ({
|
53
|
+
pending: this.pending.map((e) => e.toJSON()),
|
54
|
+
upstreamHead: EventId.toString(this.upstreamHead),
|
55
|
+
localHead: EventId.toString(this.localHead),
|
56
|
+
})
|
67
57
|
}
|
68
58
|
|
59
|
+
/**
|
60
|
+
* This payload propagates a rebase from the upstream node
|
61
|
+
*/
|
69
62
|
export class PayloadUpstreamRebase extends Schema.TaggedStruct('upstream-rebase', {
|
70
|
-
/**
|
71
|
-
|
63
|
+
/** Events which need to be rolled back */
|
64
|
+
rollbackEvents: Schema.Array(MutationEvent.EncodedWithMeta),
|
65
|
+
/** Events which need to be applied after the rollback (already rebased by the upstream node) */
|
72
66
|
newEvents: Schema.Array(MutationEvent.EncodedWithMeta),
|
73
|
-
/** Trim rollback tail up to this event (inclusive). */
|
74
|
-
trimRollbackUntil: Schema.optional(EventId.EventId),
|
75
67
|
}) {}
|
76
68
|
|
77
69
|
export class PayloadUpstreamAdvance extends Schema.TaggedStruct('upstream-advance', {
|
78
70
|
newEvents: Schema.Array(MutationEvent.EncodedWithMeta),
|
79
|
-
/** Trim rollback tail up to this event (inclusive). */
|
80
|
-
trimRollbackUntil: Schema.optional(EventId.EventId),
|
81
71
|
}) {}
|
82
72
|
|
83
73
|
export class PayloadLocalPush extends Schema.TaggedStruct('local-push', {
|
@@ -86,12 +76,10 @@ export class PayloadLocalPush extends Schema.TaggedStruct('local-push', {
|
|
86
76
|
|
87
77
|
export class Payload extends Schema.Union(PayloadUpstreamRebase, PayloadUpstreamAdvance, PayloadLocalPush) {}
|
88
78
|
|
89
|
-
export
|
90
|
-
|
91
|
-
export type PayloadUpstream = typeof PayloadUpstream.Type
|
79
|
+
export class PayloadUpstream extends Schema.Union(PayloadUpstreamRebase, PayloadUpstreamAdvance) {}
|
92
80
|
|
93
81
|
/** Only used for debugging purposes */
|
94
|
-
export class
|
82
|
+
export class MergeContext extends Schema.Class<MergeContext>('MergeContext')({
|
95
83
|
payload: Payload,
|
96
84
|
syncState: SyncState,
|
97
85
|
}) {
|
@@ -105,9 +93,10 @@ export class UpdateContext extends Schema.Class<UpdateContext>('UpdateContext')(
|
|
105
93
|
_tag: 'upstream-advance',
|
106
94
|
newEvents: this.payload.newEvents.map((e) => e.toJSON()),
|
107
95
|
})),
|
108
|
-
Match.tag('upstream-rebase', () => ({
|
96
|
+
Match.tag('upstream-rebase', (payload) => ({
|
109
97
|
_tag: 'upstream-rebase',
|
110
|
-
newEvents:
|
98
|
+
newEvents: payload.newEvents.map((e) => e.toJSON()),
|
99
|
+
rollbackEvents: payload.rollbackEvents.map((e) => e.toJSON()),
|
111
100
|
})),
|
112
101
|
Match.exhaustive,
|
113
102
|
)
|
@@ -121,16 +110,18 @@ export class UpdateContext extends Schema.Class<UpdateContext>('UpdateContext')(
|
|
121
110
|
export class MergeResultAdvance extends Schema.Class<MergeResultAdvance>('MergeResultAdvance')({
|
122
111
|
_tag: Schema.Literal('advance'),
|
123
112
|
newSyncState: SyncState,
|
124
|
-
/** Events which weren't pending before the update */
|
125
113
|
newEvents: Schema.Array(MutationEvent.EncodedWithMeta),
|
126
|
-
|
114
|
+
/** Events which were previously pending but are now confirmed */
|
115
|
+
confirmedEvents: Schema.Array(MutationEvent.EncodedWithMeta),
|
116
|
+
mergeContext: MergeContext,
|
127
117
|
}) {
|
128
118
|
toJSON = (): any => {
|
129
119
|
return {
|
130
120
|
_tag: this._tag,
|
131
121
|
newSyncState: this.newSyncState.toJSON(),
|
132
122
|
newEvents: this.newEvents.map((e) => e.toJSON()),
|
133
|
-
|
123
|
+
confirmedEvents: this.confirmedEvents.map((e) => e.toJSON()),
|
124
|
+
mergeContext: this.mergeContext.toJSON(),
|
134
125
|
}
|
135
126
|
}
|
136
127
|
}
|
@@ -138,18 +129,18 @@ export class MergeResultAdvance extends Schema.Class<MergeResultAdvance>('MergeR
|
|
138
129
|
export class MergeResultRebase extends Schema.Class<MergeResultRebase>('MergeResultRebase')({
|
139
130
|
_tag: Schema.Literal('rebase'),
|
140
131
|
newSyncState: SyncState,
|
141
|
-
/** Events which weren't pending before the update */
|
142
132
|
newEvents: Schema.Array(MutationEvent.EncodedWithMeta),
|
143
|
-
|
144
|
-
|
133
|
+
/** Events which need to be rolled back */
|
134
|
+
rollbackEvents: Schema.Array(MutationEvent.EncodedWithMeta),
|
135
|
+
mergeContext: MergeContext,
|
145
136
|
}) {
|
146
137
|
toJSON = (): any => {
|
147
138
|
return {
|
148
139
|
_tag: this._tag,
|
149
140
|
newSyncState: this.newSyncState.toJSON(),
|
150
141
|
newEvents: this.newEvents.map((e) => e.toJSON()),
|
151
|
-
|
152
|
-
|
142
|
+
rollbackEvents: this.rollbackEvents.map((e) => e.toJSON()),
|
143
|
+
mergeContext: this.mergeContext.toJSON(),
|
153
144
|
}
|
154
145
|
}
|
155
146
|
}
|
@@ -158,13 +149,13 @@ export class MergeResultReject extends Schema.Class<MergeResultReject>('MergeRes
|
|
158
149
|
_tag: Schema.Literal('reject'),
|
159
150
|
/** The minimum id that the new events must have */
|
160
151
|
expectedMinimumId: EventId.EventId,
|
161
|
-
|
152
|
+
mergeContext: MergeContext,
|
162
153
|
}) {
|
163
154
|
toJSON = (): any => {
|
164
155
|
return {
|
165
156
|
_tag: this._tag,
|
166
|
-
expectedMinimumId:
|
167
|
-
|
157
|
+
expectedMinimumId: EventId.toString(this.expectedMinimumId),
|
158
|
+
mergeContext: this.mergeContext.toJSON(),
|
168
159
|
}
|
169
160
|
}
|
170
161
|
}
|
@@ -181,12 +172,24 @@ export class MergeResult extends Schema.Union(
|
|
181
172
|
MergeResultUnexpectedError,
|
182
173
|
) {}
|
183
174
|
|
184
|
-
const unexpectedError = (cause: unknown): MergeResultUnexpectedError =>
|
185
|
-
|
175
|
+
const unexpectedError = (cause: unknown): MergeResultUnexpectedError => {
|
176
|
+
if (LS_DEV) {
|
177
|
+
debugger
|
178
|
+
}
|
179
|
+
|
180
|
+
return MergeResultUnexpectedError.make({
|
186
181
|
_tag: 'unexpected-error',
|
187
182
|
cause: new UnexpectedError({ cause }),
|
188
183
|
})
|
184
|
+
}
|
185
|
+
|
186
|
+
// TODO Idea: call merge recursively through hierarchy levels
|
187
|
+
/*
|
188
|
+
Idea: have a map that maps from `globalEventId` to Array<ClientEvents>
|
189
|
+
The same applies to even further hierarchy levels
|
189
190
|
|
191
|
+
TODO: possibly even keep the client events in a separate table in the client leader
|
192
|
+
*/
|
190
193
|
export const merge = ({
|
191
194
|
syncState,
|
192
195
|
payload,
|
@@ -202,32 +205,13 @@ export const merge = ({
|
|
202
205
|
ignoreClientEvents?: boolean
|
203
206
|
}): typeof MergeResult.Type => {
|
204
207
|
validateSyncState(syncState)
|
208
|
+
validatePayload(payload)
|
205
209
|
|
206
|
-
const
|
207
|
-
rollbackTail: ReadonlyArray<MutationEvent.EncodedWithMeta>,
|
208
|
-
): ReadonlyArray<MutationEvent.EncodedWithMeta> => {
|
209
|
-
const trimRollbackUntil = payload._tag === 'local-push' ? undefined : payload.trimRollbackUntil
|
210
|
-
if (trimRollbackUntil === undefined) return rollbackTail
|
211
|
-
const index = rollbackTail.findIndex((event) => EventId.isEqual(event.id, trimRollbackUntil))
|
212
|
-
if (index === -1) return []
|
213
|
-
return rollbackTail.slice(index + 1)
|
214
|
-
}
|
215
|
-
|
216
|
-
const updateContext = UpdateContext.make({ payload, syncState })
|
210
|
+
const mergeContext = MergeContext.make({ payload, syncState })
|
217
211
|
|
218
212
|
switch (payload._tag) {
|
219
213
|
case 'upstream-rebase': {
|
220
|
-
|
221
|
-
const rollbackIndex = syncState.rollbackTail.findIndex((event) =>
|
222
|
-
EventId.isEqual(event.id, payload.rollbackUntil),
|
223
|
-
)
|
224
|
-
if (rollbackIndex === -1) {
|
225
|
-
return unexpectedError(
|
226
|
-
`Rollback event not found in rollback tail. Rollback until: [${payload.rollbackUntil.global},${payload.rollbackUntil.client}]. Rollback tail: [${syncState.rollbackTail.map((e) => e.toString()).join(', ')}]`,
|
227
|
-
)
|
228
|
-
}
|
229
|
-
|
230
|
-
const eventsToRollback = [...syncState.rollbackTail.slice(rollbackIndex), ...syncState.pending]
|
214
|
+
const rollbackEvents = [...payload.rollbackEvents, ...syncState.pending]
|
231
215
|
|
232
216
|
// Get the last new event's ID as the new upstream head
|
233
217
|
const newUpstreamHead = payload.newEvents.at(-1)?.id ?? syncState.upstreamHead
|
@@ -239,41 +223,44 @@ export const merge = ({
|
|
239
223
|
isClientEvent,
|
240
224
|
})
|
241
225
|
|
242
|
-
return
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
226
|
+
return validateMergeResult(
|
227
|
+
MergeResultRebase.make({
|
228
|
+
_tag: 'rebase',
|
229
|
+
newSyncState: new SyncState({
|
230
|
+
pending: rebasedPending,
|
231
|
+
upstreamHead: newUpstreamHead,
|
232
|
+
localHead: rebasedPending.at(-1)?.id ?? newUpstreamHead,
|
233
|
+
}),
|
234
|
+
newEvents: [...payload.newEvents, ...rebasedPending],
|
235
|
+
rollbackEvents,
|
236
|
+
mergeContext,
|
249
237
|
}),
|
250
|
-
|
251
|
-
eventsToRollback,
|
252
|
-
updateContext,
|
253
|
-
})
|
238
|
+
)
|
254
239
|
}
|
255
240
|
|
256
241
|
// #region upstream-advance
|
257
242
|
case 'upstream-advance': {
|
258
243
|
if (payload.newEvents.length === 0) {
|
259
|
-
return
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
244
|
+
return validateMergeResult(
|
245
|
+
MergeResultAdvance.make({
|
246
|
+
_tag: 'advance',
|
247
|
+
newSyncState: new SyncState({
|
248
|
+
pending: syncState.pending,
|
249
|
+
upstreamHead: syncState.upstreamHead,
|
250
|
+
localHead: syncState.localHead,
|
251
|
+
}),
|
252
|
+
newEvents: [],
|
253
|
+
confirmedEvents: [],
|
254
|
+
mergeContext: mergeContext,
|
266
255
|
}),
|
267
|
-
|
268
|
-
updateContext,
|
269
|
-
})
|
256
|
+
)
|
270
257
|
}
|
271
258
|
|
272
259
|
// Validate that newEvents are sorted in ascending order by eventId
|
273
260
|
for (let i = 1; i < payload.newEvents.length; i++) {
|
274
261
|
if (EventId.isGreaterThan(payload.newEvents[i - 1]!.id, payload.newEvents[i]!.id)) {
|
275
262
|
return unexpectedError(
|
276
|
-
`Events must be sorted in ascending order by eventId. Received: [${payload.newEvents.map((e) =>
|
263
|
+
`Events must be sorted in ascending order by eventId. Received: [${payload.newEvents.map((e) => EventId.toString(e.id)).join(', ')}]`,
|
277
264
|
)
|
278
265
|
}
|
279
266
|
}
|
@@ -284,18 +271,7 @@ export const merge = ({
|
|
284
271
|
EventId.isEqual(syncState.upstreamHead, payload.newEvents[0]!.id)
|
285
272
|
) {
|
286
273
|
return unexpectedError(
|
287
|
-
`Incoming events must be greater than upstream head. Expected greater than:
|
288
|
-
)
|
289
|
-
}
|
290
|
-
|
291
|
-
// Validate that the parent id of the first incoming event is known
|
292
|
-
const knownEventGlobalIds = [...syncState.rollbackTail, ...syncState.pending].map((e) => e.id.global)
|
293
|
-
knownEventGlobalIds.push(syncState.upstreamHead.global)
|
294
|
-
const firstNewEvent = payload.newEvents[0]!
|
295
|
-
const hasUnknownParentId = knownEventGlobalIds.includes(firstNewEvent.parentId.global) === false
|
296
|
-
if (hasUnknownParentId) {
|
297
|
-
return unexpectedError(
|
298
|
-
`Incoming events must have a known parent id. Received: [${payload.newEvents.map((e) => `(${e.id.global},${e.id.client})`).join(', ')}]`,
|
274
|
+
`Incoming events must be greater than upstream head. Expected greater than: ${EventId.toString(syncState.upstreamHead)}. Received: [${payload.newEvents.map((e) => EventId.toString(e.id)).join(', ')}]`,
|
299
275
|
)
|
300
276
|
}
|
301
277
|
|
@@ -336,27 +312,19 @@ export const merge = ({
|
|
336
312
|
},
|
337
313
|
)
|
338
314
|
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
_tag: 'advance',
|
351
|
-
newSyncState: new SyncState({
|
352
|
-
pending: pendingRemaining,
|
353
|
-
rollbackTail: trimRollbackTail([...syncState.rollbackTail, ...pendingAndNewEvents]),
|
354
|
-
upstreamHead: newUpstreamHead,
|
355
|
-
localHead: pendingRemaining.at(-1)?.id ?? newUpstreamHead,
|
315
|
+
return validateMergeResult(
|
316
|
+
MergeResultAdvance.make({
|
317
|
+
_tag: 'advance',
|
318
|
+
newSyncState: new SyncState({
|
319
|
+
pending: pendingRemaining,
|
320
|
+
upstreamHead: newUpstreamHead,
|
321
|
+
localHead: pendingRemaining.at(-1)?.id ?? EventId.max(syncState.localHead, newUpstreamHead),
|
322
|
+
}),
|
323
|
+
newEvents,
|
324
|
+
confirmedEvents: pendingMatching,
|
325
|
+
mergeContext: mergeContext,
|
356
326
|
}),
|
357
|
-
|
358
|
-
updateContext,
|
359
|
-
})
|
327
|
+
)
|
360
328
|
} else {
|
361
329
|
const divergentPending = syncState.pending.slice(divergentPendingIndex)
|
362
330
|
const rebasedPending = rebaseEvents({
|
@@ -373,30 +341,35 @@ export const merge = ({
|
|
373
341
|
ignoreClientEvents,
|
374
342
|
})
|
375
343
|
|
376
|
-
return
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
344
|
+
return validateMergeResult(
|
345
|
+
MergeResultRebase.make({
|
346
|
+
_tag: 'rebase',
|
347
|
+
newSyncState: new SyncState({
|
348
|
+
pending: rebasedPending,
|
349
|
+
upstreamHead: newUpstreamHead,
|
350
|
+
localHead: rebasedPending.at(-1)!.id,
|
351
|
+
}),
|
352
|
+
newEvents: [...payload.newEvents.slice(divergentNewEventsIndex), ...rebasedPending],
|
353
|
+
rollbackEvents: divergentPending,
|
354
|
+
mergeContext,
|
383
355
|
}),
|
384
|
-
|
385
|
-
eventsToRollback: [...syncState.rollbackTail, ...divergentPending],
|
386
|
-
updateContext,
|
387
|
-
})
|
356
|
+
)
|
388
357
|
}
|
389
358
|
}
|
390
359
|
// #endregion
|
391
360
|
|
361
|
+
// This is the same as what's running in the sync backend
|
392
362
|
case 'local-push': {
|
393
363
|
if (payload.newEvents.length === 0) {
|
394
|
-
return
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
364
|
+
return validateMergeResult(
|
365
|
+
MergeResultAdvance.make({
|
366
|
+
_tag: 'advance',
|
367
|
+
newSyncState: syncState,
|
368
|
+
newEvents: [],
|
369
|
+
confirmedEvents: [],
|
370
|
+
mergeContext: mergeContext,
|
371
|
+
}),
|
372
|
+
)
|
400
373
|
}
|
401
374
|
|
402
375
|
const newEventsFirst = payload.newEvents.at(0)!
|
@@ -404,23 +377,27 @@ export const merge = ({
|
|
404
377
|
|
405
378
|
if (invalidEventId) {
|
406
379
|
const expectedMinimumId = EventId.nextPair(syncState.localHead, true).id
|
407
|
-
return
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
380
|
+
return validateMergeResult(
|
381
|
+
MergeResultReject.make({
|
382
|
+
_tag: 'reject',
|
383
|
+
expectedMinimumId,
|
384
|
+
mergeContext,
|
385
|
+
}),
|
386
|
+
)
|
412
387
|
} else {
|
413
|
-
return
|
414
|
-
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
|
388
|
+
return validateMergeResult(
|
389
|
+
MergeResultAdvance.make({
|
390
|
+
_tag: 'advance',
|
391
|
+
newSyncState: new SyncState({
|
392
|
+
pending: [...syncState.pending, ...payload.newEvents],
|
393
|
+
upstreamHead: syncState.upstreamHead,
|
394
|
+
localHead: payload.newEvents.at(-1)!.id,
|
395
|
+
}),
|
396
|
+
newEvents: payload.newEvents,
|
397
|
+
confirmedEvents: [],
|
398
|
+
mergeContext: mergeContext,
|
420
399
|
}),
|
421
|
-
|
422
|
-
updateContext,
|
423
|
-
})
|
400
|
+
)
|
424
401
|
}
|
425
402
|
}
|
426
403
|
|
@@ -498,19 +475,30 @@ const rebaseEvents = ({
|
|
498
475
|
*/
|
499
476
|
const _flattenMergeResults = (_updateResults: ReadonlyArray<MergeResult>) => {}
|
500
477
|
|
478
|
+
const validatePayload = (payload: typeof Payload.Type) => {
|
479
|
+
for (let i = 1; i < payload.newEvents.length; i++) {
|
480
|
+
if (EventId.isGreaterThanOrEqual(payload.newEvents[i - 1]!.id, payload.newEvents[i]!.id)) {
|
481
|
+
return unexpectedError(
|
482
|
+
`Events must be ordered in monotonically ascending order by eventId. Received: [${payload.newEvents.map((e) => EventId.toString(e.id)).join(', ')}]`,
|
483
|
+
)
|
484
|
+
}
|
485
|
+
}
|
486
|
+
}
|
487
|
+
|
501
488
|
const validateSyncState = (syncState: SyncState) => {
|
502
|
-
|
503
|
-
|
504
|
-
|
505
|
-
const event = chain[i]!
|
506
|
-
const nextEvent = chain[i + 1]
|
489
|
+
for (let i = 0; i < syncState.pending.length; i++) {
|
490
|
+
const event = syncState.pending[i]!
|
491
|
+
const nextEvent = syncState.pending[i + 1]
|
507
492
|
if (nextEvent === undefined) break // Reached end of chain
|
508
493
|
|
509
|
-
if (EventId.
|
510
|
-
shouldNeverHappen(
|
511
|
-
|
512
|
-
|
513
|
-
|
494
|
+
if (EventId.isGreaterThanOrEqual(event.id, nextEvent.id)) {
|
495
|
+
shouldNeverHappen(
|
496
|
+
`Events must be ordered in monotonically ascending order by eventId. Received: [${syncState.pending.map((e) => EventId.toString(e.id)).join(', ')}]`,
|
497
|
+
{
|
498
|
+
event,
|
499
|
+
nextEvent,
|
500
|
+
},
|
501
|
+
)
|
514
502
|
}
|
515
503
|
|
516
504
|
// If the global id has increased, then the client id must be 0
|
@@ -518,8 +506,8 @@ const validateSyncState = (syncState: SyncState) => {
|
|
518
506
|
if (globalIdHasIncreased) {
|
519
507
|
if (nextEvent.id.client !== 0) {
|
520
508
|
shouldNeverHappen(
|
521
|
-
`New global events must point to clientId 0 in the parentId. Received: (${
|
522
|
-
|
509
|
+
`New global events must point to clientId 0 in the parentId. Received: (${EventId.toString(nextEvent.id)})`,
|
510
|
+
syncState.pending,
|
523
511
|
{
|
524
512
|
event,
|
525
513
|
nextEvent,
|
@@ -529,24 +517,51 @@ const validateSyncState = (syncState: SyncState) => {
|
|
529
517
|
} else {
|
530
518
|
// Otherwise, the parentId must be the same as the previous event's id
|
531
519
|
if (EventId.isEqual(nextEvent.parentId, event.id) === false) {
|
532
|
-
shouldNeverHappen('Events must be linked in a continuous chain via the parentId',
|
520
|
+
shouldNeverHappen('Events must be linked in a continuous chain via the parentId', syncState.pending, {
|
533
521
|
event,
|
534
522
|
nextEvent,
|
535
523
|
})
|
536
524
|
}
|
537
525
|
}
|
538
526
|
}
|
527
|
+
}
|
528
|
+
|
529
|
+
const validateMergeResult = (mergeResult: typeof MergeResult.Type) => {
|
530
|
+
if (mergeResult._tag === 'unexpected-error' || mergeResult._tag === 'reject') return mergeResult
|
531
|
+
|
532
|
+
validateSyncState(mergeResult.newSyncState)
|
533
|
+
|
534
|
+
// Ensure local head is always greater than or equal to upstream head
|
535
|
+
if (EventId.isGreaterThan(mergeResult.newSyncState.upstreamHead, mergeResult.newSyncState.localHead)) {
|
536
|
+
shouldNeverHappen('Local head must be greater than or equal to upstream head', {
|
537
|
+
localHead: mergeResult.newSyncState.localHead,
|
538
|
+
upstreamHead: mergeResult.newSyncState.upstreamHead,
|
539
|
+
})
|
540
|
+
}
|
541
|
+
|
542
|
+
// Ensure new local head is greater than or equal to the previous local head
|
543
|
+
if (
|
544
|
+
EventId.isGreaterThanOrEqual(mergeResult.newSyncState.localHead, mergeResult.mergeContext.syncState.localHead) ===
|
545
|
+
false
|
546
|
+
) {
|
547
|
+
shouldNeverHappen('New local head must be greater than or equal to the previous local head', {
|
548
|
+
localHead: mergeResult.newSyncState.localHead,
|
549
|
+
previousLocalHead: mergeResult.mergeContext.syncState.localHead,
|
550
|
+
})
|
551
|
+
}
|
552
|
+
|
553
|
+
// Ensure new upstream head is greater than or equal to the previous upstream head
|
554
|
+
if (
|
555
|
+
EventId.isGreaterThanOrEqual(
|
556
|
+
mergeResult.newSyncState.upstreamHead,
|
557
|
+
mergeResult.mergeContext.syncState.upstreamHead,
|
558
|
+
) === false
|
559
|
+
) {
|
560
|
+
shouldNeverHappen('New upstream head must be greater than or equal to the previous upstream head', {
|
561
|
+
upstreamHead: mergeResult.newSyncState.upstreamHead,
|
562
|
+
previousUpstreamHead: mergeResult.mergeContext.syncState.upstreamHead,
|
563
|
+
})
|
564
|
+
}
|
539
565
|
|
540
|
-
|
541
|
-
// const globalRollbackTail = syncState.rollbackTail.filter((event) => event.id.client === 0)
|
542
|
-
// // The parent of the first global rollback tail event ("oldest event") must be the upstream head (if there is a rollback tail)
|
543
|
-
// if (globalRollbackTail.length > 0) {
|
544
|
-
// const firstRollbackTailEvent = globalRollbackTail[0]!
|
545
|
-
// if (EventId.isEqual(firstRollbackTailEvent.parentId, syncState.upstreamHead) === false) {
|
546
|
-
// shouldNeverHappen('The parent of the first rollback tail event must be the upstream head', chain, {
|
547
|
-
// event: firstRollbackTailEvent,
|
548
|
-
// upstreamHead: syncState.upstreamHead,
|
549
|
-
// })
|
550
|
-
// }
|
551
|
-
// }
|
566
|
+
return mergeResult
|
552
567
|
}
|
package/src/version.ts
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
// import packageJson from '../package.json' with { type: 'json' }
|
3
3
|
// export const liveStoreVersion = packageJson.version
|
4
4
|
|
5
|
-
export const liveStoreVersion = '0.3.0-dev.
|
5
|
+
export const liveStoreVersion = '0.3.0-dev.28' as const
|
6
6
|
|
7
7
|
/**
|
8
8
|
* This version number is incremented whenever the internal storage format changes in a breaking way.
|
package/tmp/pack.tgz
CHANGED
Binary file
|