@livestore/common 0.3.0-dev.16 → 0.3.0-dev.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/adapter-types.d.ts +5 -4
  3. package/dist/adapter-types.d.ts.map +1 -1
  4. package/dist/adapter-types.js.map +1 -1
  5. package/dist/bounded-collections.d.ts +1 -1
  6. package/dist/bounded-collections.d.ts.map +1 -1
  7. package/dist/debug-info.d.ts.map +1 -1
  8. package/dist/derived-mutations.d.ts.map +1 -1
  9. package/dist/devtools/devtools-messages-client-session.d.ts +21 -21
  10. package/dist/devtools/devtools-messages-common.d.ts +6 -6
  11. package/dist/devtools/devtools-messages-common.d.ts.map +1 -1
  12. package/dist/devtools/devtools-messages-leader.d.ts +28 -28
  13. package/dist/devtools/index.d.ts.map +1 -1
  14. package/dist/init-singleton-tables.d.ts.map +1 -1
  15. package/dist/leader-thread/LeaderSyncProcessor.d.ts +3 -1
  16. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  17. package/dist/leader-thread/LeaderSyncProcessor.js +124 -43
  18. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  19. package/dist/leader-thread/apply-mutation.d.ts.map +1 -1
  20. package/dist/leader-thread/apply-mutation.js +8 -2
  21. package/dist/leader-thread/apply-mutation.js.map +1 -1
  22. package/dist/leader-thread/connection.d.ts.map +1 -1
  23. package/dist/leader-thread/leader-worker-devtools.d.ts.map +1 -1
  24. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  25. package/dist/leader-thread/make-leader-thread-layer.js +1 -0
  26. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  27. package/dist/leader-thread/mutationlog.d.ts.map +1 -1
  28. package/dist/leader-thread/pull-queue-set.d.ts +3 -3
  29. package/dist/leader-thread/pull-queue-set.d.ts.map +1 -1
  30. package/dist/leader-thread/pull-queue-set.js +9 -0
  31. package/dist/leader-thread/pull-queue-set.js.map +1 -1
  32. package/dist/leader-thread/shutdown-channel.d.ts +2 -5
  33. package/dist/leader-thread/shutdown-channel.d.ts.map +1 -1
  34. package/dist/leader-thread/shutdown-channel.js +2 -4
  35. package/dist/leader-thread/shutdown-channel.js.map +1 -1
  36. package/dist/leader-thread/types.d.ts +7 -2
  37. package/dist/leader-thread/types.d.ts.map +1 -1
  38. package/dist/mutation.d.ts.map +1 -1
  39. package/dist/otel.d.ts.map +1 -1
  40. package/dist/query-builder/api.d.ts.map +1 -1
  41. package/dist/query-builder/impl.d.ts.map +1 -1
  42. package/dist/rehydrate-from-mutationlog.d.ts.map +1 -1
  43. package/dist/schema/EventId.d.ts +8 -0
  44. package/dist/schema/EventId.d.ts.map +1 -1
  45. package/dist/schema/EventId.js +14 -0
  46. package/dist/schema/EventId.js.map +1 -1
  47. package/dist/schema/MutationEvent.d.ts.map +1 -1
  48. package/dist/schema/MutationEvent.js +1 -1
  49. package/dist/schema/MutationEvent.js.map +1 -1
  50. package/dist/schema/db-schema/ast/sqlite.d.ts.map +1 -1
  51. package/dist/schema/db-schema/ast/validate.d.ts.map +1 -1
  52. package/dist/schema/db-schema/dsl/field-defs.d.ts.map +1 -1
  53. package/dist/schema/db-schema/dsl/field-defs.js.map +1 -1
  54. package/dist/schema/db-schema/dsl/mod.d.ts.map +1 -1
  55. package/dist/schema/db-schema/dsl/mod.js.map +1 -1
  56. package/dist/schema/db-schema/hash.d.ts.map +1 -1
  57. package/dist/schema/mutations.d.ts.map +1 -1
  58. package/dist/schema/schema-helpers.d.ts.map +1 -1
  59. package/dist/schema/schema.d.ts +3 -1
  60. package/dist/schema/schema.d.ts.map +1 -1
  61. package/dist/schema/table-def.d.ts +1 -8
  62. package/dist/schema/table-def.d.ts.map +1 -1
  63. package/dist/schema-management/common.d.ts.map +1 -1
  64. package/dist/schema-management/migrations.d.ts.map +1 -1
  65. package/dist/schema-management/validate-mutation-defs.d.ts.map +1 -1
  66. package/dist/sql-queries/misc.d.ts.map +1 -1
  67. package/dist/sql-queries/sql-queries.d.ts.map +1 -1
  68. package/dist/sql-queries/sql-query-builder.d.ts.map +1 -1
  69. package/dist/sql-queries/types.d.ts.map +1 -1
  70. package/dist/sync/ClientSessionSyncProcessor.d.ts +11 -1
  71. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  72. package/dist/sync/ClientSessionSyncProcessor.js +48 -14
  73. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  74. package/dist/sync/next/compact-events.d.ts.map +1 -1
  75. package/dist/sync/next/facts.d.ts.map +1 -1
  76. package/dist/sync/next/history-dag.d.ts.map +1 -1
  77. package/dist/sync/next/rebase-events.d.ts.map +1 -1
  78. package/dist/sync/next/test/mutation-fixtures.d.ts.map +1 -1
  79. package/dist/sync/sync.d.ts +14 -9
  80. package/dist/sync/sync.d.ts.map +1 -1
  81. package/dist/sync/sync.js +7 -3
  82. package/dist/sync/sync.js.map +1 -1
  83. package/dist/sync/syncstate.d.ts +132 -21
  84. package/dist/sync/syncstate.d.ts.map +1 -1
  85. package/dist/sync/syncstate.js +129 -41
  86. package/dist/sync/syncstate.js.map +1 -1
  87. package/dist/sync/syncstate.test.js +19 -7
  88. package/dist/sync/syncstate.test.js.map +1 -1
  89. package/dist/sync/validate-push-payload.d.ts.map +1 -1
  90. package/dist/util.d.ts.map +1 -1
  91. package/dist/version.d.ts +1 -1
  92. package/dist/version.js +1 -1
  93. package/package.json +2 -2
  94. package/src/adapter-types.ts +5 -4
  95. package/src/leader-thread/LeaderSyncProcessor.ts +164 -54
  96. package/src/leader-thread/apply-mutation.ts +17 -2
  97. package/src/leader-thread/make-leader-thread-layer.ts +1 -0
  98. package/src/leader-thread/pull-queue-set.ts +10 -1
  99. package/src/leader-thread/shutdown-channel.ts +2 -4
  100. package/src/leader-thread/types.ts +8 -2
  101. package/src/schema/EventId.ts +16 -0
  102. package/src/schema/MutationEvent.ts +1 -1
  103. package/src/schema/db-schema/dsl/field-defs.ts +1 -2
  104. package/src/schema/db-schema/dsl/mod.ts +1 -1
  105. package/src/sync/ClientSessionSyncProcessor.ts +78 -13
  106. package/src/sync/sync.ts +7 -4
  107. package/src/sync/syncstate.test.ts +32 -14
  108. package/src/sync/syncstate.ts +145 -60
  109. package/src/version.ts +1 -1
@@ -1,4 +1,4 @@
1
- import { isNotUndefined, LS_DEV, shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils'
1
+ import { casesHandled, isNotUndefined, LS_DEV, shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils'
2
2
  import type { HttpClient, Scope, Tracer } from '@livestore/utils/effect'
3
3
  import {
4
4
  BucketQueue,
@@ -18,6 +18,7 @@ import type * as otel from '@opentelemetry/api'
18
18
 
19
19
  import type { SqliteDb } from '../adapter-types.js'
20
20
  import { UnexpectedError } from '../adapter-types.js'
21
+ import { clientId } from '../devtools/devtools-messages-common.js'
21
22
  import type { LiveStoreSchema, SessionChangesetMetaRow } from '../schema/mod.js'
22
23
  import {
23
24
  EventId,
@@ -27,7 +28,7 @@ import {
27
28
  SESSION_CHANGESET_META_TABLE,
28
29
  } from '../schema/mod.js'
29
30
  import { updateRows } from '../sql-queries/index.js'
30
- import { InvalidPushError } from '../sync/sync.js'
31
+ import { InvalidPushError, LeaderAheadError } from '../sync/sync.js'
31
32
  import * as SyncState from '../sync/syncstate.js'
32
33
  import { sql } from '../util.js'
33
34
  import { makeApplyMutation } from './apply-mutation.js'
@@ -36,9 +37,12 @@ import { getBackendHeadFromDb, getClientHeadFromDb, getMutationEventsSince, upda
36
37
  import type { InitialBlockingSyncContext, InitialSyncInfo, LeaderSyncProcessor } from './types.js'
37
38
  import { LeaderThreadCtx } from './types.js'
38
39
 
39
- type PushQueueItem = [
40
+ export const BACKEND_PUSH_BATCH_SIZE = 50
41
+
42
+ type LocalPushQueueItem = [
40
43
  mutationEvent: MutationEvent.EncodedWithMeta,
41
- deferred: Deferred.Deferred<void, InvalidPushError> | undefined,
44
+ deferred: Deferred.Deferred<void, LeaderAheadError> | undefined,
45
+ generation: number,
42
46
  ]
43
47
 
44
48
  /**
@@ -68,12 +72,14 @@ export const makeLeaderSyncProcessor = ({
68
72
  schema,
69
73
  dbMissing,
70
74
  dbMutationLog,
75
+ clientId,
71
76
  initialBlockingSyncContext,
72
77
  }: {
73
78
  schema: LiveStoreSchema
74
79
  /** Only used to know whether we can safely query dbMutationLog during setup execution */
75
80
  dbMissing: boolean
76
81
  dbMutationLog: SqliteDb
82
+ clientId: string
77
83
  initialBlockingSyncContext: InitialBlockingSyncContext
78
84
  }): Effect.Effect<LeaderSyncProcessor, UnexpectedError, Scope.Scope> =>
79
85
  Effect.gen(function* () {
@@ -86,6 +92,13 @@ export const makeLeaderSyncProcessor = ({
86
92
  return mutationDef.options.clientOnly
87
93
  }
88
94
 
95
+ /**
96
+ * Tracks generations of queued local push events.
97
+ * If a batch is rejected, all subsequent push queue items with the same generation are also rejected,
98
+ * even if they would be valid on their own.
99
+ */
100
+ const currentLocalPushGenerationRef = { current: 0 }
101
+
89
102
  // This context depends on data from `boot`, we should find a better implementation to avoid this ref indirection.
90
103
  const ctxRef = {
91
104
  current: undefined as
@@ -97,7 +110,7 @@ export const makeLeaderSyncProcessor = ({
97
110
  },
98
111
  }
99
112
 
100
- const localPushesQueue = yield* BucketQueue.make<PushQueueItem>()
113
+ const localPushesQueue = yield* BucketQueue.make<LocalPushQueueItem>()
101
114
  const localPushesLatch = yield* Effect.makeLatch(true)
102
115
  const pullLatch = yield* Effect.makeLatch(true)
103
116
 
@@ -106,20 +119,36 @@ export const makeLeaderSyncProcessor = ({
106
119
  // TODO validate batch
107
120
  if (newEvents.length === 0) return
108
121
 
122
+ // if (options.generation < currentLocalPushGenerationRef.current) {
123
+ // debugger
124
+ // // We can safely drop this batch as it's from a previous push generation
125
+ // return
126
+ // }
127
+
128
+ if (clientId === 'client-b') {
129
+ // console.log(
130
+ // 'push from client session',
131
+ // newEvents.map((item) => item.toJSON()),
132
+ // )
133
+ }
134
+
109
135
  const waitForProcessing = options?.waitForProcessing ?? false
136
+ const generation = currentLocalPushGenerationRef.current
110
137
 
111
138
  if (waitForProcessing) {
112
- const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make<void, InvalidPushError>())
139
+ const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make<void, LeaderAheadError>())
113
140
 
114
141
  const items = newEvents.map(
115
- (mutationEventEncoded, i) => [mutationEventEncoded, deferreds[i]] as PushQueueItem,
142
+ (mutationEventEncoded, i) => [mutationEventEncoded, deferreds[i], generation] as LocalPushQueueItem,
116
143
  )
117
144
 
118
145
  yield* BucketQueue.offerAll(localPushesQueue, items)
119
146
 
120
147
  yield* Effect.all(deferreds)
121
148
  } else {
122
- const items = newEvents.map((mutationEventEncoded) => [mutationEventEncoded, undefined] as PushQueueItem)
149
+ const items = newEvents.map(
150
+ (mutationEventEncoded) => [mutationEventEncoded, undefined, generation] as LocalPushQueueItem,
151
+ )
123
152
  yield* BucketQueue.offerAll(localPushesQueue, items)
124
153
  }
125
154
  }).pipe(
@@ -153,14 +182,14 @@ export const makeLeaderSyncProcessor = ({
153
182
  })
154
183
 
155
184
  yield* push([mutationEventEncoded])
156
- }).pipe(Effect.catchTag('InvalidPushError', Effect.orDie))
185
+ }).pipe(Effect.catchTag('LeaderAheadError', Effect.orDie))
157
186
 
158
187
  // Starts various background loops
159
188
  const boot: LeaderSyncProcessor['boot'] = ({ dbReady }) =>
160
189
  Effect.gen(function* () {
161
190
  const span = yield* Effect.currentSpan.pipe(Effect.orDie)
162
191
  const otelSpan = yield* OtelTracer.currentOtelSpan.pipe(Effect.catchAll(() => Effect.succeed(undefined)))
163
- const { devtools } = yield* LeaderThreadCtx
192
+ const { devtools, shutdownChannel } = yield* LeaderThreadCtx
164
193
 
165
194
  ctxRef.current = {
166
195
  otelSpan,
@@ -205,6 +234,12 @@ export const makeLeaderSyncProcessor = ({
205
234
  yield* BucketQueue.offerAll(syncBackendQueue, filteredBatch)
206
235
  }
207
236
 
237
+ const shutdownOnError = (cause: unknown) =>
238
+ Effect.gen(function* () {
239
+ yield* shutdownChannel.send(UnexpectedError.make({ cause }))
240
+ yield* Effect.die(cause)
241
+ })
242
+
208
243
  yield* backgroundApplyLocalPushes({
209
244
  localPushesLatch,
210
245
  localPushesQueue,
@@ -214,7 +249,8 @@ export const makeLeaderSyncProcessor = ({
214
249
  schema,
215
250
  isLocalEvent,
216
251
  otelSpan,
217
- }).pipe(Effect.tapCauseLogPretty, Effect.forkScoped)
252
+ currentLocalPushGenerationRef,
253
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
218
254
 
219
255
  const backendPushingFiberHandle = yield* FiberHandle.make()
220
256
 
@@ -225,7 +261,7 @@ export const makeLeaderSyncProcessor = ({
225
261
  syncBackendQueue,
226
262
  otelSpan,
227
263
  devtoolsLatch: ctxRef.current?.devtoolsLatch,
228
- }).pipe(Effect.tapCauseLogPretty),
264
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError)),
229
265
  )
230
266
 
231
267
  yield* backgroundBackendPulling({
@@ -249,7 +285,7 @@ export const makeLeaderSyncProcessor = ({
249
285
  syncBackendQueue,
250
286
  otelSpan,
251
287
  devtoolsLatch: ctxRef.current?.devtoolsLatch,
252
- }).pipe(Effect.tapCauseLogPretty),
288
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError)),
253
289
  )
254
290
  }),
255
291
  syncStateSref,
@@ -258,7 +294,7 @@ export const makeLeaderSyncProcessor = ({
258
294
  otelSpan,
259
295
  initialBlockingSyncContext,
260
296
  devtoolsLatch: ctxRef.current?.devtoolsLatch,
261
- }).pipe(Effect.tapCauseLogPretty, Effect.forkScoped)
297
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
262
298
 
263
299
  return { initialLeaderHead: initialLocalHead }
264
300
  }).pipe(Effect.withSpanScoped('@livestore/common:leader-thread:syncing'))
@@ -287,32 +323,48 @@ const backgroundApplyLocalPushes = ({
287
323
  schema,
288
324
  isLocalEvent,
289
325
  otelSpan,
326
+ currentLocalPushGenerationRef,
290
327
  }: {
291
328
  pullLatch: Effect.Latch
292
329
  localPushesLatch: Effect.Latch
293
- localPushesQueue: BucketQueue.BucketQueue<PushQueueItem>
330
+ localPushesQueue: BucketQueue.BucketQueue<LocalPushQueueItem>
294
331
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
295
332
  syncBackendQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
296
333
  schema: LiveStoreSchema
297
334
  isLocalEvent: (mutationEventEncoded: MutationEvent.EncodedWithMeta) => boolean
298
335
  otelSpan: otel.Span | undefined
336
+ currentLocalPushGenerationRef: { current: number }
299
337
  }) =>
300
338
  Effect.gen(function* () {
301
- const { connectedClientSessionPullQueues } = yield* LeaderThreadCtx
339
+ const { connectedClientSessionPullQueues, clientId } = yield* LeaderThreadCtx
302
340
 
303
341
  const applyMutationItems = yield* makeApplyMutationItems
304
342
 
305
343
  while (true) {
306
344
  // TODO make batch size configurable
307
345
  const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, 10)
308
- const [newEvents, deferreds] = ReadonlyArray.unzip(batchItems)
309
346
 
310
347
  // Wait for the backend pulling to finish
311
348
  yield* localPushesLatch.await
312
349
 
313
- // Prevent the backend pulling from starting until this local push is finished
350
+ // Prevent backend pull processing until this local push is finished
314
351
  yield* pullLatch.close
315
352
 
353
+ // Since the generation might have changed since enqueuing, we need to filter out items with older generation
354
+ // It's important that we filter after we got localPushesLatch, otherwise we might filter with the old generation
355
+ const filteredBatchItems = batchItems
356
+ .filter(([_1, _2, generation]) => generation === currentLocalPushGenerationRef.current)
357
+ .map(([mutationEventEncoded, deferred]) => [mutationEventEncoded, deferred] as const)
358
+
359
+ if (filteredBatchItems.length === 0) {
360
+ // console.log('dropping old-gen batch', currentLocalPushGenerationRef.current)
361
+ // Allow the backend pulling to start
362
+ yield* pullLatch.open
363
+ continue
364
+ }
365
+
366
+ const [newEvents, deferreds] = ReadonlyArray.unzip(filteredBatchItems)
367
+
316
368
  const syncState = yield* syncStateSref
317
369
  if (syncState === undefined) return shouldNeverHappen('Not initialized')
318
370
 
@@ -323,42 +375,82 @@ const backgroundApplyLocalPushes = ({
323
375
  isEqualEvent: MutationEvent.isEqualEncoded,
324
376
  })
325
377
 
326
- if (updateResult._tag === 'rebase') {
327
- return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
328
- } else if (updateResult._tag === 'reject') {
329
- otelSpan?.addEvent('local-push:reject', {
330
- batchSize: newEvents.length,
331
- updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
332
- })
378
+ switch (updateResult._tag) {
379
+ case 'unexpected-error': {
380
+ otelSpan?.addEvent('local-push:unexpected-error', {
381
+ batchSize: newEvents.length,
382
+ newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
383
+ })
384
+ return yield* Effect.fail(updateResult.cause)
385
+ }
386
+ case 'rebase': {
387
+ return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
388
+ }
389
+ case 'reject': {
390
+ otelSpan?.addEvent('local-push:reject', {
391
+ batchSize: newEvents.length,
392
+ updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
393
+ })
394
+
395
+ /*
396
+
397
+ TODO: how to test this?
398
+ */
399
+ currentLocalPushGenerationRef.current++
400
+
401
+ const nextGeneration = currentLocalPushGenerationRef.current
333
402
 
334
- const providedId = newEvents.at(0)!.id
335
- const remainingEvents = yield* BucketQueue.takeAll(localPushesQueue)
336
- const allDeferreds = [...deferreds, ...remainingEvents.map(([_, deferred]) => deferred)].filter(isNotUndefined)
337
- yield* Effect.forEach(allDeferreds, (deferred) =>
338
- Deferred.fail(
339
- deferred,
340
- InvalidPushError.make({
341
- // TODO improve error handling so it differentiates between a push being rejected
342
- // because of itself or because of another push
343
- reason: {
344
- _tag: 'LeaderAhead',
403
+ const providedId = newEvents.at(0)!.id
404
+ // All subsequent pushes with same generation should be rejected as well
405
+ // We're also handling the case where the localPushQueue already contains events
406
+ // from the next generation which we preserve in the queue
407
+ const remainingEventsMatchingGeneration = yield* BucketQueue.takeSplitWhere(
408
+ localPushesQueue,
409
+ (item) => item[2] >= nextGeneration,
410
+ )
411
+
412
+ if ((yield* BucketQueue.size(localPushesQueue)) > 0) {
413
+ console.log('localPushesQueue is not empty', yield* BucketQueue.size(localPushesQueue))
414
+ debugger
415
+ }
416
+
417
+ const allDeferredsToReject = [
418
+ ...deferreds,
419
+ ...remainingEventsMatchingGeneration.map(([_, deferred]) => deferred),
420
+ ].filter(isNotUndefined)
421
+
422
+ yield* Effect.forEach(allDeferredsToReject, (deferred) =>
423
+ Deferred.fail(
424
+ deferred,
425
+ LeaderAheadError.make({
345
426
  minimumExpectedId: updateResult.expectedMinimumId,
346
427
  providedId,
347
- },
348
- }),
349
- ),
350
- )
428
+ // nextGeneration,
429
+ }),
430
+ ),
431
+ )
351
432
 
352
- // Allow the backend pulling to start
353
- yield* pullLatch.open
433
+ // Allow the backend pulling to start
434
+ yield* pullLatch.open
354
435
 
355
- // In this case we're skipping state update and down/upstream processing
356
- // We've cleared the local push queue and are now waiting for new local pushes / backend pulls
357
- continue
436
+ // In this case we're skipping state update and down/upstream processing
437
+ // We've cleared the local push queue and are now waiting for new local pushes / backend pulls
438
+ continue
439
+ }
440
+ case 'advance': {
441
+ break
442
+ }
443
+ default: {
444
+ casesHandled(updateResult)
445
+ }
358
446
  }
359
447
 
360
448
  yield* SubscriptionRef.set(syncStateSref, updateResult.newSyncState)
361
449
 
450
+ if (clientId === 'client-b') {
451
+ // yield* Effect.log('offer upstream-advance due to local-push')
452
+ // debugger
453
+ }
362
454
  yield* connectedClientSessionPullQueues.offer({
363
455
  payload: { _tag: 'upstream-advance', newEvents: updateResult.newEvents },
364
456
  remaining: 0,
@@ -387,7 +479,7 @@ const backgroundApplyLocalPushes = ({
387
479
  type ApplyMutationItems = (_: {
388
480
  batchItems: ReadonlyArray<MutationEvent.EncodedWithMeta>
389
481
  /** Indexes are aligned with `batchItems` */
390
- deferreds: ReadonlyArray<Deferred.Deferred<void, InvalidPushError> | undefined> | undefined
482
+ deferreds: ReadonlyArray<Deferred.Deferred<void, LeaderAheadError> | undefined> | undefined
391
483
  }) => Effect.Effect<void, UnexpectedError>
392
484
 
393
485
  // TODO how to handle errors gracefully
@@ -466,6 +558,7 @@ const backgroundBackendPulling = ({
466
558
  dbMutationLog,
467
559
  connectedClientSessionPullQueues,
468
560
  schema,
561
+ clientId,
469
562
  } = yield* LeaderThreadCtx
470
563
 
471
564
  if (syncBackend === undefined) return
@@ -503,6 +596,12 @@ const backgroundBackendPulling = ({
503
596
 
504
597
  if (updateResult._tag === 'reject') {
505
598
  return shouldNeverHappen('The leader thread should never reject upstream advances')
599
+ } else if (updateResult._tag === 'unexpected-error') {
600
+ otelSpan?.addEvent('backend-pull:unexpected-error', {
601
+ newEventsCount: newEvents.length,
602
+ newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
603
+ })
604
+ return yield* Effect.fail(updateResult.cause)
506
605
  }
507
606
 
508
607
  const newBackendHead = newEvents.at(-1)!.id
@@ -542,6 +641,9 @@ const backgroundBackendPulling = ({
542
641
  updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
543
642
  })
544
643
 
644
+ if (clientId === 'client-b') {
645
+ // yield* Effect.log('offer upstream-advance due to pull')
646
+ }
545
647
  yield* connectedClientSessionPullQueues.offer({
546
648
  payload: { _tag: 'upstream-advance', newEvents: updateResult.newEvents, trimRollbackUntil },
547
649
  remaining,
@@ -617,15 +719,23 @@ const rollback = ({
617
719
  }
618
720
  }
619
721
 
620
- // Delete the changeset rows
621
- db.execute(
622
- sql`DELETE FROM ${SESSION_CHANGESET_META_TABLE} WHERE (idGlobal, idClient) IN (${eventIdsToRollback.map((id) => `(${id.global}, ${id.client})`).join(', ')})`,
722
+ const eventIdPairChunks = ReadonlyArray.chunksOf(100)(
723
+ eventIdsToRollback.map((id) => `(${id.global}, ${id.client})`),
623
724
  )
624
725
 
726
+ // Delete the changeset rows
727
+ for (const eventIdPairChunk of eventIdPairChunks) {
728
+ db.execute(
729
+ sql`DELETE FROM ${SESSION_CHANGESET_META_TABLE} WHERE (idGlobal, idClient) IN (${eventIdPairChunk.join(', ')})`,
730
+ )
731
+ }
732
+
625
733
  // Delete the mutation log rows
626
- dbMutationLog.execute(
627
- sql`DELETE FROM ${MUTATION_LOG_META_TABLE} WHERE (idGlobal, idClient) IN (${eventIdsToRollback.map((id) => `(${id.global}, ${id.client})`).join(', ')})`,
628
- )
734
+ for (const eventIdPairChunk of eventIdPairChunks) {
735
+ dbMutationLog.execute(
736
+ sql`DELETE FROM ${MUTATION_LOG_META_TABLE} WHERE (idGlobal, idClient) IN (${eventIdPairChunk.join(', ')})`,
737
+ )
738
+ }
629
739
  }).pipe(
630
740
  Effect.withSpan('@livestore/common:leader-thread:syncing:rollback', {
631
741
  attributes: { count: eventIdsToRollback.length },
@@ -675,7 +785,7 @@ const backgroundBackendPushing = ({
675
785
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
676
786
 
677
787
  // TODO make batch size configurable
678
- const queueItems = yield* BucketQueue.takeBetween(syncBackendQueue, 1, 50)
788
+ const queueItems = yield* BucketQueue.takeBetween(syncBackendQueue, 1, BACKEND_PUSH_BATCH_SIZE)
679
789
 
680
790
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
681
791
 
@@ -693,7 +803,7 @@ const backgroundBackendPushing = ({
693
803
 
694
804
  if (pushResult._tag === 'Left') {
695
805
  if (LS_DEV) {
696
- yield* Effect.logDebug('backend-push-error', { error: pushResult.left.toString() })
806
+ yield* Effect.logDebug('handled backend-push-error', { error: pushResult.left.toString() })
697
807
  }
698
808
  otelSpan?.addEvent('backend-push-error', { error: pushResult.left.toString() })
699
809
  // wait for interrupt caused by background pulling which will then restart pushing
@@ -1,10 +1,11 @@
1
- import { memoizeByRef, shouldNeverHappen } from '@livestore/utils'
1
+ import { LS_DEV, memoizeByRef, shouldNeverHappen } from '@livestore/utils'
2
2
  import type { Scope } from '@livestore/utils/effect'
3
3
  import { Effect, Option, Schema } from '@livestore/utils/effect'
4
4
 
5
- import type { SqliteDb, SqliteError, UnexpectedError } from '../index.js'
5
+ import type { PreparedBindValues, SqliteDb, SqliteError, UnexpectedError } from '../index.js'
6
6
  import { getExecArgsFromMutation } from '../mutation.js'
7
7
  import {
8
+ EventId,
8
9
  type LiveStoreSchema,
9
10
  MUTATION_LOG_META_TABLE,
10
11
  type MutationEvent,
@@ -127,6 +128,20 @@ const insertIntoMutationLog = (
127
128
  const mutationDefSchemaHash =
128
129
  mutationDefSchemaHashMap.get(mutationName) ?? shouldNeverHappen(`Unknown mutation: ${mutationName}`)
129
130
 
131
+ if (LS_DEV && mutationEventEncoded.parentId.global !== EventId.ROOT.global) {
132
+ const parentMutationExists =
133
+ dbMutationLog.select<{ count: number }>(
134
+ `SELECT COUNT(*) as count FROM ${MUTATION_LOG_META_TABLE} WHERE idGlobal = ? AND idClient = ?`,
135
+ [mutationEventEncoded.parentId.global, mutationEventEncoded.parentId.client] as any as PreparedBindValues,
136
+ )[0]!.count === 1
137
+
138
+ if (parentMutationExists === false) {
139
+ shouldNeverHappen(
140
+ `Parent mutation ${mutationEventEncoded.parentId.global},${mutationEventEncoded.parentId.client} does not exist`,
141
+ )
142
+ }
143
+ }
144
+
130
145
  // TODO use prepared statements
131
146
  yield* execSql(
132
147
  dbMutationLog,
@@ -65,6 +65,7 @@ export const makeLeaderThreadLayer = ({
65
65
  dbMissing,
66
66
  dbMutationLog,
67
67
  initialBlockingSyncContext,
68
+ clientId,
68
69
  })
69
70
 
70
71
  const extraIncomingMessagesQueue = yield* Queue.unbounded<Devtools.Leader.MessageToApp>().pipe(
@@ -2,7 +2,7 @@ import { Effect, Queue } from '@livestore/utils/effect'
2
2
 
3
3
  import * as MutationEvent from '../schema/MutationEvent.js'
4
4
  import { getMutationEventsSince } from './mutationlog.js'
5
- import { type PullQueueItem, type PullQueueSet } from './types.js'
5
+ import { LeaderThreadCtx, type PullQueueItem, type PullQueueSet } from './types.js'
6
6
 
7
7
  export const makePullQueueSet = Effect.gen(function* () {
8
8
  const set = new Set<Queue.Queue<PullQueueItem>>()
@@ -46,6 +46,15 @@ export const makePullQueueSet = Effect.gen(function* () {
46
46
  return
47
47
  }
48
48
 
49
+ const { clientId } = yield* LeaderThreadCtx
50
+ if (clientId === 'client-b') {
51
+ // console.log(
52
+ // 'offer',
53
+ // item.payload._tag,
54
+ // item.payload.newEvents.map((_) => _.toJSON()),
55
+ // )
56
+ }
57
+
49
58
  for (const queue of set) {
50
59
  yield* Queue.offer(queue, item)
51
60
  }
@@ -1,11 +1,9 @@
1
1
  import type { WebChannel } from '@livestore/utils/effect'
2
2
  import { Schema } from '@livestore/utils/effect'
3
3
 
4
- import { IntentionalShutdownCause } from '../index.js'
4
+ import { IntentionalShutdownCause, UnexpectedError } from '../index.js'
5
5
 
6
- export class DedicatedWorkerDisconnectBroadcast extends Schema.TaggedStruct('DedicatedWorkerDisconnectBroadcast', {}) {}
7
-
8
- export class All extends Schema.Union(IntentionalShutdownCause, DedicatedWorkerDisconnectBroadcast) {}
6
+ export class All extends Schema.Union(IntentionalShutdownCause, UnexpectedError) {}
9
7
 
10
8
  /**
11
9
  * Used internally by an adapter to shutdown gracefully.
@@ -14,7 +14,7 @@ import { Context, Schema } from '@livestore/utils/effect'
14
14
  import type {
15
15
  BootStatus,
16
16
  Devtools,
17
- InvalidPushError,
17
+ LeaderAheadError,
18
18
  MakeSqliteDb,
19
19
  MigrationsReport,
20
20
  PersistenceInfo,
@@ -126,13 +126,19 @@ export interface LeaderSyncProcessor {
126
126
  /** `batch` needs to follow the same rules as `batch` in `SyncBackend.push` */
127
127
  batch: ReadonlyArray<MutationEvent.EncodedWithMeta>,
128
128
  options?: {
129
+ /**
130
+ * This generation number is used to automatically reject subsequent pushes
131
+ * of a previously rejected push from a client session. This might occur in
132
+ * certain concurrent scenarios.
133
+ */
134
+ // generation: number
129
135
  /**
130
136
  * If true, the effect will only finish when the local push has been processed (i.e. succeeded or was rejected).
131
137
  * @default false
132
138
  */
133
139
  waitForProcessing?: boolean
134
140
  },
135
- ) => Effect.Effect<void, InvalidPushError>
141
+ ) => Effect.Effect<void, LeaderAheadError>
136
142
 
137
143
  pushPartial: (args: {
138
144
  mutationEvent: MutationEvent.PartialAnyEncoded
@@ -33,6 +33,22 @@ export const compare = (a: EventId, b: EventId) => {
33
33
  return a.client - b.client
34
34
  }
35
35
 
36
+ /**
37
+ * Convert an event id to a string representation.
38
+ */
39
+ export const toString = (id: EventId) => `(${id.global},${id.client})`
40
+
41
+ /**
42
+ * Convert a string representation of an event id to an event id.
43
+ */
44
+ export const fromString = (str: string): EventId => {
45
+ const [global, client] = str.slice(1, -1).split(',').map(Number)
46
+ if (global === undefined || client === undefined) {
47
+ throw new Error('Invalid event id string')
48
+ }
49
+ return { global, client } as EventId
50
+ }
51
+
36
52
  export const isEqual = (a: EventId, b: EventId) => a.global === b.global && a.client === b.client
37
53
 
38
54
  export type EventIdPair = { id: EventId; parentId: EventId }
@@ -169,7 +169,7 @@ export class EncodedWithMeta extends Schema.Class<EncodedWithMeta>('MutationEven
169
169
  toJSON = (): any => {
170
170
  // Only used for logging/debugging
171
171
  // - More readable way to print the id + parentId
172
- // - not including `meta`
172
+ // - not including `meta`, `clientId`, `sessionId`
173
173
  return {
174
174
  id: `(${this.id.global},${this.id.client}) → (${this.parentId.global},${this.parentId.client})`,
175
175
  mutation: this.mutation,
@@ -53,8 +53,7 @@ export type ColDefFn<TColumnType extends FieldColumnType> = {
53
53
  const TNullable extends boolean = false,
54
54
  const TDefault extends TDecoded | SqlDefaultValue | NoDefault | (TNullable extends true ? null : never) = NoDefault,
55
55
  const TPrimaryKey extends boolean = false,
56
- >(args: {
57
- schema?: Schema.Schema<TDecoded, TEncoded>
56
+ >(args: { schema?: Schema.Schema<TDecoded, TEncoded>
58
57
  default?: TDefault
59
58
  nullable?: TNullable
60
59
  primaryKey?: TPrimaryKey
@@ -30,7 +30,7 @@ export type DbSchemaFromInputSchema<TSchemaInput extends DbSchemaInput> =
30
30
  export const makeDbSchema = <TDbSchemaInput extends DbSchemaInput>(
31
31
  schema: TDbSchemaInput,
32
32
  ): DbSchemaFromInputSchema<TDbSchemaInput> => {
33
- return Array.isArray(schema) ? Object.fromEntries(schema.map((_) => [_.name, _])) : schema
33
+ return Array.isArray(schema) ? Object.fromEntries(schema.map((_) => [_.name, _])) : schema as any
34
34
  }
35
35
 
36
36
  export const table = <TTableName extends string, TColumns extends Columns, TIndexes extends Index[]>(