@livestore/common 0.3.0-dev.10 → 0.3.0-dev.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/adapter-types.d.ts +22 -13
  3. package/dist/adapter-types.d.ts.map +1 -1
  4. package/dist/adapter-types.js.map +1 -1
  5. package/dist/devtools/devtool-message-leader.d.ts +2 -0
  6. package/dist/devtools/devtool-message-leader.d.ts.map +1 -0
  7. package/dist/devtools/devtool-message-leader.js +2 -0
  8. package/dist/devtools/devtool-message-leader.js.map +1 -0
  9. package/dist/devtools/devtools-messages-client-session.d.ts +297 -0
  10. package/dist/devtools/devtools-messages-client-session.d.ts.map +1 -0
  11. package/dist/devtools/devtools-messages-client-session.js +61 -0
  12. package/dist/devtools/devtools-messages-client-session.js.map +1 -0
  13. package/dist/devtools/devtools-messages-common.d.ts +65 -0
  14. package/dist/devtools/devtools-messages-common.d.ts.map +1 -0
  15. package/dist/devtools/devtools-messages-common.js +35 -0
  16. package/dist/devtools/devtools-messages-common.js.map +1 -0
  17. package/dist/devtools/devtools-messages-leader.d.ts +261 -0
  18. package/dist/devtools/devtools-messages-leader.d.ts.map +1 -0
  19. package/dist/devtools/devtools-messages-leader.js +85 -0
  20. package/dist/devtools/devtools-messages-leader.js.map +1 -0
  21. package/dist/devtools/devtools-messages.d.ts +3 -580
  22. package/dist/devtools/devtools-messages.d.ts.map +1 -1
  23. package/dist/devtools/devtools-messages.js +3 -174
  24. package/dist/devtools/devtools-messages.js.map +1 -1
  25. package/dist/init-singleton-tables.d.ts +2 -2
  26. package/dist/init-singleton-tables.d.ts.map +1 -1
  27. package/dist/init-singleton-tables.js.map +1 -1
  28. package/dist/leader-thread/LeaderSyncProcessor.d.ts +4 -4
  29. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  30. package/dist/leader-thread/LeaderSyncProcessor.js +50 -35
  31. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  32. package/dist/leader-thread/apply-mutation.d.ts.map +1 -1
  33. package/dist/leader-thread/apply-mutation.js +4 -4
  34. package/dist/leader-thread/apply-mutation.js.map +1 -1
  35. package/dist/leader-thread/connection.d.ts +4 -4
  36. package/dist/leader-thread/connection.d.ts.map +1 -1
  37. package/dist/leader-thread/connection.js +5 -5
  38. package/dist/leader-thread/connection.js.map +1 -1
  39. package/dist/leader-thread/leader-worker-devtools.js +17 -17
  40. package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
  41. package/dist/leader-thread/make-leader-thread-layer.d.ts +6 -6
  42. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  43. package/dist/leader-thread/make-leader-thread-layer.js +23 -10
  44. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  45. package/dist/leader-thread/mutationlog.d.ts +4 -4
  46. package/dist/leader-thread/mutationlog.d.ts.map +1 -1
  47. package/dist/leader-thread/mutationlog.js +6 -6
  48. package/dist/leader-thread/mutationlog.js.map +1 -1
  49. package/dist/leader-thread/pull-queue-set.d.ts.map +1 -1
  50. package/dist/leader-thread/recreate-db.js +19 -19
  51. package/dist/leader-thread/recreate-db.js.map +1 -1
  52. package/dist/leader-thread/types.d.ts +20 -8
  53. package/dist/leader-thread/types.d.ts.map +1 -1
  54. package/dist/leader-thread/types.js.map +1 -1
  55. package/dist/rehydrate-from-mutationlog.d.ts +3 -3
  56. package/dist/rehydrate-from-mutationlog.d.ts.map +1 -1
  57. package/dist/rehydrate-from-mutationlog.js.map +1 -1
  58. package/dist/schema-management/common.d.ts +3 -3
  59. package/dist/schema-management/common.d.ts.map +1 -1
  60. package/dist/schema-management/common.js.map +1 -1
  61. package/dist/schema-management/migrations.d.ts +4 -4
  62. package/dist/schema-management/migrations.d.ts.map +1 -1
  63. package/dist/schema-management/migrations.js.map +1 -1
  64. package/dist/sync/ClientSessionSyncProcessor.d.ts +5 -7
  65. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  66. package/dist/sync/ClientSessionSyncProcessor.js +13 -6
  67. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  68. package/dist/sync/next/test/mutation-fixtures.d.ts +7 -7
  69. package/dist/version.d.ts +1 -1
  70. package/dist/version.js +1 -1
  71. package/package.json +3 -3
  72. package/src/adapter-types.ts +18 -17
  73. package/src/devtools/devtools-messages-client-session.ts +109 -0
  74. package/src/devtools/devtools-messages-common.ts +52 -0
  75. package/src/devtools/devtools-messages-leader.ts +115 -0
  76. package/src/devtools/devtools-messages.ts +3 -246
  77. package/src/init-singleton-tables.ts +2 -2
  78. package/src/leader-thread/LeaderSyncProcessor.ts +78 -45
  79. package/src/leader-thread/apply-mutation.ts +5 -5
  80. package/src/leader-thread/connection.ts +7 -7
  81. package/src/leader-thread/leader-worker-devtools.ts +25 -18
  82. package/src/leader-thread/make-leader-thread-layer.ts +38 -17
  83. package/src/leader-thread/mutationlog.ts +9 -9
  84. package/src/leader-thread/recreate-db.ts +19 -19
  85. package/src/leader-thread/types.ts +24 -10
  86. package/src/rehydrate-from-mutationlog.ts +3 -3
  87. package/src/schema-management/common.ts +3 -3
  88. package/src/schema-management/migrations.ts +4 -4
  89. package/src/sync/ClientSessionSyncProcessor.ts +18 -13
  90. package/src/version.ts +1 -1
@@ -1,5 +1,5 @@
1
1
  import { isNotUndefined, shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils'
2
- import type { HttpClient, Scope } from '@livestore/utils/effect'
2
+ import type { HttpClient, Scope, Tracer } from '@livestore/utils/effect'
3
3
  import {
4
4
  BucketQueue,
5
5
  Deferred,
@@ -16,7 +16,7 @@ import {
16
16
  } from '@livestore/utils/effect'
17
17
  import type * as otel from '@opentelemetry/api'
18
18
 
19
- import type { SynchronousDatabase } from '../adapter-types.js'
19
+ import type { SqliteDb } from '../adapter-types.js'
20
20
  import { UnexpectedError } from '../adapter-types.js'
21
21
  import type { LiveStoreSchema, SessionChangesetMetaRow } from '../schema/mod.js'
22
22
  import {
@@ -67,13 +67,13 @@ type PushQueueItem = [
67
67
  export const makeLeaderSyncProcessor = ({
68
68
  schema,
69
69
  dbMissing,
70
- dbLog,
70
+ dbMutationLog,
71
71
  initialBlockingSyncContext,
72
72
  }: {
73
73
  schema: LiveStoreSchema
74
- /** Only used to know whether we can safely query dbLog during setup execution */
74
+ /** Only used to know whether we can safely query dbMutationLog during setup execution */
75
75
  dbMissing: boolean
76
- dbLog: SynchronousDatabase
76
+ dbMutationLog: SqliteDb
77
77
  initialBlockingSyncContext: InitialBlockingSyncContext
78
78
  }): Effect.Effect<LeaderSyncProcessor, UnexpectedError, Scope.Scope> =>
79
79
  Effect.gen(function* () {
@@ -86,7 +86,17 @@ export const makeLeaderSyncProcessor = ({
86
86
  return mutationDef.options.localOnly
87
87
  }
88
88
 
89
- const spanRef = { current: undefined as otel.Span | undefined }
89
+ // This context depends on data from `boot`, we should find a better implementation to avoid this ref indirection.
90
+ const ctxRef = {
91
+ current: undefined as
92
+ | undefined
93
+ | {
94
+ otelSpan: otel.Span | undefined
95
+ span: Tracer.Span
96
+ devtoolsPullLatch: Effect.Latch | undefined
97
+ devtoolsPushLatch: Effect.Latch | undefined
98
+ },
99
+ }
90
100
 
91
101
  const localPushesQueue = yield* BucketQueue.make<PushQueueItem>()
92
102
  const localPushesLatch = yield* Effect.makeLatch(true)
@@ -97,6 +107,10 @@ export const makeLeaderSyncProcessor = ({
97
107
  // TODO validate batch
98
108
  if (newEvents.length === 0) return
99
109
 
110
+ if (ctxRef.current?.devtoolsPushLatch !== undefined) {
111
+ yield* ctxRef.current.devtoolsPushLatch.await
112
+ }
113
+
100
114
  const waitForProcessing = options?.waitForProcessing ?? false
101
115
 
102
116
  if (waitForProcessing) {
@@ -119,9 +133,7 @@ export const makeLeaderSyncProcessor = ({
119
133
  batchSize: newEvents.length,
120
134
  batch: TRACE_VERBOSE ? newEvents : undefined,
121
135
  },
122
- links: spanRef.current
123
- ? [{ _tag: 'SpanLink', span: OtelTracer.makeExternalSpan(spanRef.current.spanContext()), attributes: {} }]
124
- : undefined,
136
+ links: ctxRef.current?.span ? [{ _tag: 'SpanLink', span: ctxRef.current.span, attributes: {} }] : undefined,
125
137
  }),
126
138
  )
127
139
 
@@ -145,11 +157,19 @@ export const makeLeaderSyncProcessor = ({
145
157
  // Starts various background loops
146
158
  const boot: LeaderSyncProcessor['boot'] = ({ dbReady }) =>
147
159
  Effect.gen(function* () {
148
- const span = yield* OtelTracer.currentOtelSpan.pipe(Effect.catchAll(() => Effect.succeed(undefined)))
149
- spanRef.current = span
160
+ const span = yield* Effect.currentSpan.pipe(Effect.orDie)
161
+ const otelSpan = yield* OtelTracer.currentOtelSpan.pipe(Effect.catchAll(() => Effect.succeed(undefined)))
162
+ const { devtools } = yield* LeaderThreadCtx
150
163
 
151
- const initialBackendHead = dbMissing ? EventId.ROOT.global : getBackendHeadFromDb(dbLog)
152
- const initialLocalHead = dbMissing ? EventId.ROOT : getLocalHeadFromDb(dbLog)
164
+ ctxRef.current = {
165
+ otelSpan,
166
+ span,
167
+ devtoolsPullLatch: devtools.enabled ? devtools.syncBackendPullLatch : undefined,
168
+ devtoolsPushLatch: devtools.enabled ? devtools.syncBackendPushLatch : undefined,
169
+ }
170
+
171
+ const initialBackendHead = dbMissing ? EventId.ROOT.global : getBackendHeadFromDb(dbMutationLog)
172
+ const initialLocalHead = dbMissing ? EventId.ROOT : getLocalHeadFromDb(dbMutationLog)
153
173
 
154
174
  if (initialBackendHead > initialLocalHead.global) {
155
175
  return shouldNeverHappen(
@@ -193,14 +213,14 @@ export const makeLeaderSyncProcessor = ({
193
213
  syncBackendQueue,
194
214
  schema,
195
215
  isLocalEvent,
196
- span,
216
+ otelSpan,
197
217
  }).pipe(Effect.tapCauseLogPretty, Effect.forkScoped)
198
218
 
199
219
  const backendPushingFiberHandle = yield* FiberHandle.make()
200
220
 
201
221
  yield* FiberHandle.run(
202
222
  backendPushingFiberHandle,
203
- backgroundBackendPushing({ dbReady, syncBackendQueue, span }).pipe(Effect.tapCauseLogPretty),
223
+ backgroundBackendPushing({ dbReady, syncBackendQueue, otelSpan }).pipe(Effect.tapCauseLogPretty),
204
224
  )
205
225
 
206
226
  yield* backgroundBackendPulling({
@@ -219,14 +239,15 @@ export const makeLeaderSyncProcessor = ({
219
239
  // Restart pushing fiber
220
240
  yield* FiberHandle.run(
221
241
  backendPushingFiberHandle,
222
- backgroundBackendPushing({ dbReady, syncBackendQueue, span }).pipe(Effect.tapCauseLogPretty),
242
+ backgroundBackendPushing({ dbReady, syncBackendQueue, otelSpan }).pipe(Effect.tapCauseLogPretty),
223
243
  )
224
244
  }),
225
245
  syncStateSref,
226
246
  localPushesLatch,
227
247
  pullLatch,
228
- span,
248
+ otelSpan,
229
249
  initialBlockingSyncContext,
250
+ devtoolsPullLatch: ctxRef.current?.devtoolsPullLatch,
230
251
  }).pipe(Effect.tapCauseLogPretty, Effect.forkScoped)
231
252
  }).pipe(Effect.withSpanScoped('@livestore/common:leader-thread:syncing'))
232
253
 
@@ -253,7 +274,7 @@ const backgroundApplyLocalPushes = ({
253
274
  syncBackendQueue,
254
275
  schema,
255
276
  isLocalEvent,
256
- span,
277
+ otelSpan,
257
278
  }: {
258
279
  pullLatch: Effect.Latch
259
280
  localPushesLatch: Effect.Latch
@@ -262,7 +283,7 @@ const backgroundApplyLocalPushes = ({
262
283
  syncBackendQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
263
284
  schema: LiveStoreSchema
264
285
  isLocalEvent: (mutationEventEncoded: MutationEvent.EncodedWithMeta) => boolean
265
- span: otel.Span | undefined
286
+ otelSpan: otel.Span | undefined
266
287
  }) =>
267
288
  Effect.gen(function* () {
268
289
  const { connectedClientSessionPullQueues } = yield* LeaderThreadCtx
@@ -293,7 +314,7 @@ const backgroundApplyLocalPushes = ({
293
314
  if (updateResult._tag === 'rebase') {
294
315
  return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
295
316
  } else if (updateResult._tag === 'reject') {
296
- span?.addEvent('local-push:reject', {
317
+ otelSpan?.addEvent('local-push:reject', {
297
318
  batchSize: newEvents.length,
298
319
  updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
299
320
  })
@@ -331,7 +352,7 @@ const backgroundApplyLocalPushes = ({
331
352
  remaining: 0,
332
353
  })
333
354
 
334
- span?.addEvent('local-push', {
355
+ otelSpan?.addEvent('local-push', {
335
356
  batchSize: newEvents.length,
336
357
  updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
337
358
  })
@@ -361,14 +382,14 @@ type ApplyMutationItems = (_: {
361
382
  const makeApplyMutationItems: Effect.Effect<ApplyMutationItems, UnexpectedError, LeaderThreadCtx | Scope.Scope> =
362
383
  Effect.gen(function* () {
363
384
  const leaderThreadCtx = yield* LeaderThreadCtx
364
- const { db, dbLog } = leaderThreadCtx
385
+ const { dbReadModel: db, dbMutationLog } = leaderThreadCtx
365
386
 
366
387
  const applyMutation = yield* makeApplyMutation
367
388
 
368
389
  return ({ batchItems, deferreds }) =>
369
390
  Effect.gen(function* () {
370
391
  db.execute('BEGIN TRANSACTION', undefined) // Start the transaction
371
- dbLog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
392
+ dbMutationLog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
372
393
 
373
394
  yield* Effect.addFinalizer((exit) =>
374
395
  Effect.gen(function* () {
@@ -376,7 +397,7 @@ const makeApplyMutationItems: Effect.Effect<ApplyMutationItems, UnexpectedError,
376
397
 
377
398
  // Rollback in case of an error
378
399
  db.execute('ROLLBACK', undefined)
379
- dbLog.execute('ROLLBACK', undefined)
400
+ dbMutationLog.execute('ROLLBACK', undefined)
380
401
  }),
381
402
  )
382
403
 
@@ -389,7 +410,7 @@ const makeApplyMutationItems: Effect.Effect<ApplyMutationItems, UnexpectedError,
389
410
  }
390
411
 
391
412
  db.execute('COMMIT', undefined) // Commit the transaction
392
- dbLog.execute('COMMIT', undefined) // Commit the transaction
413
+ dbMutationLog.execute('COMMIT', undefined) // Commit the transaction
393
414
  }).pipe(
394
415
  Effect.uninterruptible,
395
416
  Effect.scoped,
@@ -406,10 +427,11 @@ const backgroundBackendPulling = ({
406
427
  initialBackendHead,
407
428
  isLocalEvent,
408
429
  restartBackendPushing,
409
- span,
430
+ otelSpan,
410
431
  syncStateSref,
411
432
  localPushesLatch,
412
433
  pullLatch,
434
+ devtoolsPullLatch,
413
435
  initialBlockingSyncContext,
414
436
  }: {
415
437
  dbReady: Deferred.Deferred<void>
@@ -418,14 +440,21 @@ const backgroundBackendPulling = ({
418
440
  restartBackendPushing: (
419
441
  filteredRebasedPending: ReadonlyArray<MutationEvent.EncodedWithMeta>,
420
442
  ) => Effect.Effect<void, UnexpectedError, LeaderThreadCtx | HttpClient.HttpClient>
421
- span: otel.Span | undefined
443
+ otelSpan: otel.Span | undefined
422
444
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
423
445
  localPushesLatch: Effect.Latch
424
446
  pullLatch: Effect.Latch
447
+ devtoolsPullLatch: Effect.Latch | undefined
425
448
  initialBlockingSyncContext: InitialBlockingSyncContext
426
449
  }) =>
427
450
  Effect.gen(function* () {
428
- const { syncBackend, db, dbLog, connectedClientSessionPullQueues, schema } = yield* LeaderThreadCtx
451
+ const {
452
+ syncBackend,
453
+ dbReadModel: db,
454
+ dbMutationLog,
455
+ connectedClientSessionPullQueues,
456
+ schema,
457
+ } = yield* LeaderThreadCtx
429
458
 
430
459
  if (syncBackend === undefined) return
431
460
 
@@ -437,6 +466,10 @@ const backgroundBackendPulling = ({
437
466
  Effect.gen(function* () {
438
467
  if (newEvents.length === 0) return
439
468
 
469
+ if (devtoolsPullLatch !== undefined) {
470
+ yield* devtoolsPullLatch.await
471
+ }
472
+
440
473
  // Prevent more local pushes from being processed until this pull is finished
441
474
  yield* localPushesLatch.close
442
475
 
@@ -462,10 +495,10 @@ const backgroundBackendPulling = ({
462
495
 
463
496
  const newBackendHead = newEvents.at(-1)!.id
464
497
 
465
- updateBackendHead(dbLog, newBackendHead)
498
+ updateBackendHead(dbMutationLog, newBackendHead)
466
499
 
467
500
  if (updateResult._tag === 'rebase') {
468
- span?.addEvent('backend-pull:rebase', {
501
+ otelSpan?.addEvent('backend-pull:rebase', {
469
502
  newEventsCount: newEvents.length,
470
503
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
471
504
  rollbackCount: updateResult.eventsToRollback.length,
@@ -479,7 +512,7 @@ const backgroundBackendPulling = ({
479
512
  yield* restartBackendPushing(filteredRebasedPending)
480
513
 
481
514
  if (updateResult.eventsToRollback.length > 0) {
482
- yield* rollback({ db, dbLog, eventIdsToRollback: updateResult.eventsToRollback.map((_) => _.id) })
515
+ yield* rollback({ db, dbMutationLog, eventIdsToRollback: updateResult.eventsToRollback.map((_) => _.id) })
483
516
  }
484
517
 
485
518
  yield* connectedClientSessionPullQueues.offer({
@@ -492,7 +525,7 @@ const backgroundBackendPulling = ({
492
525
  remaining,
493
526
  })
494
527
  } else {
495
- span?.addEvent('backend-pull:advance', {
528
+ otelSpan?.addEvent('backend-pull:advance', {
496
529
  newEventsCount: newEvents.length,
497
530
  updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
498
531
  })
@@ -549,11 +582,11 @@ const backgroundBackendPulling = ({
549
582
 
550
583
  const rollback = ({
551
584
  db,
552
- dbLog,
585
+ dbMutationLog,
553
586
  eventIdsToRollback,
554
587
  }: {
555
- db: SynchronousDatabase
556
- dbLog: SynchronousDatabase
588
+ db: SqliteDb
589
+ dbMutationLog: SqliteDb
557
590
  eventIdsToRollback: EventId.EventId[]
558
591
  }) =>
559
592
  Effect.gen(function* () {
@@ -578,7 +611,7 @@ const rollback = ({
578
611
  )
579
612
 
580
613
  // Delete the mutation log rows
581
- dbLog.execute(
614
+ dbMutationLog.execute(
582
615
  sql`DELETE FROM ${MUTATION_LOG_META_TABLE} WHERE (idGlobal, idLocal) IN (${eventIdsToRollback.map((id) => `(${id.global}, ${id.local})`).join(', ')})`,
583
616
  )
584
617
  }).pipe(
@@ -589,7 +622,7 @@ const rollback = ({
589
622
 
590
623
  const getCursorInfo = (remoteHead: EventId.GlobalEventId) =>
591
624
  Effect.gen(function* () {
592
- const { dbLog } = yield* LeaderThreadCtx
625
+ const { dbMutationLog } = yield* LeaderThreadCtx
593
626
 
594
627
  if (remoteHead === EventId.ROOT.global) return Option.none()
595
628
 
@@ -598,7 +631,7 @@ const getCursorInfo = (remoteHead: EventId.GlobalEventId) =>
598
631
  }).pipe(Schema.pluck('syncMetadataJson'), Schema.Array, Schema.head)
599
632
 
600
633
  const syncMetadataOption = yield* Effect.sync(() =>
601
- dbLog.select<{ syncMetadataJson: string }>(
634
+ dbMutationLog.select<{ syncMetadataJson: string }>(
602
635
  sql`SELECT syncMetadataJson FROM ${MUTATION_LOG_META_TABLE} WHERE idGlobal = ${remoteHead} ORDER BY idLocal ASC LIMIT 1`,
603
636
  ),
604
637
  ).pipe(Effect.andThen(Schema.decode(MutationlogQuerySchema)), Effect.map(Option.flatten), Effect.orDie)
@@ -612,14 +645,14 @@ const getCursorInfo = (remoteHead: EventId.GlobalEventId) =>
612
645
  const backgroundBackendPushing = ({
613
646
  dbReady,
614
647
  syncBackendQueue,
615
- span,
648
+ otelSpan,
616
649
  }: {
617
650
  dbReady: Deferred.Deferred<void>
618
651
  syncBackendQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
619
- span: otel.Span | undefined
652
+ otelSpan: otel.Span | undefined
620
653
  }) =>
621
654
  Effect.gen(function* () {
622
- const { syncBackend, dbLog } = yield* LeaderThreadCtx
655
+ const { syncBackend, dbMutationLog } = yield* LeaderThreadCtx
623
656
  if (syncBackend === undefined) return
624
657
 
625
658
  yield* dbReady
@@ -632,7 +665,7 @@ const backgroundBackendPushing = ({
632
665
 
633
666
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
634
667
 
635
- span?.addEvent('backend-push', {
668
+ otelSpan?.addEvent('backend-push', {
636
669
  batchSize: queueItems.length,
637
670
  batch: TRACE_VERBOSE ? JSON.stringify(queueItems) : undefined,
638
671
  })
@@ -641,7 +674,7 @@ const backgroundBackendPushing = ({
641
674
  const pushResult = yield* syncBackend.push(queueItems.map((_) => _.toGlobal())).pipe(Effect.either)
642
675
 
643
676
  if (pushResult._tag === 'Left') {
644
- span?.addEvent('backend-push-error', { error: pushResult.left.toString() })
677
+ otelSpan?.addEvent('backend-push-error', { error: pushResult.left.toString() })
645
678
  // wait for interrupt caused by background pulling which will then restart pushing
646
679
  return yield* Effect.never
647
680
  }
@@ -652,7 +685,7 @@ const backgroundBackendPushing = ({
652
685
  for (let i = 0; i < queueItems.length; i++) {
653
686
  const mutationEventEncoded = queueItems[i]!
654
687
  yield* execSql(
655
- dbLog,
688
+ dbMutationLog,
656
689
  ...updateRows({
657
690
  tableName: MUTATION_LOG_META_TABLE,
658
691
  columns: mutationLogMetaTable.sqliteDef.columns,
@@ -664,7 +697,7 @@ const backgroundBackendPushing = ({
664
697
  }
665
698
  }).pipe(Effect.interruptible, Effect.withSpan('@livestore/common:leader-thread:syncing:backend-pushing'))
666
699
 
667
- const trimChangesetRows = (db: SynchronousDatabase, newHead: EventId.EventId) => {
700
+ const trimChangesetRows = (db: SqliteDb, newHead: EventId.EventId) => {
668
701
  // Since we're using the session changeset rows to query for the current head,
669
702
  // we're keeping at least one row for the current head, and thus are using `<` instead of `<=`
670
703
  db.execute(sql`DELETE FROM ${SESSION_CHANGESET_META_TABLE} WHERE idGlobal < ${newHead.global}`)
@@ -2,7 +2,7 @@ import { memoizeByRef, shouldNeverHappen } from '@livestore/utils'
2
2
  import type { Scope } from '@livestore/utils/effect'
3
3
  import { Effect, Option, Schema } from '@livestore/utils/effect'
4
4
 
5
- import type { SqliteError, SynchronousDatabase, UnexpectedError } from '../index.js'
5
+ import type { SqliteDb, SqliteError, UnexpectedError } from '../index.js'
6
6
  import { getExecArgsFromMutation } from '../mutation.js'
7
7
  import {
8
8
  type LiveStoreSchema,
@@ -38,7 +38,7 @@ export const makeApplyMutation: Effect.Effect<ApplyMutation, never, Scope.Scope
38
38
 
39
39
  return (mutationEventEncoded, options) =>
40
40
  Effect.gen(function* () {
41
- const { schema, db, dbLog } = leaderThreadCtx
41
+ const { schema, dbReadModel: db, dbMutationLog } = leaderThreadCtx
42
42
  const skipMutationLog = options?.skipMutationLog ?? false
43
43
 
44
44
  const mutationName = mutationEventEncoded.mutation
@@ -92,7 +92,7 @@ export const makeApplyMutation: Effect.Effect<ApplyMutation, never, Scope.Scope
92
92
  // write to mutation_log
93
93
  const excludeFromMutationLog = shouldExcludeMutationFromLog(mutationName, mutationEventEncoded)
94
94
  if (skipMutationLog === false && excludeFromMutationLog === false) {
95
- yield* insertIntoMutationLog(mutationEventEncoded, dbLog, mutationDefSchemaHashMap)
95
+ yield* insertIntoMutationLog(mutationEventEncoded, dbMutationLog, mutationDefSchemaHashMap)
96
96
  } else {
97
97
  // console.debug('[@livestore/common:leader-thread] skipping mutation log write', mutation, statementSql, bindValues)
98
98
  }
@@ -111,7 +111,7 @@ export const makeApplyMutation: Effect.Effect<ApplyMutation, never, Scope.Scope
111
111
 
112
112
  const insertIntoMutationLog = (
113
113
  mutationEventEncoded: MutationEvent.AnyEncoded,
114
- dbLog: SynchronousDatabase,
114
+ dbMutationLog: SqliteDb,
115
115
  mutationDefSchemaHashMap: Map<string, number>,
116
116
  ) =>
117
117
  Effect.gen(function* () {
@@ -121,7 +121,7 @@ const insertIntoMutationLog = (
121
121
 
122
122
  // TODO use prepared statements
123
123
  yield* execSql(
124
- dbLog,
124
+ dbMutationLog,
125
125
  ...insertRow({
126
126
  tableName: MUTATION_LOG_META_TABLE,
127
127
  columns: mutationLogMetaTable.sqliteDef.columns,
@@ -1,7 +1,7 @@
1
1
  // import type { WaSqlite } from '@livestore/sqlite-wasm'
2
2
  import { Effect } from '@livestore/utils/effect'
3
3
 
4
- import type { SynchronousDatabase } from '../adapter-types.js'
4
+ import type { SqliteDb } from '../adapter-types.js'
5
5
  import { SqliteError } from '../adapter-types.js'
6
6
  import type { BindValues } from '../sql-queries/index.js'
7
7
  import type { PreparedBindValues } from '../util.js'
@@ -12,9 +12,9 @@ namespace WaSqlite {
12
12
  export type SQLiteError = any
13
13
  }
14
14
 
15
- export const configureConnection = (syncDb: SynchronousDatabase, { fkEnabled }: { fkEnabled: boolean }) =>
15
+ export const configureConnection = (sqliteDb: SqliteDb, { fkEnabled }: { fkEnabled: boolean }) =>
16
16
  execSql(
17
- syncDb,
17
+ sqliteDb,
18
18
  sql`
19
19
  PRAGMA page_size=8192;
20
20
  PRAGMA journal_mode=MEMORY;
@@ -23,10 +23,10 @@ export const configureConnection = (syncDb: SynchronousDatabase, { fkEnabled }:
23
23
  {},
24
24
  )
25
25
 
26
- export const execSql = (syncDb: SynchronousDatabase, sql: string, bind: BindValues) => {
26
+ export const execSql = (sqliteDb: SqliteDb, sql: string, bind: BindValues) => {
27
27
  const bindValues = prepareBindValues(bind, sql)
28
28
  return Effect.try({
29
- try: () => syncDb.execute(sql, bindValues),
29
+ try: () => sqliteDb.execute(sql, bindValues),
30
30
  catch: (cause) =>
31
31
  new SqliteError({ cause, query: { bindValues, sql }, code: (cause as WaSqlite.SQLiteError).code }),
32
32
  }).pipe(
@@ -48,9 +48,9 @@ export const execSql = (syncDb: SynchronousDatabase, sql: string, bind: BindValu
48
48
  // }
49
49
 
50
50
  // TODO actually use prepared statements
51
- export const execSqlPrepared = (syncDb: SynchronousDatabase, sql: string, bindValues: PreparedBindValues) => {
51
+ export const execSqlPrepared = (sqliteDb: SqliteDb, sql: string, bindValues: PreparedBindValues) => {
52
52
  return Effect.try({
53
- try: () => syncDb.execute(sql, bindValues),
53
+ try: () => sqliteDb.execute(sql, bindValues),
54
54
  catch: (cause) =>
55
55
  new SqliteError({ cause, query: { bindValues, sql }, code: (cause as WaSqlite.SQLiteError).code }),
56
56
  }).pipe(
@@ -21,7 +21,7 @@ export const bootDevtools = (options: DevtoolsOptions) =>
21
21
  sendMessage: () => Effect.void,
22
22
  }).pipe(Effect.tapCauseLogPretty, Effect.forkScoped)
23
23
 
24
- const { persistenceInfo, devtoolsWebChannel } = yield* options.makeContext
24
+ const { persistenceInfo, devtoolsWebChannel } = yield* options.makeBootContext
25
25
 
26
26
  const sendMessage: SendMessageToDevtools = (message) =>
27
27
  devtoolsWebChannel
@@ -71,8 +71,15 @@ const listenToDevtools = ({
71
71
  persistenceInfo?: PersistenceInfoPair
72
72
  }) =>
73
73
  Effect.gen(function* () {
74
- const { syncBackend, makeSyncDb, db, dbLog, shutdownStateSubRef, shutdownChannel, syncProcessor } =
75
- yield* LeaderThreadCtx
74
+ const {
75
+ syncBackend,
76
+ makeSqliteDb,
77
+ dbReadModel,
78
+ dbMutationLog,
79
+ shutdownStateSubRef,
80
+ shutdownChannel,
81
+ syncProcessor,
82
+ } = yield* LeaderThreadCtx
76
83
 
77
84
  type RequestId = string
78
85
  const subscriptionFiberMap = yield* FiberMap.make<RequestId>()
@@ -95,7 +102,7 @@ const listenToDevtools = ({
95
102
  return
96
103
  }
97
104
  case 'LSD.Leader.SnapshotReq': {
98
- const snapshot = db.export()
105
+ const snapshot = dbReadModel.export()
99
106
 
100
107
  yield* sendMessage(Devtools.SnapshotRes.make({ snapshot, ...reqPayload }))
101
108
 
@@ -107,15 +114,15 @@ const listenToDevtools = ({
107
114
  let tableNames: Set<string>
108
115
 
109
116
  try {
110
- const tmpSyncDb = yield* makeSyncDb({ _tag: 'in-memory' })
111
- tmpSyncDb.import(data)
112
- const tableNameResults = tmpSyncDb.select<{ name: string }>(
117
+ const tmpDb = yield* makeSqliteDb({ _tag: 'in-memory' })
118
+ tmpDb.import(data)
119
+ const tableNameResults = tmpDb.select<{ name: string }>(
113
120
  `select name from sqlite_master where type = 'table'`,
114
121
  )
115
122
 
116
123
  tableNames = new Set(tableNameResults.map((_) => _.name))
117
124
 
118
- tmpSyncDb.close()
125
+ tmpDb.close()
119
126
  } catch (e) {
120
127
  yield* Effect.logError(`Error importing database file`, e)
121
128
  yield* sendMessage(Devtools.LoadDatabaseFileRes.make({ ...reqPayload, status: 'unsupported-file' }))
@@ -126,15 +133,15 @@ const listenToDevtools = ({
126
133
  if (tableNames.has(MUTATION_LOG_META_TABLE)) {
127
134
  yield* SubscriptionRef.set(shutdownStateSubRef, 'shutting-down')
128
135
 
129
- dbLog.import(data)
136
+ dbMutationLog.import(data)
130
137
 
131
- db.destroy()
138
+ dbReadModel.destroy()
132
139
  } else if (tableNames.has(SCHEMA_META_TABLE) && tableNames.has(SCHEMA_MUTATIONS_META_TABLE)) {
133
140
  yield* SubscriptionRef.set(shutdownStateSubRef, 'shutting-down')
134
141
 
135
- db.import(data)
142
+ dbReadModel.import(data)
136
143
 
137
- dbLog.destroy()
144
+ dbMutationLog.destroy()
138
145
  } else {
139
146
  yield* sendMessage(Devtools.LoadDatabaseFileRes.make({ ...reqPayload, status: 'unsupported-database' }))
140
147
  return
@@ -151,10 +158,10 @@ const listenToDevtools = ({
151
158
 
152
159
  yield* SubscriptionRef.set(shutdownStateSubRef, 'shutting-down')
153
160
 
154
- db.destroy()
161
+ dbReadModel.destroy()
155
162
 
156
163
  if (mode === 'all-data') {
157
- dbLog.destroy()
164
+ dbMutationLog.destroy()
158
165
  }
159
166
 
160
167
  yield* sendMessage(Devtools.ResetAllDataRes.make({ ...reqPayload }))
@@ -170,12 +177,12 @@ const listenToDevtools = ({
170
177
  }
171
178
 
172
179
  const dbSizeQuery = `SELECT page_count * page_size as size FROM pragma_page_count(), pragma_page_size();`
173
- const dbFileSize = db.select<{ size: number }>(dbSizeQuery, undefined)[0]!.size
174
- const mutationLogFileSize = dbLog.select<{ size: number }>(dbSizeQuery, undefined)[0]!.size
180
+ const dbFileSize = dbReadModel.select<{ size: number }>(dbSizeQuery, undefined)[0]!.size
181
+ const mutationLogFileSize = dbMutationLog.select<{ size: number }>(dbSizeQuery, undefined)[0]!.size
175
182
 
176
183
  yield* sendMessage(
177
184
  Devtools.DatabaseFileInfoRes.make({
178
- db: { fileSize: dbFileSize, persistenceInfo: persistenceInfo.db },
185
+ readModel: { fileSize: dbFileSize, persistenceInfo: persistenceInfo.readModel },
179
186
  mutationLog: { fileSize: mutationLogFileSize, persistenceInfo: persistenceInfo.mutationLog },
180
187
  ...reqPayload,
181
188
  }),
@@ -184,7 +191,7 @@ const listenToDevtools = ({
184
191
  return
185
192
  }
186
193
  case 'LSD.Leader.MutationLogReq': {
187
- const mutationLog = dbLog.export()
194
+ const mutationLog = dbMutationLog.export()
188
195
 
189
196
  yield* sendMessage(Devtools.MutationLogRes.make({ mutationLog, ...reqPayload }))
190
197