@livestore/common 0.4.0-dev.7 → 0.4.0-dev.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/devtools/devtools-messages-client-session.d.ts +21 -21
  3. package/dist/devtools/devtools-messages-common.d.ts +6 -6
  4. package/dist/devtools/devtools-messages-leader.d.ts +24 -24
  5. package/dist/leader-thread/LeaderSyncProcessor.d.ts +4 -1
  6. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  7. package/dist/leader-thread/LeaderSyncProcessor.js +40 -14
  8. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  9. package/dist/leader-thread/eventlog.js +1 -1
  10. package/dist/leader-thread/eventlog.js.map +1 -1
  11. package/dist/leader-thread/types.d.ts +1 -0
  12. package/dist/leader-thread/types.d.ts.map +1 -1
  13. package/dist/schema/state/sqlite/client-document-def.d.ts.map +1 -1
  14. package/dist/schema/state/sqlite/client-document-def.js +2 -2
  15. package/dist/schema/state/sqlite/client-document-def.js.map +1 -1
  16. package/dist/schema/state/sqlite/column-annotations.d.ts.map +1 -1
  17. package/dist/schema/state/sqlite/column-annotations.js +14 -6
  18. package/dist/schema/state/sqlite/column-annotations.js.map +1 -1
  19. package/dist/schema/state/sqlite/query-builder/impl.test.js +81 -0
  20. package/dist/schema/state/sqlite/query-builder/impl.test.js.map +1 -1
  21. package/dist/schema/state/sqlite/table-def.d.ts +4 -4
  22. package/dist/schema/state/sqlite/table-def.d.ts.map +1 -1
  23. package/dist/schema/state/sqlite/table-def.js +2 -2
  24. package/dist/schema/state/sqlite/table-def.js.map +1 -1
  25. package/dist/schema/state/sqlite/table-def.test.js +44 -0
  26. package/dist/schema/state/sqlite/table-def.test.js.map +1 -1
  27. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  28. package/dist/sync/ClientSessionSyncProcessor.js +7 -3
  29. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  30. package/dist/sync/mock-sync-backend.d.ts +11 -2
  31. package/dist/sync/mock-sync-backend.d.ts.map +1 -1
  32. package/dist/sync/mock-sync-backend.js +59 -7
  33. package/dist/sync/mock-sync-backend.js.map +1 -1
  34. package/dist/testing/event-factory.d.ts +68 -0
  35. package/dist/testing/event-factory.d.ts.map +1 -0
  36. package/dist/testing/event-factory.js +80 -0
  37. package/dist/testing/event-factory.js.map +1 -0
  38. package/dist/testing/mod.d.ts +2 -0
  39. package/dist/testing/mod.d.ts.map +1 -0
  40. package/dist/testing/mod.js +2 -0
  41. package/dist/testing/mod.js.map +1 -0
  42. package/dist/version.d.ts +1 -1
  43. package/dist/version.js +1 -1
  44. package/package.json +6 -5
  45. package/src/leader-thread/LeaderSyncProcessor.ts +60 -17
  46. package/src/leader-thread/eventlog.ts +1 -1
  47. package/src/leader-thread/types.ts +1 -0
  48. package/src/schema/state/sqlite/client-document-def.ts +2 -2
  49. package/src/schema/state/sqlite/column-annotations.ts +16 -6
  50. package/src/schema/state/sqlite/query-builder/impl.test.ts +121 -0
  51. package/src/schema/state/sqlite/table-def.test.ts +53 -0
  52. package/src/schema/state/sqlite/table-def.ts +9 -8
  53. package/src/sync/ClientSessionSyncProcessor.ts +9 -3
  54. package/src/sync/mock-sync-backend.ts +104 -16
  55. package/src/testing/event-factory.ts +133 -0
  56. package/src/testing/mod.ts +1 -0
  57. package/src/version.ts +1 -1
  58. package/dist/schema-management/migrations.test.d.ts +0 -2
  59. package/dist/schema-management/migrations.test.d.ts.map +0 -1
  60. package/dist/schema-management/migrations.test.js +0 -52
  61. package/dist/schema-management/migrations.test.js.map +0 -1
@@ -4,6 +4,7 @@ import {
4
4
  BucketQueue,
5
5
  Cause,
6
6
  Deferred,
7
+ Duration,
7
8
  Effect,
8
9
  Exit,
9
10
  FiberHandle,
@@ -13,6 +14,7 @@ import {
13
14
  pipe,
14
15
  Queue,
15
16
  ReadonlyArray,
17
+ Schedule,
16
18
  Stream,
17
19
  Subscribable,
18
20
  SubscriptionRef,
@@ -102,7 +104,10 @@ export const makeLeaderSyncProcessor = ({
102
104
  */
103
105
  backendPushBatchSize?: number
104
106
  }
105
- /** * Whether the sync backend should reactively pull new events from the sync backend */
107
+ /**
108
+ * Whether the sync backend should reactively pull new events from the sync backend
109
+ * When `false`, the sync processor will only do an initial pull
110
+ */
106
111
  livePull: boolean
107
112
  testing: {
108
113
  delays?: {
@@ -281,7 +286,7 @@ export const makeLeaderSyncProcessor = ({
281
286
  },
282
287
  }).pipe(Effect.catchAllCause(maybeShutdownOnError), Effect.forkScoped)
283
288
 
284
- const backendPushingFiberHandle = yield* FiberHandle.make<undefined, never>()
289
+ const backendPushingFiberHandle = yield* FiberHandle.make<void, never>()
285
290
  const backendPushingEffect = backgroundBackendPushing({
286
291
  syncBackendPushQueue,
287
292
  otelSpan,
@@ -686,6 +691,13 @@ const backgroundBackendPulling = ({
686
691
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
687
692
  })
688
693
 
694
+ // Ensure push fiber is active after advance by restarting with current pending (non-client) events
695
+ const globalPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
696
+ const { eventDef } = getEventDef(schema, event.name)
697
+ return eventDef.options.clientOnly === false
698
+ })
699
+ yield* restartBackendPushing(globalPendingEvents)
700
+
689
701
  yield* connectedClientSessionPullQueues.offer({
690
702
  payload: SyncState.payloadFromMergeResult(mergeResult),
691
703
  leaderHead: mergeResult.newSyncState.localHead,
@@ -756,6 +768,9 @@ const backgroundBackendPulling = ({
756
768
  Stream.runDrain,
757
769
  Effect.interruptible,
758
770
  )
771
+
772
+ // Should only ever happen when livePull is false
773
+ yield* Effect.logDebug('backend-pulling finished', { livePull })
759
774
  }).pipe(Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pulling'))
760
775
 
761
776
  const backgroundBackendPushing = ({
@@ -789,25 +804,53 @@ const backgroundBackendPushing = ({
789
804
  batch: TRACE_VERBOSE ? JSON.stringify(queueItems) : undefined,
790
805
  })
791
806
 
792
- // TODO handle push errors (should only happen during concurrent pull+push)
793
- const pushResult = yield* syncBackend.push(queueItems.map((_) => _.toGlobal())).pipe(Effect.either)
807
+ // Push with declarative retry/backoff using Effect schedules
808
+ // - Exponential backoff starting at 1s and doubling (1s, 2s, 4s, 8s, 16s, 30s ...)
809
+ // - Delay clamped at 30s (continues retrying at 30s)
810
+ // - Resets automatically after successful push
811
+ // TODO(metrics): expose counters/gauges for retry attempts and queue health via devtools/metrics
812
+
813
+ // Only retry for transient UnexpectedError cases
814
+ const isRetryable = (err: InvalidPushError | IsOfflineError) =>
815
+ err._tag === 'InvalidPushError' && err.cause._tag === 'LiveStore.UnexpectedError'
816
+
817
+ // Input: InvalidPushError | IsOfflineError, Output: Duration
818
+ const retrySchedule: Schedule.Schedule<Duration.DurationInput, InvalidPushError | IsOfflineError> =
819
+ Schedule.exponential(Duration.seconds(1)).pipe(
820
+ Schedule.andThenEither(Schedule.spaced(Duration.seconds(30))), // clamp at 30 second intervals
821
+ Schedule.compose(Schedule.elapsed),
822
+ Schedule.whileInput(isRetryable),
823
+ )
824
+
825
+ yield* Effect.gen(function* () {
826
+ const iteration = yield* Schedule.CurrentIterationMetadata
794
827
 
795
- if (pushResult._tag === 'Left') {
796
- if (
797
- pushResult.left._tag === 'InvalidPushError' &&
798
- // server ahead errors are gracefully handled
799
- pushResult.left.cause._tag !== 'ServerAheadError'
800
- ) {
801
- return yield* pushResult.left
828
+ const pushResult = yield* syncBackend.push(queueItems.map((_) => _.toGlobal())).pipe(Effect.either)
829
+
830
+ const retries = iteration.recurrence
831
+ if (retries > 0 && pushResult._tag === 'Right') {
832
+ otelSpan?.addEvent('backend-push-retry-success', { retries, batchSize: queueItems.length })
802
833
  }
803
834
 
804
- if (LS_DEV) {
805
- yield* Effect.logDebug('handled backend-push-error', { error: pushResult.left.toString() })
835
+ if (pushResult._tag === 'Left') {
836
+ otelSpan?.addEvent('backend-push-error', {
837
+ error: pushResult.left.toString(),
838
+ retries,
839
+ batchSize: queueItems.length,
840
+ })
841
+ const error = pushResult.left
842
+ if (
843
+ error._tag === 'IsOfflineError' ||
844
+ (error._tag === 'InvalidPushError' && error.cause._tag === 'ServerAheadError')
845
+ ) {
846
+ // It's a core part of the sync protocol that the sync backend will emit a new pull chunk alongside the ServerAheadError
847
+ yield* Effect.logDebug('handled backend-push-error (waiting for interupt caused by pull)', { error })
848
+ return yield* Effect.never
849
+ }
850
+
851
+ return yield* error
806
852
  }
807
- otelSpan?.addEvent('backend-push-error', { error: pushResult.left.toString() })
808
- // wait for interrupt caused by background pulling which will then restart pushing
809
- return yield* Effect.never
810
- }
853
+ }).pipe(Effect.retry(retrySchedule))
811
854
  }
812
855
  }).pipe(Effect.interruptible, Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pushing'))
813
856
 
@@ -149,7 +149,7 @@ export const insertIntoEventlog = (
149
149
 
150
150
  if (parentEventExists === false) {
151
151
  shouldNeverHappen(
152
- `Parent mutation ${eventEncoded.parentSeqNum.global},${eventEncoded.parentSeqNum.client} does not exist`,
152
+ `Parent event ${eventEncoded.parentSeqNum.global},${eventEncoded.parentSeqNum.client} does not exist in eventlog`,
153
153
  )
154
154
  }
155
155
  }
@@ -150,6 +150,7 @@ export interface LeaderSyncProcessor {
150
150
  options?: {
151
151
  /**
152
152
  * If true, the effect will only finish when the local push has been processed (i.e. succeeded or was rejected).
153
+ * `true` doesn't mean the events have been pushed to the sync backend.
153
154
  * @default false
154
155
  */
155
156
  waitForProcessing?: boolean
@@ -1,6 +1,6 @@
1
1
  import { shouldNeverHappen } from '@livestore/utils'
2
2
  import type { Option, Types } from '@livestore/utils/effect'
3
- import { Schema, SchemaAST } from '@livestore/utils/effect'
3
+ import { Schema } from '@livestore/utils/effect'
4
4
 
5
5
  import { SessionIdSymbol } from '../../../adapter-types.ts'
6
6
  import { sql } from '../../../util.ts'
@@ -273,7 +273,7 @@ export const deriveEventAndMaterializer = ({
273
273
  }
274
274
 
275
275
  // Override the full value if it's not an object or no partial set is allowed
276
- const schemaProps = SchemaAST.getPropertySignatures(valueSchema.ast)
276
+ const schemaProps = Schema.getResolvedPropertySignatures(valueSchema)
277
277
  if (schemaProps.length === 0 || partialSet === false) {
278
278
  const valueColJsonSchema = Schema.parseJson(valueSchema)
279
279
  const encodedInsertValue = Schema.encodeSyncDebug(valueColJsonSchema)(value ?? defaultValue)
@@ -1,5 +1,5 @@
1
1
  import type { Schema } from '@livestore/utils/effect'
2
- import { dual } from '@livestore/utils/effect'
2
+ import { dual, Option, SchemaAST } from '@livestore/utils/effect'
3
3
  import type { SqliteDsl } from './db-schema/mod.ts'
4
4
 
5
5
  export const PrimaryKeyId = Symbol.for('livestore/state/sqlite/annotations/primary-key')
@@ -32,7 +32,7 @@ Here are the knobs you can turn per-column when you CREATE TABLE (or ALTER TABLE
32
32
  * Adds a primary key annotation to a schema.
33
33
  */
34
34
  export const withPrimaryKey = <T extends Schema.Schema.All>(schema: T) =>
35
- schema.annotations({ [PrimaryKeyId]: true }) as T
35
+ applyAnnotations(schema, { [PrimaryKeyId]: true })
36
36
 
37
37
  /**
38
38
  * Adds a column type annotation to a schema.
@@ -43,19 +43,19 @@ export const withColumnType: {
43
43
  <T extends Schema.Schema.All>(schema: T, type: SqliteDsl.FieldColumnType): T
44
44
  } = dual(2, <T extends Schema.Schema.All>(schema: T, type: SqliteDsl.FieldColumnType) => {
45
45
  validateSchemaColumnTypeCompatibility(schema, type)
46
- return schema.annotations({ [ColumnType]: type }) as T
46
+ return applyAnnotations(schema, { [ColumnType]: type })
47
47
  })
48
48
 
49
49
  /**
50
50
  * Adds an auto-increment annotation to a schema.
51
51
  */
52
52
  export const withAutoIncrement = <T extends Schema.Schema.All>(schema: T) =>
53
- schema.annotations({ [AutoIncrement]: true }) as T
53
+ applyAnnotations(schema, { [AutoIncrement]: true })
54
54
 
55
55
  /**
56
56
  * Adds a unique constraint annotation to a schema.
57
57
  */
58
- export const withUnique = <T extends Schema.Schema.All>(schema: T) => schema.annotations({ [Unique]: true }) as T
58
+ export const withUnique = <T extends Schema.Schema.All>(schema: T) => applyAnnotations(schema, { [Unique]: true })
59
59
 
60
60
  /**
61
61
  * Adds a default value annotation to a schema.
@@ -64,7 +64,7 @@ export const withDefault: {
64
64
  // TODO make type safe
65
65
  <T extends Schema.Schema.All>(schema: T, value: unknown): T
66
66
  (value: unknown): <T extends Schema.Schema.All>(schema: T) => T
67
- } = dual(2, <T extends Schema.Schema.All>(schema: T, value: unknown) => schema.annotations({ [Default]: value }) as T)
67
+ } = dual(2, <T extends Schema.Schema.All>(schema: T, value: unknown) => applyAnnotations(schema, { [Default]: value }))
68
68
 
69
69
  /**
70
70
  * Validates that a schema is compatible with the specified SQLite column type
@@ -75,3 +75,13 @@ const validateSchemaColumnTypeCompatibility = (
75
75
  ): void => {
76
76
  // TODO actually implement this
77
77
  }
78
+
79
+ const applyAnnotations = <T extends Schema.Schema.All>(schema: T, overrides: Record<PropertyKey, unknown>): T => {
80
+ const identifier = SchemaAST.getIdentifierAnnotation(schema.ast)
81
+ const shouldPreserveIdentifier = Option.isSome(identifier) && !(SchemaAST.IdentifierAnnotationId in overrides)
82
+ const annotations: Record<PropertyKey, unknown> = shouldPreserveIdentifier
83
+ ? { ...overrides, [SchemaAST.IdentifierAnnotationId]: identifier.value }
84
+ : overrides
85
+
86
+ return schema.annotations(annotations) as T
87
+ }
@@ -683,6 +683,127 @@ describe('query builder', () => {
683
683
  expect(pattern1).toEqual(pattern2)
684
684
  })
685
685
  })
686
+
687
+ describe('schema transforms', () => {
688
+ const Flat = Schema.Struct({
689
+ id: Schema.String.pipe(State.SQLite.withPrimaryKey),
690
+ contactFirstName: Schema.String,
691
+ contactLastName: Schema.String,
692
+ contactEmail: Schema.String.pipe(State.SQLite.withUnique),
693
+ })
694
+
695
+ const Nested = Schema.transform(
696
+ Flat,
697
+ Schema.Struct({
698
+ id: Schema.String,
699
+ contact: Schema.Struct({
700
+ firstName: Schema.String,
701
+ lastName: Schema.String,
702
+ email: Schema.String,
703
+ }),
704
+ }),
705
+ {
706
+ decode: ({ id, contactFirstName, contactLastName, contactEmail }) => ({
707
+ id,
708
+ contact: {
709
+ firstName: contactFirstName,
710
+ lastName: contactLastName,
711
+ email: contactEmail,
712
+ },
713
+ }),
714
+ encode: ({ id, contact }) => ({
715
+ id,
716
+ contactFirstName: contact.firstName,
717
+ contactLastName: contact.lastName,
718
+ contactEmail: contact.email,
719
+ }),
720
+ },
721
+ )
722
+
723
+ const makeContactsTable = () =>
724
+ State.SQLite.table({
725
+ name: 'contacts',
726
+ schema: Nested,
727
+ // schema: Flat,
728
+ })
729
+
730
+ it('exposes flattened insert type while schema type is nested', () => {
731
+ const contactsTable = makeContactsTable()
732
+
733
+ type InsertInput = Parameters<(typeof contactsTable)['insert']>[0]
734
+ type NestedType = Schema.Schema.Type<typeof Nested>
735
+
736
+ type Assert<T extends true> = T
737
+
738
+ type InsertKeys = keyof InsertInput
739
+ type NestedKeys = keyof NestedType
740
+
741
+ type _InsertHasFlattenedColumns = Assert<
742
+ 'contactFirstName' extends InsertKeys
743
+ ? 'contactLastName' extends InsertKeys
744
+ ? 'contactEmail' extends InsertKeys
745
+ ? true
746
+ : false
747
+ : false
748
+ : false
749
+ >
750
+
751
+ type _InsertDoesNotExposeNested = Assert<Extract<'contact', InsertKeys> extends never ? true : false>
752
+
753
+ type _SchemaTypeIsNested = Assert<'contact' extends NestedKeys ? true : false>
754
+
755
+ void contactsTable
756
+ })
757
+
758
+ it('fails to encode nested inserts because flat columns are required', () => {
759
+ const contactsTable = makeContactsTable()
760
+
761
+ expect(
762
+ contactsTable
763
+ // TODO in the future we should use decoded types here instead of encoded
764
+ .insert({
765
+ id: 'person-1',
766
+ contactFirstName: 'Ada',
767
+ contactLastName: 'Lovelace',
768
+ contactEmail: 'ada@example.com',
769
+ })
770
+ .asSql(),
771
+ ).toMatchInlineSnapshot(`
772
+ {
773
+ "bindValues": [
774
+ "person-1",
775
+ "Ada",
776
+ "Lovelace",
777
+ "ada@example.com",
778
+ ],
779
+ "query": "INSERT INTO 'contacts' (id, contactFirstName, contactLastName, contactEmail) VALUES (?, ?, ?, ?)",
780
+ "usedTables": Set {
781
+ "contacts",
782
+ },
783
+ }
784
+ `)
785
+ })
786
+
787
+ it('fails to encode nested inserts because flat columns are required', () => {
788
+ const contactsTable = makeContactsTable()
789
+
790
+ expect(() =>
791
+ contactsTable
792
+ .insert({
793
+ id: 'person-1',
794
+ // @ts-expect-error
795
+ contact: {
796
+ firstName: 'Ada',
797
+ lastName: 'Lovelace',
798
+ email: 'ada@example.com',
799
+ },
800
+ })
801
+ .asSql(),
802
+ ).toThrowErrorMatchingInlineSnapshot(`
803
+ [ParseError: contacts\n└─ ["contactFirstName"]\n └─ is missing]
804
+ `)
805
+ })
806
+ })
686
807
  })
687
808
 
688
809
  // TODO nested queries
@@ -178,6 +178,59 @@ describe('table function overloads', () => {
178
178
  expect(userTable.sqliteDef.columns.age.columnType).toBe('integer')
179
179
  })
180
180
 
181
+ it('should support schemas that transform flat columns into nested types', () => {
182
+ const Flat = Schema.Struct({
183
+ id: Schema.String.pipe(State.SQLite.withPrimaryKey),
184
+ contactFirstName: Schema.String,
185
+ contactLastName: Schema.String,
186
+ contactEmail: Schema.String.pipe(State.SQLite.withUnique),
187
+ })
188
+
189
+ const Nested = Schema.transform(
190
+ Flat,
191
+ Schema.Struct({
192
+ id: Schema.String,
193
+ contact: Schema.Struct({
194
+ firstName: Schema.String,
195
+ lastName: Schema.String,
196
+ email: Schema.String,
197
+ }),
198
+ }),
199
+ {
200
+ decode: ({ id, contactFirstName, contactLastName, contactEmail }) => ({
201
+ id,
202
+ contact: {
203
+ firstName: contactFirstName,
204
+ lastName: contactLastName,
205
+ email: contactEmail,
206
+ },
207
+ }),
208
+ encode: ({ id, contact }) => ({
209
+ id,
210
+ contactFirstName: contact.firstName,
211
+ contactLastName: contact.lastName,
212
+ contactEmail: contact.email,
213
+ }),
214
+ },
215
+ )
216
+
217
+ const contactsTable = State.SQLite.table({
218
+ name: 'contacts',
219
+ schema: Nested,
220
+ })
221
+
222
+ const columns = contactsTable.sqliteDef.columns
223
+
224
+ expect(Object.keys(columns)).toEqual(['id', 'contactFirstName', 'contactLastName', 'contactEmail'])
225
+ expect(columns.id.primaryKey).toBe(true)
226
+ expect(columns.contactEmail.columnType).toBe('text')
227
+ expect(contactsTable.sqliteDef.indexes).toContainEqual({
228
+ name: 'idx_contacts_contactEmail_unique',
229
+ columns: ['contactEmail'],
230
+ isUnique: true,
231
+ })
232
+ })
233
+
181
234
  it('should extract table name from Schema.Class identifier', () => {
182
235
  class TodoItem extends Schema.Class<TodoItem>('TodoItem')({
183
236
  id: Schema.String,
@@ -1,5 +1,5 @@
1
1
  import { type Nullable, shouldNeverHappen } from '@livestore/utils'
2
- import { Option, type Schema, SchemaAST, type Types } from '@livestore/utils/effect'
2
+ import { Option, Schema, SchemaAST, type Types } from '@livestore/utils/effect'
3
3
 
4
4
  import { getColumnDefForSchema, schemaFieldsToColumns } from './column-def.ts'
5
5
  import { SqliteDsl } from './db-schema/mod.ts'
@@ -221,7 +221,7 @@ export function table<
221
221
  ) as SqliteDsl.Columns
222
222
  additionalIndexes = []
223
223
  } else if ('schema' in args) {
224
- const result = schemaFieldsToColumns(SchemaAST.getPropertySignatures(args.schema.ast))
224
+ const result = schemaFieldsToColumns(Schema.getResolvedPropertySignatures(args.schema))
225
225
  columns = result.columns
226
226
 
227
227
  // We'll set tableName first, then use it for index names
@@ -381,12 +381,13 @@ export declare namespace SchemaToColumns {
381
381
  export type ColumnDefForType<TEncoded, TType> = SqliteDsl.ColumnDefinition<TEncoded, TType>
382
382
 
383
383
  // Create columns type from schema Type and Encoded
384
- export type FromTypes<TType, TEncoded> = TType extends Record<string, any>
385
- ? TEncoded extends Record<string, any>
386
- ? {
387
- [K in keyof TType & keyof TEncoded]: ColumnDefForType<TEncoded[K], TType[K]>
388
- }
389
- : SqliteDsl.Columns
384
+ export type FromTypes<TType, TEncoded> = TEncoded extends Record<string, any>
385
+ ? {
386
+ [K in keyof TEncoded]-?: ColumnDefForType<
387
+ TEncoded[K],
388
+ TType extends Record<string, any> ? (K extends keyof TType ? TType[K] : TEncoded[K]) : TEncoded[K]
389
+ >
390
+ }
390
391
  : SqliteDsl.Columns
391
392
  }
392
393
 
@@ -94,7 +94,7 @@ export const makeClientSessionSyncProcessor = ({
94
94
  }),
95
95
  }
96
96
 
97
- /** Only used for debugging / observability, it's not relied upon for correctness of the sync processor. */
97
+ /** Only used for debugging / observability / testing, it's not relied upon for correctness of the sync processor. */
98
98
  const syncStateUpdateQueue = Queue.unbounded<SyncState.SyncState>().pipe(Effect.runSync)
99
99
  const isClientEvent = (eventEncoded: LiveStoreEvent.EncodedWithMeta) =>
100
100
  getEventDef(schema, eventEncoded.name).eventDef.options.clientOnly
@@ -240,7 +240,6 @@ export const makeClientSessionSyncProcessor = ({
240
240
  }
241
241
 
242
242
  syncStateRef.current = mergeResult.newSyncState
243
- yield* syncStateUpdateQueue.offer(mergeResult.newSyncState)
244
243
 
245
244
  if (mergeResult._tag === 'rebase') {
246
245
  span.addEvent('merge:pull:rebase', {
@@ -298,7 +297,11 @@ export const makeClientSessionSyncProcessor = ({
298
297
  debugInfo.advanceCount++
299
298
  }
300
299
 
301
- if (mergeResult.newEvents.length === 0) return
300
+ if (mergeResult.newEvents.length === 0) {
301
+ // If there are no new events, we need to update the sync state as well
302
+ yield* syncStateUpdateQueue.offer(mergeResult.newSyncState)
303
+ return
304
+ }
302
305
 
303
306
  const writeTables = new Set<string>()
304
307
  for (const event of mergeResult.newEvents) {
@@ -321,6 +324,9 @@ export const makeClientSessionSyncProcessor = ({
321
324
  }
322
325
 
323
326
  refreshTables(writeTables)
327
+
328
+ // We're only triggering the sync state update after all events have been materialized
329
+ yield* syncStateUpdateQueue.offer(mergeResult.newSyncState)
324
330
  }).pipe(
325
331
  Effect.tapCauseLogPretty,
326
332
  Effect.catchAllCause((cause) => clientSession.shutdown(Exit.failCause(cause))),
@@ -1,7 +1,8 @@
1
1
  import type { Schema, Scope } from '@livestore/utils/effect'
2
2
  import { Effect, Mailbox, Option, Queue, Stream, SubscriptionRef } from '@livestore/utils/effect'
3
- import type { UnexpectedError } from '../errors.ts'
3
+ import { UnexpectedError } from '../errors.ts'
4
4
  import { EventSequenceNumber, type LiveStoreEvent } from '../schema/mod.ts'
5
+ import { InvalidPushError } from './errors.ts'
5
6
  import * as SyncBackend from './sync-backend.ts'
6
7
  import { validatePushPayload } from './validate-push-payload.ts'
7
8
 
@@ -11,41 +12,118 @@ export interface MockSyncBackend {
11
12
  disconnect: Effect.Effect<void>
12
13
  makeSyncBackend: Effect.Effect<SyncBackend.SyncBackend, UnexpectedError, Scope.Scope>
13
14
  advance: (...batch: LiveStoreEvent.AnyEncodedGlobal[]) => Effect.Effect<void>
15
+ /** Fail the next N push calls with an InvalidPushError (or custom error) */
16
+ failNextPushes: (
17
+ count: number,
18
+ error?: (batch: ReadonlyArray<LiveStoreEvent.AnyEncodedGlobal>) => Effect.Effect<never, InvalidPushError>,
19
+ ) => Effect.Effect<void>
14
20
  }
15
21
 
16
- export const makeMockSyncBackend: Effect.Effect<MockSyncBackend, UnexpectedError, Scope.Scope> = Effect.gen(
17
- function* () {
22
+ export interface MockSyncBackendOptions {
23
+ /** Chunk size for non-live pulls; defaults to 100 */
24
+ nonLiveChunkSize?: number
25
+ /** Initial connected state; defaults to false */
26
+ startConnected?: boolean
27
+ // TODO add a "flaky" mode to simulate transient network / server failures for pull/push
28
+ }
29
+
30
+ export const makeMockSyncBackend = (
31
+ options?: MockSyncBackendOptions,
32
+ ): Effect.Effect<MockSyncBackend, UnexpectedError, Scope.Scope> =>
33
+ Effect.gen(function* () {
18
34
  const syncEventSequenceNumberRef = { current: EventSequenceNumber.ROOT.global }
19
35
  const syncPullQueue = yield* Queue.unbounded<LiveStoreEvent.AnyEncodedGlobal>()
20
36
  const pushedEventsQueue = yield* Mailbox.make<LiveStoreEvent.AnyEncodedGlobal>()
21
- const syncIsConnectedRef = yield* SubscriptionRef.make(true)
37
+ const syncIsConnectedRef = yield* SubscriptionRef.make(options?.startConnected ?? false)
38
+ const allEventsRef: { current: LiveStoreEvent.AnyEncodedGlobal[] } = { current: [] }
22
39
 
23
40
  const span = yield* Effect.currentSpan.pipe(Effect.orDie)
24
41
 
25
42
  const semaphore = yield* Effect.makeSemaphore(1)
26
43
 
44
+ // TODO improve the API and implementation of simulating errors
45
+ const failCounterRef = yield* SubscriptionRef.make(0)
46
+ const failEffectRef = yield* SubscriptionRef.make<
47
+ ((batch: ReadonlyArray<LiveStoreEvent.AnyEncodedGlobal>) => Effect.Effect<never, InvalidPushError>) | undefined
48
+ >(undefined)
49
+
27
50
  const makeSyncBackend = Effect.gen(function* () {
51
+ const nonLiveChunkSize = Math.max(1, options?.nonLiveChunkSize ?? 100)
52
+
53
+ // TODO consider making offline state actively error pull/push.
54
+ // Currently, offline only reflects in `isConnected`, while operations still succeed,
55
+ // mirroring how some real providers behave during transient disconnects.
28
56
  return SyncBackend.of<Schema.JsonValue>({
29
57
  isConnected: syncIsConnectedRef,
30
- connect: Effect.void,
58
+ connect: SubscriptionRef.set(syncIsConnectedRef, true),
31
59
  ping: Effect.void,
32
- pull: () =>
33
- Stream.fromQueue(syncPullQueue).pipe(
34
- Stream.chunks,
35
- Stream.map((chunk) => ({
36
- batch: [...chunk].map((eventEncoded) => ({ eventEncoded, metadata: Option.none() })),
37
- pageInfo: SyncBackend.pageInfoNoMore,
38
- })),
39
- Stream.withSpan('MockSyncBackend:pull', { parent: span }),
40
- ),
60
+ pull: (cursor, options) =>
61
+ (options?.live
62
+ ? Stream.concat(
63
+ Stream.make(SyncBackend.pullResItemEmpty()),
64
+ Stream.fromQueue(syncPullQueue).pipe(
65
+ Stream.chunks,
66
+ Stream.map((chunk) => ({
67
+ batch: [...chunk].map((eventEncoded) => ({ eventEncoded, metadata: Option.none() })),
68
+ pageInfo: SyncBackend.pageInfoNoMore,
69
+ })),
70
+ ),
71
+ )
72
+ : Stream.fromEffect(
73
+ Effect.sync(() => {
74
+ const lastSeen = cursor.pipe(
75
+ Option.match({
76
+ onNone: () => EventSequenceNumber.ROOT.global,
77
+ onSome: (_) => _.eventSequenceNumber,
78
+ }),
79
+ )
80
+ // All events with seqNum greater than lastSeen
81
+ const slice = allEventsRef.current.filter((e) => e.seqNum > lastSeen)
82
+ // Split into configured chunk size
83
+ const chunks: { events: LiveStoreEvent.AnyEncodedGlobal[]; remaining: number }[] = []
84
+ for (let i = 0; i < slice.length; i += nonLiveChunkSize) {
85
+ const end = Math.min(i + nonLiveChunkSize, slice.length)
86
+ const remaining = Math.max(slice.length - end, 0)
87
+ chunks.push({ events: slice.slice(i, end), remaining })
88
+ }
89
+ if (chunks.length === 0) {
90
+ chunks.push({ events: [], remaining: 0 })
91
+ }
92
+ return chunks
93
+ }),
94
+ ).pipe(
95
+ Stream.flatMap((chunks) =>
96
+ Stream.fromIterable(chunks).pipe(
97
+ Stream.map(({ events, remaining }) => ({
98
+ batch: events.map((eventEncoded) => ({ eventEncoded, metadata: Option.none() })),
99
+ pageInfo: remaining > 0 ? SyncBackend.pageInfoMoreKnown(remaining) : SyncBackend.pageInfoNoMore,
100
+ })),
101
+ ),
102
+ ),
103
+ )
104
+ ).pipe(Stream.withSpan('MockSyncBackend:pull', { parent: span })),
41
105
  push: (batch) =>
42
106
  Effect.gen(function* () {
43
107
  yield* validatePushPayload(batch, syncEventSequenceNumberRef.current)
44
108
 
109
+ const remaining = yield* SubscriptionRef.get(failCounterRef)
110
+ if (remaining > 0) {
111
+ const maybeFail = yield* SubscriptionRef.get(failEffectRef)
112
+ // decrement counter first
113
+ yield* SubscriptionRef.set(failCounterRef, remaining - 1)
114
+ if (maybeFail) {
115
+ return yield* maybeFail(batch)
116
+ }
117
+ return yield* new InvalidPushError({
118
+ cause: new UnexpectedError({ cause: new Error('MockSyncBackend: simulated push failure') }),
119
+ })
120
+ }
121
+
45
122
  yield* Effect.sleep(10).pipe(Effect.withSpan('MockSyncBackend:push:sleep')) // Simulate network latency
46
123
 
47
124
  yield* pushedEventsQueue.offerAll(batch)
48
125
  yield* syncPullQueue.offerAll(batch)
126
+ allEventsRef.current = allEventsRef.current.concat(batch)
49
127
 
50
128
  syncEventSequenceNumberRef.current = batch.at(-1)!.seqNum
51
129
  }).pipe(
@@ -71,6 +149,7 @@ export const makeMockSyncBackend: Effect.Effect<MockSyncBackend, UnexpectedError
71
149
  const advance = (...batch: LiveStoreEvent.AnyEncodedGlobal[]) =>
72
150
  Effect.gen(function* () {
73
151
  syncEventSequenceNumberRef.current = batch.at(-1)!.seqNum
152
+ allEventsRef.current = allEventsRef.current.concat(batch)
74
153
  yield* syncPullQueue.offerAll(batch)
75
154
  }).pipe(
76
155
  Effect.withSpan('MockSyncBackend:advance', {
@@ -83,6 +162,15 @@ export const makeMockSyncBackend: Effect.Effect<MockSyncBackend, UnexpectedError
83
162
  const connect = SubscriptionRef.set(syncIsConnectedRef, true)
84
163
  const disconnect = SubscriptionRef.set(syncIsConnectedRef, false)
85
164
 
165
+ const failNextPushes = (
166
+ count: number,
167
+ error?: (batch: ReadonlyArray<LiveStoreEvent.AnyEncodedGlobal>) => Effect.Effect<never, InvalidPushError>,
168
+ ) =>
169
+ Effect.gen(function* () {
170
+ yield* SubscriptionRef.set(failCounterRef, count)
171
+ yield* SubscriptionRef.set(failEffectRef, error)
172
+ })
173
+
86
174
  return {
87
175
  syncEventSequenceNumberRef,
88
176
  syncPullQueue,
@@ -91,6 +179,6 @@ export const makeMockSyncBackend: Effect.Effect<MockSyncBackend, UnexpectedError
91
179
  disconnect,
92
180
  makeSyncBackend,
93
181
  advance,
182
+ failNextPushes,
94
183
  }
95
- },
96
- ).pipe(Effect.withSpanScoped('MockSyncBackend'))
184
+ }).pipe(Effect.withSpanScoped('MockSyncBackend'))