@livestore/common 0.0.0-snapshot-2ef046b02334f52613d31dbe06af53487685edc0 → 0.0.0-snapshot-8115ad48d5a57244358c943ecc92bb0a30274b87

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (272) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/__tests__/fixture.d.ts +83 -221
  3. package/dist/__tests__/fixture.d.ts.map +1 -1
  4. package/dist/__tests__/fixture.js +33 -11
  5. package/dist/__tests__/fixture.js.map +1 -1
  6. package/dist/adapter-types.d.ts +34 -13
  7. package/dist/adapter-types.d.ts.map +1 -1
  8. package/dist/adapter-types.js +20 -2
  9. package/dist/adapter-types.js.map +1 -1
  10. package/dist/bounded-collections.d.ts +1 -1
  11. package/dist/bounded-collections.d.ts.map +1 -1
  12. package/dist/debug-info.d.ts.map +1 -1
  13. package/dist/debug-info.js +1 -0
  14. package/dist/debug-info.js.map +1 -1
  15. package/dist/devtools/devtools-messages-client-session.d.ts +21 -21
  16. package/dist/devtools/devtools-messages-common.d.ts +6 -6
  17. package/dist/devtools/devtools-messages-leader.d.ts +45 -45
  18. package/dist/devtools/devtools-messages-leader.d.ts.map +1 -1
  19. package/dist/devtools/devtools-messages-leader.js +11 -11
  20. package/dist/devtools/devtools-messages-leader.js.map +1 -1
  21. package/dist/index.d.ts +2 -5
  22. package/dist/index.d.ts.map +1 -1
  23. package/dist/index.js +2 -5
  24. package/dist/index.js.map +1 -1
  25. package/dist/leader-thread/LeaderSyncProcessor.d.ts +25 -12
  26. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  27. package/dist/leader-thread/LeaderSyncProcessor.js +146 -98
  28. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  29. package/dist/leader-thread/{apply-mutation.d.ts → apply-event.d.ts} +7 -7
  30. package/dist/leader-thread/apply-event.d.ts.map +1 -0
  31. package/dist/leader-thread/{apply-mutation.js → apply-event.js} +45 -45
  32. package/dist/leader-thread/apply-event.js.map +1 -0
  33. package/dist/leader-thread/eventlog.d.ts +27 -0
  34. package/dist/leader-thread/eventlog.d.ts.map +1 -0
  35. package/dist/leader-thread/eventlog.js +123 -0
  36. package/dist/leader-thread/eventlog.js.map +1 -0
  37. package/dist/leader-thread/leader-worker-devtools.d.ts.map +1 -1
  38. package/dist/leader-thread/leader-worker-devtools.js +21 -19
  39. package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
  40. package/dist/leader-thread/make-leader-thread-layer.d.ts +16 -4
  41. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  42. package/dist/leader-thread/make-leader-thread-layer.js +23 -16
  43. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  44. package/dist/leader-thread/mod.d.ts +1 -1
  45. package/dist/leader-thread/mod.d.ts.map +1 -1
  46. package/dist/leader-thread/mod.js +1 -1
  47. package/dist/leader-thread/mod.js.map +1 -1
  48. package/dist/leader-thread/recreate-db.d.ts.map +1 -1
  49. package/dist/leader-thread/recreate-db.js +6 -7
  50. package/dist/leader-thread/recreate-db.js.map +1 -1
  51. package/dist/leader-thread/types.d.ts +14 -15
  52. package/dist/leader-thread/types.d.ts.map +1 -1
  53. package/dist/materializer-helper.d.ts +23 -0
  54. package/dist/materializer-helper.d.ts.map +1 -0
  55. package/dist/materializer-helper.js +70 -0
  56. package/dist/materializer-helper.js.map +1 -0
  57. package/dist/query-builder/api.d.ts +56 -51
  58. package/dist/query-builder/api.d.ts.map +1 -1
  59. package/dist/query-builder/api.js +3 -5
  60. package/dist/query-builder/api.js.map +1 -1
  61. package/dist/query-builder/astToSql.d.ts.map +1 -1
  62. package/dist/query-builder/astToSql.js +59 -37
  63. package/dist/query-builder/astToSql.js.map +1 -1
  64. package/dist/query-builder/impl.d.ts +2 -3
  65. package/dist/query-builder/impl.d.ts.map +1 -1
  66. package/dist/query-builder/impl.js +48 -46
  67. package/dist/query-builder/impl.js.map +1 -1
  68. package/dist/query-builder/impl.test.d.ts +86 -1
  69. package/dist/query-builder/impl.test.d.ts.map +1 -1
  70. package/dist/query-builder/impl.test.js +223 -36
  71. package/dist/query-builder/impl.test.js.map +1 -1
  72. package/dist/rehydrate-from-eventlog.d.ts +15 -0
  73. package/dist/rehydrate-from-eventlog.d.ts.map +1 -0
  74. package/dist/{rehydrate-from-mutationlog.js → rehydrate-from-eventlog.js} +26 -25
  75. package/dist/rehydrate-from-eventlog.js.map +1 -0
  76. package/dist/schema/EventDef.d.ts +136 -0
  77. package/dist/schema/EventDef.d.ts.map +1 -0
  78. package/dist/schema/EventDef.js +58 -0
  79. package/dist/schema/EventDef.js.map +1 -0
  80. package/dist/schema/EventId.d.ts +7 -2
  81. package/dist/schema/EventId.d.ts.map +1 -1
  82. package/dist/schema/EventId.js +18 -3
  83. package/dist/schema/EventId.js.map +1 -1
  84. package/dist/schema/{MutationEvent.d.ts → LiveStoreEvent.d.ts} +56 -56
  85. package/dist/schema/LiveStoreEvent.d.ts.map +1 -0
  86. package/dist/schema/{MutationEvent.js → LiveStoreEvent.js} +25 -25
  87. package/dist/schema/LiveStoreEvent.js.map +1 -0
  88. package/dist/schema/client-document-def.d.ts +223 -0
  89. package/dist/schema/client-document-def.d.ts.map +1 -0
  90. package/dist/schema/client-document-def.js +170 -0
  91. package/dist/schema/client-document-def.js.map +1 -0
  92. package/dist/schema/client-document-def.test.d.ts +2 -0
  93. package/dist/schema/client-document-def.test.d.ts.map +1 -0
  94. package/dist/schema/client-document-def.test.js +201 -0
  95. package/dist/schema/client-document-def.test.js.map +1 -0
  96. package/dist/schema/db-schema/dsl/mod.d.ts.map +1 -1
  97. package/dist/schema/events.d.ts +2 -0
  98. package/dist/schema/events.d.ts.map +1 -0
  99. package/dist/schema/events.js +2 -0
  100. package/dist/schema/events.js.map +1 -0
  101. package/dist/schema/mod.d.ts +4 -3
  102. package/dist/schema/mod.d.ts.map +1 -1
  103. package/dist/schema/mod.js +4 -3
  104. package/dist/schema/mod.js.map +1 -1
  105. package/dist/schema/schema.d.ts +26 -22
  106. package/dist/schema/schema.d.ts.map +1 -1
  107. package/dist/schema/schema.js +45 -43
  108. package/dist/schema/schema.js.map +1 -1
  109. package/dist/schema/sqlite-state.d.ts +12 -0
  110. package/dist/schema/sqlite-state.d.ts.map +1 -0
  111. package/dist/schema/sqlite-state.js +36 -0
  112. package/dist/schema/sqlite-state.js.map +1 -0
  113. package/dist/schema/system-tables.d.ts +67 -98
  114. package/dist/schema/system-tables.d.ts.map +1 -1
  115. package/dist/schema/system-tables.js +62 -48
  116. package/dist/schema/system-tables.js.map +1 -1
  117. package/dist/schema/table-def.d.ts +26 -96
  118. package/dist/schema/table-def.d.ts.map +1 -1
  119. package/dist/schema/table-def.js +14 -64
  120. package/dist/schema/table-def.js.map +1 -1
  121. package/dist/schema/view.d.ts +3 -0
  122. package/dist/schema/view.d.ts.map +1 -0
  123. package/dist/schema/view.js +3 -0
  124. package/dist/schema/view.js.map +1 -0
  125. package/dist/schema-management/common.d.ts +4 -4
  126. package/dist/schema-management/common.d.ts.map +1 -1
  127. package/dist/schema-management/migrations.d.ts.map +1 -1
  128. package/dist/schema-management/migrations.js +6 -6
  129. package/dist/schema-management/migrations.js.map +1 -1
  130. package/dist/schema-management/validate-mutation-defs.d.ts +3 -3
  131. package/dist/schema-management/validate-mutation-defs.d.ts.map +1 -1
  132. package/dist/schema-management/validate-mutation-defs.js +17 -17
  133. package/dist/schema-management/validate-mutation-defs.js.map +1 -1
  134. package/dist/sync/ClientSessionSyncProcessor.d.ts +7 -7
  135. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  136. package/dist/sync/ClientSessionSyncProcessor.js +33 -30
  137. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  138. package/dist/sync/next/facts.d.ts +19 -19
  139. package/dist/sync/next/facts.d.ts.map +1 -1
  140. package/dist/sync/next/facts.js +2 -2
  141. package/dist/sync/next/facts.js.map +1 -1
  142. package/dist/sync/next/history-dag-common.d.ts +3 -3
  143. package/dist/sync/next/history-dag-common.d.ts.map +1 -1
  144. package/dist/sync/next/history-dag-common.js +1 -1
  145. package/dist/sync/next/history-dag-common.js.map +1 -1
  146. package/dist/sync/next/history-dag.js +1 -1
  147. package/dist/sync/next/history-dag.js.map +1 -1
  148. package/dist/sync/next/rebase-events.d.ts +7 -7
  149. package/dist/sync/next/rebase-events.d.ts.map +1 -1
  150. package/dist/sync/next/rebase-events.js +5 -5
  151. package/dist/sync/next/rebase-events.js.map +1 -1
  152. package/dist/sync/next/test/compact-events.calculator.test.js +38 -33
  153. package/dist/sync/next/test/compact-events.calculator.test.js.map +1 -1
  154. package/dist/sync/next/test/compact-events.test.js +71 -71
  155. package/dist/sync/next/test/compact-events.test.js.map +1 -1
  156. package/dist/sync/next/test/{mutation-fixtures.d.ts → event-fixtures.d.ts} +25 -25
  157. package/dist/sync/next/test/event-fixtures.d.ts.map +1 -0
  158. package/dist/sync/next/test/{mutation-fixtures.js → event-fixtures.js} +60 -25
  159. package/dist/sync/next/test/event-fixtures.js.map +1 -0
  160. package/dist/sync/next/test/mod.d.ts +1 -1
  161. package/dist/sync/next/test/mod.d.ts.map +1 -1
  162. package/dist/sync/next/test/mod.js +1 -1
  163. package/dist/sync/next/test/mod.js.map +1 -1
  164. package/dist/sync/sync.d.ts +3 -3
  165. package/dist/sync/sync.d.ts.map +1 -1
  166. package/dist/sync/syncstate.d.ts +30 -30
  167. package/dist/sync/syncstate.d.ts.map +1 -1
  168. package/dist/sync/syncstate.js +73 -40
  169. package/dist/sync/syncstate.js.map +1 -1
  170. package/dist/sync/syncstate.test.js +175 -184
  171. package/dist/sync/syncstate.test.js.map +1 -1
  172. package/dist/sync/validate-push-payload.d.ts +2 -2
  173. package/dist/sync/validate-push-payload.d.ts.map +1 -1
  174. package/dist/sync/validate-push-payload.js.map +1 -1
  175. package/dist/version.d.ts +1 -1
  176. package/dist/version.js +1 -1
  177. package/package.json +3 -3
  178. package/src/__tests__/fixture.ts +36 -15
  179. package/src/adapter-types.ts +33 -13
  180. package/src/debug-info.ts +1 -0
  181. package/src/devtools/devtools-messages-leader.ts +13 -13
  182. package/src/index.ts +2 -5
  183. package/src/leader-thread/LeaderSyncProcessor.ts +210 -138
  184. package/src/leader-thread/{apply-mutation.ts → apply-event.ts} +61 -61
  185. package/src/leader-thread/eventlog.ts +199 -0
  186. package/src/leader-thread/leader-worker-devtools.ts +22 -19
  187. package/src/leader-thread/make-leader-thread-layer.ts +51 -29
  188. package/src/leader-thread/mod.ts +1 -1
  189. package/src/leader-thread/recreate-db.ts +6 -8
  190. package/src/leader-thread/types.ts +15 -16
  191. package/src/materializer-helper.ts +110 -0
  192. package/src/query-builder/api.ts +77 -103
  193. package/src/query-builder/astToSql.ts +68 -39
  194. package/src/query-builder/impl.test.ts +239 -42
  195. package/src/query-builder/impl.ts +72 -56
  196. package/src/{rehydrate-from-mutationlog.ts → rehydrate-from-eventlog.ts} +35 -38
  197. package/src/schema/EventDef.ts +216 -0
  198. package/src/schema/EventId.ts +23 -4
  199. package/src/schema/{MutationEvent.ts → LiveStoreEvent.ts} +68 -69
  200. package/src/schema/client-document-def.test.ts +239 -0
  201. package/src/schema/client-document-def.ts +444 -0
  202. package/src/schema/db-schema/dsl/mod.ts +0 -1
  203. package/src/schema/events.ts +1 -0
  204. package/src/schema/mod.ts +4 -3
  205. package/src/schema/schema.ts +78 -68
  206. package/src/schema/sqlite-state.ts +62 -0
  207. package/src/schema/system-tables.ts +42 -53
  208. package/src/schema/table-def.ts +51 -209
  209. package/src/schema/view.ts +2 -0
  210. package/src/schema-management/common.ts +4 -4
  211. package/src/schema-management/migrations.ts +8 -9
  212. package/src/schema-management/validate-mutation-defs.ts +22 -24
  213. package/src/sync/ClientSessionSyncProcessor.ts +41 -36
  214. package/src/sync/next/facts.ts +31 -32
  215. package/src/sync/next/history-dag-common.ts +4 -4
  216. package/src/sync/next/history-dag.ts +1 -1
  217. package/src/sync/next/rebase-events.ts +13 -13
  218. package/src/sync/next/test/compact-events.calculator.test.ts +45 -45
  219. package/src/sync/next/test/compact-events.test.ts +73 -73
  220. package/src/sync/next/test/event-fixtures.ts +219 -0
  221. package/src/sync/next/test/mod.ts +1 -1
  222. package/src/sync/sync.ts +3 -3
  223. package/src/sync/syncstate.test.ts +180 -189
  224. package/src/sync/syncstate.ts +162 -100
  225. package/src/sync/validate-push-payload.ts +2 -2
  226. package/src/version.ts +1 -1
  227. package/tsconfig.json +1 -0
  228. package/dist/derived-mutations.d.ts +0 -109
  229. package/dist/derived-mutations.d.ts.map +0 -1
  230. package/dist/derived-mutations.js +0 -54
  231. package/dist/derived-mutations.js.map +0 -1
  232. package/dist/derived-mutations.test.d.ts +0 -2
  233. package/dist/derived-mutations.test.d.ts.map +0 -1
  234. package/dist/derived-mutations.test.js +0 -93
  235. package/dist/derived-mutations.test.js.map +0 -1
  236. package/dist/init-singleton-tables.d.ts +0 -4
  237. package/dist/init-singleton-tables.d.ts.map +0 -1
  238. package/dist/init-singleton-tables.js +0 -16
  239. package/dist/init-singleton-tables.js.map +0 -1
  240. package/dist/leader-thread/apply-mutation.d.ts.map +0 -1
  241. package/dist/leader-thread/apply-mutation.js.map +0 -1
  242. package/dist/leader-thread/mutationlog.d.ts +0 -27
  243. package/dist/leader-thread/mutationlog.d.ts.map +0 -1
  244. package/dist/leader-thread/mutationlog.js +0 -124
  245. package/dist/leader-thread/mutationlog.js.map +0 -1
  246. package/dist/mutation.d.ts +0 -20
  247. package/dist/mutation.d.ts.map +0 -1
  248. package/dist/mutation.js +0 -68
  249. package/dist/mutation.js.map +0 -1
  250. package/dist/query-info.d.ts +0 -41
  251. package/dist/query-info.d.ts.map +0 -1
  252. package/dist/query-info.js +0 -7
  253. package/dist/query-info.js.map +0 -1
  254. package/dist/rehydrate-from-mutationlog.d.ts +0 -15
  255. package/dist/rehydrate-from-mutationlog.d.ts.map +0 -1
  256. package/dist/rehydrate-from-mutationlog.js.map +0 -1
  257. package/dist/schema/MutationEvent.d.ts.map +0 -1
  258. package/dist/schema/MutationEvent.js.map +0 -1
  259. package/dist/schema/mutations.d.ts +0 -115
  260. package/dist/schema/mutations.d.ts.map +0 -1
  261. package/dist/schema/mutations.js +0 -42
  262. package/dist/schema/mutations.js.map +0 -1
  263. package/dist/sync/next/test/mutation-fixtures.d.ts.map +0 -1
  264. package/dist/sync/next/test/mutation-fixtures.js.map +0 -1
  265. package/src/derived-mutations.test.ts +0 -101
  266. package/src/derived-mutations.ts +0 -170
  267. package/src/init-singleton-tables.ts +0 -24
  268. package/src/leader-thread/mutationlog.ts +0 -202
  269. package/src/mutation.ts +0 -108
  270. package/src/query-info.ts +0 -83
  271. package/src/schema/mutations.ts +0 -193
  272. package/src/sync/next/test/mutation-fixtures.ts +0 -228
@@ -20,39 +20,36 @@ import { UnexpectedError } from '../adapter-types.js'
20
20
  import type { LiveStoreSchema } from '../schema/mod.js'
21
21
  import {
22
22
  EventId,
23
- getMutationDef,
23
+ getEventDef,
24
24
  LEADER_MERGE_COUNTER_TABLE,
25
- MutationEvent,
25
+ LiveStoreEvent,
26
26
  SESSION_CHANGESET_META_TABLE,
27
27
  } from '../schema/mod.js'
28
28
  import { LeaderAheadError } from '../sync/sync.js'
29
29
  import * as SyncState from '../sync/syncstate.js'
30
30
  import { sql } from '../util.js'
31
- import { rollback } from './apply-mutation.js'
32
- import * as Mutationlog from './mutationlog.js'
31
+ import { rollback } from './apply-event.js'
32
+ import * as Eventlog from './eventlog.js'
33
33
  import type { InitialBlockingSyncContext, LeaderSyncProcessor } from './types.js'
34
34
  import { LeaderThreadCtx } from './types.js'
35
35
 
36
- export const BACKEND_PUSH_BATCH_SIZE = 50
37
- export const LOCAL_PUSH_BATCH_SIZE = 10
38
-
39
36
  type LocalPushQueueItem = [
40
- mutationEvent: MutationEvent.EncodedWithMeta,
37
+ event: LiveStoreEvent.EncodedWithMeta,
41
38
  deferred: Deferred.Deferred<void, LeaderAheadError> | undefined,
42
39
  /** Used to determine whether the batch has become invalid due to a rejected local push batch */
43
40
  generation: number,
44
41
  ]
45
42
 
46
43
  /**
47
- * The LeaderSyncProcessor manages synchronization of mutations between
44
+ * The LeaderSyncProcessor manages synchronization of events between
48
45
  * the local state and the sync backend, ensuring efficient and orderly processing.
49
46
  *
50
47
  * In the LeaderSyncProcessor, pulling always has precedence over pushing.
51
48
  *
52
49
  * Responsibilities:
53
- * - Queueing incoming local mutations in a localPushesQueue.
54
- * - Broadcasting mutations to client sessions via pull queues.
55
- * - Pushing mutations to the sync backend.
50
+ * - Queueing incoming local events in a localPushesQueue.
51
+ * - Broadcasting events to client sessions via pull queues.
52
+ * - Pushing events to the sync backend.
56
53
  *
57
54
  * Notes:
58
55
  *
@@ -60,12 +57,12 @@ type LocalPushQueueItem = [
60
57
  * - localPushesQueue:
61
58
  * - Maintains events in ascending order.
62
59
  * - Uses `Deferred` objects to resolve/reject events based on application success.
63
- * - Processes events from the queue, applying mutations in batches.
60
+ * - Processes events from the queue, applying events in batches.
64
61
  * - Controlled by a `Latch` to manage execution flow.
65
62
  * - The latch closes on pull receipt and re-opens post-pull completion.
66
63
  * - Processes up to `maxBatchSize` events per cycle.
67
64
  *
68
- * Currently we're advancing the db read model and mutation log in lockstep, but we could also decouple this in the future
65
+ * Currently we're advancing the db read model and eventlog in lockstep, but we could also decouple this in the future
69
66
  *
70
67
  * Tricky concurrency scenarios:
71
68
  * - Queued local push batches becoming invalid due to a prior local push item being rejected.
@@ -74,31 +71,50 @@ type LocalPushQueueItem = [
74
71
  */
75
72
  export const makeLeaderSyncProcessor = ({
76
73
  schema,
77
- dbMutationLogMissing,
78
- dbMutationLog,
74
+ dbEventlogMissing,
75
+ dbEventlog,
79
76
  dbReadModel,
80
77
  dbReadModelMissing,
81
78
  initialBlockingSyncContext,
82
79
  onError,
80
+ params,
81
+ testing,
83
82
  }: {
84
83
  schema: LiveStoreSchema
85
- /** Only used to know whether we can safely query dbMutationLog during setup execution */
86
- dbMutationLogMissing: boolean
87
- dbMutationLog: SqliteDb
84
+ /** Only used to know whether we can safely query dbEventlog during setup execution */
85
+ dbEventlogMissing: boolean
86
+ dbEventlog: SqliteDb
88
87
  dbReadModel: SqliteDb
89
88
  /** Only used to know whether we can safely query dbReadModel during setup execution */
90
89
  dbReadModelMissing: boolean
91
90
  initialBlockingSyncContext: InitialBlockingSyncContext
92
91
  onError: 'shutdown' | 'ignore'
92
+ params: {
93
+ /**
94
+ * @default 10
95
+ */
96
+ localPushBatchSize?: number
97
+ /**
98
+ * @default 50
99
+ */
100
+ backendPushBatchSize?: number
101
+ }
102
+ testing: {
103
+ delays?: {
104
+ localPushProcessing?: Effect.Effect<void>
105
+ }
106
+ }
93
107
  }): Effect.Effect<LeaderSyncProcessor, UnexpectedError, Scope.Scope> =>
94
108
  Effect.gen(function* () {
95
- const syncBackendPushQueue = yield* BucketQueue.make<MutationEvent.EncodedWithMeta>()
109
+ const syncBackendPushQueue = yield* BucketQueue.make<LiveStoreEvent.EncodedWithMeta>()
110
+ const localPushBatchSize = params.localPushBatchSize ?? 10
111
+ const backendPushBatchSize = params.backendPushBatchSize ?? 50
96
112
 
97
113
  const syncStateSref = yield* SubscriptionRef.make<SyncState.SyncState | undefined>(undefined)
98
114
 
99
- const isClientEvent = (mutationEventEncoded: MutationEvent.EncodedWithMeta) => {
100
- const mutationDef = getMutationDef(schema, mutationEventEncoded.mutation)
101
- return mutationDef.options.clientOnly
115
+ const isClientEvent = (eventEncoded: LiveStoreEvent.EncodedWithMeta) => {
116
+ const eventDef = getEventDef(schema, eventEncoded.name)
117
+ return eventDef.eventDef.options.clientOnly
102
118
  }
103
119
 
104
120
  const connectedClientSessionPullQueues = yield* makePullQueueSet
@@ -108,10 +124,12 @@ export const makeLeaderSyncProcessor = ({
108
124
  * If a local-push batch is rejected, all subsequent push queue items with the same generation are also rejected,
109
125
  * even if they would be valid on their own.
110
126
  */
127
+ // TODO get rid of this in favour of the `mergeGeneration` event id field
111
128
  const currentLocalPushGenerationRef = { current: 0 }
112
129
 
130
+ type MergeCounter = number
113
131
  const mergeCounterRef = { current: dbReadModelMissing ? 0 : yield* getMergeCounterFromDb(dbReadModel) }
114
- const mergePayloads = new Map<number, typeof SyncState.PayloadUpstream.Type>()
132
+ const mergePayloads = new Map<MergeCounter, typeof SyncState.PayloadUpstream.Type>()
115
133
 
116
134
  // This context depends on data from `boot`, we should find a better implementation to avoid this ref indirection.
117
135
  const ctxRef = {
@@ -129,12 +147,29 @@ export const makeLeaderSyncProcessor = ({
129
147
  const localPushesLatch = yield* Effect.makeLatch(true)
130
148
  const pullLatch = yield* Effect.makeLatch(true)
131
149
 
150
+ /**
151
+ * Additionally to the `syncStateSref` we also need the `pushHeadRef` in order to prevent old/duplicate
152
+ * events from being pushed in a scenario like this:
153
+ * - client session A pushes e1
154
+ * - leader sync processor takes a bit and hasn't yet taken e1 from the localPushesQueue
155
+ * - client session B also pushes e1 (which should be rejected)
156
+ *
157
+ * Thus the purpoe of the pushHeadRef is the guard the integrity of the local push queue
158
+ */
159
+ const pushHeadRef = { current: EventId.ROOT }
160
+ const advancePushHead = (eventId: EventId.EventId) => {
161
+ pushHeadRef.current = EventId.max(pushHeadRef.current, eventId)
162
+ }
163
+
132
164
  // NOTE: New events are only pushed to sync backend after successful local push processing
133
165
  const push: LeaderSyncProcessor['push'] = (newEvents, options) =>
134
166
  Effect.gen(function* () {
135
- // TODO validate batch
136
167
  if (newEvents.length === 0) return
137
168
 
169
+ yield* validatePushBatch(newEvents, pushHeadRef.current)
170
+
171
+ advancePushHead(newEvents.at(-1)!.id)
172
+
138
173
  const waitForProcessing = options?.waitForProcessing ?? false
139
174
  const generation = currentLocalPushGenerationRef.current
140
175
 
@@ -142,20 +177,18 @@ export const makeLeaderSyncProcessor = ({
142
177
  const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make<void, LeaderAheadError>())
143
178
 
144
179
  const items = newEvents.map(
145
- (mutationEventEncoded, i) => [mutationEventEncoded, deferreds[i], generation] as LocalPushQueueItem,
180
+ (eventEncoded, i) => [eventEncoded, deferreds[i], generation] as LocalPushQueueItem,
146
181
  )
147
182
 
148
183
  yield* BucketQueue.offerAll(localPushesQueue, items)
149
184
 
150
185
  yield* Effect.all(deferreds)
151
186
  } else {
152
- const items = newEvents.map(
153
- (mutationEventEncoded) => [mutationEventEncoded, undefined, generation] as LocalPushQueueItem,
154
- )
187
+ const items = newEvents.map((eventEncoded) => [eventEncoded, undefined, generation] as LocalPushQueueItem)
155
188
  yield* BucketQueue.offerAll(localPushesQueue, items)
156
189
  }
157
190
  }).pipe(
158
- Effect.withSpan('@livestore/common:LeaderSyncProcessor:local-push', {
191
+ Effect.withSpan('@livestore/common:LeaderSyncProcessor:push', {
159
192
  attributes: {
160
193
  batchSize: newEvents.length,
161
194
  batch: TRACE_VERBOSE ? newEvents : undefined,
@@ -164,26 +197,22 @@ export const makeLeaderSyncProcessor = ({
164
197
  }),
165
198
  )
166
199
 
167
- const pushPartial: LeaderSyncProcessor['pushPartial'] = ({
168
- mutationEvent: { mutation, args },
169
- clientId,
170
- sessionId,
171
- }) =>
200
+ const pushPartial: LeaderSyncProcessor['pushPartial'] = ({ event: { name, args }, clientId, sessionId }) =>
172
201
  Effect.gen(function* () {
173
202
  const syncState = yield* syncStateSref
174
203
  if (syncState === undefined) return shouldNeverHappen('Not initialized')
175
204
 
176
- const mutationDef = getMutationDef(schema, mutation)
205
+ const eventDef = getEventDef(schema, name)
177
206
 
178
- const mutationEventEncoded = new MutationEvent.EncodedWithMeta({
179
- mutation,
207
+ const eventEncoded = new LiveStoreEvent.EncodedWithMeta({
208
+ name,
180
209
  args,
181
210
  clientId,
182
211
  sessionId,
183
- ...EventId.nextPair(syncState.localHead, mutationDef.options.clientOnly),
212
+ ...EventId.nextPair(syncState.localHead, eventDef.eventDef.options.clientOnly),
184
213
  })
185
214
 
186
- yield* push([mutationEventEncoded])
215
+ yield* push([eventEncoded])
187
216
  }).pipe(Effect.catchTag('LeaderAheadError', Effect.orDie))
188
217
 
189
218
  // Starts various background loops
@@ -200,10 +229,9 @@ export const makeLeaderSyncProcessor = ({
200
229
  runtime,
201
230
  }
202
231
 
203
- const initialBackendHead = dbMutationLogMissing
204
- ? EventId.ROOT.global
205
- : Mutationlog.getBackendHeadFromDb(dbMutationLog)
206
- const initialLocalHead = dbMutationLogMissing ? EventId.ROOT : Mutationlog.getClientHeadFromDb(dbMutationLog)
232
+ const initialLocalHead = dbEventlogMissing ? EventId.ROOT : Eventlog.getClientHeadFromDb(dbEventlog)
233
+
234
+ const initialBackendHead = dbEventlogMissing ? EventId.ROOT.global : Eventlog.getBackendHeadFromDb(dbEventlog)
207
235
 
208
236
  if (initialBackendHead > initialLocalHead.global) {
209
237
  return shouldNeverHappen(
@@ -211,12 +239,12 @@ export const makeLeaderSyncProcessor = ({
211
239
  )
212
240
  }
213
241
 
214
- const pendingMutationEvents = dbMutationLogMissing
242
+ const pendingEvents = dbEventlogMissing
215
243
  ? []
216
- : yield* Mutationlog.getMutationEventsSince({ global: initialBackendHead, client: EventId.clientDefault })
244
+ : yield* Eventlog.getEventsSince({ global: initialBackendHead, client: EventId.clientDefault })
217
245
 
218
246
  const initialSyncState = new SyncState.SyncState({
219
- pending: pendingMutationEvents,
247
+ pending: pendingEvents,
220
248
  upstreamHead: { global: initialBackendHead, client: EventId.clientDefault },
221
249
  localHead: initialLocalHead,
222
250
  })
@@ -225,16 +253,16 @@ export const makeLeaderSyncProcessor = ({
225
253
  yield* SubscriptionRef.set(syncStateSref, initialSyncState)
226
254
 
227
255
  // Rehydrate sync queue
228
- if (pendingMutationEvents.length > 0) {
229
- const globalPendingMutationEvents = pendingMutationEvents
230
- // Don't sync clientOnly mutations
231
- .filter((mutationEventEncoded) => {
232
- const mutationDef = getMutationDef(schema, mutationEventEncoded.mutation)
233
- return mutationDef.options.clientOnly === false
256
+ if (pendingEvents.length > 0) {
257
+ const globalPendingEvents = pendingEvents
258
+ // Don't sync clientOnly events
259
+ .filter((eventEncoded) => {
260
+ const eventDef = getEventDef(schema, eventEncoded.name)
261
+ return eventDef.eventDef.options.clientOnly === false
234
262
  })
235
263
 
236
- if (globalPendingMutationEvents.length > 0) {
237
- yield* BucketQueue.offerAll(syncBackendPushQueue, globalPendingMutationEvents)
264
+ if (globalPendingEvents.length > 0) {
265
+ yield* BucketQueue.offerAll(syncBackendPushQueue, globalPendingEvents)
238
266
  }
239
267
  }
240
268
 
@@ -259,18 +287,21 @@ export const makeLeaderSyncProcessor = ({
259
287
  connectedClientSessionPullQueues,
260
288
  mergeCounterRef,
261
289
  mergePayloads,
290
+ localPushBatchSize,
291
+ testing: {
292
+ delay: testing?.delays?.localPushProcessing,
293
+ },
262
294
  }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
263
295
 
264
296
  const backendPushingFiberHandle = yield* FiberHandle.make()
297
+ const backendPushingEffect = backgroundBackendPushing({
298
+ syncBackendPushQueue,
299
+ otelSpan,
300
+ devtoolsLatch: ctxRef.current?.devtoolsLatch,
301
+ backendPushBatchSize,
302
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError))
265
303
 
266
- yield* FiberHandle.run(
267
- backendPushingFiberHandle,
268
- backgroundBackendPushing({
269
- syncBackendPushQueue,
270
- otelSpan,
271
- devtoolsLatch: ctxRef.current?.devtoolsLatch,
272
- }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError)),
273
- )
304
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
274
305
 
275
306
  yield* backgroundBackendPulling({
276
307
  initialBackendHead,
@@ -285,14 +316,7 @@ export const makeLeaderSyncProcessor = ({
285
316
  yield* BucketQueue.offerAll(syncBackendPushQueue, filteredRebasedPending)
286
317
 
287
318
  // Restart pushing fiber
288
- yield* FiberHandle.run(
289
- backendPushingFiberHandle,
290
- backgroundBackendPushing({
291
- syncBackendPushQueue,
292
- otelSpan,
293
- devtoolsLatch: ctxRef.current?.devtoolsLatch,
294
- }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError)),
295
- )
319
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
296
320
  }),
297
321
  syncStateSref,
298
322
  localPushesLatch,
@@ -303,26 +327,41 @@ export const makeLeaderSyncProcessor = ({
303
327
  connectedClientSessionPullQueues,
304
328
  mergeCounterRef,
305
329
  mergePayloads,
330
+ advancePushHead,
306
331
  }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
307
332
 
308
333
  return { initialLeaderHead: initialLocalHead }
309
334
  }).pipe(Effect.withSpanScoped('@livestore/common:LeaderSyncProcessor:boot'))
310
335
 
311
- const pull: LeaderSyncProcessor['pull'] = ({ cursor }) => {
312
- return Effect.gen(function* () {
336
+ const pull: LeaderSyncProcessor['pull'] = ({ cursor }) =>
337
+ Effect.gen(function* () {
313
338
  const queue = yield* pullQueue({ cursor })
314
339
  return Stream.fromQueue(queue)
315
340
  }).pipe(Stream.unwrapScoped)
316
- }
317
341
 
318
342
  const pullQueue: LeaderSyncProcessor['pullQueue'] = ({ cursor }) => {
319
343
  const runtime = ctxRef.current?.runtime ?? shouldNeverHappen('Not initialized')
320
344
  return Effect.gen(function* () {
321
- const queue = yield* connectedClientSessionPullQueues.makeQueue(cursor)
345
+ const queue = yield* connectedClientSessionPullQueues.makeQueue
322
346
  const payloadsSinceCursor = Array.from(mergePayloads.entries())
323
347
  .map(([mergeCounter, payload]) => ({ payload, mergeCounter }))
324
- .filter(({ mergeCounter }) => mergeCounter > cursor)
348
+ .filter(({ mergeCounter }) => mergeCounter > cursor.mergeCounter)
325
349
  .toSorted((a, b) => a.mergeCounter - b.mergeCounter)
350
+ .map(({ payload, mergeCounter }) => {
351
+ if (payload._tag === 'upstream-advance') {
352
+ return {
353
+ payload: {
354
+ _tag: 'upstream-advance' as const,
355
+ newEvents: ReadonlyArray.dropWhile(payload.newEvents, (eventEncoded) =>
356
+ EventId.isGreaterThanOrEqual(cursor.eventId, eventEncoded.id),
357
+ ),
358
+ },
359
+ mergeCounter,
360
+ }
361
+ } else {
362
+ return { payload, mergeCounter }
363
+ }
364
+ })
326
365
 
327
366
  yield* queue.offerAll(payloadsSinceCursor)
328
367
 
@@ -363,24 +402,33 @@ const backgroundApplyLocalPushes = ({
363
402
  connectedClientSessionPullQueues,
364
403
  mergeCounterRef,
365
404
  mergePayloads,
405
+ localPushBatchSize,
406
+ testing,
366
407
  }: {
367
408
  pullLatch: Effect.Latch
368
409
  localPushesLatch: Effect.Latch
369
410
  localPushesQueue: BucketQueue.BucketQueue<LocalPushQueueItem>
370
411
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
371
- syncBackendPushQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
412
+ syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.EncodedWithMeta>
372
413
  schema: LiveStoreSchema
373
- isClientEvent: (mutationEventEncoded: MutationEvent.EncodedWithMeta) => boolean
414
+ isClientEvent: (eventEncoded: LiveStoreEvent.EncodedWithMeta) => boolean
374
415
  otelSpan: otel.Span | undefined
375
416
  currentLocalPushGenerationRef: { current: number }
376
417
  connectedClientSessionPullQueues: PullQueueSet
377
418
  mergeCounterRef: { current: number }
378
419
  mergePayloads: Map<number, typeof SyncState.PayloadUpstream.Type>
420
+ localPushBatchSize: number
421
+ testing: {
422
+ delay: Effect.Effect<void> | undefined
423
+ }
379
424
  }) =>
380
425
  Effect.gen(function* () {
381
426
  while (true) {
382
- // TODO make batch size configurable
383
- const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, LOCAL_PUSH_BATCH_SIZE)
427
+ if (testing.delay !== undefined) {
428
+ yield* testing.delay.pipe(Effect.withSpan('localPushProcessingDelay'))
429
+ }
430
+
431
+ const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, localPushBatchSize)
384
432
 
385
433
  // Wait for the backend pulling to finish
386
434
  yield* localPushesLatch.await
@@ -392,7 +440,7 @@ const backgroundApplyLocalPushes = ({
392
440
  // It's important that we filter after we got localPushesLatch, otherwise we might filter with the old generation
393
441
  const filteredBatchItems = batchItems
394
442
  .filter(([_1, _2, generation]) => generation === currentLocalPushGenerationRef.current)
395
- .map(([mutationEventEncoded, deferred]) => [mutationEventEncoded, deferred] as const)
443
+ .map(([eventEncoded, deferred]) => [eventEncoded, deferred] as const)
396
444
 
397
445
  if (filteredBatchItems.length === 0) {
398
446
  // console.log('dropping old-gen batch', currentLocalPushGenerationRef.current)
@@ -410,14 +458,14 @@ const backgroundApplyLocalPushes = ({
410
458
  syncState,
411
459
  payload: { _tag: 'local-push', newEvents },
412
460
  isClientEvent,
413
- isEqualEvent: MutationEvent.isEqualEncoded,
461
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
414
462
  })
415
463
 
416
464
  const mergeCounter = yield* incrementMergeCounter(mergeCounterRef)
417
465
 
418
466
  switch (mergeResult._tag) {
419
467
  case 'unexpected-error': {
420
- otelSpan?.addEvent(`merge[${mergeCounter}]:local-push:unexpected-error`, {
468
+ otelSpan?.addEvent(`[${mergeCounter}]:push:unexpected-error`, {
421
469
  batchSize: newEvents.length,
422
470
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
423
471
  })
@@ -427,7 +475,7 @@ const backgroundApplyLocalPushes = ({
427
475
  return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
428
476
  }
429
477
  case 'reject': {
430
- otelSpan?.addEvent(`merge[${mergeCounter}]:local-push:reject`, {
478
+ otelSpan?.addEvent(`[${mergeCounter}]:push:reject`, {
431
479
  batchSize: newEvents.length,
432
480
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
433
481
  })
@@ -491,28 +539,28 @@ const backgroundApplyLocalPushes = ({
491
539
  })
492
540
  mergePayloads.set(mergeCounter, SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }))
493
541
 
494
- otelSpan?.addEvent(`merge[${mergeCounter}]:local-push:advance`, {
542
+ otelSpan?.addEvent(`[${mergeCounter}]:push:advance`, {
495
543
  batchSize: newEvents.length,
496
544
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
497
545
  })
498
546
 
499
- // Don't sync clientOnly mutations
500
- const filteredBatch = mergeResult.newEvents.filter((mutationEventEncoded) => {
501
- const mutationDef = getMutationDef(schema, mutationEventEncoded.mutation)
502
- return mutationDef.options.clientOnly === false
547
+ // Don't sync clientOnly events
548
+ const filteredBatch = mergeResult.newEvents.filter((eventEncoded) => {
549
+ const eventDef = getEventDef(schema, eventEncoded.name)
550
+ return eventDef.eventDef.options.clientOnly === false
503
551
  })
504
552
 
505
553
  yield* BucketQueue.offerAll(syncBackendPushQueue, filteredBatch)
506
554
 
507
- yield* applyMutationsBatch({ batchItems: newEvents, deferreds })
555
+ yield* applyEventsBatch({ batchItems: mergeResult.newEvents, deferreds })
508
556
 
509
557
  // Allow the backend pulling to start
510
558
  yield* pullLatch.open
511
559
  }
512
560
  })
513
561
 
514
- type ApplyMutationsBatch = (_: {
515
- batchItems: ReadonlyArray<MutationEvent.EncodedWithMeta>
562
+ type ApplyEventsBatch = (_: {
563
+ batchItems: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>
516
564
  /**
517
565
  * The deferreds are used by the caller to know when the mutation has been processed.
518
566
  * Indexes are aligned with `batchItems`
@@ -521,13 +569,13 @@ type ApplyMutationsBatch = (_: {
521
569
  }) => Effect.Effect<void, UnexpectedError, LeaderThreadCtx>
522
570
 
523
571
  // TODO how to handle errors gracefully
524
- const applyMutationsBatch: ApplyMutationsBatch = ({ batchItems, deferreds }) =>
572
+ const applyEventsBatch: ApplyEventsBatch = ({ batchItems, deferreds }) =>
525
573
  Effect.gen(function* () {
526
- const { dbReadModel: db, dbMutationLog, applyMutation } = yield* LeaderThreadCtx
574
+ const { dbReadModel: db, dbEventlog, applyEvent } = yield* LeaderThreadCtx
527
575
 
528
- // NOTE We always start a transaction to ensure consistency between db and mutation log (even for single-item batches)
576
+ // NOTE We always start a transaction to ensure consistency between db and eventlog (even for single-item batches)
529
577
  db.execute('BEGIN TRANSACTION', undefined) // Start the transaction
530
- dbMutationLog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
578
+ dbEventlog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
531
579
 
532
580
  yield* Effect.addFinalizer((exit) =>
533
581
  Effect.gen(function* () {
@@ -535,12 +583,12 @@ const applyMutationsBatch: ApplyMutationsBatch = ({ batchItems, deferreds }) =>
535
583
 
536
584
  // Rollback in case of an error
537
585
  db.execute('ROLLBACK', undefined)
538
- dbMutationLog.execute('ROLLBACK', undefined)
586
+ dbEventlog.execute('ROLLBACK', undefined)
539
587
  }),
540
588
  )
541
589
 
542
590
  for (let i = 0; i < batchItems.length; i++) {
543
- const { sessionChangeset } = yield* applyMutation(batchItems[i]!)
591
+ const { sessionChangeset } = yield* applyEvent(batchItems[i]!)
544
592
  batchItems[i]!.meta.sessionChangeset = sessionChangeset
545
593
 
546
594
  if (deferreds?.[i] !== undefined) {
@@ -549,11 +597,11 @@ const applyMutationsBatch: ApplyMutationsBatch = ({ batchItems, deferreds }) =>
549
597
  }
550
598
 
551
599
  db.execute('COMMIT', undefined) // Commit the transaction
552
- dbMutationLog.execute('COMMIT', undefined) // Commit the transaction
600
+ dbEventlog.execute('COMMIT', undefined) // Commit the transaction
553
601
  }).pipe(
554
602
  Effect.uninterruptible,
555
603
  Effect.scoped,
556
- Effect.withSpan('@livestore/common:LeaderSyncProcessor:applyMutationItems', {
604
+ Effect.withSpan('@livestore/common:LeaderSyncProcessor:applyEventItems', {
557
605
  attributes: { batchSize: batchItems.length },
558
606
  }),
559
607
  Effect.tapCauseLogPretty,
@@ -573,11 +621,12 @@ const backgroundBackendPulling = ({
573
621
  connectedClientSessionPullQueues,
574
622
  mergeCounterRef,
575
623
  mergePayloads,
624
+ advancePushHead,
576
625
  }: {
577
626
  initialBackendHead: EventId.GlobalEventId
578
- isClientEvent: (mutationEventEncoded: MutationEvent.EncodedWithMeta) => boolean
627
+ isClientEvent: (eventEncoded: LiveStoreEvent.EncodedWithMeta) => boolean
579
628
  restartBackendPushing: (
580
- filteredRebasedPending: ReadonlyArray<MutationEvent.EncodedWithMeta>,
629
+ filteredRebasedPending: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>,
581
630
  ) => Effect.Effect<void, UnexpectedError, LeaderThreadCtx | HttpClient.HttpClient>
582
631
  otelSpan: otel.Span | undefined
583
632
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
@@ -588,13 +637,14 @@ const backgroundBackendPulling = ({
588
637
  connectedClientSessionPullQueues: PullQueueSet
589
638
  mergeCounterRef: { current: number }
590
639
  mergePayloads: Map<number, typeof SyncState.PayloadUpstream.Type>
640
+ advancePushHead: (eventId: EventId.EventId) => void
591
641
  }) =>
592
642
  Effect.gen(function* () {
593
- const { syncBackend, dbReadModel: db, dbMutationLog, schema } = yield* LeaderThreadCtx
643
+ const { syncBackend, dbReadModel: db, dbEventlog, schema } = yield* LeaderThreadCtx
594
644
 
595
645
  if (syncBackend === undefined) return
596
646
 
597
- const onNewPullChunk = (newEvents: MutationEvent.EncodedWithMeta[], remaining: number) =>
647
+ const onNewPullChunk = (newEvents: LiveStoreEvent.EncodedWithMeta[], remaining: number) =>
598
648
  Effect.gen(function* () {
599
649
  if (newEvents.length === 0) return
600
650
 
@@ -615,7 +665,7 @@ const backgroundBackendPulling = ({
615
665
  syncState,
616
666
  payload: SyncState.PayloadUpstreamAdvance.make({ newEvents }),
617
667
  isClientEvent,
618
- isEqualEvent: MutationEvent.isEqualEncoded,
668
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
619
669
  ignoreClientEvents: true,
620
670
  })
621
671
 
@@ -624,7 +674,7 @@ const backgroundBackendPulling = ({
624
674
  if (mergeResult._tag === 'reject') {
625
675
  return shouldNeverHappen('The leader thread should never reject upstream advances')
626
676
  } else if (mergeResult._tag === 'unexpected-error') {
627
- otelSpan?.addEvent(`merge[${mergeCounter}]:backend-pull:unexpected-error`, {
677
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:unexpected-error`, {
628
678
  newEventsCount: newEvents.length,
629
679
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
630
680
  })
@@ -633,24 +683,24 @@ const backgroundBackendPulling = ({
633
683
 
634
684
  const newBackendHead = newEvents.at(-1)!.id
635
685
 
636
- Mutationlog.updateBackendHead(dbMutationLog, newBackendHead)
686
+ Eventlog.updateBackendHead(dbEventlog, newBackendHead)
637
687
 
638
688
  if (mergeResult._tag === 'rebase') {
639
- otelSpan?.addEvent(`merge[${mergeCounter}]:backend-pull:rebase`, {
689
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:rebase`, {
640
690
  newEventsCount: newEvents.length,
641
691
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
642
692
  rollbackCount: mergeResult.rollbackEvents.length,
643
693
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
644
694
  })
645
695
 
646
- const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((mutationEvent) => {
647
- const mutationDef = getMutationDef(schema, mutationEvent.mutation)
648
- return mutationDef.options.clientOnly === false
696
+ const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
697
+ const eventDef = getEventDef(schema, event.name)
698
+ return eventDef.eventDef.options.clientOnly === false
649
699
  })
650
700
  yield* restartBackendPushing(globalRebasedPendingEvents)
651
701
 
652
702
  if (mergeResult.rollbackEvents.length > 0) {
653
- yield* rollback({ db, dbMutationLog, eventIdsToRollback: mergeResult.rollbackEvents.map((_) => _.id) })
703
+ yield* rollback({ db, dbEventlog, eventIdsToRollback: mergeResult.rollbackEvents.map((_) => _.id) })
654
704
  }
655
705
 
656
706
  yield* connectedClientSessionPullQueues.offer({
@@ -668,7 +718,7 @@ const backgroundBackendPulling = ({
668
718
  }),
669
719
  )
670
720
  } else {
671
- otelSpan?.addEvent(`merge[${mergeCounter}]:backend-pull:advance`, {
721
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:advance`, {
672
722
  newEventsCount: newEvents.length,
673
723
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
674
724
  })
@@ -682,19 +732,19 @@ const backgroundBackendPulling = ({
682
732
  if (mergeResult.confirmedEvents.length > 0) {
683
733
  // `mergeResult.confirmedEvents` don't contain the correct sync metadata, so we need to use
684
734
  // `newEvents` instead which we filter via `mergeResult.confirmedEvents`
685
- const confirmedNewEvents = newEvents.filter((mutationEvent) =>
686
- mergeResult.confirmedEvents.some((confirmedEvent) =>
687
- EventId.isEqual(mutationEvent.id, confirmedEvent.id),
688
- ),
735
+ const confirmedNewEvents = newEvents.filter((event) =>
736
+ mergeResult.confirmedEvents.some((confirmedEvent) => EventId.isEqual(event.id, confirmedEvent.id)),
689
737
  )
690
- yield* Mutationlog.updateSyncMetadata(confirmedNewEvents)
738
+ yield* Eventlog.updateSyncMetadata(confirmedNewEvents)
691
739
  }
692
740
  }
693
741
 
694
742
  // Removes the changeset rows which are no longer needed as we'll never have to rollback beyond this point
695
743
  trimChangesetRows(db, newBackendHead)
696
744
 
697
- yield* applyMutationsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined })
745
+ advancePushHead(mergeResult.newSyncState.localHead)
746
+
747
+ yield* applyEventsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined })
698
748
 
699
749
  yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState)
700
750
 
@@ -704,7 +754,7 @@ const backgroundBackendPulling = ({
704
754
  }
705
755
  })
706
756
 
707
- const cursorInfo = yield* Mutationlog.getSyncBackendCursorInfo(initialBackendHead)
757
+ const cursorInfo = yield* Eventlog.getSyncBackendCursorInfo(initialBackendHead)
708
758
 
709
759
  yield* syncBackend.pull(cursorInfo).pipe(
710
760
  // TODO only take from queue while connected
@@ -717,13 +767,13 @@ const backgroundBackendPulling = ({
717
767
  // },
718
768
  // })
719
769
 
720
- // NOTE we only want to take process mutations when the sync backend is connected
770
+ // NOTE we only want to take process events when the sync backend is connected
721
771
  // (e.g. needed for simulating being offline)
722
772
  // TODO remove when there's a better way to handle this in stream above
723
773
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
724
774
 
725
775
  yield* onNewPullChunk(
726
- batch.map((_) => MutationEvent.EncodedWithMeta.fromGlobal(_.mutationEventEncoded, _.metadata)),
776
+ batch.map((_) => LiveStoreEvent.EncodedWithMeta.fromGlobal(_.eventEncoded, _.metadata)),
727
777
  remaining,
728
778
  )
729
779
 
@@ -739,10 +789,12 @@ const backgroundBackendPushing = ({
739
789
  syncBackendPushQueue,
740
790
  otelSpan,
741
791
  devtoolsLatch,
792
+ backendPushBatchSize,
742
793
  }: {
743
- syncBackendPushQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
794
+ syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.EncodedWithMeta>
744
795
  otelSpan: otel.Span | undefined
745
796
  devtoolsLatch: Effect.Latch | undefined
797
+ backendPushBatchSize: number
746
798
  }) =>
747
799
  Effect.gen(function* () {
748
800
  const { syncBackend } = yield* LeaderThreadCtx
@@ -751,8 +803,7 @@ const backgroundBackendPushing = ({
751
803
  while (true) {
752
804
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
753
805
 
754
- // TODO make batch size configurable
755
- const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, BACKEND_PUSH_BATCH_SIZE)
806
+ const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, backendPushBatchSize)
756
807
 
757
808
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
758
809
 
@@ -786,9 +837,7 @@ const trimChangesetRows = (db: SqliteDb, newHead: EventId.EventId) => {
786
837
  }
787
838
 
788
839
  interface PullQueueSet {
789
- makeQueue: (
790
- cursor: number,
791
- ) => Effect.Effect<
840
+ makeQueue: Effect.Effect<
792
841
  Queue.Queue<{ payload: typeof SyncState.PayloadUpstream.Type; mergeCounter: number }>,
793
842
  UnexpectedError,
794
843
  Scope.Scope | LeaderThreadCtx
@@ -812,19 +861,18 @@ const makePullQueueSet = Effect.gen(function* () {
812
861
  }),
813
862
  )
814
863
 
815
- const makeQueue: PullQueueSet['makeQueue'] = () =>
816
- Effect.gen(function* () {
817
- const queue = yield* Queue.unbounded<{
818
- payload: typeof SyncState.PayloadUpstream.Type
819
- mergeCounter: number
820
- }>().pipe(Effect.acquireRelease(Queue.shutdown))
864
+ const makeQueue: PullQueueSet['makeQueue'] = Effect.gen(function* () {
865
+ const queue = yield* Queue.unbounded<{
866
+ payload: typeof SyncState.PayloadUpstream.Type
867
+ mergeCounter: number
868
+ }>().pipe(Effect.acquireRelease(Queue.shutdown))
821
869
 
822
- yield* Effect.addFinalizer(() => Effect.sync(() => set.delete(queue)))
870
+ yield* Effect.addFinalizer(() => Effect.sync(() => set.delete(queue)))
823
871
 
824
- set.add(queue)
872
+ set.add(queue)
825
873
 
826
- return queue
827
- })
874
+ return queue
875
+ })
828
876
 
829
877
  const offer: PullQueueSet['offer'] = (item) =>
830
878
  Effect.gen(function* () {
@@ -861,3 +909,27 @@ const getMergeCounterFromDb = (dbReadModel: SqliteDb) =>
861
909
  )
862
910
  return result[0]?.mergeCounter ?? 0
863
911
  })
912
+
913
+ const validatePushBatch = (batch: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>, pushHead: EventId.EventId) =>
914
+ Effect.gen(function* () {
915
+ if (batch.length === 0) {
916
+ return
917
+ }
918
+
919
+ // Make sure batch is monotonically increasing
920
+ for (let i = 1; i < batch.length; i++) {
921
+ if (EventId.isGreaterThanOrEqual(batch[i - 1]!.id, batch[i]!.id)) {
922
+ shouldNeverHappen(
923
+ `Events must be ordered in monotonically ascending order by eventId. Received: [${batch.map((e) => EventId.toString(e.id)).join(', ')}]`,
924
+ )
925
+ }
926
+ }
927
+
928
+ // Make sure smallest event id is > pushHead
929
+ if (EventId.isGreaterThanOrEqual(pushHead, batch[0]!.id)) {
930
+ return yield* LeaderAheadError.make({
931
+ minimumExpectedId: pushHead,
932
+ providedId: batch[0]!.id,
933
+ })
934
+ }
935
+ })