@livestore/common 0.3.0-dev.27 → 0.3.0-dev.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (277) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/__tests__/fixture.d.ts +83 -221
  3. package/dist/__tests__/fixture.d.ts.map +1 -1
  4. package/dist/__tests__/fixture.js +33 -11
  5. package/dist/__tests__/fixture.js.map +1 -1
  6. package/dist/adapter-types.d.ts +22 -15
  7. package/dist/adapter-types.d.ts.map +1 -1
  8. package/dist/adapter-types.js +15 -2
  9. package/dist/adapter-types.js.map +1 -1
  10. package/dist/bounded-collections.d.ts +1 -1
  11. package/dist/bounded-collections.d.ts.map +1 -1
  12. package/dist/debug-info.d.ts.map +1 -1
  13. package/dist/debug-info.js +1 -0
  14. package/dist/debug-info.js.map +1 -1
  15. package/dist/devtools/devtools-messages-client-session.d.ts +21 -21
  16. package/dist/devtools/devtools-messages-common.d.ts +6 -6
  17. package/dist/devtools/devtools-messages-leader.d.ts +45 -45
  18. package/dist/devtools/devtools-messages-leader.d.ts.map +1 -1
  19. package/dist/devtools/devtools-messages-leader.js +11 -11
  20. package/dist/devtools/devtools-messages-leader.js.map +1 -1
  21. package/dist/index.d.ts +2 -5
  22. package/dist/index.d.ts.map +1 -1
  23. package/dist/index.js +2 -5
  24. package/dist/index.js.map +1 -1
  25. package/dist/leader-thread/LeaderSyncProcessor.d.ts +25 -12
  26. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  27. package/dist/leader-thread/LeaderSyncProcessor.js +125 -89
  28. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  29. package/dist/leader-thread/{apply-mutation.d.ts → apply-event.d.ts} +7 -7
  30. package/dist/leader-thread/apply-event.d.ts.map +1 -0
  31. package/dist/leader-thread/apply-event.js +103 -0
  32. package/dist/leader-thread/apply-event.js.map +1 -0
  33. package/dist/leader-thread/eventlog.d.ts +27 -0
  34. package/dist/leader-thread/eventlog.d.ts.map +1 -0
  35. package/dist/leader-thread/eventlog.js +123 -0
  36. package/dist/leader-thread/eventlog.js.map +1 -0
  37. package/dist/leader-thread/leader-worker-devtools.js +18 -18
  38. package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
  39. package/dist/leader-thread/make-leader-thread-layer.d.ts +16 -4
  40. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  41. package/dist/leader-thread/make-leader-thread-layer.js +23 -16
  42. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  43. package/dist/leader-thread/mod.d.ts +1 -1
  44. package/dist/leader-thread/mod.d.ts.map +1 -1
  45. package/dist/leader-thread/mod.js +1 -1
  46. package/dist/leader-thread/mod.js.map +1 -1
  47. package/dist/leader-thread/recreate-db.d.ts.map +1 -1
  48. package/dist/leader-thread/recreate-db.js +6 -8
  49. package/dist/leader-thread/recreate-db.js.map +1 -1
  50. package/dist/leader-thread/types.d.ts +11 -11
  51. package/dist/leader-thread/types.d.ts.map +1 -1
  52. package/dist/materializer-helper.d.ts +23 -0
  53. package/dist/materializer-helper.d.ts.map +1 -0
  54. package/dist/materializer-helper.js +70 -0
  55. package/dist/materializer-helper.js.map +1 -0
  56. package/dist/query-builder/api.d.ts +58 -53
  57. package/dist/query-builder/api.d.ts.map +1 -1
  58. package/dist/query-builder/api.js +3 -5
  59. package/dist/query-builder/api.js.map +1 -1
  60. package/dist/query-builder/astToSql.d.ts.map +1 -1
  61. package/dist/query-builder/astToSql.js +59 -37
  62. package/dist/query-builder/astToSql.js.map +1 -1
  63. package/dist/query-builder/impl.d.ts +2 -3
  64. package/dist/query-builder/impl.d.ts.map +1 -1
  65. package/dist/query-builder/impl.js +48 -46
  66. package/dist/query-builder/impl.js.map +1 -1
  67. package/dist/query-builder/impl.test.d.ts +86 -1
  68. package/dist/query-builder/impl.test.d.ts.map +1 -1
  69. package/dist/query-builder/impl.test.js +244 -36
  70. package/dist/query-builder/impl.test.js.map +1 -1
  71. package/dist/rehydrate-from-eventlog.d.ts +14 -0
  72. package/dist/rehydrate-from-eventlog.d.ts.map +1 -0
  73. package/dist/{rehydrate-from-mutationlog.js → rehydrate-from-eventlog.js} +25 -26
  74. package/dist/rehydrate-from-eventlog.js.map +1 -0
  75. package/dist/schema/EventDef.d.ts +136 -0
  76. package/dist/schema/EventDef.d.ts.map +1 -0
  77. package/dist/schema/EventDef.js +58 -0
  78. package/dist/schema/EventDef.js.map +1 -0
  79. package/dist/schema/EventId.d.ts +2 -2
  80. package/dist/schema/EventId.d.ts.map +1 -1
  81. package/dist/schema/EventId.js +8 -2
  82. package/dist/schema/EventId.js.map +1 -1
  83. package/dist/schema/{MutationEvent.d.ts → LiveStoreEvent.d.ts} +56 -56
  84. package/dist/schema/LiveStoreEvent.d.ts.map +1 -0
  85. package/dist/schema/{MutationEvent.js → LiveStoreEvent.js} +25 -25
  86. package/dist/schema/LiveStoreEvent.js.map +1 -0
  87. package/dist/schema/client-document-def.d.ts +223 -0
  88. package/dist/schema/client-document-def.d.ts.map +1 -0
  89. package/dist/schema/client-document-def.js +170 -0
  90. package/dist/schema/client-document-def.js.map +1 -0
  91. package/dist/schema/client-document-def.test.d.ts +2 -0
  92. package/dist/schema/client-document-def.test.d.ts.map +1 -0
  93. package/dist/schema/client-document-def.test.js +201 -0
  94. package/dist/schema/client-document-def.test.js.map +1 -0
  95. package/dist/schema/db-schema/dsl/mod.d.ts.map +1 -1
  96. package/dist/schema/events.d.ts +2 -0
  97. package/dist/schema/events.d.ts.map +1 -0
  98. package/dist/schema/events.js +2 -0
  99. package/dist/schema/events.js.map +1 -0
  100. package/dist/schema/mod.d.ts +4 -3
  101. package/dist/schema/mod.d.ts.map +1 -1
  102. package/dist/schema/mod.js +4 -3
  103. package/dist/schema/mod.js.map +1 -1
  104. package/dist/schema/schema.d.ts +27 -23
  105. package/dist/schema/schema.d.ts.map +1 -1
  106. package/dist/schema/schema.js +45 -43
  107. package/dist/schema/schema.js.map +1 -1
  108. package/dist/schema/sqlite-state.d.ts +12 -0
  109. package/dist/schema/sqlite-state.d.ts.map +1 -0
  110. package/dist/schema/sqlite-state.js +36 -0
  111. package/dist/schema/sqlite-state.js.map +1 -0
  112. package/dist/schema/system-tables.d.ts +67 -98
  113. package/dist/schema/system-tables.d.ts.map +1 -1
  114. package/dist/schema/system-tables.js +62 -48
  115. package/dist/schema/system-tables.js.map +1 -1
  116. package/dist/schema/table-def.d.ts +26 -96
  117. package/dist/schema/table-def.d.ts.map +1 -1
  118. package/dist/schema/table-def.js +16 -64
  119. package/dist/schema/table-def.js.map +1 -1
  120. package/dist/schema/view.d.ts +3 -0
  121. package/dist/schema/view.d.ts.map +1 -0
  122. package/dist/schema/view.js +3 -0
  123. package/dist/schema/view.js.map +1 -0
  124. package/dist/schema-management/common.d.ts +4 -4
  125. package/dist/schema-management/common.d.ts.map +1 -1
  126. package/dist/schema-management/migrations.d.ts.map +1 -1
  127. package/dist/schema-management/migrations.js +6 -6
  128. package/dist/schema-management/migrations.js.map +1 -1
  129. package/dist/schema-management/validate-mutation-defs.d.ts +3 -3
  130. package/dist/schema-management/validate-mutation-defs.d.ts.map +1 -1
  131. package/dist/schema-management/validate-mutation-defs.js +17 -17
  132. package/dist/schema-management/validate-mutation-defs.js.map +1 -1
  133. package/dist/sync/ClientSessionSyncProcessor.d.ts +7 -7
  134. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  135. package/dist/sync/ClientSessionSyncProcessor.js +31 -30
  136. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  137. package/dist/sync/next/facts.d.ts +19 -19
  138. package/dist/sync/next/facts.d.ts.map +1 -1
  139. package/dist/sync/next/facts.js +2 -2
  140. package/dist/sync/next/facts.js.map +1 -1
  141. package/dist/sync/next/history-dag-common.d.ts +3 -3
  142. package/dist/sync/next/history-dag-common.d.ts.map +1 -1
  143. package/dist/sync/next/history-dag-common.js +1 -1
  144. package/dist/sync/next/history-dag-common.js.map +1 -1
  145. package/dist/sync/next/history-dag.js +1 -1
  146. package/dist/sync/next/history-dag.js.map +1 -1
  147. package/dist/sync/next/rebase-events.d.ts +7 -7
  148. package/dist/sync/next/rebase-events.d.ts.map +1 -1
  149. package/dist/sync/next/rebase-events.js +5 -5
  150. package/dist/sync/next/rebase-events.js.map +1 -1
  151. package/dist/sync/next/test/compact-events.calculator.test.js +38 -33
  152. package/dist/sync/next/test/compact-events.calculator.test.js.map +1 -1
  153. package/dist/sync/next/test/compact-events.test.js +71 -71
  154. package/dist/sync/next/test/compact-events.test.js.map +1 -1
  155. package/dist/sync/next/test/{mutation-fixtures.d.ts → event-fixtures.d.ts} +29 -29
  156. package/dist/sync/next/test/event-fixtures.d.ts.map +1 -0
  157. package/dist/sync/next/test/{mutation-fixtures.js → event-fixtures.js} +60 -25
  158. package/dist/sync/next/test/event-fixtures.js.map +1 -0
  159. package/dist/sync/next/test/mod.d.ts +1 -1
  160. package/dist/sync/next/test/mod.d.ts.map +1 -1
  161. package/dist/sync/next/test/mod.js +1 -1
  162. package/dist/sync/next/test/mod.js.map +1 -1
  163. package/dist/sync/sync.d.ts +3 -3
  164. package/dist/sync/sync.d.ts.map +1 -1
  165. package/dist/sync/syncstate.d.ts +32 -32
  166. package/dist/sync/syncstate.d.ts.map +1 -1
  167. package/dist/sync/syncstate.js +31 -25
  168. package/dist/sync/syncstate.js.map +1 -1
  169. package/dist/sync/syncstate.test.js +165 -175
  170. package/dist/sync/syncstate.test.js.map +1 -1
  171. package/dist/sync/validate-push-payload.d.ts +2 -2
  172. package/dist/sync/validate-push-payload.d.ts.map +1 -1
  173. package/dist/sync/validate-push-payload.js.map +1 -1
  174. package/dist/version.d.ts +1 -1
  175. package/dist/version.js +1 -1
  176. package/package.json +3 -3
  177. package/src/__tests__/fixture.ts +36 -15
  178. package/src/adapter-types.ts +23 -16
  179. package/src/debug-info.ts +1 -0
  180. package/src/devtools/devtools-messages-leader.ts +13 -13
  181. package/src/index.ts +2 -5
  182. package/src/leader-thread/LeaderSyncProcessor.ts +183 -122
  183. package/src/leader-thread/{apply-mutation.ts → apply-event.ts} +50 -74
  184. package/src/leader-thread/eventlog.ts +199 -0
  185. package/src/leader-thread/leader-worker-devtools.ts +18 -18
  186. package/src/leader-thread/make-leader-thread-layer.ts +51 -29
  187. package/src/leader-thread/mod.ts +1 -1
  188. package/src/leader-thread/recreate-db.ts +6 -9
  189. package/src/leader-thread/types.ts +12 -12
  190. package/src/materializer-helper.ts +110 -0
  191. package/src/query-builder/api.ts +79 -105
  192. package/src/query-builder/astToSql.ts +68 -39
  193. package/src/query-builder/impl.test.ts +264 -42
  194. package/src/query-builder/impl.ts +72 -56
  195. package/src/{rehydrate-from-mutationlog.ts → rehydrate-from-eventlog.ts} +33 -40
  196. package/src/schema/EventDef.ts +216 -0
  197. package/src/schema/EventId.ts +11 -3
  198. package/src/schema/{MutationEvent.ts → LiveStoreEvent.ts} +68 -69
  199. package/src/schema/client-document-def.test.ts +239 -0
  200. package/src/schema/client-document-def.ts +444 -0
  201. package/src/schema/db-schema/dsl/mod.ts +0 -1
  202. package/src/schema/events.ts +1 -0
  203. package/src/schema/mod.ts +4 -3
  204. package/src/schema/schema.ts +79 -69
  205. package/src/schema/sqlite-state.ts +62 -0
  206. package/src/schema/system-tables.ts +42 -53
  207. package/src/schema/table-def.ts +53 -209
  208. package/src/schema/view.ts +2 -0
  209. package/src/schema-management/common.ts +4 -4
  210. package/src/schema-management/migrations.ts +8 -9
  211. package/src/schema-management/validate-mutation-defs.ts +22 -24
  212. package/src/sync/ClientSessionSyncProcessor.ts +37 -36
  213. package/src/sync/next/facts.ts +31 -32
  214. package/src/sync/next/history-dag-common.ts +4 -4
  215. package/src/sync/next/history-dag.ts +1 -1
  216. package/src/sync/next/rebase-events.ts +13 -13
  217. package/src/sync/next/test/compact-events.calculator.test.ts +45 -45
  218. package/src/sync/next/test/compact-events.test.ts +73 -73
  219. package/src/sync/next/test/event-fixtures.ts +219 -0
  220. package/src/sync/next/test/mod.ts +1 -1
  221. package/src/sync/sync.ts +3 -3
  222. package/src/sync/syncstate.test.ts +168 -179
  223. package/src/sync/syncstate.ts +48 -38
  224. package/src/sync/validate-push-payload.ts +2 -2
  225. package/src/version.ts +1 -1
  226. package/tmp/pack.tgz +0 -0
  227. package/tsconfig.json +1 -0
  228. package/dist/derived-mutations.d.ts +0 -109
  229. package/dist/derived-mutations.d.ts.map +0 -1
  230. package/dist/derived-mutations.js +0 -54
  231. package/dist/derived-mutations.js.map +0 -1
  232. package/dist/derived-mutations.test.d.ts +0 -2
  233. package/dist/derived-mutations.test.d.ts.map +0 -1
  234. package/dist/derived-mutations.test.js +0 -93
  235. package/dist/derived-mutations.test.js.map +0 -1
  236. package/dist/init-singleton-tables.d.ts +0 -4
  237. package/dist/init-singleton-tables.d.ts.map +0 -1
  238. package/dist/init-singleton-tables.js +0 -16
  239. package/dist/init-singleton-tables.js.map +0 -1
  240. package/dist/leader-thread/apply-mutation.d.ts.map +0 -1
  241. package/dist/leader-thread/apply-mutation.js +0 -122
  242. package/dist/leader-thread/apply-mutation.js.map +0 -1
  243. package/dist/leader-thread/mutationlog.d.ts +0 -27
  244. package/dist/leader-thread/mutationlog.d.ts.map +0 -1
  245. package/dist/leader-thread/mutationlog.js +0 -124
  246. package/dist/leader-thread/mutationlog.js.map +0 -1
  247. package/dist/leader-thread/pull-queue-set.d.ts +0 -7
  248. package/dist/leader-thread/pull-queue-set.d.ts.map +0 -1
  249. package/dist/leader-thread/pull-queue-set.js +0 -38
  250. package/dist/leader-thread/pull-queue-set.js.map +0 -1
  251. package/dist/mutation.d.ts +0 -20
  252. package/dist/mutation.d.ts.map +0 -1
  253. package/dist/mutation.js +0 -68
  254. package/dist/mutation.js.map +0 -1
  255. package/dist/query-info.d.ts +0 -41
  256. package/dist/query-info.d.ts.map +0 -1
  257. package/dist/query-info.js +0 -7
  258. package/dist/query-info.js.map +0 -1
  259. package/dist/rehydrate-from-mutationlog.d.ts +0 -15
  260. package/dist/rehydrate-from-mutationlog.d.ts.map +0 -1
  261. package/dist/rehydrate-from-mutationlog.js.map +0 -1
  262. package/dist/schema/MutationEvent.d.ts.map +0 -1
  263. package/dist/schema/MutationEvent.js.map +0 -1
  264. package/dist/schema/mutations.d.ts +0 -115
  265. package/dist/schema/mutations.d.ts.map +0 -1
  266. package/dist/schema/mutations.js +0 -42
  267. package/dist/schema/mutations.js.map +0 -1
  268. package/dist/sync/next/test/mutation-fixtures.d.ts.map +0 -1
  269. package/dist/sync/next/test/mutation-fixtures.js.map +0 -1
  270. package/src/derived-mutations.test.ts +0 -101
  271. package/src/derived-mutations.ts +0 -170
  272. package/src/init-singleton-tables.ts +0 -24
  273. package/src/leader-thread/mutationlog.ts +0 -202
  274. package/src/mutation.ts +0 -108
  275. package/src/query-info.ts +0 -83
  276. package/src/schema/mutations.ts +0 -193
  277. package/src/sync/next/test/mutation-fixtures.ts +0 -228
@@ -20,39 +20,36 @@ import { UnexpectedError } from '../adapter-types.js'
20
20
  import type { LiveStoreSchema } from '../schema/mod.js'
21
21
  import {
22
22
  EventId,
23
- getMutationDef,
23
+ getEventDef,
24
24
  LEADER_MERGE_COUNTER_TABLE,
25
- MutationEvent,
25
+ LiveStoreEvent,
26
26
  SESSION_CHANGESET_META_TABLE,
27
27
  } from '../schema/mod.js'
28
28
  import { LeaderAheadError } from '../sync/sync.js'
29
29
  import * as SyncState from '../sync/syncstate.js'
30
30
  import { sql } from '../util.js'
31
- import { rollback } from './apply-mutation.js'
32
- import * as Mutationlog from './mutationlog.js'
31
+ import { rollback } from './apply-event.js'
32
+ import * as Eventlog from './eventlog.js'
33
33
  import type { InitialBlockingSyncContext, LeaderSyncProcessor } from './types.js'
34
34
  import { LeaderThreadCtx } from './types.js'
35
35
 
36
- export const BACKEND_PUSH_BATCH_SIZE = 50
37
- export const LOCAL_PUSH_BATCH_SIZE = 10
38
-
39
36
  type LocalPushQueueItem = [
40
- mutationEvent: MutationEvent.EncodedWithMeta,
37
+ event: LiveStoreEvent.EncodedWithMeta,
41
38
  deferred: Deferred.Deferred<void, LeaderAheadError> | undefined,
42
39
  /** Used to determine whether the batch has become invalid due to a rejected local push batch */
43
40
  generation: number,
44
41
  ]
45
42
 
46
43
  /**
47
- * The LeaderSyncProcessor manages synchronization of mutations between
44
+ * The LeaderSyncProcessor manages synchronization of events between
48
45
  * the local state and the sync backend, ensuring efficient and orderly processing.
49
46
  *
50
47
  * In the LeaderSyncProcessor, pulling always has precedence over pushing.
51
48
  *
52
49
  * Responsibilities:
53
- * - Queueing incoming local mutations in a localPushesQueue.
54
- * - Broadcasting mutations to client sessions via pull queues.
55
- * - Pushing mutations to the sync backend.
50
+ * - Queueing incoming local events in a localPushesQueue.
51
+ * - Broadcasting events to client sessions via pull queues.
52
+ * - Pushing events to the sync backend.
56
53
  *
57
54
  * Notes:
58
55
  *
@@ -60,12 +57,12 @@ type LocalPushQueueItem = [
60
57
  * - localPushesQueue:
61
58
  * - Maintains events in ascending order.
62
59
  * - Uses `Deferred` objects to resolve/reject events based on application success.
63
- * - Processes events from the queue, applying mutations in batches.
60
+ * - Processes events from the queue, applying events in batches.
64
61
  * - Controlled by a `Latch` to manage execution flow.
65
62
  * - The latch closes on pull receipt and re-opens post-pull completion.
66
63
  * - Processes up to `maxBatchSize` events per cycle.
67
64
  *
68
- * Currently we're advancing the db read model and mutation log in lockstep, but we could also decouple this in the future
65
+ * Currently we're advancing the db read model and eventlog in lockstep, but we could also decouple this in the future
69
66
  *
70
67
  * Tricky concurrency scenarios:
71
68
  * - Queued local push batches becoming invalid due to a prior local push item being rejected.
@@ -74,31 +71,50 @@ type LocalPushQueueItem = [
74
71
  */
75
72
  export const makeLeaderSyncProcessor = ({
76
73
  schema,
77
- dbMutationLogMissing,
78
- dbMutationLog,
74
+ dbEventlogMissing,
75
+ dbEventlog,
79
76
  dbReadModel,
80
77
  dbReadModelMissing,
81
78
  initialBlockingSyncContext,
82
79
  onError,
80
+ params,
81
+ testing,
83
82
  }: {
84
83
  schema: LiveStoreSchema
85
- /** Only used to know whether we can safely query dbMutationLog during setup execution */
86
- dbMutationLogMissing: boolean
87
- dbMutationLog: SqliteDb
84
+ /** Only used to know whether we can safely query dbEventlog during setup execution */
85
+ dbEventlogMissing: boolean
86
+ dbEventlog: SqliteDb
88
87
  dbReadModel: SqliteDb
89
88
  /** Only used to know whether we can safely query dbReadModel during setup execution */
90
89
  dbReadModelMissing: boolean
91
90
  initialBlockingSyncContext: InitialBlockingSyncContext
92
91
  onError: 'shutdown' | 'ignore'
92
+ params: {
93
+ /**
94
+ * @default 10
95
+ */
96
+ localPushBatchSize?: number
97
+ /**
98
+ * @default 50
99
+ */
100
+ backendPushBatchSize?: number
101
+ }
102
+ testing: {
103
+ delays?: {
104
+ localPushProcessing?: Effect.Effect<void>
105
+ }
106
+ }
93
107
  }): Effect.Effect<LeaderSyncProcessor, UnexpectedError, Scope.Scope> =>
94
108
  Effect.gen(function* () {
95
- const syncBackendPushQueue = yield* BucketQueue.make<MutationEvent.EncodedWithMeta>()
109
+ const syncBackendPushQueue = yield* BucketQueue.make<LiveStoreEvent.EncodedWithMeta>()
110
+ const localPushBatchSize = params.localPushBatchSize ?? 10
111
+ const backendPushBatchSize = params.backendPushBatchSize ?? 50
96
112
 
97
113
  const syncStateSref = yield* SubscriptionRef.make<SyncState.SyncState | undefined>(undefined)
98
114
 
99
- const isClientEvent = (mutationEventEncoded: MutationEvent.EncodedWithMeta) => {
100
- const mutationDef = getMutationDef(schema, mutationEventEncoded.mutation)
101
- return mutationDef.options.clientOnly
115
+ const isClientEvent = (eventEncoded: LiveStoreEvent.EncodedWithMeta) => {
116
+ const eventDef = getEventDef(schema, eventEncoded.name)
117
+ return eventDef.eventDef.options.clientOnly
102
118
  }
103
119
 
104
120
  const connectedClientSessionPullQueues = yield* makePullQueueSet
@@ -108,10 +124,12 @@ export const makeLeaderSyncProcessor = ({
108
124
  * If a local-push batch is rejected, all subsequent push queue items with the same generation are also rejected,
109
125
  * even if they would be valid on their own.
110
126
  */
127
+ // TODO get rid of this in favour of the `mergeGeneration` event id field
111
128
  const currentLocalPushGenerationRef = { current: 0 }
112
129
 
130
+ type MergeCounter = number
113
131
  const mergeCounterRef = { current: dbReadModelMissing ? 0 : yield* getMergeCounterFromDb(dbReadModel) }
114
- const mergePayloads = new Map<number, typeof SyncState.PayloadUpstream.Type>()
132
+ const mergePayloads = new Map<MergeCounter, typeof SyncState.PayloadUpstream.Type>()
115
133
 
116
134
  // This context depends on data from `boot`, we should find a better implementation to avoid this ref indirection.
117
135
  const ctxRef = {
@@ -129,12 +147,29 @@ export const makeLeaderSyncProcessor = ({
129
147
  const localPushesLatch = yield* Effect.makeLatch(true)
130
148
  const pullLatch = yield* Effect.makeLatch(true)
131
149
 
150
+ /**
151
+ * Additionally to the `syncStateSref` we also need the `pushHeadRef` in order to prevent old/duplicate
152
+ * events from being pushed in a scenario like this:
153
+ * - client session A pushes e1
154
+ * - leader sync processor takes a bit and hasn't yet taken e1 from the localPushesQueue
155
+ * - client session B also pushes e1 (which should be rejected)
156
+ *
157
+ * Thus the purpoe of the pushHeadRef is the guard the integrity of the local push queue
158
+ */
159
+ const pushHeadRef = { current: EventId.ROOT }
160
+ const advancePushHead = (eventId: EventId.EventId) => {
161
+ pushHeadRef.current = EventId.max(pushHeadRef.current, eventId)
162
+ }
163
+
132
164
  // NOTE: New events are only pushed to sync backend after successful local push processing
133
165
  const push: LeaderSyncProcessor['push'] = (newEvents, options) =>
134
166
  Effect.gen(function* () {
135
- // TODO validate batch
136
167
  if (newEvents.length === 0) return
137
168
 
169
+ yield* validatePushBatch(newEvents, pushHeadRef.current)
170
+
171
+ advancePushHead(newEvents.at(-1)!.id)
172
+
138
173
  const waitForProcessing = options?.waitForProcessing ?? false
139
174
  const generation = currentLocalPushGenerationRef.current
140
175
 
@@ -142,20 +177,18 @@ export const makeLeaderSyncProcessor = ({
142
177
  const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make<void, LeaderAheadError>())
143
178
 
144
179
  const items = newEvents.map(
145
- (mutationEventEncoded, i) => [mutationEventEncoded, deferreds[i], generation] as LocalPushQueueItem,
180
+ (eventEncoded, i) => [eventEncoded, deferreds[i], generation] as LocalPushQueueItem,
146
181
  )
147
182
 
148
183
  yield* BucketQueue.offerAll(localPushesQueue, items)
149
184
 
150
185
  yield* Effect.all(deferreds)
151
186
  } else {
152
- const items = newEvents.map(
153
- (mutationEventEncoded) => [mutationEventEncoded, undefined, generation] as LocalPushQueueItem,
154
- )
187
+ const items = newEvents.map((eventEncoded) => [eventEncoded, undefined, generation] as LocalPushQueueItem)
155
188
  yield* BucketQueue.offerAll(localPushesQueue, items)
156
189
  }
157
190
  }).pipe(
158
- Effect.withSpan('@livestore/common:LeaderSyncProcessor:local-push', {
191
+ Effect.withSpan('@livestore/common:LeaderSyncProcessor:push', {
159
192
  attributes: {
160
193
  batchSize: newEvents.length,
161
194
  batch: TRACE_VERBOSE ? newEvents : undefined,
@@ -164,26 +197,22 @@ export const makeLeaderSyncProcessor = ({
164
197
  }),
165
198
  )
166
199
 
167
- const pushPartial: LeaderSyncProcessor['pushPartial'] = ({
168
- mutationEvent: { mutation, args },
169
- clientId,
170
- sessionId,
171
- }) =>
200
+ const pushPartial: LeaderSyncProcessor['pushPartial'] = ({ event: { name, args }, clientId, sessionId }) =>
172
201
  Effect.gen(function* () {
173
202
  const syncState = yield* syncStateSref
174
203
  if (syncState === undefined) return shouldNeverHappen('Not initialized')
175
204
 
176
- const mutationDef = getMutationDef(schema, mutation)
205
+ const eventDef = getEventDef(schema, name)
177
206
 
178
- const mutationEventEncoded = new MutationEvent.EncodedWithMeta({
179
- mutation,
207
+ const eventEncoded = new LiveStoreEvent.EncodedWithMeta({
208
+ name,
180
209
  args,
181
210
  clientId,
182
211
  sessionId,
183
- ...EventId.nextPair(syncState.localHead, mutationDef.options.clientOnly),
212
+ ...EventId.nextPair(syncState.localHead, eventDef.eventDef.options.clientOnly),
184
213
  })
185
214
 
186
- yield* push([mutationEventEncoded])
215
+ yield* push([eventEncoded])
187
216
  }).pipe(Effect.catchTag('LeaderAheadError', Effect.orDie))
188
217
 
189
218
  // Starts various background loops
@@ -200,10 +229,9 @@ export const makeLeaderSyncProcessor = ({
200
229
  runtime,
201
230
  }
202
231
 
203
- const initialBackendHead = dbMutationLogMissing
204
- ? EventId.ROOT.global
205
- : Mutationlog.getBackendHeadFromDb(dbMutationLog)
206
- const initialLocalHead = dbMutationLogMissing ? EventId.ROOT : Mutationlog.getClientHeadFromDb(dbMutationLog)
232
+ const initialLocalHead = dbEventlogMissing ? EventId.ROOT : Eventlog.getClientHeadFromDb(dbEventlog)
233
+
234
+ const initialBackendHead = dbEventlogMissing ? EventId.ROOT.global : Eventlog.getBackendHeadFromDb(dbEventlog)
207
235
 
208
236
  if (initialBackendHead > initialLocalHead.global) {
209
237
  return shouldNeverHappen(
@@ -211,12 +239,12 @@ export const makeLeaderSyncProcessor = ({
211
239
  )
212
240
  }
213
241
 
214
- const pendingMutationEvents = dbMutationLogMissing
242
+ const pendingEvents = dbEventlogMissing
215
243
  ? []
216
- : yield* Mutationlog.getMutationEventsSince({ global: initialBackendHead, client: EventId.clientDefault })
244
+ : yield* Eventlog.getEventsSince({ global: initialBackendHead, client: EventId.clientDefault })
217
245
 
218
246
  const initialSyncState = new SyncState.SyncState({
219
- pending: pendingMutationEvents,
247
+ pending: pendingEvents,
220
248
  upstreamHead: { global: initialBackendHead, client: EventId.clientDefault },
221
249
  localHead: initialLocalHead,
222
250
  })
@@ -225,16 +253,16 @@ export const makeLeaderSyncProcessor = ({
225
253
  yield* SubscriptionRef.set(syncStateSref, initialSyncState)
226
254
 
227
255
  // Rehydrate sync queue
228
- if (pendingMutationEvents.length > 0) {
229
- const globalPendingMutationEvents = pendingMutationEvents
230
- // Don't sync clientOnly mutations
231
- .filter((mutationEventEncoded) => {
232
- const mutationDef = getMutationDef(schema, mutationEventEncoded.mutation)
233
- return mutationDef.options.clientOnly === false
256
+ if (pendingEvents.length > 0) {
257
+ const globalPendingEvents = pendingEvents
258
+ // Don't sync clientOnly events
259
+ .filter((eventEncoded) => {
260
+ const eventDef = getEventDef(schema, eventEncoded.name)
261
+ return eventDef.eventDef.options.clientOnly === false
234
262
  })
235
263
 
236
- if (globalPendingMutationEvents.length > 0) {
237
- yield* BucketQueue.offerAll(syncBackendPushQueue, globalPendingMutationEvents)
264
+ if (globalPendingEvents.length > 0) {
265
+ yield* BucketQueue.offerAll(syncBackendPushQueue, globalPendingEvents)
238
266
  }
239
267
  }
240
268
 
@@ -259,18 +287,21 @@ export const makeLeaderSyncProcessor = ({
259
287
  connectedClientSessionPullQueues,
260
288
  mergeCounterRef,
261
289
  mergePayloads,
290
+ localPushBatchSize,
291
+ testing: {
292
+ delay: testing?.delays?.localPushProcessing,
293
+ },
262
294
  }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
263
295
 
264
296
  const backendPushingFiberHandle = yield* FiberHandle.make()
297
+ const backendPushingEffect = backgroundBackendPushing({
298
+ syncBackendPushQueue,
299
+ otelSpan,
300
+ devtoolsLatch: ctxRef.current?.devtoolsLatch,
301
+ backendPushBatchSize,
302
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError))
265
303
 
266
- yield* FiberHandle.run(
267
- backendPushingFiberHandle,
268
- backgroundBackendPushing({
269
- syncBackendPushQueue,
270
- otelSpan,
271
- devtoolsLatch: ctxRef.current?.devtoolsLatch,
272
- }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError)),
273
- )
304
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
274
305
 
275
306
  yield* backgroundBackendPulling({
276
307
  initialBackendHead,
@@ -285,14 +316,7 @@ export const makeLeaderSyncProcessor = ({
285
316
  yield* BucketQueue.offerAll(syncBackendPushQueue, filteredRebasedPending)
286
317
 
287
318
  // Restart pushing fiber
288
- yield* FiberHandle.run(
289
- backendPushingFiberHandle,
290
- backgroundBackendPushing({
291
- syncBackendPushQueue,
292
- otelSpan,
293
- devtoolsLatch: ctxRef.current?.devtoolsLatch,
294
- }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError)),
295
- )
319
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
296
320
  }),
297
321
  syncStateSref,
298
322
  localPushesLatch,
@@ -303,6 +327,7 @@ export const makeLeaderSyncProcessor = ({
303
327
  connectedClientSessionPullQueues,
304
328
  mergeCounterRef,
305
329
  mergePayloads,
330
+ advancePushHead,
306
331
  }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
307
332
 
308
333
  return { initialLeaderHead: initialLocalHead }
@@ -327,8 +352,8 @@ export const makeLeaderSyncProcessor = ({
327
352
  return {
328
353
  payload: {
329
354
  _tag: 'upstream-advance' as const,
330
- newEvents: ReadonlyArray.dropWhile(payload.newEvents, (mutationEventEncoded) =>
331
- EventId.isGreaterThanOrEqual(cursor.eventId, mutationEventEncoded.id),
355
+ newEvents: ReadonlyArray.dropWhile(payload.newEvents, (eventEncoded) =>
356
+ EventId.isGreaterThanOrEqual(cursor.eventId, eventEncoded.id),
332
357
  ),
333
358
  },
334
359
  mergeCounter,
@@ -377,24 +402,33 @@ const backgroundApplyLocalPushes = ({
377
402
  connectedClientSessionPullQueues,
378
403
  mergeCounterRef,
379
404
  mergePayloads,
405
+ localPushBatchSize,
406
+ testing,
380
407
  }: {
381
408
  pullLatch: Effect.Latch
382
409
  localPushesLatch: Effect.Latch
383
410
  localPushesQueue: BucketQueue.BucketQueue<LocalPushQueueItem>
384
411
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
385
- syncBackendPushQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
412
+ syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.EncodedWithMeta>
386
413
  schema: LiveStoreSchema
387
- isClientEvent: (mutationEventEncoded: MutationEvent.EncodedWithMeta) => boolean
414
+ isClientEvent: (eventEncoded: LiveStoreEvent.EncodedWithMeta) => boolean
388
415
  otelSpan: otel.Span | undefined
389
416
  currentLocalPushGenerationRef: { current: number }
390
417
  connectedClientSessionPullQueues: PullQueueSet
391
418
  mergeCounterRef: { current: number }
392
419
  mergePayloads: Map<number, typeof SyncState.PayloadUpstream.Type>
420
+ localPushBatchSize: number
421
+ testing: {
422
+ delay: Effect.Effect<void> | undefined
423
+ }
393
424
  }) =>
394
425
  Effect.gen(function* () {
395
426
  while (true) {
396
- // TODO make batch size configurable
397
- const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, LOCAL_PUSH_BATCH_SIZE)
427
+ if (testing.delay !== undefined) {
428
+ yield* testing.delay.pipe(Effect.withSpan('localPushProcessingDelay'))
429
+ }
430
+
431
+ const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, localPushBatchSize)
398
432
 
399
433
  // Wait for the backend pulling to finish
400
434
  yield* localPushesLatch.await
@@ -406,7 +440,7 @@ const backgroundApplyLocalPushes = ({
406
440
  // It's important that we filter after we got localPushesLatch, otherwise we might filter with the old generation
407
441
  const filteredBatchItems = batchItems
408
442
  .filter(([_1, _2, generation]) => generation === currentLocalPushGenerationRef.current)
409
- .map(([mutationEventEncoded, deferred]) => [mutationEventEncoded, deferred] as const)
443
+ .map(([eventEncoded, deferred]) => [eventEncoded, deferred] as const)
410
444
 
411
445
  if (filteredBatchItems.length === 0) {
412
446
  // console.log('dropping old-gen batch', currentLocalPushGenerationRef.current)
@@ -424,14 +458,14 @@ const backgroundApplyLocalPushes = ({
424
458
  syncState,
425
459
  payload: { _tag: 'local-push', newEvents },
426
460
  isClientEvent,
427
- isEqualEvent: MutationEvent.isEqualEncoded,
461
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
428
462
  })
429
463
 
430
464
  const mergeCounter = yield* incrementMergeCounter(mergeCounterRef)
431
465
 
432
466
  switch (mergeResult._tag) {
433
467
  case 'unexpected-error': {
434
- otelSpan?.addEvent(`merge[${mergeCounter}]:local-push:unexpected-error`, {
468
+ otelSpan?.addEvent(`[${mergeCounter}]:push:unexpected-error`, {
435
469
  batchSize: newEvents.length,
436
470
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
437
471
  })
@@ -441,7 +475,7 @@ const backgroundApplyLocalPushes = ({
441
475
  return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
442
476
  }
443
477
  case 'reject': {
444
- otelSpan?.addEvent(`merge[${mergeCounter}]:local-push:reject`, {
478
+ otelSpan?.addEvent(`[${mergeCounter}]:push:reject`, {
445
479
  batchSize: newEvents.length,
446
480
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
447
481
  })
@@ -505,28 +539,28 @@ const backgroundApplyLocalPushes = ({
505
539
  })
506
540
  mergePayloads.set(mergeCounter, SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }))
507
541
 
508
- otelSpan?.addEvent(`merge[${mergeCounter}]:local-push:advance`, {
542
+ otelSpan?.addEvent(`[${mergeCounter}]:push:advance`, {
509
543
  batchSize: newEvents.length,
510
544
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
511
545
  })
512
546
 
513
- // Don't sync clientOnly mutations
514
- const filteredBatch = mergeResult.newEvents.filter((mutationEventEncoded) => {
515
- const mutationDef = getMutationDef(schema, mutationEventEncoded.mutation)
516
- return mutationDef.options.clientOnly === false
547
+ // Don't sync clientOnly events
548
+ const filteredBatch = mergeResult.newEvents.filter((eventEncoded) => {
549
+ const eventDef = getEventDef(schema, eventEncoded.name)
550
+ return eventDef.eventDef.options.clientOnly === false
517
551
  })
518
552
 
519
553
  yield* BucketQueue.offerAll(syncBackendPushQueue, filteredBatch)
520
554
 
521
- yield* applyMutationsBatch({ batchItems: newEvents, deferreds })
555
+ yield* applyEventsBatch({ batchItems: mergeResult.newEvents, deferreds })
522
556
 
523
557
  // Allow the backend pulling to start
524
558
  yield* pullLatch.open
525
559
  }
526
560
  })
527
561
 
528
- type ApplyMutationsBatch = (_: {
529
- batchItems: ReadonlyArray<MutationEvent.EncodedWithMeta>
562
+ type ApplyEventsBatch = (_: {
563
+ batchItems: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>
530
564
  /**
531
565
  * The deferreds are used by the caller to know when the mutation has been processed.
532
566
  * Indexes are aligned with `batchItems`
@@ -535,13 +569,13 @@ type ApplyMutationsBatch = (_: {
535
569
  }) => Effect.Effect<void, UnexpectedError, LeaderThreadCtx>
536
570
 
537
571
  // TODO how to handle errors gracefully
538
- const applyMutationsBatch: ApplyMutationsBatch = ({ batchItems, deferreds }) =>
572
+ const applyEventsBatch: ApplyEventsBatch = ({ batchItems, deferreds }) =>
539
573
  Effect.gen(function* () {
540
- const { dbReadModel: db, dbMutationLog, applyMutation } = yield* LeaderThreadCtx
574
+ const { dbReadModel: db, dbEventlog, applyEvent } = yield* LeaderThreadCtx
541
575
 
542
- // NOTE We always start a transaction to ensure consistency between db and mutation log (even for single-item batches)
576
+ // NOTE We always start a transaction to ensure consistency between db and eventlog (even for single-item batches)
543
577
  db.execute('BEGIN TRANSACTION', undefined) // Start the transaction
544
- dbMutationLog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
578
+ dbEventlog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
545
579
 
546
580
  yield* Effect.addFinalizer((exit) =>
547
581
  Effect.gen(function* () {
@@ -549,12 +583,12 @@ const applyMutationsBatch: ApplyMutationsBatch = ({ batchItems, deferreds }) =>
549
583
 
550
584
  // Rollback in case of an error
551
585
  db.execute('ROLLBACK', undefined)
552
- dbMutationLog.execute('ROLLBACK', undefined)
586
+ dbEventlog.execute('ROLLBACK', undefined)
553
587
  }),
554
588
  )
555
589
 
556
590
  for (let i = 0; i < batchItems.length; i++) {
557
- const { sessionChangeset } = yield* applyMutation(batchItems[i]!)
591
+ const { sessionChangeset } = yield* applyEvent(batchItems[i]!)
558
592
  batchItems[i]!.meta.sessionChangeset = sessionChangeset
559
593
 
560
594
  if (deferreds?.[i] !== undefined) {
@@ -563,11 +597,11 @@ const applyMutationsBatch: ApplyMutationsBatch = ({ batchItems, deferreds }) =>
563
597
  }
564
598
 
565
599
  db.execute('COMMIT', undefined) // Commit the transaction
566
- dbMutationLog.execute('COMMIT', undefined) // Commit the transaction
600
+ dbEventlog.execute('COMMIT', undefined) // Commit the transaction
567
601
  }).pipe(
568
602
  Effect.uninterruptible,
569
603
  Effect.scoped,
570
- Effect.withSpan('@livestore/common:LeaderSyncProcessor:applyMutationItems', {
604
+ Effect.withSpan('@livestore/common:LeaderSyncProcessor:applyEventItems', {
571
605
  attributes: { batchSize: batchItems.length },
572
606
  }),
573
607
  Effect.tapCauseLogPretty,
@@ -587,11 +621,12 @@ const backgroundBackendPulling = ({
587
621
  connectedClientSessionPullQueues,
588
622
  mergeCounterRef,
589
623
  mergePayloads,
624
+ advancePushHead,
590
625
  }: {
591
626
  initialBackendHead: EventId.GlobalEventId
592
- isClientEvent: (mutationEventEncoded: MutationEvent.EncodedWithMeta) => boolean
627
+ isClientEvent: (eventEncoded: LiveStoreEvent.EncodedWithMeta) => boolean
593
628
  restartBackendPushing: (
594
- filteredRebasedPending: ReadonlyArray<MutationEvent.EncodedWithMeta>,
629
+ filteredRebasedPending: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>,
595
630
  ) => Effect.Effect<void, UnexpectedError, LeaderThreadCtx | HttpClient.HttpClient>
596
631
  otelSpan: otel.Span | undefined
597
632
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
@@ -602,13 +637,14 @@ const backgroundBackendPulling = ({
602
637
  connectedClientSessionPullQueues: PullQueueSet
603
638
  mergeCounterRef: { current: number }
604
639
  mergePayloads: Map<number, typeof SyncState.PayloadUpstream.Type>
640
+ advancePushHead: (eventId: EventId.EventId) => void
605
641
  }) =>
606
642
  Effect.gen(function* () {
607
- const { syncBackend, dbReadModel: db, dbMutationLog, schema } = yield* LeaderThreadCtx
643
+ const { syncBackend, dbReadModel: db, dbEventlog, schema } = yield* LeaderThreadCtx
608
644
 
609
645
  if (syncBackend === undefined) return
610
646
 
611
- const onNewPullChunk = (newEvents: MutationEvent.EncodedWithMeta[], remaining: number) =>
647
+ const onNewPullChunk = (newEvents: LiveStoreEvent.EncodedWithMeta[], remaining: number) =>
612
648
  Effect.gen(function* () {
613
649
  if (newEvents.length === 0) return
614
650
 
@@ -629,7 +665,7 @@ const backgroundBackendPulling = ({
629
665
  syncState,
630
666
  payload: SyncState.PayloadUpstreamAdvance.make({ newEvents }),
631
667
  isClientEvent,
632
- isEqualEvent: MutationEvent.isEqualEncoded,
668
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
633
669
  ignoreClientEvents: true,
634
670
  })
635
671
 
@@ -638,7 +674,7 @@ const backgroundBackendPulling = ({
638
674
  if (mergeResult._tag === 'reject') {
639
675
  return shouldNeverHappen('The leader thread should never reject upstream advances')
640
676
  } else if (mergeResult._tag === 'unexpected-error') {
641
- otelSpan?.addEvent(`merge[${mergeCounter}]:backend-pull:unexpected-error`, {
677
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:unexpected-error`, {
642
678
  newEventsCount: newEvents.length,
643
679
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
644
680
  })
@@ -647,24 +683,24 @@ const backgroundBackendPulling = ({
647
683
 
648
684
  const newBackendHead = newEvents.at(-1)!.id
649
685
 
650
- Mutationlog.updateBackendHead(dbMutationLog, newBackendHead)
686
+ Eventlog.updateBackendHead(dbEventlog, newBackendHead)
651
687
 
652
688
  if (mergeResult._tag === 'rebase') {
653
- otelSpan?.addEvent(`merge[${mergeCounter}]:backend-pull:rebase`, {
689
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:rebase`, {
654
690
  newEventsCount: newEvents.length,
655
691
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
656
692
  rollbackCount: mergeResult.rollbackEvents.length,
657
693
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
658
694
  })
659
695
 
660
- const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((mutationEvent) => {
661
- const mutationDef = getMutationDef(schema, mutationEvent.mutation)
662
- return mutationDef.options.clientOnly === false
696
+ const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
697
+ const eventDef = getEventDef(schema, event.name)
698
+ return eventDef.eventDef.options.clientOnly === false
663
699
  })
664
700
  yield* restartBackendPushing(globalRebasedPendingEvents)
665
701
 
666
702
  if (mergeResult.rollbackEvents.length > 0) {
667
- yield* rollback({ db, dbMutationLog, eventIdsToRollback: mergeResult.rollbackEvents.map((_) => _.id) })
703
+ yield* rollback({ db, dbEventlog, eventIdsToRollback: mergeResult.rollbackEvents.map((_) => _.id) })
668
704
  }
669
705
 
670
706
  yield* connectedClientSessionPullQueues.offer({
@@ -682,7 +718,7 @@ const backgroundBackendPulling = ({
682
718
  }),
683
719
  )
684
720
  } else {
685
- otelSpan?.addEvent(`merge[${mergeCounter}]:backend-pull:advance`, {
721
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:advance`, {
686
722
  newEventsCount: newEvents.length,
687
723
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
688
724
  })
@@ -696,19 +732,19 @@ const backgroundBackendPulling = ({
696
732
  if (mergeResult.confirmedEvents.length > 0) {
697
733
  // `mergeResult.confirmedEvents` don't contain the correct sync metadata, so we need to use
698
734
  // `newEvents` instead which we filter via `mergeResult.confirmedEvents`
699
- const confirmedNewEvents = newEvents.filter((mutationEvent) =>
700
- mergeResult.confirmedEvents.some((confirmedEvent) =>
701
- EventId.isEqual(mutationEvent.id, confirmedEvent.id),
702
- ),
735
+ const confirmedNewEvents = newEvents.filter((event) =>
736
+ mergeResult.confirmedEvents.some((confirmedEvent) => EventId.isEqual(event.id, confirmedEvent.id)),
703
737
  )
704
- yield* Mutationlog.updateSyncMetadata(confirmedNewEvents)
738
+ yield* Eventlog.updateSyncMetadata(confirmedNewEvents)
705
739
  }
706
740
  }
707
741
 
708
742
  // Removes the changeset rows which are no longer needed as we'll never have to rollback beyond this point
709
743
  trimChangesetRows(db, newBackendHead)
710
744
 
711
- yield* applyMutationsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined })
745
+ advancePushHead(mergeResult.newSyncState.localHead)
746
+
747
+ yield* applyEventsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined })
712
748
 
713
749
  yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState)
714
750
 
@@ -718,7 +754,7 @@ const backgroundBackendPulling = ({
718
754
  }
719
755
  })
720
756
 
721
- const cursorInfo = yield* Mutationlog.getSyncBackendCursorInfo(initialBackendHead)
757
+ const cursorInfo = yield* Eventlog.getSyncBackendCursorInfo(initialBackendHead)
722
758
 
723
759
  yield* syncBackend.pull(cursorInfo).pipe(
724
760
  // TODO only take from queue while connected
@@ -731,13 +767,13 @@ const backgroundBackendPulling = ({
731
767
  // },
732
768
  // })
733
769
 
734
- // NOTE we only want to take process mutations when the sync backend is connected
770
+ // NOTE we only want to take process events when the sync backend is connected
735
771
  // (e.g. needed for simulating being offline)
736
772
  // TODO remove when there's a better way to handle this in stream above
737
773
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
738
774
 
739
775
  yield* onNewPullChunk(
740
- batch.map((_) => MutationEvent.EncodedWithMeta.fromGlobal(_.mutationEventEncoded, _.metadata)),
776
+ batch.map((_) => LiveStoreEvent.EncodedWithMeta.fromGlobal(_.eventEncoded, _.metadata)),
741
777
  remaining,
742
778
  )
743
779
 
@@ -753,10 +789,12 @@ const backgroundBackendPushing = ({
753
789
  syncBackendPushQueue,
754
790
  otelSpan,
755
791
  devtoolsLatch,
792
+ backendPushBatchSize,
756
793
  }: {
757
- syncBackendPushQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
794
+ syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.EncodedWithMeta>
758
795
  otelSpan: otel.Span | undefined
759
796
  devtoolsLatch: Effect.Latch | undefined
797
+ backendPushBatchSize: number
760
798
  }) =>
761
799
  Effect.gen(function* () {
762
800
  const { syncBackend } = yield* LeaderThreadCtx
@@ -765,8 +803,7 @@ const backgroundBackendPushing = ({
765
803
  while (true) {
766
804
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
767
805
 
768
- // TODO make batch size configurable
769
- const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, BACKEND_PUSH_BATCH_SIZE)
806
+ const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, backendPushBatchSize)
770
807
 
771
808
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
772
809
 
@@ -872,3 +909,27 @@ const getMergeCounterFromDb = (dbReadModel: SqliteDb) =>
872
909
  )
873
910
  return result[0]?.mergeCounter ?? 0
874
911
  })
912
+
913
+ const validatePushBatch = (batch: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>, pushHead: EventId.EventId) =>
914
+ Effect.gen(function* () {
915
+ if (batch.length === 0) {
916
+ return
917
+ }
918
+
919
+ // Make sure batch is monotonically increasing
920
+ for (let i = 1; i < batch.length; i++) {
921
+ if (EventId.isGreaterThanOrEqual(batch[i - 1]!.id, batch[i]!.id)) {
922
+ shouldNeverHappen(
923
+ `Events must be ordered in monotonically ascending order by eventId. Received: [${batch.map((e) => EventId.toString(e.id)).join(', ')}]`,
924
+ )
925
+ }
926
+ }
927
+
928
+ // Make sure smallest event id is > pushHead
929
+ if (EventId.isGreaterThanOrEqual(pushHead, batch[0]!.id)) {
930
+ return yield* LeaderAheadError.make({
931
+ minimumExpectedId: pushHead,
932
+ providedId: batch[0]!.id,
933
+ })
934
+ }
935
+ })