@livestore/common 0.3.0-dev.27 → 0.3.0-dev.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (277) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/__tests__/fixture.d.ts +83 -221
  3. package/dist/__tests__/fixture.d.ts.map +1 -1
  4. package/dist/__tests__/fixture.js +33 -11
  5. package/dist/__tests__/fixture.js.map +1 -1
  6. package/dist/adapter-types.d.ts +22 -15
  7. package/dist/adapter-types.d.ts.map +1 -1
  8. package/dist/adapter-types.js +15 -2
  9. package/dist/adapter-types.js.map +1 -1
  10. package/dist/bounded-collections.d.ts +1 -1
  11. package/dist/bounded-collections.d.ts.map +1 -1
  12. package/dist/debug-info.d.ts.map +1 -1
  13. package/dist/debug-info.js +1 -0
  14. package/dist/debug-info.js.map +1 -1
  15. package/dist/devtools/devtools-messages-client-session.d.ts +21 -21
  16. package/dist/devtools/devtools-messages-common.d.ts +6 -6
  17. package/dist/devtools/devtools-messages-leader.d.ts +45 -45
  18. package/dist/devtools/devtools-messages-leader.d.ts.map +1 -1
  19. package/dist/devtools/devtools-messages-leader.js +11 -11
  20. package/dist/devtools/devtools-messages-leader.js.map +1 -1
  21. package/dist/index.d.ts +2 -5
  22. package/dist/index.d.ts.map +1 -1
  23. package/dist/index.js +2 -5
  24. package/dist/index.js.map +1 -1
  25. package/dist/leader-thread/LeaderSyncProcessor.d.ts +25 -12
  26. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  27. package/dist/leader-thread/LeaderSyncProcessor.js +125 -89
  28. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  29. package/dist/leader-thread/{apply-mutation.d.ts → apply-event.d.ts} +7 -7
  30. package/dist/leader-thread/apply-event.d.ts.map +1 -0
  31. package/dist/leader-thread/apply-event.js +103 -0
  32. package/dist/leader-thread/apply-event.js.map +1 -0
  33. package/dist/leader-thread/eventlog.d.ts +27 -0
  34. package/dist/leader-thread/eventlog.d.ts.map +1 -0
  35. package/dist/leader-thread/eventlog.js +123 -0
  36. package/dist/leader-thread/eventlog.js.map +1 -0
  37. package/dist/leader-thread/leader-worker-devtools.js +18 -18
  38. package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
  39. package/dist/leader-thread/make-leader-thread-layer.d.ts +16 -4
  40. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  41. package/dist/leader-thread/make-leader-thread-layer.js +23 -16
  42. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  43. package/dist/leader-thread/mod.d.ts +1 -1
  44. package/dist/leader-thread/mod.d.ts.map +1 -1
  45. package/dist/leader-thread/mod.js +1 -1
  46. package/dist/leader-thread/mod.js.map +1 -1
  47. package/dist/leader-thread/recreate-db.d.ts.map +1 -1
  48. package/dist/leader-thread/recreate-db.js +6 -8
  49. package/dist/leader-thread/recreate-db.js.map +1 -1
  50. package/dist/leader-thread/types.d.ts +11 -11
  51. package/dist/leader-thread/types.d.ts.map +1 -1
  52. package/dist/materializer-helper.d.ts +23 -0
  53. package/dist/materializer-helper.d.ts.map +1 -0
  54. package/dist/materializer-helper.js +70 -0
  55. package/dist/materializer-helper.js.map +1 -0
  56. package/dist/query-builder/api.d.ts +58 -53
  57. package/dist/query-builder/api.d.ts.map +1 -1
  58. package/dist/query-builder/api.js +3 -5
  59. package/dist/query-builder/api.js.map +1 -1
  60. package/dist/query-builder/astToSql.d.ts.map +1 -1
  61. package/dist/query-builder/astToSql.js +59 -37
  62. package/dist/query-builder/astToSql.js.map +1 -1
  63. package/dist/query-builder/impl.d.ts +2 -3
  64. package/dist/query-builder/impl.d.ts.map +1 -1
  65. package/dist/query-builder/impl.js +48 -46
  66. package/dist/query-builder/impl.js.map +1 -1
  67. package/dist/query-builder/impl.test.d.ts +86 -1
  68. package/dist/query-builder/impl.test.d.ts.map +1 -1
  69. package/dist/query-builder/impl.test.js +244 -36
  70. package/dist/query-builder/impl.test.js.map +1 -1
  71. package/dist/rehydrate-from-eventlog.d.ts +14 -0
  72. package/dist/rehydrate-from-eventlog.d.ts.map +1 -0
  73. package/dist/{rehydrate-from-mutationlog.js → rehydrate-from-eventlog.js} +25 -26
  74. package/dist/rehydrate-from-eventlog.js.map +1 -0
  75. package/dist/schema/EventDef.d.ts +136 -0
  76. package/dist/schema/EventDef.d.ts.map +1 -0
  77. package/dist/schema/EventDef.js +58 -0
  78. package/dist/schema/EventDef.js.map +1 -0
  79. package/dist/schema/EventId.d.ts +2 -2
  80. package/dist/schema/EventId.d.ts.map +1 -1
  81. package/dist/schema/EventId.js +8 -2
  82. package/dist/schema/EventId.js.map +1 -1
  83. package/dist/schema/{MutationEvent.d.ts → LiveStoreEvent.d.ts} +56 -56
  84. package/dist/schema/LiveStoreEvent.d.ts.map +1 -0
  85. package/dist/schema/{MutationEvent.js → LiveStoreEvent.js} +25 -25
  86. package/dist/schema/LiveStoreEvent.js.map +1 -0
  87. package/dist/schema/client-document-def.d.ts +223 -0
  88. package/dist/schema/client-document-def.d.ts.map +1 -0
  89. package/dist/schema/client-document-def.js +170 -0
  90. package/dist/schema/client-document-def.js.map +1 -0
  91. package/dist/schema/client-document-def.test.d.ts +2 -0
  92. package/dist/schema/client-document-def.test.d.ts.map +1 -0
  93. package/dist/schema/client-document-def.test.js +201 -0
  94. package/dist/schema/client-document-def.test.js.map +1 -0
  95. package/dist/schema/db-schema/dsl/mod.d.ts.map +1 -1
  96. package/dist/schema/events.d.ts +2 -0
  97. package/dist/schema/events.d.ts.map +1 -0
  98. package/dist/schema/events.js +2 -0
  99. package/dist/schema/events.js.map +1 -0
  100. package/dist/schema/mod.d.ts +4 -3
  101. package/dist/schema/mod.d.ts.map +1 -1
  102. package/dist/schema/mod.js +4 -3
  103. package/dist/schema/mod.js.map +1 -1
  104. package/dist/schema/schema.d.ts +27 -23
  105. package/dist/schema/schema.d.ts.map +1 -1
  106. package/dist/schema/schema.js +45 -43
  107. package/dist/schema/schema.js.map +1 -1
  108. package/dist/schema/sqlite-state.d.ts +12 -0
  109. package/dist/schema/sqlite-state.d.ts.map +1 -0
  110. package/dist/schema/sqlite-state.js +36 -0
  111. package/dist/schema/sqlite-state.js.map +1 -0
  112. package/dist/schema/system-tables.d.ts +67 -98
  113. package/dist/schema/system-tables.d.ts.map +1 -1
  114. package/dist/schema/system-tables.js +62 -48
  115. package/dist/schema/system-tables.js.map +1 -1
  116. package/dist/schema/table-def.d.ts +26 -96
  117. package/dist/schema/table-def.d.ts.map +1 -1
  118. package/dist/schema/table-def.js +16 -64
  119. package/dist/schema/table-def.js.map +1 -1
  120. package/dist/schema/view.d.ts +3 -0
  121. package/dist/schema/view.d.ts.map +1 -0
  122. package/dist/schema/view.js +3 -0
  123. package/dist/schema/view.js.map +1 -0
  124. package/dist/schema-management/common.d.ts +4 -4
  125. package/dist/schema-management/common.d.ts.map +1 -1
  126. package/dist/schema-management/migrations.d.ts.map +1 -1
  127. package/dist/schema-management/migrations.js +6 -6
  128. package/dist/schema-management/migrations.js.map +1 -1
  129. package/dist/schema-management/validate-mutation-defs.d.ts +3 -3
  130. package/dist/schema-management/validate-mutation-defs.d.ts.map +1 -1
  131. package/dist/schema-management/validate-mutation-defs.js +17 -17
  132. package/dist/schema-management/validate-mutation-defs.js.map +1 -1
  133. package/dist/sync/ClientSessionSyncProcessor.d.ts +7 -7
  134. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  135. package/dist/sync/ClientSessionSyncProcessor.js +31 -30
  136. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  137. package/dist/sync/next/facts.d.ts +19 -19
  138. package/dist/sync/next/facts.d.ts.map +1 -1
  139. package/dist/sync/next/facts.js +2 -2
  140. package/dist/sync/next/facts.js.map +1 -1
  141. package/dist/sync/next/history-dag-common.d.ts +3 -3
  142. package/dist/sync/next/history-dag-common.d.ts.map +1 -1
  143. package/dist/sync/next/history-dag-common.js +1 -1
  144. package/dist/sync/next/history-dag-common.js.map +1 -1
  145. package/dist/sync/next/history-dag.js +1 -1
  146. package/dist/sync/next/history-dag.js.map +1 -1
  147. package/dist/sync/next/rebase-events.d.ts +7 -7
  148. package/dist/sync/next/rebase-events.d.ts.map +1 -1
  149. package/dist/sync/next/rebase-events.js +5 -5
  150. package/dist/sync/next/rebase-events.js.map +1 -1
  151. package/dist/sync/next/test/compact-events.calculator.test.js +38 -33
  152. package/dist/sync/next/test/compact-events.calculator.test.js.map +1 -1
  153. package/dist/sync/next/test/compact-events.test.js +71 -71
  154. package/dist/sync/next/test/compact-events.test.js.map +1 -1
  155. package/dist/sync/next/test/{mutation-fixtures.d.ts → event-fixtures.d.ts} +29 -29
  156. package/dist/sync/next/test/event-fixtures.d.ts.map +1 -0
  157. package/dist/sync/next/test/{mutation-fixtures.js → event-fixtures.js} +60 -25
  158. package/dist/sync/next/test/event-fixtures.js.map +1 -0
  159. package/dist/sync/next/test/mod.d.ts +1 -1
  160. package/dist/sync/next/test/mod.d.ts.map +1 -1
  161. package/dist/sync/next/test/mod.js +1 -1
  162. package/dist/sync/next/test/mod.js.map +1 -1
  163. package/dist/sync/sync.d.ts +3 -3
  164. package/dist/sync/sync.d.ts.map +1 -1
  165. package/dist/sync/syncstate.d.ts +32 -32
  166. package/dist/sync/syncstate.d.ts.map +1 -1
  167. package/dist/sync/syncstate.js +31 -25
  168. package/dist/sync/syncstate.js.map +1 -1
  169. package/dist/sync/syncstate.test.js +165 -175
  170. package/dist/sync/syncstate.test.js.map +1 -1
  171. package/dist/sync/validate-push-payload.d.ts +2 -2
  172. package/dist/sync/validate-push-payload.d.ts.map +1 -1
  173. package/dist/sync/validate-push-payload.js.map +1 -1
  174. package/dist/version.d.ts +1 -1
  175. package/dist/version.js +1 -1
  176. package/package.json +3 -3
  177. package/src/__tests__/fixture.ts +36 -15
  178. package/src/adapter-types.ts +23 -16
  179. package/src/debug-info.ts +1 -0
  180. package/src/devtools/devtools-messages-leader.ts +13 -13
  181. package/src/index.ts +2 -5
  182. package/src/leader-thread/LeaderSyncProcessor.ts +183 -122
  183. package/src/leader-thread/{apply-mutation.ts → apply-event.ts} +50 -74
  184. package/src/leader-thread/eventlog.ts +199 -0
  185. package/src/leader-thread/leader-worker-devtools.ts +18 -18
  186. package/src/leader-thread/make-leader-thread-layer.ts +51 -29
  187. package/src/leader-thread/mod.ts +1 -1
  188. package/src/leader-thread/recreate-db.ts +6 -9
  189. package/src/leader-thread/types.ts +12 -12
  190. package/src/materializer-helper.ts +110 -0
  191. package/src/query-builder/api.ts +79 -105
  192. package/src/query-builder/astToSql.ts +68 -39
  193. package/src/query-builder/impl.test.ts +264 -42
  194. package/src/query-builder/impl.ts +72 -56
  195. package/src/{rehydrate-from-mutationlog.ts → rehydrate-from-eventlog.ts} +33 -40
  196. package/src/schema/EventDef.ts +216 -0
  197. package/src/schema/EventId.ts +11 -3
  198. package/src/schema/{MutationEvent.ts → LiveStoreEvent.ts} +68 -69
  199. package/src/schema/client-document-def.test.ts +239 -0
  200. package/src/schema/client-document-def.ts +444 -0
  201. package/src/schema/db-schema/dsl/mod.ts +0 -1
  202. package/src/schema/events.ts +1 -0
  203. package/src/schema/mod.ts +4 -3
  204. package/src/schema/schema.ts +79 -69
  205. package/src/schema/sqlite-state.ts +62 -0
  206. package/src/schema/system-tables.ts +42 -53
  207. package/src/schema/table-def.ts +53 -209
  208. package/src/schema/view.ts +2 -0
  209. package/src/schema-management/common.ts +4 -4
  210. package/src/schema-management/migrations.ts +8 -9
  211. package/src/schema-management/validate-mutation-defs.ts +22 -24
  212. package/src/sync/ClientSessionSyncProcessor.ts +37 -36
  213. package/src/sync/next/facts.ts +31 -32
  214. package/src/sync/next/history-dag-common.ts +4 -4
  215. package/src/sync/next/history-dag.ts +1 -1
  216. package/src/sync/next/rebase-events.ts +13 -13
  217. package/src/sync/next/test/compact-events.calculator.test.ts +45 -45
  218. package/src/sync/next/test/compact-events.test.ts +73 -73
  219. package/src/sync/next/test/event-fixtures.ts +219 -0
  220. package/src/sync/next/test/mod.ts +1 -1
  221. package/src/sync/sync.ts +3 -3
  222. package/src/sync/syncstate.test.ts +168 -179
  223. package/src/sync/syncstate.ts +48 -38
  224. package/src/sync/validate-push-payload.ts +2 -2
  225. package/src/version.ts +1 -1
  226. package/tmp/pack.tgz +0 -0
  227. package/tsconfig.json +1 -0
  228. package/dist/derived-mutations.d.ts +0 -109
  229. package/dist/derived-mutations.d.ts.map +0 -1
  230. package/dist/derived-mutations.js +0 -54
  231. package/dist/derived-mutations.js.map +0 -1
  232. package/dist/derived-mutations.test.d.ts +0 -2
  233. package/dist/derived-mutations.test.d.ts.map +0 -1
  234. package/dist/derived-mutations.test.js +0 -93
  235. package/dist/derived-mutations.test.js.map +0 -1
  236. package/dist/init-singleton-tables.d.ts +0 -4
  237. package/dist/init-singleton-tables.d.ts.map +0 -1
  238. package/dist/init-singleton-tables.js +0 -16
  239. package/dist/init-singleton-tables.js.map +0 -1
  240. package/dist/leader-thread/apply-mutation.d.ts.map +0 -1
  241. package/dist/leader-thread/apply-mutation.js +0 -122
  242. package/dist/leader-thread/apply-mutation.js.map +0 -1
  243. package/dist/leader-thread/mutationlog.d.ts +0 -27
  244. package/dist/leader-thread/mutationlog.d.ts.map +0 -1
  245. package/dist/leader-thread/mutationlog.js +0 -124
  246. package/dist/leader-thread/mutationlog.js.map +0 -1
  247. package/dist/leader-thread/pull-queue-set.d.ts +0 -7
  248. package/dist/leader-thread/pull-queue-set.d.ts.map +0 -1
  249. package/dist/leader-thread/pull-queue-set.js +0 -38
  250. package/dist/leader-thread/pull-queue-set.js.map +0 -1
  251. package/dist/mutation.d.ts +0 -20
  252. package/dist/mutation.d.ts.map +0 -1
  253. package/dist/mutation.js +0 -68
  254. package/dist/mutation.js.map +0 -1
  255. package/dist/query-info.d.ts +0 -41
  256. package/dist/query-info.d.ts.map +0 -1
  257. package/dist/query-info.js +0 -7
  258. package/dist/query-info.js.map +0 -1
  259. package/dist/rehydrate-from-mutationlog.d.ts +0 -15
  260. package/dist/rehydrate-from-mutationlog.d.ts.map +0 -1
  261. package/dist/rehydrate-from-mutationlog.js.map +0 -1
  262. package/dist/schema/MutationEvent.d.ts.map +0 -1
  263. package/dist/schema/MutationEvent.js.map +0 -1
  264. package/dist/schema/mutations.d.ts +0 -115
  265. package/dist/schema/mutations.d.ts.map +0 -1
  266. package/dist/schema/mutations.js +0 -42
  267. package/dist/schema/mutations.js.map +0 -1
  268. package/dist/sync/next/test/mutation-fixtures.d.ts.map +0 -1
  269. package/dist/sync/next/test/mutation-fixtures.js.map +0 -1
  270. package/src/derived-mutations.test.ts +0 -101
  271. package/src/derived-mutations.ts +0 -170
  272. package/src/init-singleton-tables.ts +0 -24
  273. package/src/leader-thread/mutationlog.ts +0 -202
  274. package/src/mutation.ts +0 -108
  275. package/src/query-info.ts +0 -83
  276. package/src/schema/mutations.ts +0 -193
  277. package/src/sync/next/test/mutation-fixtures.ts +0 -228
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,2BAA2B,CAAA;AACzC,cAAc,WAAW,CAAA;AACzB,cAAc,oBAAoB,CAAA;AAClC,cAAc,mCAAmC,CAAA;AACjD,cAAc,eAAe,CAAA;AAC7B,cAAc,4BAA4B,CAAA;AAC1C,cAAc,iCAAiC,CAAA;AAC/C,cAAc,iBAAiB,CAAA;AAC/B,cAAc,wBAAwB,CAAA;AACtC,cAAc,iBAAiB,CAAA;AAC/B,OAAO,KAAK,QAAQ,MAAM,mBAAmB,CAAA;AAC7C,cAAc,iBAAiB,CAAA;AAC/B,cAAc,0BAA0B,CAAA;AACxC,cAAc,cAAc,CAAA;AAC5B,cAAc,wBAAwB,CAAA;AACtC,OAAO,KAAK,SAAS,MAAM,qBAAqB,CAAA;AAChD,cAAc,WAAW,CAAA"}
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,2BAA2B,CAAA;AACzC,cAAc,WAAW,CAAA;AACzB,cAAc,oBAAoB,CAAA;AAClC,cAAc,mCAAmC,CAAA;AACjD,cAAc,0BAA0B,CAAA;AACxC,cAAc,8BAA8B,CAAA;AAC5C,cAAc,iBAAiB,CAAA;AAC/B,OAAO,KAAK,QAAQ,MAAM,mBAAmB,CAAA;AAC7C,cAAc,iBAAiB,CAAA;AAC/B,cAAc,0BAA0B,CAAA;AACxC,cAAc,cAAc,CAAA;AAC5B,cAAc,wBAAwB,CAAA;AACtC,OAAO,KAAK,SAAS,MAAM,qBAAqB,CAAA;AAChD,cAAc,WAAW,CAAA"}
@@ -4,18 +4,16 @@ import type { SqliteDb } from '../adapter-types.js';
4
4
  import { UnexpectedError } from '../adapter-types.js';
5
5
  import type { LiveStoreSchema } from '../schema/mod.js';
6
6
  import type { InitialBlockingSyncContext, LeaderSyncProcessor } from './types.js';
7
- export declare const BACKEND_PUSH_BATCH_SIZE = 50;
8
- export declare const LOCAL_PUSH_BATCH_SIZE = 10;
9
7
  /**
10
- * The LeaderSyncProcessor manages synchronization of mutations between
8
+ * The LeaderSyncProcessor manages synchronization of events between
11
9
  * the local state and the sync backend, ensuring efficient and orderly processing.
12
10
  *
13
11
  * In the LeaderSyncProcessor, pulling always has precedence over pushing.
14
12
  *
15
13
  * Responsibilities:
16
- * - Queueing incoming local mutations in a localPushesQueue.
17
- * - Broadcasting mutations to client sessions via pull queues.
18
- * - Pushing mutations to the sync backend.
14
+ * - Queueing incoming local events in a localPushesQueue.
15
+ * - Broadcasting events to client sessions via pull queues.
16
+ * - Pushing events to the sync backend.
19
17
  *
20
18
  * Notes:
21
19
  *
@@ -23,27 +21,42 @@ export declare const LOCAL_PUSH_BATCH_SIZE = 10;
23
21
  * - localPushesQueue:
24
22
  * - Maintains events in ascending order.
25
23
  * - Uses `Deferred` objects to resolve/reject events based on application success.
26
- * - Processes events from the queue, applying mutations in batches.
24
+ * - Processes events from the queue, applying events in batches.
27
25
  * - Controlled by a `Latch` to manage execution flow.
28
26
  * - The latch closes on pull receipt and re-opens post-pull completion.
29
27
  * - Processes up to `maxBatchSize` events per cycle.
30
28
  *
31
- * Currently we're advancing the db read model and mutation log in lockstep, but we could also decouple this in the future
29
+ * Currently we're advancing the db read model and eventlog in lockstep, but we could also decouple this in the future
32
30
  *
33
31
  * Tricky concurrency scenarios:
34
32
  * - Queued local push batches becoming invalid due to a prior local push item being rejected.
35
33
  * Solution: Introduce a generation number for local push batches which is used to filter out old batches items in case of rejection.
36
34
  *
37
35
  */
38
- export declare const makeLeaderSyncProcessor: ({ schema, dbMutationLogMissing, dbMutationLog, dbReadModel, dbReadModelMissing, initialBlockingSyncContext, onError, }: {
36
+ export declare const makeLeaderSyncProcessor: ({ schema, dbEventlogMissing, dbEventlog, dbReadModel, dbReadModelMissing, initialBlockingSyncContext, onError, params, testing, }: {
39
37
  schema: LiveStoreSchema;
40
- /** Only used to know whether we can safely query dbMutationLog during setup execution */
41
- dbMutationLogMissing: boolean;
42
- dbMutationLog: SqliteDb;
38
+ /** Only used to know whether we can safely query dbEventlog during setup execution */
39
+ dbEventlogMissing: boolean;
40
+ dbEventlog: SqliteDb;
43
41
  dbReadModel: SqliteDb;
44
42
  /** Only used to know whether we can safely query dbReadModel during setup execution */
45
43
  dbReadModelMissing: boolean;
46
44
  initialBlockingSyncContext: InitialBlockingSyncContext;
47
45
  onError: "shutdown" | "ignore";
46
+ params: {
47
+ /**
48
+ * @default 10
49
+ */
50
+ localPushBatchSize?: number;
51
+ /**
52
+ * @default 50
53
+ */
54
+ backendPushBatchSize?: number;
55
+ };
56
+ testing: {
57
+ delays?: {
58
+ localPushProcessing?: Effect.Effect<void>;
59
+ };
60
+ };
48
61
  }) => Effect.Effect<LeaderSyncProcessor, UnexpectedError, Scope.Scope>;
49
62
  //# sourceMappingURL=LeaderSyncProcessor.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"LeaderSyncProcessor.d.ts","sourceRoot":"","sources":["../../src/leader-thread/LeaderSyncProcessor.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAuB,KAAK,EAAU,MAAM,yBAAyB,CAAA;AACjF,OAAO,EAGL,MAAM,EASP,MAAM,yBAAyB,CAAA;AAGhC,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAA;AACrD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAA;AAavD,OAAO,KAAK,EAAE,0BAA0B,EAAE,mBAAmB,EAAE,MAAM,YAAY,CAAA;AAGjF,eAAO,MAAM,uBAAuB,KAAK,CAAA;AACzC,eAAO,MAAM,qBAAqB,KAAK,CAAA;AASvC;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,eAAO,MAAM,uBAAuB,GAAI,wHAQrC;IACD,MAAM,EAAE,eAAe,CAAA;IACvB,yFAAyF;IACzF,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,QAAQ,CAAA;IACvB,WAAW,EAAE,QAAQ,CAAA;IACrB,uFAAuF;IACvF,kBAAkB,EAAE,OAAO,CAAA;IAC3B,0BAA0B,EAAE,0BAA0B,CAAA;IACtD,OAAO,EAAE,UAAU,GAAG,QAAQ,CAAA;CAC/B,KAAG,MAAM,CAAC,MAAM,CAAC,mBAAmB,EAAE,eAAe,EAAE,KAAK,CAAC,KAAK,CAgR/D,CAAA"}
1
+ {"version":3,"file":"LeaderSyncProcessor.d.ts","sourceRoot":"","sources":["../../src/leader-thread/LeaderSyncProcessor.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAuB,KAAK,EAAU,MAAM,yBAAyB,CAAA;AACjF,OAAO,EAGL,MAAM,EASP,MAAM,yBAAyB,CAAA;AAGhC,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAA;AACrD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAA;AAavD,OAAO,KAAK,EAAE,0BAA0B,EAAE,mBAAmB,EAAE,MAAM,YAAY,CAAA;AAUjF;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,eAAO,MAAM,uBAAuB,GAAI,mIAUrC;IACD,MAAM,EAAE,eAAe,CAAA;IACvB,sFAAsF;IACtF,iBAAiB,EAAE,OAAO,CAAA;IAC1B,UAAU,EAAE,QAAQ,CAAA;IACpB,WAAW,EAAE,QAAQ,CAAA;IACrB,uFAAuF;IACvF,kBAAkB,EAAE,OAAO,CAAA;IAC3B,0BAA0B,EAAE,0BAA0B,CAAA;IACtD,OAAO,EAAE,UAAU,GAAG,QAAQ,CAAA;IAC9B,MAAM,EAAE;QACN;;WAEG;QACH,kBAAkB,CAAC,EAAE,MAAM,CAAA;QAC3B;;WAEG;QACH,oBAAoB,CAAC,EAAE,MAAM,CAAA;KAC9B,CAAA;IACD,OAAO,EAAE;QACP,MAAM,CAAC,EAAE;YACP,mBAAmB,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;SAC1C,CAAA;KACF,CAAA;CACF,KAAG,MAAM,CAAC,MAAM,CAAC,mBAAmB,EAAE,eAAe,EAAE,KAAK,CAAC,KAAK,CA2R/D,CAAA"}
@@ -1,25 +1,23 @@
1
1
  import { casesHandled, isNotUndefined, LS_DEV, shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils';
2
2
  import { BucketQueue, Deferred, Effect, Exit, FiberHandle, OtelTracer, Queue, ReadonlyArray, Stream, Subscribable, SubscriptionRef, } from '@livestore/utils/effect';
3
3
  import { UnexpectedError } from '../adapter-types.js';
4
- import { EventId, getMutationDef, LEADER_MERGE_COUNTER_TABLE, MutationEvent, SESSION_CHANGESET_META_TABLE, } from '../schema/mod.js';
4
+ import { EventId, getEventDef, LEADER_MERGE_COUNTER_TABLE, LiveStoreEvent, SESSION_CHANGESET_META_TABLE, } from '../schema/mod.js';
5
5
  import { LeaderAheadError } from '../sync/sync.js';
6
6
  import * as SyncState from '../sync/syncstate.js';
7
7
  import { sql } from '../util.js';
8
- import { rollback } from './apply-mutation.js';
9
- import * as Mutationlog from './mutationlog.js';
8
+ import { rollback } from './apply-event.js';
9
+ import * as Eventlog from './eventlog.js';
10
10
  import { LeaderThreadCtx } from './types.js';
11
- export const BACKEND_PUSH_BATCH_SIZE = 50;
12
- export const LOCAL_PUSH_BATCH_SIZE = 10;
13
11
  /**
14
- * The LeaderSyncProcessor manages synchronization of mutations between
12
+ * The LeaderSyncProcessor manages synchronization of events between
15
13
  * the local state and the sync backend, ensuring efficient and orderly processing.
16
14
  *
17
15
  * In the LeaderSyncProcessor, pulling always has precedence over pushing.
18
16
  *
19
17
  * Responsibilities:
20
- * - Queueing incoming local mutations in a localPushesQueue.
21
- * - Broadcasting mutations to client sessions via pull queues.
22
- * - Pushing mutations to the sync backend.
18
+ * - Queueing incoming local events in a localPushesQueue.
19
+ * - Broadcasting events to client sessions via pull queues.
20
+ * - Pushing events to the sync backend.
23
21
  *
24
22
  * Notes:
25
23
  *
@@ -27,24 +25,26 @@ export const LOCAL_PUSH_BATCH_SIZE = 10;
27
25
  * - localPushesQueue:
28
26
  * - Maintains events in ascending order.
29
27
  * - Uses `Deferred` objects to resolve/reject events based on application success.
30
- * - Processes events from the queue, applying mutations in batches.
28
+ * - Processes events from the queue, applying events in batches.
31
29
  * - Controlled by a `Latch` to manage execution flow.
32
30
  * - The latch closes on pull receipt and re-opens post-pull completion.
33
31
  * - Processes up to `maxBatchSize` events per cycle.
34
32
  *
35
- * Currently we're advancing the db read model and mutation log in lockstep, but we could also decouple this in the future
33
+ * Currently we're advancing the db read model and eventlog in lockstep, but we could also decouple this in the future
36
34
  *
37
35
  * Tricky concurrency scenarios:
38
36
  * - Queued local push batches becoming invalid due to a prior local push item being rejected.
39
37
  * Solution: Introduce a generation number for local push batches which is used to filter out old batches items in case of rejection.
40
38
  *
41
39
  */
42
- export const makeLeaderSyncProcessor = ({ schema, dbMutationLogMissing, dbMutationLog, dbReadModel, dbReadModelMissing, initialBlockingSyncContext, onError, }) => Effect.gen(function* () {
40
+ export const makeLeaderSyncProcessor = ({ schema, dbEventlogMissing, dbEventlog, dbReadModel, dbReadModelMissing, initialBlockingSyncContext, onError, params, testing, }) => Effect.gen(function* () {
43
41
  const syncBackendPushQueue = yield* BucketQueue.make();
42
+ const localPushBatchSize = params.localPushBatchSize ?? 10;
43
+ const backendPushBatchSize = params.backendPushBatchSize ?? 50;
44
44
  const syncStateSref = yield* SubscriptionRef.make(undefined);
45
- const isClientEvent = (mutationEventEncoded) => {
46
- const mutationDef = getMutationDef(schema, mutationEventEncoded.mutation);
47
- return mutationDef.options.clientOnly;
45
+ const isClientEvent = (eventEncoded) => {
46
+ const eventDef = getEventDef(schema, eventEncoded.name);
47
+ return eventDef.eventDef.options.clientOnly;
48
48
  };
49
49
  const connectedClientSessionPullQueues = yield* makePullQueueSet;
50
50
  /**
@@ -52,6 +52,7 @@ export const makeLeaderSyncProcessor = ({ schema, dbMutationLogMissing, dbMutati
52
52
  * If a local-push batch is rejected, all subsequent push queue items with the same generation are also rejected,
53
53
  * even if they would be valid on their own.
54
54
  */
55
+ // TODO get rid of this in favour of the `mergeGeneration` event id field
55
56
  const currentLocalPushGenerationRef = { current: 0 };
56
57
  const mergeCounterRef = { current: dbReadModelMissing ? 0 : yield* getMergeCounterFromDb(dbReadModel) };
57
58
  const mergePayloads = new Map();
@@ -62,43 +63,57 @@ export const makeLeaderSyncProcessor = ({ schema, dbMutationLogMissing, dbMutati
62
63
  const localPushesQueue = yield* BucketQueue.make();
63
64
  const localPushesLatch = yield* Effect.makeLatch(true);
64
65
  const pullLatch = yield* Effect.makeLatch(true);
66
+ /**
67
+ * Additionally to the `syncStateSref` we also need the `pushHeadRef` in order to prevent old/duplicate
68
+ * events from being pushed in a scenario like this:
69
+ * - client session A pushes e1
70
+ * - leader sync processor takes a bit and hasn't yet taken e1 from the localPushesQueue
71
+ * - client session B also pushes e1 (which should be rejected)
72
+ *
73
+ * Thus the purpoe of the pushHeadRef is the guard the integrity of the local push queue
74
+ */
75
+ const pushHeadRef = { current: EventId.ROOT };
76
+ const advancePushHead = (eventId) => {
77
+ pushHeadRef.current = EventId.max(pushHeadRef.current, eventId);
78
+ };
65
79
  // NOTE: New events are only pushed to sync backend after successful local push processing
66
80
  const push = (newEvents, options) => Effect.gen(function* () {
67
- // TODO validate batch
68
81
  if (newEvents.length === 0)
69
82
  return;
83
+ yield* validatePushBatch(newEvents, pushHeadRef.current);
84
+ advancePushHead(newEvents.at(-1).id);
70
85
  const waitForProcessing = options?.waitForProcessing ?? false;
71
86
  const generation = currentLocalPushGenerationRef.current;
72
87
  if (waitForProcessing) {
73
88
  const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make());
74
- const items = newEvents.map((mutationEventEncoded, i) => [mutationEventEncoded, deferreds[i], generation]);
89
+ const items = newEvents.map((eventEncoded, i) => [eventEncoded, deferreds[i], generation]);
75
90
  yield* BucketQueue.offerAll(localPushesQueue, items);
76
91
  yield* Effect.all(deferreds);
77
92
  }
78
93
  else {
79
- const items = newEvents.map((mutationEventEncoded) => [mutationEventEncoded, undefined, generation]);
94
+ const items = newEvents.map((eventEncoded) => [eventEncoded, undefined, generation]);
80
95
  yield* BucketQueue.offerAll(localPushesQueue, items);
81
96
  }
82
- }).pipe(Effect.withSpan('@livestore/common:LeaderSyncProcessor:local-push', {
97
+ }).pipe(Effect.withSpan('@livestore/common:LeaderSyncProcessor:push', {
83
98
  attributes: {
84
99
  batchSize: newEvents.length,
85
100
  batch: TRACE_VERBOSE ? newEvents : undefined,
86
101
  },
87
102
  links: ctxRef.current?.span ? [{ _tag: 'SpanLink', span: ctxRef.current.span, attributes: {} }] : undefined,
88
103
  }));
89
- const pushPartial = ({ mutationEvent: { mutation, args }, clientId, sessionId, }) => Effect.gen(function* () {
104
+ const pushPartial = ({ event: { name, args }, clientId, sessionId }) => Effect.gen(function* () {
90
105
  const syncState = yield* syncStateSref;
91
106
  if (syncState === undefined)
92
107
  return shouldNeverHappen('Not initialized');
93
- const mutationDef = getMutationDef(schema, mutation);
94
- const mutationEventEncoded = new MutationEvent.EncodedWithMeta({
95
- mutation,
108
+ const eventDef = getEventDef(schema, name);
109
+ const eventEncoded = new LiveStoreEvent.EncodedWithMeta({
110
+ name,
96
111
  args,
97
112
  clientId,
98
113
  sessionId,
99
- ...EventId.nextPair(syncState.localHead, mutationDef.options.clientOnly),
114
+ ...EventId.nextPair(syncState.localHead, eventDef.eventDef.options.clientOnly),
100
115
  });
101
- yield* push([mutationEventEncoded]);
116
+ yield* push([eventEncoded]);
102
117
  }).pipe(Effect.catchTag('LeaderAheadError', Effect.orDie));
103
118
  // Starts various background loops
104
119
  const boot = Effect.gen(function* () {
@@ -112,33 +127,31 @@ export const makeLeaderSyncProcessor = ({ schema, dbMutationLogMissing, dbMutati
112
127
  devtoolsLatch: devtools.enabled ? devtools.syncBackendLatch : undefined,
113
128
  runtime,
114
129
  };
115
- const initialBackendHead = dbMutationLogMissing
116
- ? EventId.ROOT.global
117
- : Mutationlog.getBackendHeadFromDb(dbMutationLog);
118
- const initialLocalHead = dbMutationLogMissing ? EventId.ROOT : Mutationlog.getClientHeadFromDb(dbMutationLog);
130
+ const initialLocalHead = dbEventlogMissing ? EventId.ROOT : Eventlog.getClientHeadFromDb(dbEventlog);
131
+ const initialBackendHead = dbEventlogMissing ? EventId.ROOT.global : Eventlog.getBackendHeadFromDb(dbEventlog);
119
132
  if (initialBackendHead > initialLocalHead.global) {
120
133
  return shouldNeverHappen(`During boot the backend head (${initialBackendHead}) should never be greater than the local head (${initialLocalHead.global})`);
121
134
  }
122
- const pendingMutationEvents = dbMutationLogMissing
135
+ const pendingEvents = dbEventlogMissing
123
136
  ? []
124
- : yield* Mutationlog.getMutationEventsSince({ global: initialBackendHead, client: EventId.clientDefault });
137
+ : yield* Eventlog.getEventsSince({ global: initialBackendHead, client: EventId.clientDefault });
125
138
  const initialSyncState = new SyncState.SyncState({
126
- pending: pendingMutationEvents,
139
+ pending: pendingEvents,
127
140
  upstreamHead: { global: initialBackendHead, client: EventId.clientDefault },
128
141
  localHead: initialLocalHead,
129
142
  });
130
143
  /** State transitions need to happen atomically, so we use a Ref to track the state */
131
144
  yield* SubscriptionRef.set(syncStateSref, initialSyncState);
132
145
  // Rehydrate sync queue
133
- if (pendingMutationEvents.length > 0) {
134
- const globalPendingMutationEvents = pendingMutationEvents
135
- // Don't sync clientOnly mutations
136
- .filter((mutationEventEncoded) => {
137
- const mutationDef = getMutationDef(schema, mutationEventEncoded.mutation);
138
- return mutationDef.options.clientOnly === false;
146
+ if (pendingEvents.length > 0) {
147
+ const globalPendingEvents = pendingEvents
148
+ // Don't sync clientOnly events
149
+ .filter((eventEncoded) => {
150
+ const eventDef = getEventDef(schema, eventEncoded.name);
151
+ return eventDef.eventDef.options.clientOnly === false;
139
152
  });
140
- if (globalPendingMutationEvents.length > 0) {
141
- yield* BucketQueue.offerAll(syncBackendPushQueue, globalPendingMutationEvents);
153
+ if (globalPendingEvents.length > 0) {
154
+ yield* BucketQueue.offerAll(syncBackendPushQueue, globalPendingEvents);
142
155
  }
143
156
  }
144
157
  const shutdownOnError = (cause) => Effect.gen(function* () {
@@ -160,13 +173,19 @@ export const makeLeaderSyncProcessor = ({ schema, dbMutationLogMissing, dbMutati
160
173
  connectedClientSessionPullQueues,
161
174
  mergeCounterRef,
162
175
  mergePayloads,
176
+ localPushBatchSize,
177
+ testing: {
178
+ delay: testing?.delays?.localPushProcessing,
179
+ },
163
180
  }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped);
164
181
  const backendPushingFiberHandle = yield* FiberHandle.make();
165
- yield* FiberHandle.run(backendPushingFiberHandle, backgroundBackendPushing({
182
+ const backendPushingEffect = backgroundBackendPushing({
166
183
  syncBackendPushQueue,
167
184
  otelSpan,
168
185
  devtoolsLatch: ctxRef.current?.devtoolsLatch,
169
- }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError)));
186
+ backendPushBatchSize,
187
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError));
188
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect);
170
189
  yield* backgroundBackendPulling({
171
190
  initialBackendHead,
172
191
  isClientEvent,
@@ -177,11 +196,7 @@ export const makeLeaderSyncProcessor = ({ schema, dbMutationLogMissing, dbMutati
177
196
  yield* BucketQueue.clear(syncBackendPushQueue);
178
197
  yield* BucketQueue.offerAll(syncBackendPushQueue, filteredRebasedPending);
179
198
  // Restart pushing fiber
180
- yield* FiberHandle.run(backendPushingFiberHandle, backgroundBackendPushing({
181
- syncBackendPushQueue,
182
- otelSpan,
183
- devtoolsLatch: ctxRef.current?.devtoolsLatch,
184
- }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError)));
199
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect);
185
200
  }),
186
201
  syncStateSref,
187
202
  localPushesLatch,
@@ -192,6 +207,7 @@ export const makeLeaderSyncProcessor = ({ schema, dbMutationLogMissing, dbMutati
192
207
  connectedClientSessionPullQueues,
193
208
  mergeCounterRef,
194
209
  mergePayloads,
210
+ advancePushHead,
195
211
  }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped);
196
212
  return { initialLeaderHead: initialLocalHead };
197
213
  }).pipe(Effect.withSpanScoped('@livestore/common:LeaderSyncProcessor:boot'));
@@ -212,7 +228,7 @@ export const makeLeaderSyncProcessor = ({ schema, dbMutationLogMissing, dbMutati
212
228
  return {
213
229
  payload: {
214
230
  _tag: 'upstream-advance',
215
- newEvents: ReadonlyArray.dropWhile(payload.newEvents, (mutationEventEncoded) => EventId.isGreaterThanOrEqual(cursor.eventId, mutationEventEncoded.id)),
231
+ newEvents: ReadonlyArray.dropWhile(payload.newEvents, (eventEncoded) => EventId.isGreaterThanOrEqual(cursor.eventId, eventEncoded.id)),
216
232
  },
217
233
  mergeCounter,
218
234
  };
@@ -244,10 +260,12 @@ export const makeLeaderSyncProcessor = ({ schema, dbMutationLogMissing, dbMutati
244
260
  getMergeCounter: () => mergeCounterRef.current,
245
261
  };
246
262
  });
247
- const backgroundApplyLocalPushes = ({ localPushesLatch, localPushesQueue, pullLatch, syncStateSref, syncBackendPushQueue, schema, isClientEvent, otelSpan, currentLocalPushGenerationRef, connectedClientSessionPullQueues, mergeCounterRef, mergePayloads, }) => Effect.gen(function* () {
263
+ const backgroundApplyLocalPushes = ({ localPushesLatch, localPushesQueue, pullLatch, syncStateSref, syncBackendPushQueue, schema, isClientEvent, otelSpan, currentLocalPushGenerationRef, connectedClientSessionPullQueues, mergeCounterRef, mergePayloads, localPushBatchSize, testing, }) => Effect.gen(function* () {
248
264
  while (true) {
249
- // TODO make batch size configurable
250
- const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, LOCAL_PUSH_BATCH_SIZE);
265
+ if (testing.delay !== undefined) {
266
+ yield* testing.delay.pipe(Effect.withSpan('localPushProcessingDelay'));
267
+ }
268
+ const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, localPushBatchSize);
251
269
  // Wait for the backend pulling to finish
252
270
  yield* localPushesLatch.await;
253
271
  // Prevent backend pull processing until this local push is finished
@@ -256,7 +274,7 @@ const backgroundApplyLocalPushes = ({ localPushesLatch, localPushesQueue, pullLa
256
274
  // It's important that we filter after we got localPushesLatch, otherwise we might filter with the old generation
257
275
  const filteredBatchItems = batchItems
258
276
  .filter(([_1, _2, generation]) => generation === currentLocalPushGenerationRef.current)
259
- .map(([mutationEventEncoded, deferred]) => [mutationEventEncoded, deferred]);
277
+ .map(([eventEncoded, deferred]) => [eventEncoded, deferred]);
260
278
  if (filteredBatchItems.length === 0) {
261
279
  // console.log('dropping old-gen batch', currentLocalPushGenerationRef.current)
262
280
  // Allow the backend pulling to start
@@ -271,12 +289,12 @@ const backgroundApplyLocalPushes = ({ localPushesLatch, localPushesQueue, pullLa
271
289
  syncState,
272
290
  payload: { _tag: 'local-push', newEvents },
273
291
  isClientEvent,
274
- isEqualEvent: MutationEvent.isEqualEncoded,
292
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
275
293
  });
276
294
  const mergeCounter = yield* incrementMergeCounter(mergeCounterRef);
277
295
  switch (mergeResult._tag) {
278
296
  case 'unexpected-error': {
279
- otelSpan?.addEvent(`merge[${mergeCounter}]:local-push:unexpected-error`, {
297
+ otelSpan?.addEvent(`[${mergeCounter}]:push:unexpected-error`, {
280
298
  batchSize: newEvents.length,
281
299
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
282
300
  });
@@ -286,7 +304,7 @@ const backgroundApplyLocalPushes = ({ localPushesLatch, localPushesQueue, pullLa
286
304
  return shouldNeverHappen('The leader thread should never have to rebase due to a local push');
287
305
  }
288
306
  case 'reject': {
289
- otelSpan?.addEvent(`merge[${mergeCounter}]:local-push:reject`, {
307
+ otelSpan?.addEvent(`[${mergeCounter}]:push:reject`, {
290
308
  batchSize: newEvents.length,
291
309
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
292
310
  });
@@ -331,48 +349,48 @@ const backgroundApplyLocalPushes = ({ localPushesLatch, localPushesQueue, pullLa
331
349
  mergeCounter,
332
350
  });
333
351
  mergePayloads.set(mergeCounter, SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }));
334
- otelSpan?.addEvent(`merge[${mergeCounter}]:local-push:advance`, {
352
+ otelSpan?.addEvent(`[${mergeCounter}]:push:advance`, {
335
353
  batchSize: newEvents.length,
336
354
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
337
355
  });
338
- // Don't sync clientOnly mutations
339
- const filteredBatch = mergeResult.newEvents.filter((mutationEventEncoded) => {
340
- const mutationDef = getMutationDef(schema, mutationEventEncoded.mutation);
341
- return mutationDef.options.clientOnly === false;
356
+ // Don't sync clientOnly events
357
+ const filteredBatch = mergeResult.newEvents.filter((eventEncoded) => {
358
+ const eventDef = getEventDef(schema, eventEncoded.name);
359
+ return eventDef.eventDef.options.clientOnly === false;
342
360
  });
343
361
  yield* BucketQueue.offerAll(syncBackendPushQueue, filteredBatch);
344
- yield* applyMutationsBatch({ batchItems: newEvents, deferreds });
362
+ yield* applyEventsBatch({ batchItems: mergeResult.newEvents, deferreds });
345
363
  // Allow the backend pulling to start
346
364
  yield* pullLatch.open;
347
365
  }
348
366
  });
349
367
  // TODO how to handle errors gracefully
350
- const applyMutationsBatch = ({ batchItems, deferreds }) => Effect.gen(function* () {
351
- const { dbReadModel: db, dbMutationLog, applyMutation } = yield* LeaderThreadCtx;
352
- // NOTE We always start a transaction to ensure consistency between db and mutation log (even for single-item batches)
368
+ const applyEventsBatch = ({ batchItems, deferreds }) => Effect.gen(function* () {
369
+ const { dbReadModel: db, dbEventlog, applyEvent } = yield* LeaderThreadCtx;
370
+ // NOTE We always start a transaction to ensure consistency between db and eventlog (even for single-item batches)
353
371
  db.execute('BEGIN TRANSACTION', undefined); // Start the transaction
354
- dbMutationLog.execute('BEGIN TRANSACTION', undefined); // Start the transaction
372
+ dbEventlog.execute('BEGIN TRANSACTION', undefined); // Start the transaction
355
373
  yield* Effect.addFinalizer((exit) => Effect.gen(function* () {
356
374
  if (Exit.isSuccess(exit))
357
375
  return;
358
376
  // Rollback in case of an error
359
377
  db.execute('ROLLBACK', undefined);
360
- dbMutationLog.execute('ROLLBACK', undefined);
378
+ dbEventlog.execute('ROLLBACK', undefined);
361
379
  }));
362
380
  for (let i = 0; i < batchItems.length; i++) {
363
- const { sessionChangeset } = yield* applyMutation(batchItems[i]);
381
+ const { sessionChangeset } = yield* applyEvent(batchItems[i]);
364
382
  batchItems[i].meta.sessionChangeset = sessionChangeset;
365
383
  if (deferreds?.[i] !== undefined) {
366
384
  yield* Deferred.succeed(deferreds[i], void 0);
367
385
  }
368
386
  }
369
387
  db.execute('COMMIT', undefined); // Commit the transaction
370
- dbMutationLog.execute('COMMIT', undefined); // Commit the transaction
371
- }).pipe(Effect.uninterruptible, Effect.scoped, Effect.withSpan('@livestore/common:LeaderSyncProcessor:applyMutationItems', {
388
+ dbEventlog.execute('COMMIT', undefined); // Commit the transaction
389
+ }).pipe(Effect.uninterruptible, Effect.scoped, Effect.withSpan('@livestore/common:LeaderSyncProcessor:applyEventItems', {
372
390
  attributes: { batchSize: batchItems.length },
373
391
  }), Effect.tapCauseLogPretty, UnexpectedError.mapToUnexpectedError);
374
- const backgroundBackendPulling = ({ initialBackendHead, isClientEvent, restartBackendPushing, otelSpan, syncStateSref, localPushesLatch, pullLatch, devtoolsLatch, initialBlockingSyncContext, connectedClientSessionPullQueues, mergeCounterRef, mergePayloads, }) => Effect.gen(function* () {
375
- const { syncBackend, dbReadModel: db, dbMutationLog, schema } = yield* LeaderThreadCtx;
392
+ const backgroundBackendPulling = ({ initialBackendHead, isClientEvent, restartBackendPushing, otelSpan, syncStateSref, localPushesLatch, pullLatch, devtoolsLatch, initialBlockingSyncContext, connectedClientSessionPullQueues, mergeCounterRef, mergePayloads, advancePushHead, }) => Effect.gen(function* () {
393
+ const { syncBackend, dbReadModel: db, dbEventlog, schema } = yield* LeaderThreadCtx;
376
394
  if (syncBackend === undefined)
377
395
  return;
378
396
  const onNewPullChunk = (newEvents, remaining) => Effect.gen(function* () {
@@ -392,7 +410,7 @@ const backgroundBackendPulling = ({ initialBackendHead, isClientEvent, restartBa
392
410
  syncState,
393
411
  payload: SyncState.PayloadUpstreamAdvance.make({ newEvents }),
394
412
  isClientEvent,
395
- isEqualEvent: MutationEvent.isEqualEncoded,
413
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
396
414
  ignoreClientEvents: true,
397
415
  });
398
416
  const mergeCounter = yield* incrementMergeCounter(mergeCounterRef);
@@ -400,28 +418,28 @@ const backgroundBackendPulling = ({ initialBackendHead, isClientEvent, restartBa
400
418
  return shouldNeverHappen('The leader thread should never reject upstream advances');
401
419
  }
402
420
  else if (mergeResult._tag === 'unexpected-error') {
403
- otelSpan?.addEvent(`merge[${mergeCounter}]:backend-pull:unexpected-error`, {
421
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:unexpected-error`, {
404
422
  newEventsCount: newEvents.length,
405
423
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
406
424
  });
407
425
  return yield* Effect.fail(mergeResult.cause);
408
426
  }
409
427
  const newBackendHead = newEvents.at(-1).id;
410
- Mutationlog.updateBackendHead(dbMutationLog, newBackendHead);
428
+ Eventlog.updateBackendHead(dbEventlog, newBackendHead);
411
429
  if (mergeResult._tag === 'rebase') {
412
- otelSpan?.addEvent(`merge[${mergeCounter}]:backend-pull:rebase`, {
430
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:rebase`, {
413
431
  newEventsCount: newEvents.length,
414
432
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
415
433
  rollbackCount: mergeResult.rollbackEvents.length,
416
434
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
417
435
  });
418
- const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((mutationEvent) => {
419
- const mutationDef = getMutationDef(schema, mutationEvent.mutation);
420
- return mutationDef.options.clientOnly === false;
436
+ const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
437
+ const eventDef = getEventDef(schema, event.name);
438
+ return eventDef.eventDef.options.clientOnly === false;
421
439
  });
422
440
  yield* restartBackendPushing(globalRebasedPendingEvents);
423
441
  if (mergeResult.rollbackEvents.length > 0) {
424
- yield* rollback({ db, dbMutationLog, eventIdsToRollback: mergeResult.rollbackEvents.map((_) => _.id) });
442
+ yield* rollback({ db, dbEventlog, eventIdsToRollback: mergeResult.rollbackEvents.map((_) => _.id) });
425
443
  }
426
444
  yield* connectedClientSessionPullQueues.offer({
427
445
  payload: SyncState.PayloadUpstreamRebase.make({
@@ -436,7 +454,7 @@ const backgroundBackendPulling = ({ initialBackendHead, isClientEvent, restartBa
436
454
  }));
437
455
  }
438
456
  else {
439
- otelSpan?.addEvent(`merge[${mergeCounter}]:backend-pull:advance`, {
457
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:advance`, {
440
458
  newEventsCount: newEvents.length,
441
459
  mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
442
460
  });
@@ -448,20 +466,21 @@ const backgroundBackendPulling = ({ initialBackendHead, isClientEvent, restartBa
448
466
  if (mergeResult.confirmedEvents.length > 0) {
449
467
  // `mergeResult.confirmedEvents` don't contain the correct sync metadata, so we need to use
450
468
  // `newEvents` instead which we filter via `mergeResult.confirmedEvents`
451
- const confirmedNewEvents = newEvents.filter((mutationEvent) => mergeResult.confirmedEvents.some((confirmedEvent) => EventId.isEqual(mutationEvent.id, confirmedEvent.id)));
452
- yield* Mutationlog.updateSyncMetadata(confirmedNewEvents);
469
+ const confirmedNewEvents = newEvents.filter((event) => mergeResult.confirmedEvents.some((confirmedEvent) => EventId.isEqual(event.id, confirmedEvent.id)));
470
+ yield* Eventlog.updateSyncMetadata(confirmedNewEvents);
453
471
  }
454
472
  }
455
473
  // Removes the changeset rows which are no longer needed as we'll never have to rollback beyond this point
456
474
  trimChangesetRows(db, newBackendHead);
457
- yield* applyMutationsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined });
475
+ advancePushHead(mergeResult.newSyncState.localHead);
476
+ yield* applyEventsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined });
458
477
  yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState);
459
478
  // Allow local pushes to be processed again
460
479
  if (remaining === 0) {
461
480
  yield* localPushesLatch.open;
462
481
  }
463
482
  });
464
- const cursorInfo = yield* Mutationlog.getSyncBackendCursorInfo(initialBackendHead);
483
+ const cursorInfo = yield* Eventlog.getSyncBackendCursorInfo(initialBackendHead);
465
484
  yield* syncBackend.pull(cursorInfo).pipe(
466
485
  // TODO only take from queue while connected
467
486
  Stream.tap(({ batch, remaining }) => Effect.gen(function* () {
@@ -471,22 +490,21 @@ const backgroundBackendPulling = ({ initialBackendHead, isClientEvent, restartBa
471
490
  // batch: TRACE_VERBOSE ? batch : undefined,
472
491
  // },
473
492
  // })
474
- // NOTE we only want to take process mutations when the sync backend is connected
493
+ // NOTE we only want to take process events when the sync backend is connected
475
494
  // (e.g. needed for simulating being offline)
476
495
  // TODO remove when there's a better way to handle this in stream above
477
496
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true);
478
- yield* onNewPullChunk(batch.map((_) => MutationEvent.EncodedWithMeta.fromGlobal(_.mutationEventEncoded, _.metadata)), remaining);
497
+ yield* onNewPullChunk(batch.map((_) => LiveStoreEvent.EncodedWithMeta.fromGlobal(_.eventEncoded, _.metadata)), remaining);
479
498
  yield* initialBlockingSyncContext.update({ processed: batch.length, remaining });
480
499
  })), Stream.runDrain, Effect.interruptible);
481
500
  }).pipe(Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pulling'));
482
- const backgroundBackendPushing = ({ syncBackendPushQueue, otelSpan, devtoolsLatch, }) => Effect.gen(function* () {
501
+ const backgroundBackendPushing = ({ syncBackendPushQueue, otelSpan, devtoolsLatch, backendPushBatchSize, }) => Effect.gen(function* () {
483
502
  const { syncBackend } = yield* LeaderThreadCtx;
484
503
  if (syncBackend === undefined)
485
504
  return;
486
505
  while (true) {
487
506
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true);
488
- // TODO make batch size configurable
489
- const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, BACKEND_PUSH_BATCH_SIZE);
507
+ const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, backendPushBatchSize);
490
508
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true);
491
509
  if (devtoolsLatch !== undefined) {
492
510
  yield* devtoolsLatch.await;
@@ -550,4 +568,22 @@ const getMergeCounterFromDb = (dbReadModel) => Effect.gen(function* () {
550
568
  const result = dbReadModel.select(sql `SELECT mergeCounter FROM ${LEADER_MERGE_COUNTER_TABLE} WHERE id = 0`);
551
569
  return result[0]?.mergeCounter ?? 0;
552
570
  });
571
+ const validatePushBatch = (batch, pushHead) => Effect.gen(function* () {
572
+ if (batch.length === 0) {
573
+ return;
574
+ }
575
+ // Make sure batch is monotonically increasing
576
+ for (let i = 1; i < batch.length; i++) {
577
+ if (EventId.isGreaterThanOrEqual(batch[i - 1].id, batch[i].id)) {
578
+ shouldNeverHappen(`Events must be ordered in monotonically ascending order by eventId. Received: [${batch.map((e) => EventId.toString(e.id)).join(', ')}]`);
579
+ }
580
+ }
581
+ // Make sure smallest event id is > pushHead
582
+ if (EventId.isGreaterThanOrEqual(pushHead, batch[0].id)) {
583
+ return yield* LeaderAheadError.make({
584
+ minimumExpectedId: pushHead,
585
+ providedId: batch[0].id,
586
+ });
587
+ }
588
+ });
553
589
  //# sourceMappingURL=LeaderSyncProcessor.js.map