@livestore/common 0.3.0-dev.8 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (480) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/__tests__/fixture.d.ts +83 -221
  3. package/dist/__tests__/fixture.d.ts.map +1 -1
  4. package/dist/__tests__/fixture.js +33 -11
  5. package/dist/__tests__/fixture.js.map +1 -1
  6. package/dist/adapter-types.d.ts +120 -64
  7. package/dist/adapter-types.d.ts.map +1 -1
  8. package/dist/adapter-types.js +39 -8
  9. package/dist/adapter-types.js.map +1 -1
  10. package/dist/bounded-collections.d.ts.map +1 -1
  11. package/dist/debug-info.d.ts +1 -1
  12. package/dist/debug-info.d.ts.map +1 -1
  13. package/dist/debug-info.js +1 -0
  14. package/dist/debug-info.js.map +1 -1
  15. package/dist/devtools/devtools-messages-client-session.d.ts +390 -0
  16. package/dist/devtools/devtools-messages-client-session.d.ts.map +1 -0
  17. package/dist/devtools/devtools-messages-client-session.js +97 -0
  18. package/dist/devtools/devtools-messages-client-session.js.map +1 -0
  19. package/dist/devtools/devtools-messages-common.d.ts +68 -0
  20. package/dist/devtools/devtools-messages-common.d.ts.map +1 -0
  21. package/dist/devtools/devtools-messages-common.js +60 -0
  22. package/dist/devtools/devtools-messages-common.js.map +1 -0
  23. package/dist/devtools/devtools-messages-leader.d.ts +394 -0
  24. package/dist/devtools/devtools-messages-leader.d.ts.map +1 -0
  25. package/dist/devtools/devtools-messages-leader.js +147 -0
  26. package/dist/devtools/devtools-messages-leader.js.map +1 -0
  27. package/dist/devtools/devtools-messages.d.ts +3 -580
  28. package/dist/devtools/devtools-messages.d.ts.map +1 -1
  29. package/dist/devtools/devtools-messages.js +3 -174
  30. package/dist/devtools/devtools-messages.js.map +1 -1
  31. package/dist/devtools/devtools-sessioninfo.d.ts +32 -0
  32. package/dist/devtools/devtools-sessioninfo.d.ts.map +1 -0
  33. package/dist/devtools/devtools-sessioninfo.js +36 -0
  34. package/dist/devtools/devtools-sessioninfo.js.map +1 -0
  35. package/dist/devtools/mod.d.ts +55 -0
  36. package/dist/devtools/mod.d.ts.map +1 -0
  37. package/dist/devtools/mod.js +33 -0
  38. package/dist/devtools/mod.js.map +1 -0
  39. package/dist/index.d.ts +7 -9
  40. package/dist/index.d.ts.map +1 -1
  41. package/dist/index.js +7 -9
  42. package/dist/index.js.map +1 -1
  43. package/dist/leader-thread/LeaderSyncProcessor.d.ts +45 -30
  44. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  45. package/dist/leader-thread/LeaderSyncProcessor.js +484 -321
  46. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  47. package/dist/leader-thread/connection.d.ts +34 -6
  48. package/dist/leader-thread/connection.d.ts.map +1 -1
  49. package/dist/leader-thread/connection.js +22 -7
  50. package/dist/leader-thread/connection.js.map +1 -1
  51. package/dist/leader-thread/eventlog.d.ts +27 -0
  52. package/dist/leader-thread/eventlog.d.ts.map +1 -0
  53. package/dist/leader-thread/eventlog.js +119 -0
  54. package/dist/leader-thread/eventlog.js.map +1 -0
  55. package/dist/leader-thread/leader-worker-devtools.d.ts.map +1 -1
  56. package/dist/leader-thread/leader-worker-devtools.js +155 -80
  57. package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
  58. package/dist/leader-thread/make-leader-thread-layer.d.ts +22 -9
  59. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  60. package/dist/leader-thread/make-leader-thread-layer.js +67 -45
  61. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  62. package/dist/leader-thread/materialize-event.d.ts +16 -0
  63. package/dist/leader-thread/materialize-event.d.ts.map +1 -0
  64. package/dist/leader-thread/materialize-event.js +109 -0
  65. package/dist/leader-thread/materialize-event.js.map +1 -0
  66. package/dist/leader-thread/mod.d.ts +1 -1
  67. package/dist/leader-thread/mod.d.ts.map +1 -1
  68. package/dist/leader-thread/mod.js +1 -1
  69. package/dist/leader-thread/mod.js.map +1 -1
  70. package/dist/leader-thread/recreate-db.d.ts +4 -2
  71. package/dist/leader-thread/recreate-db.d.ts.map +1 -1
  72. package/dist/leader-thread/recreate-db.js +28 -32
  73. package/dist/leader-thread/recreate-db.js.map +1 -1
  74. package/dist/leader-thread/shutdown-channel.d.ts +2 -5
  75. package/dist/leader-thread/shutdown-channel.d.ts.map +1 -1
  76. package/dist/leader-thread/shutdown-channel.js +2 -4
  77. package/dist/leader-thread/shutdown-channel.js.map +1 -1
  78. package/dist/leader-thread/types.d.ts +85 -38
  79. package/dist/leader-thread/types.d.ts.map +1 -1
  80. package/dist/leader-thread/types.js +1 -3
  81. package/dist/leader-thread/types.js.map +1 -1
  82. package/dist/make-client-session.d.ts +23 -0
  83. package/dist/make-client-session.d.ts.map +1 -0
  84. package/dist/make-client-session.js +57 -0
  85. package/dist/make-client-session.js.map +1 -0
  86. package/dist/materializer-helper.d.ts +23 -0
  87. package/dist/materializer-helper.d.ts.map +1 -0
  88. package/dist/materializer-helper.js +86 -0
  89. package/dist/materializer-helper.js.map +1 -0
  90. package/dist/otel.d.ts +2 -0
  91. package/dist/otel.d.ts.map +1 -1
  92. package/dist/otel.js +5 -0
  93. package/dist/otel.js.map +1 -1
  94. package/dist/rematerialize-from-eventlog.d.ts +14 -0
  95. package/dist/rematerialize-from-eventlog.d.ts.map +1 -0
  96. package/dist/rematerialize-from-eventlog.js +64 -0
  97. package/dist/rematerialize-from-eventlog.js.map +1 -0
  98. package/dist/schema/EventDef.d.ts +146 -0
  99. package/dist/schema/EventDef.d.ts.map +1 -0
  100. package/dist/schema/EventDef.js +58 -0
  101. package/dist/schema/EventDef.js.map +1 -0
  102. package/dist/schema/EventSequenceNumber.d.ts +57 -0
  103. package/dist/schema/EventSequenceNumber.d.ts.map +1 -0
  104. package/dist/schema/EventSequenceNumber.js +82 -0
  105. package/dist/schema/EventSequenceNumber.js.map +1 -0
  106. package/dist/schema/EventSequenceNumber.test.d.ts +2 -0
  107. package/dist/schema/EventSequenceNumber.test.d.ts.map +1 -0
  108. package/dist/schema/EventSequenceNumber.test.js +11 -0
  109. package/dist/schema/EventSequenceNumber.test.js.map +1 -0
  110. package/dist/schema/LiveStoreEvent.d.ts +257 -0
  111. package/dist/schema/LiveStoreEvent.d.ts.map +1 -0
  112. package/dist/schema/LiveStoreEvent.js +117 -0
  113. package/dist/schema/LiveStoreEvent.js.map +1 -0
  114. package/dist/schema/events.d.ts +2 -0
  115. package/dist/schema/events.d.ts.map +1 -0
  116. package/dist/schema/events.js +2 -0
  117. package/dist/schema/events.js.map +1 -0
  118. package/dist/schema/mod.d.ts +8 -6
  119. package/dist/schema/mod.d.ts.map +1 -1
  120. package/dist/schema/mod.js +8 -6
  121. package/dist/schema/mod.js.map +1 -1
  122. package/dist/schema/schema.d.ts +50 -32
  123. package/dist/schema/schema.d.ts.map +1 -1
  124. package/dist/schema/schema.js +36 -43
  125. package/dist/schema/schema.js.map +1 -1
  126. package/dist/schema/state/mod.d.ts +3 -0
  127. package/dist/schema/state/mod.d.ts.map +1 -0
  128. package/dist/schema/state/mod.js +3 -0
  129. package/dist/schema/state/mod.js.map +1 -0
  130. package/dist/schema/state/sqlite/client-document-def.d.ts +223 -0
  131. package/dist/schema/state/sqlite/client-document-def.d.ts.map +1 -0
  132. package/dist/schema/state/sqlite/client-document-def.js +170 -0
  133. package/dist/schema/state/sqlite/client-document-def.js.map +1 -0
  134. package/dist/schema/state/sqlite/client-document-def.test.d.ts +2 -0
  135. package/dist/schema/state/sqlite/client-document-def.test.d.ts.map +1 -0
  136. package/dist/schema/state/sqlite/client-document-def.test.js +201 -0
  137. package/dist/schema/state/sqlite/client-document-def.test.js.map +1 -0
  138. package/dist/schema/state/sqlite/db-schema/ast/sqlite.d.ts +69 -0
  139. package/dist/schema/state/sqlite/db-schema/ast/sqlite.d.ts.map +1 -0
  140. package/dist/schema/state/sqlite/db-schema/ast/sqlite.js +71 -0
  141. package/dist/schema/state/sqlite/db-schema/ast/sqlite.js.map +1 -0
  142. package/dist/schema/state/sqlite/db-schema/ast/validate.d.ts +3 -0
  143. package/dist/schema/state/sqlite/db-schema/ast/validate.d.ts.map +1 -0
  144. package/dist/schema/state/sqlite/db-schema/ast/validate.js +12 -0
  145. package/dist/schema/state/sqlite/db-schema/ast/validate.js.map +1 -0
  146. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.d.ts +90 -0
  147. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.d.ts.map +1 -0
  148. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.js +87 -0
  149. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.js.map +1 -0
  150. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.d.ts +2 -0
  151. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.d.ts.map +1 -0
  152. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.js +29 -0
  153. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.js.map +1 -0
  154. package/dist/schema/state/sqlite/db-schema/dsl/mod.d.ts +90 -0
  155. package/dist/schema/state/sqlite/db-schema/dsl/mod.d.ts.map +1 -0
  156. package/dist/schema/state/sqlite/db-schema/dsl/mod.js +41 -0
  157. package/dist/schema/state/sqlite/db-schema/dsl/mod.js.map +1 -0
  158. package/dist/schema/state/sqlite/db-schema/hash.d.ts +2 -0
  159. package/dist/schema/state/sqlite/db-schema/hash.d.ts.map +1 -0
  160. package/dist/schema/state/sqlite/db-schema/hash.js +14 -0
  161. package/dist/schema/state/sqlite/db-schema/hash.js.map +1 -0
  162. package/dist/schema/state/sqlite/db-schema/mod.d.ts +3 -0
  163. package/dist/schema/state/sqlite/db-schema/mod.d.ts.map +1 -0
  164. package/dist/schema/state/sqlite/db-schema/mod.js +3 -0
  165. package/dist/schema/state/sqlite/db-schema/mod.js.map +1 -0
  166. package/dist/schema/state/sqlite/mod.d.ts +17 -0
  167. package/dist/schema/state/sqlite/mod.d.ts.map +1 -0
  168. package/dist/schema/state/sqlite/mod.js +41 -0
  169. package/dist/schema/state/sqlite/mod.js.map +1 -0
  170. package/dist/schema/state/sqlite/query-builder/api.d.ts +294 -0
  171. package/dist/schema/state/sqlite/query-builder/api.d.ts.map +1 -0
  172. package/dist/schema/state/sqlite/query-builder/api.js +6 -0
  173. package/dist/schema/state/sqlite/query-builder/api.js.map +1 -0
  174. package/dist/schema/state/sqlite/query-builder/astToSql.d.ts +7 -0
  175. package/dist/schema/state/sqlite/query-builder/astToSql.d.ts.map +1 -0
  176. package/dist/schema/state/sqlite/query-builder/astToSql.js +190 -0
  177. package/dist/schema/state/sqlite/query-builder/astToSql.js.map +1 -0
  178. package/dist/schema/state/sqlite/query-builder/impl.d.ts +7 -0
  179. package/dist/schema/state/sqlite/query-builder/impl.d.ts.map +1 -0
  180. package/dist/schema/state/sqlite/query-builder/impl.js +286 -0
  181. package/dist/schema/state/sqlite/query-builder/impl.js.map +1 -0
  182. package/dist/schema/state/sqlite/query-builder/impl.test.d.ts +87 -0
  183. package/dist/schema/state/sqlite/query-builder/impl.test.d.ts.map +1 -0
  184. package/dist/schema/state/sqlite/query-builder/impl.test.js +563 -0
  185. package/dist/schema/state/sqlite/query-builder/impl.test.js.map +1 -0
  186. package/dist/{query-builder → schema/state/sqlite/query-builder}/mod.d.ts +7 -0
  187. package/dist/schema/state/sqlite/query-builder/mod.d.ts.map +1 -0
  188. package/dist/{query-builder → schema/state/sqlite/query-builder}/mod.js +7 -0
  189. package/dist/schema/state/sqlite/query-builder/mod.js.map +1 -0
  190. package/dist/schema/state/sqlite/schema-helpers.d.ts.map +1 -0
  191. package/dist/schema/{schema-helpers.js → state/sqlite/schema-helpers.js} +1 -1
  192. package/dist/schema/state/sqlite/schema-helpers.js.map +1 -0
  193. package/dist/schema/state/sqlite/system-tables.d.ts +574 -0
  194. package/dist/schema/state/sqlite/system-tables.d.ts.map +1 -0
  195. package/dist/schema/state/sqlite/system-tables.js +88 -0
  196. package/dist/schema/state/sqlite/system-tables.js.map +1 -0
  197. package/dist/schema/state/sqlite/table-def.d.ts +84 -0
  198. package/dist/schema/state/sqlite/table-def.d.ts.map +1 -0
  199. package/dist/schema/state/sqlite/table-def.js +36 -0
  200. package/dist/schema/state/sqlite/table-def.js.map +1 -0
  201. package/dist/schema-management/common.d.ts +7 -7
  202. package/dist/schema-management/common.d.ts.map +1 -1
  203. package/dist/schema-management/common.js.map +1 -1
  204. package/dist/schema-management/migrations.d.ts +6 -6
  205. package/dist/schema-management/migrations.d.ts.map +1 -1
  206. package/dist/schema-management/migrations.js +27 -18
  207. package/dist/schema-management/migrations.js.map +1 -1
  208. package/dist/schema-management/validate-schema.d.ts +8 -0
  209. package/dist/schema-management/validate-schema.d.ts.map +1 -0
  210. package/dist/schema-management/validate-schema.js +39 -0
  211. package/dist/schema-management/validate-schema.js.map +1 -0
  212. package/dist/sql-queries/misc.d.ts.map +1 -1
  213. package/dist/sql-queries/sql-queries.d.ts +1 -1
  214. package/dist/sql-queries/sql-queries.d.ts.map +1 -1
  215. package/dist/sql-queries/sql-queries.js.map +1 -1
  216. package/dist/sql-queries/sql-query-builder.d.ts +1 -1
  217. package/dist/sql-queries/sql-query-builder.d.ts.map +1 -1
  218. package/dist/sql-queries/sql-query-builder.js.map +1 -1
  219. package/dist/sql-queries/types.d.ts +2 -1
  220. package/dist/sql-queries/types.d.ts.map +1 -1
  221. package/dist/sql-queries/types.js.map +1 -1
  222. package/dist/sync/ClientSessionSyncProcessor.d.ts +40 -19
  223. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  224. package/dist/sync/ClientSessionSyncProcessor.js +150 -72
  225. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  226. package/dist/sync/next/compact-events.d.ts.map +1 -1
  227. package/dist/sync/next/compact-events.js +38 -35
  228. package/dist/sync/next/compact-events.js.map +1 -1
  229. package/dist/sync/next/facts.d.ts +21 -21
  230. package/dist/sync/next/facts.d.ts.map +1 -1
  231. package/dist/sync/next/facts.js +11 -11
  232. package/dist/sync/next/facts.js.map +1 -1
  233. package/dist/sync/next/history-dag-common.d.ts +9 -7
  234. package/dist/sync/next/history-dag-common.d.ts.map +1 -1
  235. package/dist/sync/next/history-dag-common.js +10 -5
  236. package/dist/sync/next/history-dag-common.js.map +1 -1
  237. package/dist/sync/next/history-dag.d.ts +0 -2
  238. package/dist/sync/next/history-dag.d.ts.map +1 -1
  239. package/dist/sync/next/history-dag.js +16 -14
  240. package/dist/sync/next/history-dag.js.map +1 -1
  241. package/dist/sync/next/rebase-events.d.ts +10 -8
  242. package/dist/sync/next/rebase-events.d.ts.map +1 -1
  243. package/dist/sync/next/rebase-events.js +18 -10
  244. package/dist/sync/next/rebase-events.js.map +1 -1
  245. package/dist/sync/next/test/compact-events.calculator.test.js +39 -34
  246. package/dist/sync/next/test/compact-events.calculator.test.js.map +1 -1
  247. package/dist/sync/next/test/compact-events.test.js +77 -77
  248. package/dist/sync/next/test/compact-events.test.js.map +1 -1
  249. package/dist/sync/next/test/{mutation-fixtures.d.ts → event-fixtures.d.ts} +35 -25
  250. package/dist/sync/next/test/event-fixtures.d.ts.map +1 -0
  251. package/dist/sync/next/test/{mutation-fixtures.js → event-fixtures.js} +81 -38
  252. package/dist/sync/next/test/event-fixtures.js.map +1 -0
  253. package/dist/sync/next/test/mod.d.ts +1 -1
  254. package/dist/sync/next/test/mod.d.ts.map +1 -1
  255. package/dist/sync/next/test/mod.js +1 -1
  256. package/dist/sync/next/test/mod.js.map +1 -1
  257. package/dist/sync/sync.d.ts +46 -21
  258. package/dist/sync/sync.d.ts.map +1 -1
  259. package/dist/sync/sync.js +10 -6
  260. package/dist/sync/sync.js.map +1 -1
  261. package/dist/sync/syncstate.d.ts +213 -82
  262. package/dist/sync/syncstate.d.ts.map +1 -1
  263. package/dist/sync/syncstate.js +337 -139
  264. package/dist/sync/syncstate.js.map +1 -1
  265. package/dist/sync/syncstate.test.js +309 -286
  266. package/dist/sync/syncstate.test.js.map +1 -1
  267. package/dist/sync/validate-push-payload.d.ts +2 -2
  268. package/dist/sync/validate-push-payload.d.ts.map +1 -1
  269. package/dist/sync/validate-push-payload.js +4 -4
  270. package/dist/sync/validate-push-payload.js.map +1 -1
  271. package/dist/util.d.ts +2 -2
  272. package/dist/util.d.ts.map +1 -1
  273. package/dist/version.d.ts +2 -2
  274. package/dist/version.d.ts.map +1 -1
  275. package/dist/version.js +2 -2
  276. package/dist/version.js.map +1 -1
  277. package/package.json +10 -4
  278. package/src/__tests__/fixture.ts +36 -15
  279. package/src/adapter-types.ts +107 -68
  280. package/src/debug-info.ts +1 -0
  281. package/src/devtools/devtools-messages-client-session.ts +142 -0
  282. package/src/devtools/devtools-messages-common.ts +115 -0
  283. package/src/devtools/devtools-messages-leader.ts +191 -0
  284. package/src/devtools/devtools-messages.ts +3 -246
  285. package/src/devtools/devtools-sessioninfo.ts +101 -0
  286. package/src/devtools/mod.ts +59 -0
  287. package/src/index.ts +7 -9
  288. package/src/leader-thread/LeaderSyncProcessor.ts +738 -477
  289. package/src/leader-thread/connection.ts +54 -9
  290. package/src/leader-thread/eventlog.ts +199 -0
  291. package/src/leader-thread/leader-worker-devtools.ts +227 -104
  292. package/src/leader-thread/make-leader-thread-layer.ts +121 -72
  293. package/src/leader-thread/materialize-event.ts +173 -0
  294. package/src/leader-thread/mod.ts +1 -1
  295. package/src/leader-thread/recreate-db.ts +33 -38
  296. package/src/leader-thread/shutdown-channel.ts +2 -4
  297. package/src/leader-thread/types.ts +94 -48
  298. package/src/make-client-session.ts +136 -0
  299. package/src/materializer-helper.ts +138 -0
  300. package/src/otel.ts +8 -0
  301. package/src/rematerialize-from-eventlog.ts +117 -0
  302. package/src/schema/EventDef.ts +227 -0
  303. package/src/schema/EventSequenceNumber.test.ts +12 -0
  304. package/src/schema/EventSequenceNumber.ts +121 -0
  305. package/src/schema/LiveStoreEvent.ts +240 -0
  306. package/src/schema/events.ts +1 -0
  307. package/src/schema/mod.ts +8 -6
  308. package/src/schema/schema.ts +88 -84
  309. package/src/schema/state/mod.ts +2 -0
  310. package/src/schema/state/sqlite/client-document-def.test.ts +238 -0
  311. package/src/schema/state/sqlite/client-document-def.ts +444 -0
  312. package/src/schema/state/sqlite/db-schema/ast/sqlite.ts +142 -0
  313. package/src/schema/state/sqlite/db-schema/ast/validate.ts +13 -0
  314. package/src/schema/state/sqlite/db-schema/dsl/__snapshots__/field-defs.test.ts.snap +206 -0
  315. package/src/schema/state/sqlite/db-schema/dsl/field-defs.test.ts +35 -0
  316. package/src/schema/state/sqlite/db-schema/dsl/field-defs.ts +242 -0
  317. package/src/schema/state/sqlite/db-schema/dsl/mod.ts +222 -0
  318. package/src/schema/state/sqlite/db-schema/hash.ts +14 -0
  319. package/src/schema/state/sqlite/db-schema/mod.ts +2 -0
  320. package/src/schema/state/sqlite/mod.ts +73 -0
  321. package/src/schema/state/sqlite/query-builder/api.ts +440 -0
  322. package/src/schema/state/sqlite/query-builder/astToSql.ts +232 -0
  323. package/src/schema/state/sqlite/query-builder/impl.test.ts +617 -0
  324. package/src/schema/state/sqlite/query-builder/impl.ts +351 -0
  325. package/src/{query-builder → schema/state/sqlite/query-builder}/mod.ts +7 -0
  326. package/src/schema/{schema-helpers.ts → state/sqlite/schema-helpers.ts} +1 -1
  327. package/src/schema/state/sqlite/system-tables.ts +117 -0
  328. package/src/schema/state/sqlite/table-def.ts +197 -0
  329. package/src/schema-management/common.ts +7 -7
  330. package/src/schema-management/migrations.ts +37 -31
  331. package/src/schema-management/validate-schema.ts +61 -0
  332. package/src/sql-queries/sql-queries.ts +1 -1
  333. package/src/sql-queries/sql-query-builder.ts +1 -2
  334. package/src/sql-queries/types.ts +3 -1
  335. package/src/sync/ClientSessionSyncProcessor.ts +220 -94
  336. package/src/sync/next/compact-events.ts +38 -35
  337. package/src/sync/next/facts.ts +43 -41
  338. package/src/sync/next/history-dag-common.ts +17 -10
  339. package/src/sync/next/history-dag.ts +16 -17
  340. package/src/sync/next/rebase-events.ts +29 -17
  341. package/src/sync/next/test/compact-events.calculator.test.ts +46 -46
  342. package/src/sync/next/test/compact-events.test.ts +79 -79
  343. package/src/sync/next/test/event-fixtures.ts +226 -0
  344. package/src/sync/next/test/mod.ts +1 -1
  345. package/src/sync/sync.ts +46 -21
  346. package/src/sync/syncstate.test.ts +346 -320
  347. package/src/sync/syncstate.ts +422 -230
  348. package/src/sync/validate-push-payload.ts +6 -6
  349. package/src/version.ts +2 -2
  350. package/dist/derived-mutations.d.ts +0 -109
  351. package/dist/derived-mutations.d.ts.map +0 -1
  352. package/dist/derived-mutations.js +0 -54
  353. package/dist/derived-mutations.js.map +0 -1
  354. package/dist/derived-mutations.test.d.ts +0 -2
  355. package/dist/derived-mutations.test.d.ts.map +0 -1
  356. package/dist/derived-mutations.test.js +0 -93
  357. package/dist/derived-mutations.test.js.map +0 -1
  358. package/dist/devtools/devtools-bridge.d.ts +0 -13
  359. package/dist/devtools/devtools-bridge.d.ts.map +0 -1
  360. package/dist/devtools/devtools-bridge.js +0 -2
  361. package/dist/devtools/devtools-bridge.js.map +0 -1
  362. package/dist/devtools/devtools-window-message.d.ts +0 -29
  363. package/dist/devtools/devtools-window-message.d.ts.map +0 -1
  364. package/dist/devtools/devtools-window-message.js +0 -33
  365. package/dist/devtools/devtools-window-message.js.map +0 -1
  366. package/dist/devtools/index.d.ts +0 -42
  367. package/dist/devtools/index.d.ts.map +0 -1
  368. package/dist/devtools/index.js +0 -48
  369. package/dist/devtools/index.js.map +0 -1
  370. package/dist/init-singleton-tables.d.ts +0 -4
  371. package/dist/init-singleton-tables.d.ts.map +0 -1
  372. package/dist/init-singleton-tables.js +0 -16
  373. package/dist/init-singleton-tables.js.map +0 -1
  374. package/dist/leader-thread/apply-mutation.d.ts +0 -11
  375. package/dist/leader-thread/apply-mutation.d.ts.map +0 -1
  376. package/dist/leader-thread/apply-mutation.js +0 -107
  377. package/dist/leader-thread/apply-mutation.js.map +0 -1
  378. package/dist/leader-thread/leader-sync-processor.d.ts +0 -47
  379. package/dist/leader-thread/leader-sync-processor.d.ts.map +0 -1
  380. package/dist/leader-thread/leader-sync-processor.js +0 -430
  381. package/dist/leader-thread/leader-sync-processor.js.map +0 -1
  382. package/dist/leader-thread/mutationlog.d.ts +0 -10
  383. package/dist/leader-thread/mutationlog.d.ts.map +0 -1
  384. package/dist/leader-thread/mutationlog.js +0 -28
  385. package/dist/leader-thread/mutationlog.js.map +0 -1
  386. package/dist/leader-thread/pull-queue-set.d.ts +0 -7
  387. package/dist/leader-thread/pull-queue-set.d.ts.map +0 -1
  388. package/dist/leader-thread/pull-queue-set.js +0 -39
  389. package/dist/leader-thread/pull-queue-set.js.map +0 -1
  390. package/dist/mutation.d.ts +0 -20
  391. package/dist/mutation.d.ts.map +0 -1
  392. package/dist/mutation.js +0 -57
  393. package/dist/mutation.js.map +0 -1
  394. package/dist/query-builder/api.d.ts +0 -190
  395. package/dist/query-builder/api.d.ts.map +0 -1
  396. package/dist/query-builder/api.js +0 -8
  397. package/dist/query-builder/api.js.map +0 -1
  398. package/dist/query-builder/impl.d.ts +0 -12
  399. package/dist/query-builder/impl.d.ts.map +0 -1
  400. package/dist/query-builder/impl.js +0 -244
  401. package/dist/query-builder/impl.js.map +0 -1
  402. package/dist/query-builder/impl.test.d.ts +0 -2
  403. package/dist/query-builder/impl.test.d.ts.map +0 -1
  404. package/dist/query-builder/impl.test.js +0 -212
  405. package/dist/query-builder/impl.test.js.map +0 -1
  406. package/dist/query-builder/mod.d.ts.map +0 -1
  407. package/dist/query-builder/mod.js.map +0 -1
  408. package/dist/query-info.d.ts +0 -38
  409. package/dist/query-info.d.ts.map +0 -1
  410. package/dist/query-info.js +0 -7
  411. package/dist/query-info.js.map +0 -1
  412. package/dist/rehydrate-from-mutationlog.d.ts +0 -14
  413. package/dist/rehydrate-from-mutationlog.d.ts.map +0 -1
  414. package/dist/rehydrate-from-mutationlog.js +0 -66
  415. package/dist/rehydrate-from-mutationlog.js.map +0 -1
  416. package/dist/schema/EventId.d.ts +0 -39
  417. package/dist/schema/EventId.d.ts.map +0 -1
  418. package/dist/schema/EventId.js +0 -38
  419. package/dist/schema/EventId.js.map +0 -1
  420. package/dist/schema/EventId.test.d.ts +0 -2
  421. package/dist/schema/EventId.test.d.ts.map +0 -1
  422. package/dist/schema/EventId.test.js +0 -11
  423. package/dist/schema/EventId.test.js.map +0 -1
  424. package/dist/schema/MutationEvent.d.ts +0 -166
  425. package/dist/schema/MutationEvent.d.ts.map +0 -1
  426. package/dist/schema/MutationEvent.js +0 -72
  427. package/dist/schema/MutationEvent.js.map +0 -1
  428. package/dist/schema/MutationEvent.test.d.ts +0 -2
  429. package/dist/schema/MutationEvent.test.d.ts.map +0 -1
  430. package/dist/schema/MutationEvent.test.js +0 -2
  431. package/dist/schema/MutationEvent.test.js.map +0 -1
  432. package/dist/schema/mutations.d.ts +0 -107
  433. package/dist/schema/mutations.d.ts.map +0 -1
  434. package/dist/schema/mutations.js +0 -42
  435. package/dist/schema/mutations.js.map +0 -1
  436. package/dist/schema/schema-helpers.d.ts.map +0 -1
  437. package/dist/schema/schema-helpers.js.map +0 -1
  438. package/dist/schema/system-tables.d.ts +0 -399
  439. package/dist/schema/system-tables.d.ts.map +0 -1
  440. package/dist/schema/system-tables.js +0 -59
  441. package/dist/schema/system-tables.js.map +0 -1
  442. package/dist/schema/table-def.d.ts +0 -156
  443. package/dist/schema/table-def.d.ts.map +0 -1
  444. package/dist/schema/table-def.js +0 -79
  445. package/dist/schema/table-def.js.map +0 -1
  446. package/dist/schema-management/validate-mutation-defs.d.ts +0 -8
  447. package/dist/schema-management/validate-mutation-defs.d.ts.map +0 -1
  448. package/dist/schema-management/validate-mutation-defs.js +0 -39
  449. package/dist/schema-management/validate-mutation-defs.js.map +0 -1
  450. package/dist/sync/client-session-sync-processor.d.ts +0 -45
  451. package/dist/sync/client-session-sync-processor.d.ts.map +0 -1
  452. package/dist/sync/client-session-sync-processor.js +0 -131
  453. package/dist/sync/client-session-sync-processor.js.map +0 -1
  454. package/dist/sync/next/test/mutation-fixtures.d.ts.map +0 -1
  455. package/dist/sync/next/test/mutation-fixtures.js.map +0 -1
  456. package/src/derived-mutations.test.ts +0 -101
  457. package/src/derived-mutations.ts +0 -170
  458. package/src/devtools/devtools-bridge.ts +0 -14
  459. package/src/devtools/devtools-window-message.ts +0 -27
  460. package/src/devtools/index.ts +0 -48
  461. package/src/init-singleton-tables.ts +0 -24
  462. package/src/leader-thread/apply-mutation.ts +0 -161
  463. package/src/leader-thread/mutationlog.ts +0 -46
  464. package/src/leader-thread/pull-queue-set.ts +0 -58
  465. package/src/mutation.ts +0 -91
  466. package/src/query-builder/api.ts +0 -289
  467. package/src/query-builder/impl.test.ts +0 -239
  468. package/src/query-builder/impl.ts +0 -285
  469. package/src/query-info.ts +0 -78
  470. package/src/rehydrate-from-mutationlog.ts +0 -119
  471. package/src/schema/EventId.test.ts +0 -12
  472. package/src/schema/EventId.ts +0 -60
  473. package/src/schema/MutationEvent.ts +0 -181
  474. package/src/schema/mutations.ts +0 -192
  475. package/src/schema/system-tables.ts +0 -105
  476. package/src/schema/table-def.ts +0 -343
  477. package/src/schema-management/validate-mutation-defs.ts +0 -63
  478. package/src/sync/next/test/mutation-fixtures.ts +0 -224
  479. package/tsconfig.json +0 -11
  480. /package/dist/schema/{schema-helpers.d.ts → state/sqlite/schema-helpers.d.ts} +0 -0
@@ -1,528 +1,763 @@
1
- import { shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils'
2
- import type { HttpClient, Scope } from '@livestore/utils/effect'
1
+ import { casesHandled, isNotUndefined, LS_DEV, shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils'
2
+ import type { HttpClient, Runtime, Scope, Tracer } from '@livestore/utils/effect'
3
3
  import {
4
4
  BucketQueue,
5
5
  Deferred,
6
6
  Effect,
7
7
  Exit,
8
- Fiber,
9
8
  FiberHandle,
10
- Option,
11
9
  OtelTracer,
10
+ Queue,
12
11
  ReadonlyArray,
13
- Ref,
14
- Schema,
15
12
  Stream,
13
+ Subscribable,
16
14
  SubscriptionRef,
17
15
  } from '@livestore/utils/effect'
18
16
  import type * as otel from '@opentelemetry/api'
19
17
 
20
- import type { SynchronousDatabase } from '../adapter-types.js'
18
+ import type { SqliteDb } from '../adapter-types.js'
21
19
  import { UnexpectedError } from '../adapter-types.js'
22
- import type { LiveStoreSchema, SessionChangesetMetaRow } from '../schema/mod.js'
23
- import {
24
- EventId,
25
- MUTATION_LOG_META_TABLE,
26
- MutationEvent,
27
- mutationLogMetaTable,
28
- SESSION_CHANGESET_META_TABLE,
29
- } from '../schema/mod.js'
30
- import { updateRows } from '../sql-queries/index.js'
31
- import { InvalidPushError } from '../sync/sync.js'
20
+ import type { LiveStoreSchema } from '../schema/mod.js'
21
+ import { EventSequenceNumber, getEventDef, LiveStoreEvent, SystemTables } from '../schema/mod.js'
22
+ import { LeaderAheadError } from '../sync/sync.js'
32
23
  import * as SyncState from '../sync/syncstate.js'
33
24
  import { sql } from '../util.js'
34
- import { makeApplyMutation } from './apply-mutation.js'
35
- import { execSql } from './connection.js'
36
- import { getBackendHeadFromDb, getLocalHeadFromDb, getMutationEventsSince, updateBackendHead } from './mutationlog.js'
37
- import type { InitialBlockingSyncContext, InitialSyncInfo, LeaderSyncProcessor } from './types.js'
25
+ import * as Eventlog from './eventlog.js'
26
+ import { rollback } from './materialize-event.js'
27
+ import type { InitialBlockingSyncContext, LeaderSyncProcessor } from './types.js'
38
28
  import { LeaderThreadCtx } from './types.js'
39
29
 
40
- type ProcessorStateInit = {
41
- _tag: 'init'
42
- }
43
-
44
- type ProcessorStateInSync = {
45
- _tag: 'in-sync'
46
- syncState: SyncState.SyncState
47
- }
48
-
49
- type ProcessorStateApplyingSyncStateAdvance = {
50
- _tag: 'applying-syncstate-advance'
51
- origin: 'pull' | 'push'
52
- syncState: SyncState.SyncState
53
- // TODO re-introduce this
54
- // proccesHead: EventId
55
- fiber: Fiber.RuntimeFiber<void, UnexpectedError>
56
- }
57
-
58
- type ProcessorState = ProcessorStateInit | ProcessorStateInSync | ProcessorStateApplyingSyncStateAdvance
30
+ type LocalPushQueueItem = [
31
+ event: LiveStoreEvent.EncodedWithMeta,
32
+ deferred: Deferred.Deferred<void, LeaderAheadError> | undefined,
33
+ /** Used to determine whether the batch has become invalid due to a rejected local push batch */
34
+ generation: number,
35
+ ]
59
36
 
60
37
  /**
61
- * The general idea of the sync processor is to "follow the sync state"
62
- * and apply/rollback mutations as needed to the read model and mutation log.
63
- * The leader sync processor is also responsible for
64
- * - broadcasting mutations to client sessions via the pull queues.
65
- * - pushing mutations to the sync backend
38
+ * The LeaderSyncProcessor manages synchronization of events between
39
+ * the local state and the sync backend, ensuring efficient and orderly processing.
66
40
  *
67
- * In the leader sync processor, pulling always has precedence over pushing.
41
+ * In the LeaderSyncProcessor, pulling always has precedence over pushing.
68
42
  *
69
- * External events:
70
- * - Mutation pushed from client session
71
- * - Mutation pushed from devtools (via pushPartial)
72
- * - Mutation pulled from sync backend
43
+ * Responsibilities:
44
+ * - Queueing incoming local events in a localPushesQueue.
45
+ * - Broadcasting events to client sessions via pull queues.
46
+ * - Pushing events to the sync backend.
73
47
  *
74
- * The machine can be in the following states:
75
- * - in-sync: fully synced with remote, now idling
76
- * - applying-syncstate-advance (with pointer to current progress in case of rebase interrupt)
48
+ * Notes:
77
49
  *
78
- * Transitions:
79
- * - in-sync -> applying-syncstate-advance
80
- * - applying-syncstate-advance -> in-sync
81
- * - applying-syncstate-advance -> applying-syncstate-advance (need to interrupt previous operation)
50
+ * local push processing:
51
+ * - localPushesQueue:
52
+ * - Maintains events in ascending order.
53
+ * - Uses `Deferred` objects to resolve/reject events based on application success.
54
+ * - Processes events from the queue, applying events in batches.
55
+ * - Controlled by a `Latch` to manage execution flow.
56
+ * - The latch closes on pull receipt and re-opens post-pull completion.
57
+ * - Processes up to `maxBatchSize` events per cycle.
82
58
  *
83
- * Queuing vs interrupting behaviour:
84
- * - Operations caused by pull can never be interrupted
85
- * - Incoming pull can interrupt current push
86
- * - Incoming pull needs to wait to previous pull to finish
87
- * - Incoming push needs to wait to previous push to finish
59
+ * Currently we're advancing the db read model and eventlog in lockstep, but we could also decouple this in the future
60
+ *
61
+ * Tricky concurrency scenarios:
62
+ * - Queued local push batches becoming invalid due to a prior local push item being rejected.
63
+ * Solution: Introduce a generation number for local push batches which is used to filter out old batches items in case of rejection.
88
64
  *
89
- * Backend pushing:
90
- * - continously push to backend
91
- * - only interrupted and restarted on rebase
92
65
  */
93
66
  export const makeLeaderSyncProcessor = ({
94
67
  schema,
95
- dbMissing,
96
- dbLog,
68
+ dbEventlogMissing,
69
+ dbEventlog,
70
+ dbState,
71
+ dbStateMissing,
97
72
  initialBlockingSyncContext,
73
+ onError,
74
+ params,
75
+ testing,
98
76
  }: {
99
77
  schema: LiveStoreSchema
100
- /** Only used to know whether we can safely query dbLog during setup execution */
101
- dbMissing: boolean
102
- dbLog: SynchronousDatabase
78
+ /** Only used to know whether we can safely query dbEventlog during setup execution */
79
+ dbEventlogMissing: boolean
80
+ dbEventlog: SqliteDb
81
+ dbState: SqliteDb
82
+ /** Only used to know whether we can safely query dbState during setup execution */
83
+ dbStateMissing: boolean
103
84
  initialBlockingSyncContext: InitialBlockingSyncContext
85
+ onError: 'shutdown' | 'ignore'
86
+ params: {
87
+ /**
88
+ * @default 10
89
+ */
90
+ localPushBatchSize?: number
91
+ /**
92
+ * @default 50
93
+ */
94
+ backendPushBatchSize?: number
95
+ }
96
+ testing: {
97
+ delays?: {
98
+ localPushProcessing?: Effect.Effect<void>
99
+ }
100
+ }
104
101
  }): Effect.Effect<LeaderSyncProcessor, UnexpectedError, Scope.Scope> =>
105
102
  Effect.gen(function* () {
106
- const syncBackendQueue = yield* BucketQueue.make<MutationEvent.EncodedWithMeta>()
103
+ const syncBackendPushQueue = yield* BucketQueue.make<LiveStoreEvent.EncodedWithMeta>()
104
+ const localPushBatchSize = params.localPushBatchSize ?? 10
105
+ const backendPushBatchSize = params.backendPushBatchSize ?? 50
107
106
 
108
- const stateRef = yield* Ref.make<ProcessorState>({ _tag: 'init' })
107
+ const syncStateSref = yield* SubscriptionRef.make<SyncState.SyncState | undefined>(undefined)
109
108
 
110
- const semaphore = yield* Effect.makeSemaphore(1)
111
-
112
- const isLocalEvent = (mutationEventEncoded: MutationEvent.EncodedWithMeta) => {
113
- const mutationDef = schema.mutations.get(mutationEventEncoded.mutation)!
114
- return mutationDef.options.localOnly
109
+ const isClientEvent = (eventEncoded: LiveStoreEvent.EncodedWithMeta) => {
110
+ const { eventDef } = getEventDef(schema, eventEncoded.name)
111
+ return eventDef.options.clientOnly
115
112
  }
116
113
 
117
- const spanRef = { current: undefined as otel.Span | undefined }
118
- const applyMutationItemsRef = { current: undefined as ApplyMutationItems | undefined }
119
-
120
- // TODO get rid of counters once Effect semaphore ordering is fixed
121
- let counterRef = 0
122
- let expectedCounter = 0
123
-
124
- /*
125
- TODO: refactor
126
- - Pushes go directly into a Mailbox
127
- - Have a worker fiber that takes from the mailbox (wouldn't need a semaphore)
128
- */
114
+ const connectedClientSessionPullQueues = yield* makePullQueueSet
115
+
116
+ /**
117
+ * Tracks generations of queued local push events.
118
+ * If a local-push batch is rejected, all subsequent push queue items with the same generation are also rejected,
119
+ * even if they would be valid on their own.
120
+ */
121
+ // TODO get rid of this in favour of the `mergeGeneration` event sequence number field
122
+ const currentLocalPushGenerationRef = { current: 0 }
123
+
124
+ type MergeCounter = number
125
+ const mergeCounterRef = { current: dbStateMissing ? 0 : yield* getMergeCounterFromDb(dbState) }
126
+ const mergePayloads = new Map<MergeCounter, typeof SyncState.PayloadUpstream.Type>()
127
+
128
+ // This context depends on data from `boot`, we should find a better implementation to avoid this ref indirection.
129
+ const ctxRef = {
130
+ current: undefined as
131
+ | undefined
132
+ | {
133
+ otelSpan: otel.Span | undefined
134
+ span: Tracer.Span
135
+ devtoolsLatch: Effect.Latch | undefined
136
+ runtime: Runtime.Runtime<LeaderThreadCtx>
137
+ },
138
+ }
129
139
 
130
- const waitForSyncState = (counter: number): Effect.Effect<ProcessorStateInSync> =>
131
- Effect.gen(function* () {
132
- // console.log('waitForSyncState: waiting for semaphore', counter)
133
- yield* semaphore.take(1)
134
- // NOTE this is a workaround to ensure the semaphore take-order is respected
135
- // TODO this needs to be fixed upstream in Effect
136
- if (counter !== expectedCounter) {
137
- console.log(
138
- `waitForSyncState: counter mismatch (expected: ${expectedCounter}, got: ${counter}), releasing semaphore`,
139
- )
140
- yield* semaphore.release(1)
141
- yield* Effect.yieldNow()
142
- // Retrying...
143
- return yield* waitForSyncState(counter)
144
- }
145
- // console.log('waitForSyncState: took semaphore', counter)
146
- const state = yield* Ref.get(stateRef)
147
- if (state._tag !== 'in-sync') {
148
- return shouldNeverHappen('Expected to be in-sync but got ' + state._tag)
149
- }
150
- expectedCounter = counter + 1
151
- return state
152
- }).pipe(Effect.withSpan(`@livestore/common:leader-thread:syncing:waitForSyncState(${counter})`))
140
+ const localPushesQueue = yield* BucketQueue.make<LocalPushQueueItem>()
141
+ const localPushesLatch = yield* Effect.makeLatch(true)
142
+ const pullLatch = yield* Effect.makeLatch(true)
143
+
144
+ /**
145
+ * Additionally to the `syncStateSref` we also need the `pushHeadRef` in order to prevent old/duplicate
146
+ * events from being pushed in a scenario like this:
147
+ * - client session A pushes e1
148
+ * - leader sync processor takes a bit and hasn't yet taken e1 from the localPushesQueue
149
+ * - client session B also pushes e1 (which should be rejected)
150
+ *
151
+ * Thus the purpoe of the pushHeadRef is the guard the integrity of the local push queue
152
+ */
153
+ const pushHeadRef = { current: EventSequenceNumber.ROOT }
154
+ const advancePushHead = (eventNum: EventSequenceNumber.EventSequenceNumber) => {
155
+ pushHeadRef.current = EventSequenceNumber.max(pushHeadRef.current, eventNum)
156
+ }
153
157
 
154
- const push = (newEvents: ReadonlyArray<MutationEvent.EncodedWithMeta>) =>
158
+ // NOTE: New events are only pushed to sync backend after successful local push processing
159
+ const push: LeaderSyncProcessor['push'] = (newEvents, options) =>
155
160
  Effect.gen(function* () {
156
- const counter = counterRef
157
- counterRef++
158
- // TODO validate batch
159
161
  if (newEvents.length === 0) return
160
162
 
161
- const { connectedClientSessionPullQueues } = yield* LeaderThreadCtx
162
-
163
- // TODO if there are multiple pending pushes, we should batch them together
164
- const state = yield* waitForSyncState(counter)
165
-
166
- const updateResult = SyncState.updateSyncState({
167
- syncState: state.syncState,
168
- payload: { _tag: 'local-push', newEvents },
169
- isLocalEvent,
170
- isEqualEvent: MutationEvent.isEqualEncoded,
171
- })
163
+ yield* validatePushBatch(newEvents, pushHeadRef.current)
172
164
 
173
- if (updateResult._tag === 'rebase') {
174
- return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
175
- } else if (updateResult._tag === 'reject') {
176
- return yield* Effect.fail(
177
- InvalidPushError.make({
178
- reason: {
179
- _tag: 'LeaderAhead',
180
- minimumExpectedId: updateResult.expectedMinimumId,
181
- providedId: newEvents.at(0)!.id,
182
- },
183
- }),
184
- )
185
- }
186
-
187
- const fiber = yield* applyMutationItemsRef.current!({ batchItems: updateResult.newEvents }).pipe(Effect.fork)
165
+ advancePushHead(newEvents.at(-1)!.seqNum)
188
166
 
189
- yield* Ref.set(stateRef, {
190
- _tag: 'applying-syncstate-advance',
191
- origin: 'push',
192
- syncState: updateResult.newSyncState,
193
- fiber,
194
- })
167
+ const waitForProcessing = options?.waitForProcessing ?? false
168
+ const generation = currentLocalPushGenerationRef.current
195
169
 
196
- // console.log('setRef:applying-syncstate-advance after push', counter)
170
+ if (waitForProcessing) {
171
+ const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make<void, LeaderAheadError>())
197
172
 
198
- yield* connectedClientSessionPullQueues.offer({
199
- payload: { _tag: 'upstream-advance', newEvents: updateResult.newEvents },
200
- remaining: 0,
201
- })
202
-
203
- spanRef.current?.addEvent('local-push', {
204
- batchSize: newEvents.length,
205
- updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
206
- })
207
-
208
- // Don't sync localOnly mutations
209
- const filteredBatch = updateResult.newEvents.filter((mutationEventEncoded) => {
210
- const mutationDef = schema.mutations.get(mutationEventEncoded.mutation)!
211
- return mutationDef.options.localOnly === false
212
- })
173
+ const items = newEvents.map(
174
+ (eventEncoded, i) => [eventEncoded, deferreds[i], generation] as LocalPushQueueItem,
175
+ )
213
176
 
214
- yield* BucketQueue.offerAll(syncBackendQueue, filteredBatch)
177
+ yield* BucketQueue.offerAll(localPushesQueue, items)
215
178
 
216
- yield* fiber // Waiting for the mutation to be applied
179
+ yield* Effect.all(deferreds)
180
+ } else {
181
+ const items = newEvents.map((eventEncoded) => [eventEncoded, undefined, generation] as LocalPushQueueItem)
182
+ yield* BucketQueue.offerAll(localPushesQueue, items)
183
+ }
217
184
  }).pipe(
218
- Effect.withSpan('@livestore/common:leader-thread:syncing:local-push', {
185
+ Effect.withSpan('@livestore/common:LeaderSyncProcessor:push', {
219
186
  attributes: {
220
187
  batchSize: newEvents.length,
221
188
  batch: TRACE_VERBOSE ? newEvents : undefined,
222
189
  },
223
- links: spanRef.current
224
- ? [{ _tag: 'SpanLink', span: OtelTracer.makeExternalSpan(spanRef.current.spanContext()), attributes: {} }]
225
- : undefined,
190
+ links: ctxRef.current?.span ? [{ _tag: 'SpanLink', span: ctxRef.current.span, attributes: {} }] : undefined,
226
191
  }),
227
192
  )
228
193
 
229
- const pushPartial: LeaderSyncProcessor['pushPartial'] = (mutationEventEncoded_) =>
194
+ const pushPartial: LeaderSyncProcessor['pushPartial'] = ({ event: { name, args }, clientId, sessionId }) =>
230
195
  Effect.gen(function* () {
231
- const state = yield* Ref.get(stateRef)
232
- if (state._tag === 'init') return shouldNeverHappen('Not initialized')
196
+ const syncState = yield* syncStateSref
197
+ if (syncState === undefined) return shouldNeverHappen('Not initialized')
233
198
 
234
- const mutationDef =
235
- schema.mutations.get(mutationEventEncoded_.mutation) ??
236
- shouldNeverHappen(`Unknown mutation: ${mutationEventEncoded_.mutation}`)
199
+ const { eventDef } = getEventDef(schema, name)
237
200
 
238
- const mutationEventEncoded = new MutationEvent.EncodedWithMeta({
239
- ...mutationEventEncoded_,
240
- ...EventId.nextPair(state.syncState.localHead, mutationDef.options.localOnly),
201
+ const eventEncoded = new LiveStoreEvent.EncodedWithMeta({
202
+ name,
203
+ args,
204
+ clientId,
205
+ sessionId,
206
+ ...EventSequenceNumber.nextPair(syncState.localHead, eventDef.options.clientOnly),
241
207
  })
242
208
 
243
- yield* push([mutationEventEncoded])
244
- }).pipe(Effect.catchTag('InvalidPushError', Effect.orDie))
209
+ yield* push([eventEncoded])
210
+ }).pipe(Effect.catchTag('LeaderAheadError', Effect.orDie))
245
211
 
246
212
  // Starts various background loops
247
- const boot: LeaderSyncProcessor['boot'] = ({ dbReady }) =>
248
- Effect.gen(function* () {
249
- const span = yield* OtelTracer.currentOtelSpan.pipe(Effect.catchAll(() => Effect.succeed(undefined)))
250
- spanRef.current = span
213
+ const boot: LeaderSyncProcessor['boot'] = Effect.gen(function* () {
214
+ const span = yield* Effect.currentSpan.pipe(Effect.orDie)
215
+ const otelSpan = yield* OtelTracer.currentOtelSpan.pipe(Effect.catchAll(() => Effect.succeed(undefined)))
216
+ const { devtools, shutdownChannel } = yield* LeaderThreadCtx
217
+ const runtime = yield* Effect.runtime<LeaderThreadCtx>()
218
+
219
+ ctxRef.current = {
220
+ otelSpan,
221
+ span,
222
+ devtoolsLatch: devtools.enabled ? devtools.syncBackendLatch : undefined,
223
+ runtime,
224
+ }
251
225
 
252
- const initialBackendHead = dbMissing ? EventId.ROOT.global : getBackendHeadFromDb(dbLog)
253
- const initialLocalHead = dbMissing ? EventId.ROOT : getLocalHeadFromDb(dbLog)
226
+ const initialLocalHead = dbEventlogMissing ? EventSequenceNumber.ROOT : Eventlog.getClientHeadFromDb(dbEventlog)
254
227
 
255
- if (initialBackendHead > initialLocalHead.global) {
256
- return shouldNeverHappen(
257
- `During boot the backend head (${initialBackendHead}) should never be greater than the local head (${initialLocalHead.global})`,
258
- )
259
- }
228
+ const initialBackendHead = dbEventlogMissing
229
+ ? EventSequenceNumber.ROOT.global
230
+ : Eventlog.getBackendHeadFromDb(dbEventlog)
260
231
 
261
- const pendingMutationEvents = yield* getMutationEventsSince({
262
- global: initialBackendHead,
263
- local: EventId.localDefault,
264
- }).pipe(Effect.map(ReadonlyArray.map((_) => new MutationEvent.EncodedWithMeta(_))))
265
-
266
- const initialSyncState = {
267
- pending: pendingMutationEvents,
268
- // On the leader we don't need a rollback tail beyond `pending` items
269
- rollbackTail: [],
270
- upstreamHead: { global: initialBackendHead, local: EventId.localDefault },
271
- localHead: initialLocalHead,
272
- } as SyncState.SyncState
273
-
274
- /** State transitions need to happen atomically, so we use a Ref to track the state */
275
- yield* Ref.set(stateRef, { _tag: 'in-sync', syncState: initialSyncState })
276
-
277
- applyMutationItemsRef.current = yield* makeApplyMutationItems({ stateRef, semaphore })
278
-
279
- // Rehydrate sync queue
280
- if (pendingMutationEvents.length > 0) {
281
- const filteredBatch = pendingMutationEvents
282
- // Don't sync localOnly mutations
283
- .filter((mutationEventEncoded) => {
284
- const mutationDef = schema.mutations.get(mutationEventEncoded.mutation)!
285
- return mutationDef.options.localOnly === false
286
- })
232
+ if (initialBackendHead > initialLocalHead.global) {
233
+ return shouldNeverHappen(
234
+ `During boot the backend head (${initialBackendHead}) should never be greater than the local head (${initialLocalHead.global})`,
235
+ )
236
+ }
237
+
238
+ const pendingEvents = dbEventlogMissing
239
+ ? []
240
+ : yield* Eventlog.getEventsSince({ global: initialBackendHead, client: EventSequenceNumber.clientDefault })
241
+
242
+ const initialSyncState = new SyncState.SyncState({
243
+ pending: pendingEvents,
244
+ upstreamHead: { global: initialBackendHead, client: EventSequenceNumber.clientDefault },
245
+ localHead: initialLocalHead,
246
+ })
247
+
248
+ /** State transitions need to happen atomically, so we use a Ref to track the state */
249
+ yield* SubscriptionRef.set(syncStateSref, initialSyncState)
250
+
251
+ // Rehydrate sync queue
252
+ if (pendingEvents.length > 0) {
253
+ const globalPendingEvents = pendingEvents
254
+ // Don't sync clientOnly events
255
+ .filter((eventEncoded) => {
256
+ const { eventDef } = getEventDef(schema, eventEncoded.name)
257
+ return eventDef.options.clientOnly === false
258
+ })
287
259
 
288
- yield* BucketQueue.offerAll(syncBackendQueue, filteredBatch)
260
+ if (globalPendingEvents.length > 0) {
261
+ yield* BucketQueue.offerAll(syncBackendPushQueue, globalPendingEvents)
289
262
  }
263
+ }
290
264
 
291
- const backendPushingFiberHandle = yield* FiberHandle.make()
265
+ const shutdownOnError = (cause: unknown) =>
266
+ Effect.gen(function* () {
267
+ if (onError === 'shutdown') {
268
+ yield* shutdownChannel.send(UnexpectedError.make({ cause }))
269
+ yield* Effect.die(cause)
270
+ }
271
+ })
292
272
 
293
- yield* FiberHandle.run(
294
- backendPushingFiberHandle,
295
- backgroundBackendPushing({ dbReady, syncBackendQueue, span }).pipe(Effect.tapCauseLogPretty),
296
- )
273
+ yield* backgroundApplyLocalPushes({
274
+ localPushesLatch,
275
+ localPushesQueue,
276
+ pullLatch,
277
+ syncStateSref,
278
+ syncBackendPushQueue,
279
+ schema,
280
+ isClientEvent,
281
+ otelSpan,
282
+ currentLocalPushGenerationRef,
283
+ connectedClientSessionPullQueues,
284
+ mergeCounterRef,
285
+ mergePayloads,
286
+ localPushBatchSize,
287
+ testing: {
288
+ delay: testing?.delays?.localPushProcessing,
289
+ },
290
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
291
+
292
+ const backendPushingFiberHandle = yield* FiberHandle.make()
293
+ const backendPushingEffect = backgroundBackendPushing({
294
+ syncBackendPushQueue,
295
+ otelSpan,
296
+ devtoolsLatch: ctxRef.current?.devtoolsLatch,
297
+ backendPushBatchSize,
298
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError))
299
+
300
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
301
+
302
+ yield* backgroundBackendPulling({
303
+ initialBackendHead,
304
+ isClientEvent,
305
+ restartBackendPushing: (filteredRebasedPending) =>
306
+ Effect.gen(function* () {
307
+ // Stop current pushing fiber
308
+ yield* FiberHandle.clear(backendPushingFiberHandle)
297
309
 
298
- yield* backgroundBackendPulling({
299
- dbReady,
300
- initialBackendHead,
301
- isLocalEvent,
302
- restartBackendPushing: (filteredRebasedPending) =>
303
- Effect.gen(function* () {
304
- // Stop current pushing fiber
305
- yield* FiberHandle.clear(backendPushingFiberHandle)
306
-
307
- // Reset the sync queue
308
- yield* BucketQueue.clear(syncBackendQueue)
309
- yield* BucketQueue.offerAll(syncBackendQueue, filteredRebasedPending)
310
-
311
- // Restart pushing fiber
312
- yield* FiberHandle.run(
313
- backendPushingFiberHandle,
314
- backgroundBackendPushing({ dbReady, syncBackendQueue, span }).pipe(Effect.tapCauseLogPretty),
315
- )
316
- }),
317
- applyMutationItemsRef,
318
- stateRef,
319
- semaphore,
320
- span,
321
- initialBlockingSyncContext,
322
- }).pipe(Effect.tapCauseLogPretty, Effect.forkScoped)
323
- }).pipe(Effect.withSpanScoped('@livestore/common:leader-thread:syncing'))
310
+ // Reset the sync backend push queue
311
+ yield* BucketQueue.clear(syncBackendPushQueue)
312
+ yield* BucketQueue.offerAll(syncBackendPushQueue, filteredRebasedPending)
313
+
314
+ // Restart pushing fiber
315
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
316
+ }),
317
+ syncStateSref,
318
+ localPushesLatch,
319
+ pullLatch,
320
+ otelSpan,
321
+ initialBlockingSyncContext,
322
+ devtoolsLatch: ctxRef.current?.devtoolsLatch,
323
+ connectedClientSessionPullQueues,
324
+ mergeCounterRef,
325
+ mergePayloads,
326
+ advancePushHead,
327
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
328
+
329
+ return { initialLeaderHead: initialLocalHead }
330
+ }).pipe(Effect.withSpanScoped('@livestore/common:LeaderSyncProcessor:boot'))
331
+
332
+ const pull: LeaderSyncProcessor['pull'] = ({ cursor }) =>
333
+ Effect.gen(function* () {
334
+ const queue = yield* pullQueue({ cursor })
335
+ return Stream.fromQueue(queue)
336
+ }).pipe(Stream.unwrapScoped)
337
+
338
+ const pullQueue: LeaderSyncProcessor['pullQueue'] = ({ cursor }) => {
339
+ const runtime = ctxRef.current?.runtime ?? shouldNeverHappen('Not initialized')
340
+ return Effect.gen(function* () {
341
+ const queue = yield* connectedClientSessionPullQueues.makeQueue
342
+ const payloadsSinceCursor = Array.from(mergePayloads.entries())
343
+ .map(([mergeCounter, payload]) => ({ payload, mergeCounter }))
344
+ .filter(({ mergeCounter }) => mergeCounter > cursor.mergeCounter)
345
+ .toSorted((a, b) => a.mergeCounter - b.mergeCounter)
346
+ .map(({ payload, mergeCounter }) => {
347
+ if (payload._tag === 'upstream-advance') {
348
+ return {
349
+ payload: {
350
+ _tag: 'upstream-advance' as const,
351
+ newEvents: ReadonlyArray.dropWhile(payload.newEvents, (eventEncoded) =>
352
+ EventSequenceNumber.isGreaterThanOrEqual(cursor.eventNum, eventEncoded.seqNum),
353
+ ),
354
+ },
355
+ mergeCounter,
356
+ }
357
+ } else {
358
+ return { payload, mergeCounter }
359
+ }
360
+ })
361
+
362
+ yield* queue.offerAll(payloadsSinceCursor)
363
+
364
+ return queue
365
+ }).pipe(Effect.provide(runtime))
366
+ }
367
+
368
+ const syncState = Subscribable.make({
369
+ get: Effect.gen(function* () {
370
+ const syncState = yield* syncStateSref
371
+ if (syncState === undefined) return shouldNeverHappen('Not initialized')
372
+ return syncState
373
+ }),
374
+ changes: syncStateSref.changes.pipe(Stream.filter(isNotUndefined)),
375
+ })
324
376
 
325
377
  return {
378
+ pull,
379
+ pullQueue,
326
380
  push,
327
381
  pushPartial,
328
382
  boot,
329
- syncState: Effect.gen(function* () {
330
- const state = yield* Ref.get(stateRef)
331
- if (state._tag === 'init') return shouldNeverHappen('Not initialized')
332
- return state.syncState
333
- }),
383
+ syncState,
384
+ getMergeCounter: () => mergeCounterRef.current,
334
385
  } satisfies LeaderSyncProcessor
335
386
  })
336
387
 
337
- type ApplyMutationItems = (_: {
338
- batchItems: ReadonlyArray<MutationEvent.EncodedWithMeta>
339
- }) => Effect.Effect<void, UnexpectedError>
340
-
341
- // TODO how to handle errors gracefully
342
- const makeApplyMutationItems = ({
343
- stateRef,
344
- semaphore,
388
+ const backgroundApplyLocalPushes = ({
389
+ localPushesLatch,
390
+ localPushesQueue,
391
+ pullLatch,
392
+ syncStateSref,
393
+ syncBackendPushQueue,
394
+ schema,
395
+ isClientEvent,
396
+ otelSpan,
397
+ currentLocalPushGenerationRef,
398
+ connectedClientSessionPullQueues,
399
+ mergeCounterRef,
400
+ mergePayloads,
401
+ localPushBatchSize,
402
+ testing,
345
403
  }: {
346
- stateRef: Ref.Ref<ProcessorState>
347
- semaphore: Effect.Semaphore
348
- }): Effect.Effect<ApplyMutationItems, UnexpectedError, LeaderThreadCtx | Scope.Scope> =>
404
+ pullLatch: Effect.Latch
405
+ localPushesLatch: Effect.Latch
406
+ localPushesQueue: BucketQueue.BucketQueue<LocalPushQueueItem>
407
+ syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
408
+ syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.EncodedWithMeta>
409
+ schema: LiveStoreSchema
410
+ isClientEvent: (eventEncoded: LiveStoreEvent.EncodedWithMeta) => boolean
411
+ otelSpan: otel.Span | undefined
412
+ currentLocalPushGenerationRef: { current: number }
413
+ connectedClientSessionPullQueues: PullQueueSet
414
+ mergeCounterRef: { current: number }
415
+ mergePayloads: Map<number, typeof SyncState.PayloadUpstream.Type>
416
+ localPushBatchSize: number
417
+ testing: {
418
+ delay: Effect.Effect<void> | undefined
419
+ }
420
+ }) =>
349
421
  Effect.gen(function* () {
350
- const leaderThreadCtx = yield* LeaderThreadCtx
351
- const { db, dbLog } = leaderThreadCtx
422
+ while (true) {
423
+ if (testing.delay !== undefined) {
424
+ yield* testing.delay.pipe(Effect.withSpan('localPushProcessingDelay'))
425
+ }
352
426
 
353
- const applyMutation = yield* makeApplyMutation
427
+ const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, localPushBatchSize)
354
428
 
355
- return ({ batchItems }) =>
356
- Effect.gen(function* () {
357
- const state = yield* Ref.get(stateRef)
358
- if (state._tag !== 'applying-syncstate-advance') {
359
- // console.log('applyMutationItems: counter', counter)
360
- return shouldNeverHappen(`Expected to be applying-syncstate-advance but got ${state._tag}`)
361
- }
429
+ // Wait for the backend pulling to finish
430
+ yield* localPushesLatch.await
362
431
 
363
- db.execute('BEGIN TRANSACTION', undefined) // Start the transaction
364
- dbLog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
432
+ // Prevent backend pull processing until this local push is finished
433
+ yield* pullLatch.close
365
434
 
366
- yield* Effect.addFinalizer((exit) =>
367
- Effect.gen(function* () {
368
- if (Exit.isSuccess(exit)) return
435
+ // Since the generation might have changed since enqueuing, we need to filter out items with older generation
436
+ // It's important that we filter after we got localPushesLatch, otherwise we might filter with the old generation
437
+ const filteredBatchItems = batchItems
438
+ .filter(([_1, _2, generation]) => generation === currentLocalPushGenerationRef.current)
439
+ .map(([eventEncoded, deferred]) => [eventEncoded, deferred] as const)
369
440
 
370
- // Rollback in case of an error
371
- db.execute('ROLLBACK', undefined)
372
- dbLog.execute('ROLLBACK', undefined)
373
- }),
374
- )
441
+ if (filteredBatchItems.length === 0) {
442
+ // console.log('dropping old-gen batch', currentLocalPushGenerationRef.current)
443
+ // Allow the backend pulling to start
444
+ yield* pullLatch.open
445
+ continue
446
+ }
447
+
448
+ const [newEvents, deferreds] = ReadonlyArray.unzip(filteredBatchItems)
449
+
450
+ const syncState = yield* syncStateSref
451
+ if (syncState === undefined) return shouldNeverHappen('Not initialized')
452
+
453
+ const mergeResult = SyncState.merge({
454
+ syncState,
455
+ payload: { _tag: 'local-push', newEvents },
456
+ isClientEvent,
457
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
458
+ })
459
+
460
+ const mergeCounter = yield* incrementMergeCounter(mergeCounterRef)
461
+
462
+ switch (mergeResult._tag) {
463
+ case 'unexpected-error': {
464
+ otelSpan?.addEvent(`[${mergeCounter}]:push:unexpected-error`, {
465
+ batchSize: newEvents.length,
466
+ newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
467
+ })
468
+ return yield* Effect.fail(mergeResult.cause)
469
+ }
470
+ case 'rebase': {
471
+ return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
472
+ }
473
+ case 'reject': {
474
+ otelSpan?.addEvent(`[${mergeCounter}]:push:reject`, {
475
+ batchSize: newEvents.length,
476
+ mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
477
+ })
375
478
 
376
- for (let i = 0; i < batchItems.length; i++) {
377
- const { meta, ...mutationEventEncoded } = batchItems[i]!
479
+ // TODO: how to test this?
480
+ currentLocalPushGenerationRef.current++
378
481
 
379
- yield* applyMutation(mutationEventEncoded)
482
+ const nextGeneration = currentLocalPushGenerationRef.current
380
483
 
381
- if (meta?.deferred) {
382
- yield* Deferred.succeed(meta.deferred, void 0)
484
+ const providedNum = newEvents.at(0)!.seqNum
485
+ // All subsequent pushes with same generation should be rejected as well
486
+ // We're also handling the case where the localPushQueue already contains events
487
+ // from the next generation which we preserve in the queue
488
+ const remainingEventsMatchingGeneration = yield* BucketQueue.takeSplitWhere(
489
+ localPushesQueue,
490
+ (item) => item[2] >= nextGeneration,
491
+ )
492
+
493
+ // TODO we still need to better understand and handle this scenario
494
+ if (LS_DEV && (yield* BucketQueue.size(localPushesQueue)) > 0) {
495
+ console.log('localPushesQueue is not empty', yield* BucketQueue.size(localPushesQueue))
496
+ debugger
383
497
  }
384
498
 
385
- // TODO re-introduce this
386
- // if (i < batchItems.length - 1) {
387
- // yield* Ref.set(stateRef, { ...state, proccesHead: batchItems[i + 1]!.id })
388
- // }
499
+ const allDeferredsToReject = [
500
+ ...deferreds,
501
+ ...remainingEventsMatchingGeneration.map(([_, deferred]) => deferred),
502
+ ].filter(isNotUndefined)
503
+
504
+ yield* Effect.forEach(allDeferredsToReject, (deferred) =>
505
+ Deferred.fail(
506
+ deferred,
507
+ LeaderAheadError.make({
508
+ minimumExpectedNum: mergeResult.expectedMinimumId,
509
+ providedNum,
510
+ // nextGeneration,
511
+ }),
512
+ ),
513
+ )
514
+
515
+ // Allow the backend pulling to start
516
+ yield* pullLatch.open
517
+
518
+ // In this case we're skipping state update and down/upstream processing
519
+ // We've cleared the local push queue and are now waiting for new local pushes / backend pulls
520
+ continue
521
+ }
522
+ case 'advance': {
523
+ break
524
+ }
525
+ default: {
526
+ casesHandled(mergeResult)
389
527
  }
528
+ }
390
529
 
391
- db.execute('COMMIT', undefined) // Commit the transaction
392
- dbLog.execute('COMMIT', undefined) // Commit the transaction
530
+ yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState)
393
531
 
394
- yield* Ref.set(stateRef, { _tag: 'in-sync', syncState: state.syncState })
395
- // console.log('setRef:sync after applyMutationItems', counter)
396
- yield* semaphore.release(1)
397
- }).pipe(
398
- Effect.scoped,
399
- Effect.withSpan('@livestore/common:leader-thread:syncing:applyMutationItems', {
400
- attributes: { count: batchItems.length },
401
- }),
402
- Effect.tapCauseLogPretty,
403
- UnexpectedError.mapToUnexpectedError,
404
- )
532
+ yield* connectedClientSessionPullQueues.offer({
533
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }),
534
+ mergeCounter,
535
+ })
536
+ mergePayloads.set(mergeCounter, SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }))
537
+
538
+ otelSpan?.addEvent(`[${mergeCounter}]:push:advance`, {
539
+ batchSize: newEvents.length,
540
+ mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
541
+ })
542
+
543
+ // Don't sync clientOnly events
544
+ const filteredBatch = mergeResult.newEvents.filter((eventEncoded) => {
545
+ const { eventDef } = getEventDef(schema, eventEncoded.name)
546
+ return eventDef.options.clientOnly === false
547
+ })
548
+
549
+ yield* BucketQueue.offerAll(syncBackendPushQueue, filteredBatch)
550
+
551
+ yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds })
552
+
553
+ // Allow the backend pulling to start
554
+ yield* pullLatch.open
555
+ }
405
556
  })
406
557
 
558
+ type MaterializeEventsBatch = (_: {
559
+ batchItems: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>
560
+ /**
561
+ * The deferreds are used by the caller to know when the mutation has been processed.
562
+ * Indexes are aligned with `batchItems`
563
+ */
564
+ deferreds: ReadonlyArray<Deferred.Deferred<void, LeaderAheadError> | undefined> | undefined
565
+ }) => Effect.Effect<void, UnexpectedError, LeaderThreadCtx>
566
+
567
+ // TODO how to handle errors gracefully
568
+ const materializeEventsBatch: MaterializeEventsBatch = ({ batchItems, deferreds }) =>
569
+ Effect.gen(function* () {
570
+ const { dbState: db, dbEventlog, materializeEvent } = yield* LeaderThreadCtx
571
+
572
+ // NOTE We always start a transaction to ensure consistency between db and eventlog (even for single-item batches)
573
+ db.execute('BEGIN TRANSACTION', undefined) // Start the transaction
574
+ dbEventlog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
575
+
576
+ yield* Effect.addFinalizer((exit) =>
577
+ Effect.gen(function* () {
578
+ if (Exit.isSuccess(exit)) return
579
+
580
+ // Rollback in case of an error
581
+ db.execute('ROLLBACK', undefined)
582
+ dbEventlog.execute('ROLLBACK', undefined)
583
+ }),
584
+ )
585
+
586
+ for (let i = 0; i < batchItems.length; i++) {
587
+ const { sessionChangeset } = yield* materializeEvent(batchItems[i]!)
588
+ batchItems[i]!.meta.sessionChangeset = sessionChangeset
589
+
590
+ if (deferreds?.[i] !== undefined) {
591
+ yield* Deferred.succeed(deferreds[i]!, void 0)
592
+ }
593
+ }
594
+
595
+ db.execute('COMMIT', undefined) // Commit the transaction
596
+ dbEventlog.execute('COMMIT', undefined) // Commit the transaction
597
+ }).pipe(
598
+ Effect.uninterruptible,
599
+ Effect.scoped,
600
+ Effect.withSpan('@livestore/common:LeaderSyncProcessor:materializeEventItems', {
601
+ attributes: { batchSize: batchItems.length },
602
+ }),
603
+ Effect.tapCauseLogPretty,
604
+ UnexpectedError.mapToUnexpectedError,
605
+ )
606
+
407
607
  const backgroundBackendPulling = ({
408
- dbReady,
409
608
  initialBackendHead,
410
- isLocalEvent,
609
+ isClientEvent,
411
610
  restartBackendPushing,
412
- span,
413
- stateRef,
414
- applyMutationItemsRef,
415
- semaphore,
611
+ otelSpan,
612
+ syncStateSref,
613
+ localPushesLatch,
614
+ pullLatch,
615
+ devtoolsLatch,
416
616
  initialBlockingSyncContext,
617
+ connectedClientSessionPullQueues,
618
+ mergeCounterRef,
619
+ mergePayloads,
620
+ advancePushHead,
417
621
  }: {
418
- dbReady: Deferred.Deferred<void>
419
- initialBackendHead: EventId.GlobalEventId
420
- isLocalEvent: (mutationEventEncoded: MutationEvent.EncodedWithMeta) => boolean
622
+ initialBackendHead: EventSequenceNumber.GlobalEventSequenceNumber
623
+ isClientEvent: (eventEncoded: LiveStoreEvent.EncodedWithMeta) => boolean
421
624
  restartBackendPushing: (
422
- filteredRebasedPending: ReadonlyArray<MutationEvent.EncodedWithMeta>,
625
+ filteredRebasedPending: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>,
423
626
  ) => Effect.Effect<void, UnexpectedError, LeaderThreadCtx | HttpClient.HttpClient>
424
- span: otel.Span | undefined
425
- stateRef: Ref.Ref<ProcessorState>
426
- applyMutationItemsRef: { current: ApplyMutationItems | undefined }
427
- semaphore: Effect.Semaphore
627
+ otelSpan: otel.Span | undefined
628
+ syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
629
+ localPushesLatch: Effect.Latch
630
+ pullLatch: Effect.Latch
631
+ devtoolsLatch: Effect.Latch | undefined
428
632
  initialBlockingSyncContext: InitialBlockingSyncContext
633
+ connectedClientSessionPullQueues: PullQueueSet
634
+ mergeCounterRef: { current: number }
635
+ mergePayloads: Map<number, typeof SyncState.PayloadUpstream.Type>
636
+ advancePushHead: (eventNum: EventSequenceNumber.EventSequenceNumber) => void
429
637
  }) =>
430
638
  Effect.gen(function* () {
431
- const { syncBackend, db, dbLog, connectedClientSessionPullQueues, schema } = yield* LeaderThreadCtx
639
+ const { syncBackend, dbState: db, dbEventlog, schema } = yield* LeaderThreadCtx
432
640
 
433
641
  if (syncBackend === undefined) return
434
642
 
435
- const cursorInfo = yield* getCursorInfo(initialBackendHead)
436
-
437
- const onNewPullChunk = (newEvents: MutationEvent.EncodedWithMeta[], remaining: number) =>
643
+ const onNewPullChunk = (newEvents: LiveStoreEvent.EncodedWithMeta[], remaining: number) =>
438
644
  Effect.gen(function* () {
439
645
  if (newEvents.length === 0) return
440
646
 
441
- const state = yield* Ref.get(stateRef)
442
- if (state._tag === 'init') return shouldNeverHappen('Not initialized')
647
+ if (devtoolsLatch !== undefined) {
648
+ yield* devtoolsLatch.await
649
+ }
443
650
 
444
- // const counter = state.counter + 1
651
+ // Prevent more local pushes from being processed until this pull is finished
652
+ yield* localPushesLatch.close
445
653
 
446
- if (state._tag === 'applying-syncstate-advance') {
447
- if (state.origin === 'push') {
448
- yield* Fiber.interrupt(state.fiber)
449
- // In theory we should force-take the semaphore here, but as it's still taken,
450
- // it's already in the right state we want it to be in
451
- } else {
452
- // Wait for previous advance to finish
453
- yield* semaphore.take(1)
454
- }
455
- }
654
+ // Wait for pending local pushes to finish
655
+ yield* pullLatch.await
456
656
 
457
- const trimRollbackUntil = newEvents.at(-1)!.id
657
+ const syncState = yield* syncStateSref
658
+ if (syncState === undefined) return shouldNeverHappen('Not initialized')
458
659
 
459
- const updateResult = SyncState.updateSyncState({
460
- syncState: state.syncState,
461
- payload: { _tag: 'upstream-advance', newEvents, trimRollbackUntil },
462
- isLocalEvent,
463
- isEqualEvent: MutationEvent.isEqualEncoded,
464
- ignoreLocalEvents: true,
660
+ const mergeResult = SyncState.merge({
661
+ syncState,
662
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents }),
663
+ isClientEvent,
664
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
665
+ ignoreClientEvents: true,
465
666
  })
466
667
 
467
- if (updateResult._tag === 'reject') {
668
+ const mergeCounter = yield* incrementMergeCounter(mergeCounterRef)
669
+
670
+ if (mergeResult._tag === 'reject') {
468
671
  return shouldNeverHappen('The leader thread should never reject upstream advances')
672
+ } else if (mergeResult._tag === 'unexpected-error') {
673
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:unexpected-error`, {
674
+ newEventsCount: newEvents.length,
675
+ newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
676
+ })
677
+ return yield* Effect.fail(mergeResult.cause)
469
678
  }
470
679
 
471
- const newBackendHead = newEvents.at(-1)!.id
680
+ const newBackendHead = newEvents.at(-1)!.seqNum
472
681
 
473
- updateBackendHead(dbLog, newBackendHead)
682
+ Eventlog.updateBackendHead(dbEventlog, newBackendHead)
474
683
 
475
- if (updateResult._tag === 'rebase') {
476
- span?.addEvent('backend-pull:rebase', {
684
+ if (mergeResult._tag === 'rebase') {
685
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:rebase`, {
477
686
  newEventsCount: newEvents.length,
478
687
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
479
- rollbackCount: updateResult.eventsToRollback.length,
480
- updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
688
+ rollbackCount: mergeResult.rollbackEvents.length,
689
+ mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
481
690
  })
482
691
 
483
- const filteredRebasedPending = updateResult.newSyncState.pending.filter((mutationEvent) => {
484
- const mutationDef = schema.mutations.get(mutationEvent.mutation)!
485
- return mutationDef.options.localOnly === false
692
+ const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
693
+ const { eventDef } = getEventDef(schema, event.name)
694
+ return eventDef.options.clientOnly === false
486
695
  })
487
- yield* restartBackendPushing(filteredRebasedPending)
696
+ yield* restartBackendPushing(globalRebasedPendingEvents)
488
697
 
489
- if (updateResult.eventsToRollback.length > 0) {
490
- yield* rollback({ db, dbLog, eventIdsToRollback: updateResult.eventsToRollback.map((_) => _.id) })
698
+ if (mergeResult.rollbackEvents.length > 0) {
699
+ yield* rollback({
700
+ dbState: db,
701
+ dbEventlog,
702
+ eventNumsToRollback: mergeResult.rollbackEvents.map((_) => _.seqNum),
703
+ })
491
704
  }
492
705
 
493
706
  yield* connectedClientSessionPullQueues.offer({
494
- payload: {
495
- _tag: 'upstream-rebase',
496
- newEvents: updateResult.newEvents,
497
- rollbackUntil: updateResult.eventsToRollback.at(0)!.id,
498
- trimRollbackUntil,
499
- },
500
- remaining,
707
+ payload: SyncState.PayloadUpstreamRebase.make({
708
+ newEvents: mergeResult.newEvents,
709
+ rollbackEvents: mergeResult.rollbackEvents,
710
+ }),
711
+ mergeCounter,
501
712
  })
713
+ mergePayloads.set(
714
+ mergeCounter,
715
+ SyncState.PayloadUpstreamRebase.make({
716
+ newEvents: mergeResult.newEvents,
717
+ rollbackEvents: mergeResult.rollbackEvents,
718
+ }),
719
+ )
502
720
  } else {
503
- span?.addEvent('backend-pull:advance', {
721
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:advance`, {
504
722
  newEventsCount: newEvents.length,
505
- updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
723
+ mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
506
724
  })
507
725
 
508
726
  yield* connectedClientSessionPullQueues.offer({
509
- payload: { _tag: 'upstream-advance', newEvents: updateResult.newEvents, trimRollbackUntil },
510
- remaining,
727
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }),
728
+ mergeCounter,
511
729
  })
730
+ mergePayloads.set(mergeCounter, SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }))
731
+
732
+ if (mergeResult.confirmedEvents.length > 0) {
733
+ // `mergeResult.confirmedEvents` don't contain the correct sync metadata, so we need to use
734
+ // `newEvents` instead which we filter via `mergeResult.confirmedEvents`
735
+ const confirmedNewEvents = newEvents.filter((event) =>
736
+ mergeResult.confirmedEvents.some((confirmedEvent) =>
737
+ EventSequenceNumber.isEqual(event.seqNum, confirmedEvent.seqNum),
738
+ ),
739
+ )
740
+ yield* Eventlog.updateSyncMetadata(confirmedNewEvents)
741
+ }
512
742
  }
513
743
 
744
+ // Removes the changeset rows which are no longer needed as we'll never have to rollback beyond this point
514
745
  trimChangesetRows(db, newBackendHead)
515
746
 
516
- const fiber = yield* applyMutationItemsRef.current!({ batchItems: updateResult.newEvents }).pipe(Effect.fork)
747
+ advancePushHead(mergeResult.newSyncState.localHead)
517
748
 
518
- yield* Ref.set(stateRef, {
519
- _tag: 'applying-syncstate-advance',
520
- origin: 'pull',
521
- syncState: updateResult.newSyncState,
522
- fiber,
523
- })
749
+ yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined })
750
+
751
+ yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState)
752
+
753
+ // Allow local pushes to be processed again
754
+ if (remaining === 0) {
755
+ yield* localPushesLatch.open
756
+ }
524
757
  })
525
758
 
759
+ const cursorInfo = yield* Eventlog.getSyncBackendCursorInfo(initialBackendHead)
760
+
526
761
  yield* syncBackend.pull(cursorInfo).pipe(
527
762
  // TODO only take from queue while connected
528
763
  Stream.tap(({ batch, remaining }) =>
@@ -534,16 +769,13 @@ const backgroundBackendPulling = ({
534
769
  // },
535
770
  // })
536
771
 
537
- // Wait for the db to be initially created
538
- yield* dbReady
539
-
540
- // NOTE we only want to take process mutations when the sync backend is connected
772
+ // NOTE we only want to take process events when the sync backend is connected
541
773
  // (e.g. needed for simulating being offline)
542
774
  // TODO remove when there's a better way to handle this in stream above
543
775
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
544
776
 
545
777
  yield* onNewPullChunk(
546
- batch.map((_) => MutationEvent.EncodedWithMeta.fromGlobal(_.mutationEventEncoded)),
778
+ batch.map((_) => LiveStoreEvent.EncodedWithMeta.fromGlobal(_.eventEncoded, _.metadata)),
547
779
  remaining,
548
780
  )
549
781
 
@@ -553,94 +785,35 @@ const backgroundBackendPulling = ({
553
785
  Stream.runDrain,
554
786
  Effect.interruptible,
555
787
  )
556
- }).pipe(Effect.withSpan('@livestore/common:leader-thread:syncing:backend-pulling'))
557
-
558
- const rollback = ({
559
- db,
560
- dbLog,
561
- eventIdsToRollback,
562
- }: {
563
- db: SynchronousDatabase
564
- dbLog: SynchronousDatabase
565
- eventIdsToRollback: EventId.EventId[]
566
- }) =>
567
- Effect.gen(function* () {
568
- const rollbackEvents = db
569
- .select<SessionChangesetMetaRow>(
570
- sql`SELECT * FROM ${SESSION_CHANGESET_META_TABLE} WHERE (idGlobal, idLocal) IN (${eventIdsToRollback.map((id) => `(${id.global}, ${id.local})`).join(', ')})`,
571
- )
572
- .map((_) => ({ id: { global: _.idGlobal, local: _.idLocal }, changeset: _.changeset, debug: _.debug }))
573
- .toSorted((a, b) => EventId.compare(a.id, b.id))
574
-
575
- // Apply changesets in reverse order
576
- for (let i = rollbackEvents.length - 1; i >= 0; i--) {
577
- const { changeset } = rollbackEvents[i]!
578
- if (changeset !== null) {
579
- db.makeChangeset(changeset).invert().apply()
580
- }
581
- }
582
-
583
- // Delete the changeset rows
584
- db.execute(
585
- sql`DELETE FROM ${SESSION_CHANGESET_META_TABLE} WHERE (idGlobal, idLocal) IN (${eventIdsToRollback.map((id) => `(${id.global}, ${id.local})`).join(', ')})`,
586
- )
587
-
588
- // Delete the mutation log rows
589
- dbLog.execute(
590
- sql`DELETE FROM ${MUTATION_LOG_META_TABLE} WHERE (idGlobal, idLocal) IN (${eventIdsToRollback.map((id) => `(${id.global}, ${id.local})`).join(', ')})`,
591
- )
592
- }).pipe(
593
- Effect.withSpan('@livestore/common:leader-thread:syncing:rollback', {
594
- attributes: { count: eventIdsToRollback.length },
595
- }),
596
- )
597
-
598
- const getCursorInfo = (remoteHead: EventId.GlobalEventId) =>
599
- Effect.gen(function* () {
600
- const { dbLog } = yield* LeaderThreadCtx
601
-
602
- if (remoteHead === EventId.ROOT.global) return Option.none()
603
-
604
- const MutationlogQuerySchema = Schema.Struct({
605
- syncMetadataJson: Schema.parseJson(Schema.Option(Schema.JsonValue)),
606
- }).pipe(Schema.pluck('syncMetadataJson'), Schema.Array, Schema.head)
607
-
608
- const syncMetadataOption = yield* Effect.sync(() =>
609
- dbLog.select<{ syncMetadataJson: string }>(
610
- sql`SELECT syncMetadataJson FROM ${MUTATION_LOG_META_TABLE} WHERE idGlobal = ${remoteHead} ORDER BY idLocal ASC LIMIT 1`,
611
- ),
612
- ).pipe(Effect.andThen(Schema.decode(MutationlogQuerySchema)), Effect.map(Option.flatten), Effect.orDie)
613
-
614
- return Option.some({
615
- cursor: { global: remoteHead, local: EventId.localDefault },
616
- metadata: syncMetadataOption,
617
- }) satisfies InitialSyncInfo
618
- }).pipe(Effect.withSpan('@livestore/common:leader-thread:syncing:getCursorInfo', { attributes: { remoteHead } }))
788
+ }).pipe(Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pulling'))
619
789
 
620
790
  const backgroundBackendPushing = ({
621
- dbReady,
622
- syncBackendQueue,
623
- span,
791
+ syncBackendPushQueue,
792
+ otelSpan,
793
+ devtoolsLatch,
794
+ backendPushBatchSize,
624
795
  }: {
625
- dbReady: Deferred.Deferred<void>
626
- syncBackendQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
627
- span: otel.Span | undefined
796
+ syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.EncodedWithMeta>
797
+ otelSpan: otel.Span | undefined
798
+ devtoolsLatch: Effect.Latch | undefined
799
+ backendPushBatchSize: number
628
800
  }) =>
629
801
  Effect.gen(function* () {
630
- const { syncBackend, dbLog } = yield* LeaderThreadCtx
802
+ const { syncBackend } = yield* LeaderThreadCtx
631
803
  if (syncBackend === undefined) return
632
804
 
633
- yield* dbReady
634
-
635
805
  while (true) {
636
806
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
637
807
 
638
- // TODO make batch size configurable
639
- const queueItems = yield* BucketQueue.takeBetween(syncBackendQueue, 1, 50)
808
+ const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, backendPushBatchSize)
640
809
 
641
810
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
642
811
 
643
- span?.addEvent('backend-push', {
812
+ if (devtoolsLatch !== undefined) {
813
+ yield* devtoolsLatch.await
814
+ }
815
+
816
+ otelSpan?.addEvent('backend-push', {
644
817
  batchSize: queueItems.length,
645
818
  batch: TRACE_VERBOSE ? JSON.stringify(queueItems) : undefined,
646
819
  })
@@ -649,31 +822,119 @@ const backgroundBackendPushing = ({
649
822
  const pushResult = yield* syncBackend.push(queueItems.map((_) => _.toGlobal())).pipe(Effect.either)
650
823
 
651
824
  if (pushResult._tag === 'Left') {
652
- span?.addEvent('backend-push-error', { error: pushResult.left.toString() })
825
+ if (LS_DEV) {
826
+ yield* Effect.logDebug('handled backend-push-error', { error: pushResult.left.toString() })
827
+ }
828
+ otelSpan?.addEvent('backend-push-error', { error: pushResult.left.toString() })
653
829
  // wait for interrupt caused by background pulling which will then restart pushing
654
830
  return yield* Effect.never
655
831
  }
656
-
657
- const { metadata } = pushResult.right
658
-
659
- // TODO try to do this in a single query
660
- for (let i = 0; i < queueItems.length; i++) {
661
- const mutationEventEncoded = queueItems[i]!
662
- yield* execSql(
663
- dbLog,
664
- ...updateRows({
665
- tableName: MUTATION_LOG_META_TABLE,
666
- columns: mutationLogMetaTable.sqliteDef.columns,
667
- where: { idGlobal: mutationEventEncoded.id.global, idLocal: mutationEventEncoded.id.local },
668
- updateValues: { syncMetadataJson: metadata[i]! },
669
- }),
670
- )
671
- }
672
832
  }
673
- }).pipe(Effect.interruptible, Effect.withSpan('@livestore/common:leader-thread:syncing:backend-pushing'))
833
+ }).pipe(Effect.interruptible, Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pushing'))
674
834
 
675
- const trimChangesetRows = (db: SynchronousDatabase, newHead: EventId.EventId) => {
835
+ const trimChangesetRows = (db: SqliteDb, newHead: EventSequenceNumber.EventSequenceNumber) => {
676
836
  // Since we're using the session changeset rows to query for the current head,
677
837
  // we're keeping at least one row for the current head, and thus are using `<` instead of `<=`
678
- db.execute(sql`DELETE FROM ${SESSION_CHANGESET_META_TABLE} WHERE idGlobal < ${newHead.global}`)
838
+ db.execute(sql`DELETE FROM ${SystemTables.SESSION_CHANGESET_META_TABLE} WHERE seqNumGlobal < ${newHead.global}`)
679
839
  }
840
+
841
+ interface PullQueueSet {
842
+ makeQueue: Effect.Effect<
843
+ Queue.Queue<{ payload: typeof SyncState.PayloadUpstream.Type; mergeCounter: number }>,
844
+ UnexpectedError,
845
+ Scope.Scope | LeaderThreadCtx
846
+ >
847
+ offer: (item: {
848
+ payload: typeof SyncState.PayloadUpstream.Type
849
+ mergeCounter: number
850
+ }) => Effect.Effect<void, UnexpectedError>
851
+ }
852
+
853
+ const makePullQueueSet = Effect.gen(function* () {
854
+ const set = new Set<Queue.Queue<{ payload: typeof SyncState.PayloadUpstream.Type; mergeCounter: number }>>()
855
+
856
+ yield* Effect.addFinalizer(() =>
857
+ Effect.gen(function* () {
858
+ for (const queue of set) {
859
+ yield* Queue.shutdown(queue)
860
+ }
861
+
862
+ set.clear()
863
+ }),
864
+ )
865
+
866
+ const makeQueue: PullQueueSet['makeQueue'] = Effect.gen(function* () {
867
+ const queue = yield* Queue.unbounded<{
868
+ payload: typeof SyncState.PayloadUpstream.Type
869
+ mergeCounter: number
870
+ }>().pipe(Effect.acquireRelease(Queue.shutdown))
871
+
872
+ yield* Effect.addFinalizer(() => Effect.sync(() => set.delete(queue)))
873
+
874
+ set.add(queue)
875
+
876
+ return queue
877
+ })
878
+
879
+ const offer: PullQueueSet['offer'] = (item) =>
880
+ Effect.gen(function* () {
881
+ // Short-circuit if the payload is an empty upstream advance
882
+ if (item.payload._tag === 'upstream-advance' && item.payload.newEvents.length === 0) {
883
+ return
884
+ }
885
+
886
+ for (const queue of set) {
887
+ yield* Queue.offer(queue, item)
888
+ }
889
+ })
890
+
891
+ return {
892
+ makeQueue,
893
+ offer,
894
+ }
895
+ })
896
+
897
+ const incrementMergeCounter = (mergeCounterRef: { current: number }) =>
898
+ Effect.gen(function* () {
899
+ const { dbState } = yield* LeaderThreadCtx
900
+ mergeCounterRef.current++
901
+ dbState.execute(
902
+ sql`INSERT OR REPLACE INTO ${SystemTables.LEADER_MERGE_COUNTER_TABLE} (id, mergeCounter) VALUES (0, ${mergeCounterRef.current})`,
903
+ )
904
+ return mergeCounterRef.current
905
+ })
906
+
907
+ const getMergeCounterFromDb = (dbState: SqliteDb) =>
908
+ Effect.gen(function* () {
909
+ const result = dbState.select<{ mergeCounter: number }>(
910
+ sql`SELECT mergeCounter FROM ${SystemTables.LEADER_MERGE_COUNTER_TABLE} WHERE id = 0`,
911
+ )
912
+ return result[0]?.mergeCounter ?? 0
913
+ })
914
+
915
+ const validatePushBatch = (
916
+ batch: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>,
917
+ pushHead: EventSequenceNumber.EventSequenceNumber,
918
+ ) =>
919
+ Effect.gen(function* () {
920
+ if (batch.length === 0) {
921
+ return
922
+ }
923
+
924
+ // Make sure batch is monotonically increasing
925
+ for (let i = 1; i < batch.length; i++) {
926
+ if (EventSequenceNumber.isGreaterThanOrEqual(batch[i - 1]!.seqNum, batch[i]!.seqNum)) {
927
+ shouldNeverHappen(
928
+ `Events must be ordered in monotonically ascending order by eventNum. Received: [${batch.map((e) => EventSequenceNumber.toString(e.seqNum)).join(', ')}]`,
929
+ )
930
+ }
931
+ }
932
+
933
+ // Make sure smallest sequence number is > pushHead
934
+ if (EventSequenceNumber.isGreaterThanOrEqual(pushHead, batch[0]!.seqNum)) {
935
+ return yield* LeaderAheadError.make({
936
+ minimumExpectedNum: pushHead,
937
+ providedNum: batch[0]!.seqNum,
938
+ })
939
+ }
940
+ })