@livestore/common 0.3.0-dev.9 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (480) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/__tests__/fixture.d.ts +83 -221
  3. package/dist/__tests__/fixture.d.ts.map +1 -1
  4. package/dist/__tests__/fixture.js +33 -11
  5. package/dist/__tests__/fixture.js.map +1 -1
  6. package/dist/adapter-types.d.ts +120 -64
  7. package/dist/adapter-types.d.ts.map +1 -1
  8. package/dist/adapter-types.js +39 -8
  9. package/dist/adapter-types.js.map +1 -1
  10. package/dist/bounded-collections.d.ts.map +1 -1
  11. package/dist/debug-info.d.ts +1 -1
  12. package/dist/debug-info.d.ts.map +1 -1
  13. package/dist/debug-info.js +1 -0
  14. package/dist/debug-info.js.map +1 -1
  15. package/dist/devtools/devtools-messages-client-session.d.ts +390 -0
  16. package/dist/devtools/devtools-messages-client-session.d.ts.map +1 -0
  17. package/dist/devtools/devtools-messages-client-session.js +97 -0
  18. package/dist/devtools/devtools-messages-client-session.js.map +1 -0
  19. package/dist/devtools/devtools-messages-common.d.ts +68 -0
  20. package/dist/devtools/devtools-messages-common.d.ts.map +1 -0
  21. package/dist/devtools/devtools-messages-common.js +60 -0
  22. package/dist/devtools/devtools-messages-common.js.map +1 -0
  23. package/dist/devtools/devtools-messages-leader.d.ts +394 -0
  24. package/dist/devtools/devtools-messages-leader.d.ts.map +1 -0
  25. package/dist/devtools/devtools-messages-leader.js +147 -0
  26. package/dist/devtools/devtools-messages-leader.js.map +1 -0
  27. package/dist/devtools/devtools-messages.d.ts +3 -580
  28. package/dist/devtools/devtools-messages.d.ts.map +1 -1
  29. package/dist/devtools/devtools-messages.js +3 -174
  30. package/dist/devtools/devtools-messages.js.map +1 -1
  31. package/dist/devtools/devtools-sessioninfo.d.ts +32 -0
  32. package/dist/devtools/devtools-sessioninfo.d.ts.map +1 -0
  33. package/dist/devtools/devtools-sessioninfo.js +36 -0
  34. package/dist/devtools/devtools-sessioninfo.js.map +1 -0
  35. package/dist/devtools/mod.d.ts +55 -0
  36. package/dist/devtools/mod.d.ts.map +1 -0
  37. package/dist/devtools/mod.js +33 -0
  38. package/dist/devtools/mod.js.map +1 -0
  39. package/dist/index.d.ts +7 -9
  40. package/dist/index.d.ts.map +1 -1
  41. package/dist/index.js +7 -9
  42. package/dist/index.js.map +1 -1
  43. package/dist/leader-thread/LeaderSyncProcessor.d.ts +36 -11
  44. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  45. package/dist/leader-thread/LeaderSyncProcessor.js +426 -252
  46. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  47. package/dist/leader-thread/connection.d.ts +34 -6
  48. package/dist/leader-thread/connection.d.ts.map +1 -1
  49. package/dist/leader-thread/connection.js +22 -7
  50. package/dist/leader-thread/connection.js.map +1 -1
  51. package/dist/leader-thread/eventlog.d.ts +27 -0
  52. package/dist/leader-thread/eventlog.d.ts.map +1 -0
  53. package/dist/leader-thread/eventlog.js +119 -0
  54. package/dist/leader-thread/eventlog.js.map +1 -0
  55. package/dist/leader-thread/leader-worker-devtools.d.ts.map +1 -1
  56. package/dist/leader-thread/leader-worker-devtools.js +155 -80
  57. package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
  58. package/dist/leader-thread/make-leader-thread-layer.d.ts +22 -9
  59. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  60. package/dist/leader-thread/make-leader-thread-layer.js +67 -45
  61. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  62. package/dist/leader-thread/materialize-event.d.ts +16 -0
  63. package/dist/leader-thread/materialize-event.d.ts.map +1 -0
  64. package/dist/leader-thread/materialize-event.js +109 -0
  65. package/dist/leader-thread/materialize-event.js.map +1 -0
  66. package/dist/leader-thread/mod.d.ts +1 -1
  67. package/dist/leader-thread/mod.d.ts.map +1 -1
  68. package/dist/leader-thread/mod.js +1 -1
  69. package/dist/leader-thread/mod.js.map +1 -1
  70. package/dist/leader-thread/recreate-db.d.ts +4 -2
  71. package/dist/leader-thread/recreate-db.d.ts.map +1 -1
  72. package/dist/leader-thread/recreate-db.js +28 -32
  73. package/dist/leader-thread/recreate-db.js.map +1 -1
  74. package/dist/leader-thread/shutdown-channel.d.ts +2 -5
  75. package/dist/leader-thread/shutdown-channel.d.ts.map +1 -1
  76. package/dist/leader-thread/shutdown-channel.js +2 -4
  77. package/dist/leader-thread/shutdown-channel.js.map +1 -1
  78. package/dist/leader-thread/types.d.ts +79 -38
  79. package/dist/leader-thread/types.d.ts.map +1 -1
  80. package/dist/leader-thread/types.js +1 -3
  81. package/dist/leader-thread/types.js.map +1 -1
  82. package/dist/make-client-session.d.ts +23 -0
  83. package/dist/make-client-session.d.ts.map +1 -0
  84. package/dist/make-client-session.js +57 -0
  85. package/dist/make-client-session.js.map +1 -0
  86. package/dist/materializer-helper.d.ts +23 -0
  87. package/dist/materializer-helper.d.ts.map +1 -0
  88. package/dist/materializer-helper.js +86 -0
  89. package/dist/materializer-helper.js.map +1 -0
  90. package/dist/otel.d.ts +2 -0
  91. package/dist/otel.d.ts.map +1 -1
  92. package/dist/otel.js +5 -0
  93. package/dist/otel.js.map +1 -1
  94. package/dist/rematerialize-from-eventlog.d.ts +14 -0
  95. package/dist/rematerialize-from-eventlog.d.ts.map +1 -0
  96. package/dist/rematerialize-from-eventlog.js +64 -0
  97. package/dist/rematerialize-from-eventlog.js.map +1 -0
  98. package/dist/schema/EventDef.d.ts +146 -0
  99. package/dist/schema/EventDef.d.ts.map +1 -0
  100. package/dist/schema/EventDef.js +58 -0
  101. package/dist/schema/EventDef.js.map +1 -0
  102. package/dist/schema/EventSequenceNumber.d.ts +57 -0
  103. package/dist/schema/EventSequenceNumber.d.ts.map +1 -0
  104. package/dist/schema/EventSequenceNumber.js +82 -0
  105. package/dist/schema/EventSequenceNumber.js.map +1 -0
  106. package/dist/schema/EventSequenceNumber.test.d.ts +2 -0
  107. package/dist/schema/EventSequenceNumber.test.d.ts.map +1 -0
  108. package/dist/schema/EventSequenceNumber.test.js +11 -0
  109. package/dist/schema/EventSequenceNumber.test.js.map +1 -0
  110. package/dist/schema/LiveStoreEvent.d.ts +257 -0
  111. package/dist/schema/LiveStoreEvent.d.ts.map +1 -0
  112. package/dist/schema/LiveStoreEvent.js +117 -0
  113. package/dist/schema/LiveStoreEvent.js.map +1 -0
  114. package/dist/schema/events.d.ts +2 -0
  115. package/dist/schema/events.d.ts.map +1 -0
  116. package/dist/schema/events.js +2 -0
  117. package/dist/schema/events.js.map +1 -0
  118. package/dist/schema/mod.d.ts +8 -6
  119. package/dist/schema/mod.d.ts.map +1 -1
  120. package/dist/schema/mod.js +8 -6
  121. package/dist/schema/mod.js.map +1 -1
  122. package/dist/schema/schema.d.ts +50 -32
  123. package/dist/schema/schema.d.ts.map +1 -1
  124. package/dist/schema/schema.js +36 -43
  125. package/dist/schema/schema.js.map +1 -1
  126. package/dist/schema/state/mod.d.ts +3 -0
  127. package/dist/schema/state/mod.d.ts.map +1 -0
  128. package/dist/schema/state/mod.js +3 -0
  129. package/dist/schema/state/mod.js.map +1 -0
  130. package/dist/schema/state/sqlite/client-document-def.d.ts +223 -0
  131. package/dist/schema/state/sqlite/client-document-def.d.ts.map +1 -0
  132. package/dist/schema/state/sqlite/client-document-def.js +170 -0
  133. package/dist/schema/state/sqlite/client-document-def.js.map +1 -0
  134. package/dist/schema/state/sqlite/client-document-def.test.d.ts +2 -0
  135. package/dist/schema/state/sqlite/client-document-def.test.d.ts.map +1 -0
  136. package/dist/schema/state/sqlite/client-document-def.test.js +201 -0
  137. package/dist/schema/state/sqlite/client-document-def.test.js.map +1 -0
  138. package/dist/schema/state/sqlite/db-schema/ast/sqlite.d.ts +69 -0
  139. package/dist/schema/state/sqlite/db-schema/ast/sqlite.d.ts.map +1 -0
  140. package/dist/schema/state/sqlite/db-schema/ast/sqlite.js +71 -0
  141. package/dist/schema/state/sqlite/db-schema/ast/sqlite.js.map +1 -0
  142. package/dist/schema/state/sqlite/db-schema/ast/validate.d.ts +3 -0
  143. package/dist/schema/state/sqlite/db-schema/ast/validate.d.ts.map +1 -0
  144. package/dist/schema/state/sqlite/db-schema/ast/validate.js +12 -0
  145. package/dist/schema/state/sqlite/db-schema/ast/validate.js.map +1 -0
  146. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.d.ts +90 -0
  147. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.d.ts.map +1 -0
  148. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.js +87 -0
  149. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.js.map +1 -0
  150. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.d.ts +2 -0
  151. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.d.ts.map +1 -0
  152. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.js +29 -0
  153. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.js.map +1 -0
  154. package/dist/schema/state/sqlite/db-schema/dsl/mod.d.ts +90 -0
  155. package/dist/schema/state/sqlite/db-schema/dsl/mod.d.ts.map +1 -0
  156. package/dist/schema/state/sqlite/db-schema/dsl/mod.js +41 -0
  157. package/dist/schema/state/sqlite/db-schema/dsl/mod.js.map +1 -0
  158. package/dist/schema/state/sqlite/db-schema/hash.d.ts +2 -0
  159. package/dist/schema/state/sqlite/db-schema/hash.d.ts.map +1 -0
  160. package/dist/schema/state/sqlite/db-schema/hash.js +14 -0
  161. package/dist/schema/state/sqlite/db-schema/hash.js.map +1 -0
  162. package/dist/schema/state/sqlite/db-schema/mod.d.ts +3 -0
  163. package/dist/schema/state/sqlite/db-schema/mod.d.ts.map +1 -0
  164. package/dist/schema/state/sqlite/db-schema/mod.js +3 -0
  165. package/dist/schema/state/sqlite/db-schema/mod.js.map +1 -0
  166. package/dist/schema/state/sqlite/mod.d.ts +17 -0
  167. package/dist/schema/state/sqlite/mod.d.ts.map +1 -0
  168. package/dist/schema/state/sqlite/mod.js +41 -0
  169. package/dist/schema/state/sqlite/mod.js.map +1 -0
  170. package/dist/schema/state/sqlite/query-builder/api.d.ts +294 -0
  171. package/dist/schema/state/sqlite/query-builder/api.d.ts.map +1 -0
  172. package/dist/schema/state/sqlite/query-builder/api.js +6 -0
  173. package/dist/schema/state/sqlite/query-builder/api.js.map +1 -0
  174. package/dist/schema/state/sqlite/query-builder/astToSql.d.ts +7 -0
  175. package/dist/schema/state/sqlite/query-builder/astToSql.d.ts.map +1 -0
  176. package/dist/schema/state/sqlite/query-builder/astToSql.js +190 -0
  177. package/dist/schema/state/sqlite/query-builder/astToSql.js.map +1 -0
  178. package/dist/schema/state/sqlite/query-builder/impl.d.ts +7 -0
  179. package/dist/schema/state/sqlite/query-builder/impl.d.ts.map +1 -0
  180. package/dist/schema/state/sqlite/query-builder/impl.js +286 -0
  181. package/dist/schema/state/sqlite/query-builder/impl.js.map +1 -0
  182. package/dist/schema/state/sqlite/query-builder/impl.test.d.ts +87 -0
  183. package/dist/schema/state/sqlite/query-builder/impl.test.d.ts.map +1 -0
  184. package/dist/schema/state/sqlite/query-builder/impl.test.js +563 -0
  185. package/dist/schema/state/sqlite/query-builder/impl.test.js.map +1 -0
  186. package/dist/{query-builder → schema/state/sqlite/query-builder}/mod.d.ts +7 -0
  187. package/dist/schema/state/sqlite/query-builder/mod.d.ts.map +1 -0
  188. package/dist/{query-builder → schema/state/sqlite/query-builder}/mod.js +7 -0
  189. package/dist/schema/state/sqlite/query-builder/mod.js.map +1 -0
  190. package/dist/schema/state/sqlite/schema-helpers.d.ts.map +1 -0
  191. package/dist/schema/{schema-helpers.js → state/sqlite/schema-helpers.js} +1 -1
  192. package/dist/schema/state/sqlite/schema-helpers.js.map +1 -0
  193. package/dist/schema/state/sqlite/system-tables.d.ts +574 -0
  194. package/dist/schema/state/sqlite/system-tables.d.ts.map +1 -0
  195. package/dist/schema/state/sqlite/system-tables.js +88 -0
  196. package/dist/schema/state/sqlite/system-tables.js.map +1 -0
  197. package/dist/schema/state/sqlite/table-def.d.ts +84 -0
  198. package/dist/schema/state/sqlite/table-def.d.ts.map +1 -0
  199. package/dist/schema/state/sqlite/table-def.js +36 -0
  200. package/dist/schema/state/sqlite/table-def.js.map +1 -0
  201. package/dist/schema-management/common.d.ts +7 -7
  202. package/dist/schema-management/common.d.ts.map +1 -1
  203. package/dist/schema-management/common.js.map +1 -1
  204. package/dist/schema-management/migrations.d.ts +6 -6
  205. package/dist/schema-management/migrations.d.ts.map +1 -1
  206. package/dist/schema-management/migrations.js +27 -18
  207. package/dist/schema-management/migrations.js.map +1 -1
  208. package/dist/schema-management/validate-schema.d.ts +8 -0
  209. package/dist/schema-management/validate-schema.d.ts.map +1 -0
  210. package/dist/schema-management/validate-schema.js +39 -0
  211. package/dist/schema-management/validate-schema.js.map +1 -0
  212. package/dist/sql-queries/misc.d.ts.map +1 -1
  213. package/dist/sql-queries/sql-queries.d.ts +1 -1
  214. package/dist/sql-queries/sql-queries.d.ts.map +1 -1
  215. package/dist/sql-queries/sql-queries.js.map +1 -1
  216. package/dist/sql-queries/sql-query-builder.d.ts +1 -1
  217. package/dist/sql-queries/sql-query-builder.d.ts.map +1 -1
  218. package/dist/sql-queries/sql-query-builder.js.map +1 -1
  219. package/dist/sql-queries/types.d.ts +2 -1
  220. package/dist/sql-queries/types.d.ts.map +1 -1
  221. package/dist/sql-queries/types.js.map +1 -1
  222. package/dist/sync/ClientSessionSyncProcessor.d.ts +40 -19
  223. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  224. package/dist/sync/ClientSessionSyncProcessor.js +149 -73
  225. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  226. package/dist/sync/next/compact-events.d.ts.map +1 -1
  227. package/dist/sync/next/compact-events.js +38 -35
  228. package/dist/sync/next/compact-events.js.map +1 -1
  229. package/dist/sync/next/facts.d.ts +21 -21
  230. package/dist/sync/next/facts.d.ts.map +1 -1
  231. package/dist/sync/next/facts.js +11 -11
  232. package/dist/sync/next/facts.js.map +1 -1
  233. package/dist/sync/next/history-dag-common.d.ts +9 -7
  234. package/dist/sync/next/history-dag-common.d.ts.map +1 -1
  235. package/dist/sync/next/history-dag-common.js +10 -5
  236. package/dist/sync/next/history-dag-common.js.map +1 -1
  237. package/dist/sync/next/history-dag.d.ts +0 -2
  238. package/dist/sync/next/history-dag.d.ts.map +1 -1
  239. package/dist/sync/next/history-dag.js +16 -14
  240. package/dist/sync/next/history-dag.js.map +1 -1
  241. package/dist/sync/next/rebase-events.d.ts +10 -8
  242. package/dist/sync/next/rebase-events.d.ts.map +1 -1
  243. package/dist/sync/next/rebase-events.js +18 -10
  244. package/dist/sync/next/rebase-events.js.map +1 -1
  245. package/dist/sync/next/test/compact-events.calculator.test.js +39 -34
  246. package/dist/sync/next/test/compact-events.calculator.test.js.map +1 -1
  247. package/dist/sync/next/test/compact-events.test.js +77 -77
  248. package/dist/sync/next/test/compact-events.test.js.map +1 -1
  249. package/dist/sync/next/test/{mutation-fixtures.d.ts → event-fixtures.d.ts} +35 -25
  250. package/dist/sync/next/test/event-fixtures.d.ts.map +1 -0
  251. package/dist/sync/next/test/{mutation-fixtures.js → event-fixtures.js} +81 -38
  252. package/dist/sync/next/test/event-fixtures.js.map +1 -0
  253. package/dist/sync/next/test/mod.d.ts +1 -1
  254. package/dist/sync/next/test/mod.d.ts.map +1 -1
  255. package/dist/sync/next/test/mod.js +1 -1
  256. package/dist/sync/next/test/mod.js.map +1 -1
  257. package/dist/sync/sync.d.ts +46 -21
  258. package/dist/sync/sync.d.ts.map +1 -1
  259. package/dist/sync/sync.js +10 -6
  260. package/dist/sync/sync.js.map +1 -1
  261. package/dist/sync/syncstate.d.ts +193 -84
  262. package/dist/sync/syncstate.d.ts.map +1 -1
  263. package/dist/sync/syncstate.js +305 -151
  264. package/dist/sync/syncstate.js.map +1 -1
  265. package/dist/sync/syncstate.test.js +267 -303
  266. package/dist/sync/syncstate.test.js.map +1 -1
  267. package/dist/sync/validate-push-payload.d.ts +2 -2
  268. package/dist/sync/validate-push-payload.d.ts.map +1 -1
  269. package/dist/sync/validate-push-payload.js +4 -4
  270. package/dist/sync/validate-push-payload.js.map +1 -1
  271. package/dist/util.d.ts +2 -2
  272. package/dist/util.d.ts.map +1 -1
  273. package/dist/version.d.ts +2 -2
  274. package/dist/version.d.ts.map +1 -1
  275. package/dist/version.js +2 -2
  276. package/dist/version.js.map +1 -1
  277. package/package.json +10 -4
  278. package/src/__tests__/fixture.ts +36 -15
  279. package/src/adapter-types.ts +107 -68
  280. package/src/debug-info.ts +1 -0
  281. package/src/devtools/devtools-messages-client-session.ts +142 -0
  282. package/src/devtools/devtools-messages-common.ts +115 -0
  283. package/src/devtools/devtools-messages-leader.ts +191 -0
  284. package/src/devtools/devtools-messages.ts +3 -246
  285. package/src/devtools/devtools-sessioninfo.ts +101 -0
  286. package/src/devtools/mod.ts +59 -0
  287. package/src/index.ts +7 -9
  288. package/src/leader-thread/LeaderSyncProcessor.ts +664 -394
  289. package/src/leader-thread/connection.ts +54 -9
  290. package/src/leader-thread/eventlog.ts +199 -0
  291. package/src/leader-thread/leader-worker-devtools.ts +227 -104
  292. package/src/leader-thread/make-leader-thread-layer.ts +121 -72
  293. package/src/leader-thread/materialize-event.ts +173 -0
  294. package/src/leader-thread/mod.ts +1 -1
  295. package/src/leader-thread/recreate-db.ts +33 -38
  296. package/src/leader-thread/shutdown-channel.ts +2 -4
  297. package/src/leader-thread/types.ts +84 -46
  298. package/src/make-client-session.ts +136 -0
  299. package/src/materializer-helper.ts +138 -0
  300. package/src/otel.ts +8 -0
  301. package/src/rematerialize-from-eventlog.ts +117 -0
  302. package/src/schema/EventDef.ts +227 -0
  303. package/src/schema/EventSequenceNumber.test.ts +12 -0
  304. package/src/schema/EventSequenceNumber.ts +121 -0
  305. package/src/schema/LiveStoreEvent.ts +240 -0
  306. package/src/schema/events.ts +1 -0
  307. package/src/schema/mod.ts +8 -6
  308. package/src/schema/schema.ts +88 -84
  309. package/src/schema/state/mod.ts +2 -0
  310. package/src/schema/state/sqlite/client-document-def.test.ts +238 -0
  311. package/src/schema/state/sqlite/client-document-def.ts +444 -0
  312. package/src/schema/state/sqlite/db-schema/ast/sqlite.ts +142 -0
  313. package/src/schema/state/sqlite/db-schema/ast/validate.ts +13 -0
  314. package/src/schema/state/sqlite/db-schema/dsl/__snapshots__/field-defs.test.ts.snap +206 -0
  315. package/src/schema/state/sqlite/db-schema/dsl/field-defs.test.ts +35 -0
  316. package/src/schema/state/sqlite/db-schema/dsl/field-defs.ts +242 -0
  317. package/src/schema/state/sqlite/db-schema/dsl/mod.ts +222 -0
  318. package/src/schema/state/sqlite/db-schema/hash.ts +14 -0
  319. package/src/schema/state/sqlite/db-schema/mod.ts +2 -0
  320. package/src/schema/state/sqlite/mod.ts +73 -0
  321. package/src/schema/state/sqlite/query-builder/api.ts +440 -0
  322. package/src/schema/state/sqlite/query-builder/astToSql.ts +232 -0
  323. package/src/schema/state/sqlite/query-builder/impl.test.ts +617 -0
  324. package/src/schema/state/sqlite/query-builder/impl.ts +351 -0
  325. package/src/{query-builder → schema/state/sqlite/query-builder}/mod.ts +7 -0
  326. package/src/schema/{schema-helpers.ts → state/sqlite/schema-helpers.ts} +1 -1
  327. package/src/schema/state/sqlite/system-tables.ts +117 -0
  328. package/src/schema/state/sqlite/table-def.ts +197 -0
  329. package/src/schema-management/common.ts +7 -7
  330. package/src/schema-management/migrations.ts +37 -31
  331. package/src/schema-management/validate-schema.ts +61 -0
  332. package/src/sql-queries/sql-queries.ts +1 -1
  333. package/src/sql-queries/sql-query-builder.ts +1 -2
  334. package/src/sql-queries/types.ts +3 -1
  335. package/src/sync/ClientSessionSyncProcessor.ts +218 -94
  336. package/src/sync/next/compact-events.ts +38 -35
  337. package/src/sync/next/facts.ts +43 -41
  338. package/src/sync/next/history-dag-common.ts +17 -10
  339. package/src/sync/next/history-dag.ts +16 -17
  340. package/src/sync/next/rebase-events.ts +29 -17
  341. package/src/sync/next/test/compact-events.calculator.test.ts +46 -46
  342. package/src/sync/next/test/compact-events.test.ts +79 -79
  343. package/src/sync/next/test/event-fixtures.ts +226 -0
  344. package/src/sync/next/test/mod.ts +1 -1
  345. package/src/sync/sync.ts +46 -21
  346. package/src/sync/syncstate.test.ts +312 -345
  347. package/src/sync/syncstate.ts +414 -224
  348. package/src/sync/validate-push-payload.ts +6 -6
  349. package/src/version.ts +2 -2
  350. package/dist/derived-mutations.d.ts +0 -109
  351. package/dist/derived-mutations.d.ts.map +0 -1
  352. package/dist/derived-mutations.js +0 -54
  353. package/dist/derived-mutations.js.map +0 -1
  354. package/dist/derived-mutations.test.d.ts +0 -2
  355. package/dist/derived-mutations.test.d.ts.map +0 -1
  356. package/dist/derived-mutations.test.js +0 -93
  357. package/dist/derived-mutations.test.js.map +0 -1
  358. package/dist/devtools/devtools-bridge.d.ts +0 -13
  359. package/dist/devtools/devtools-bridge.d.ts.map +0 -1
  360. package/dist/devtools/devtools-bridge.js +0 -2
  361. package/dist/devtools/devtools-bridge.js.map +0 -1
  362. package/dist/devtools/devtools-window-message.d.ts +0 -29
  363. package/dist/devtools/devtools-window-message.d.ts.map +0 -1
  364. package/dist/devtools/devtools-window-message.js +0 -33
  365. package/dist/devtools/devtools-window-message.js.map +0 -1
  366. package/dist/devtools/index.d.ts +0 -42
  367. package/dist/devtools/index.d.ts.map +0 -1
  368. package/dist/devtools/index.js +0 -48
  369. package/dist/devtools/index.js.map +0 -1
  370. package/dist/init-singleton-tables.d.ts +0 -4
  371. package/dist/init-singleton-tables.d.ts.map +0 -1
  372. package/dist/init-singleton-tables.js +0 -16
  373. package/dist/init-singleton-tables.js.map +0 -1
  374. package/dist/leader-thread/apply-mutation.d.ts +0 -11
  375. package/dist/leader-thread/apply-mutation.d.ts.map +0 -1
  376. package/dist/leader-thread/apply-mutation.js +0 -107
  377. package/dist/leader-thread/apply-mutation.js.map +0 -1
  378. package/dist/leader-thread/leader-sync-processor.d.ts +0 -47
  379. package/dist/leader-thread/leader-sync-processor.d.ts.map +0 -1
  380. package/dist/leader-thread/leader-sync-processor.js +0 -430
  381. package/dist/leader-thread/leader-sync-processor.js.map +0 -1
  382. package/dist/leader-thread/mutationlog.d.ts +0 -10
  383. package/dist/leader-thread/mutationlog.d.ts.map +0 -1
  384. package/dist/leader-thread/mutationlog.js +0 -28
  385. package/dist/leader-thread/mutationlog.js.map +0 -1
  386. package/dist/leader-thread/pull-queue-set.d.ts +0 -7
  387. package/dist/leader-thread/pull-queue-set.d.ts.map +0 -1
  388. package/dist/leader-thread/pull-queue-set.js +0 -39
  389. package/dist/leader-thread/pull-queue-set.js.map +0 -1
  390. package/dist/mutation.d.ts +0 -20
  391. package/dist/mutation.d.ts.map +0 -1
  392. package/dist/mutation.js +0 -57
  393. package/dist/mutation.js.map +0 -1
  394. package/dist/query-builder/api.d.ts +0 -190
  395. package/dist/query-builder/api.d.ts.map +0 -1
  396. package/dist/query-builder/api.js +0 -8
  397. package/dist/query-builder/api.js.map +0 -1
  398. package/dist/query-builder/impl.d.ts +0 -12
  399. package/dist/query-builder/impl.d.ts.map +0 -1
  400. package/dist/query-builder/impl.js +0 -244
  401. package/dist/query-builder/impl.js.map +0 -1
  402. package/dist/query-builder/impl.test.d.ts +0 -2
  403. package/dist/query-builder/impl.test.d.ts.map +0 -1
  404. package/dist/query-builder/impl.test.js +0 -212
  405. package/dist/query-builder/impl.test.js.map +0 -1
  406. package/dist/query-builder/mod.d.ts.map +0 -1
  407. package/dist/query-builder/mod.js.map +0 -1
  408. package/dist/query-info.d.ts +0 -38
  409. package/dist/query-info.d.ts.map +0 -1
  410. package/dist/query-info.js +0 -7
  411. package/dist/query-info.js.map +0 -1
  412. package/dist/rehydrate-from-mutationlog.d.ts +0 -14
  413. package/dist/rehydrate-from-mutationlog.d.ts.map +0 -1
  414. package/dist/rehydrate-from-mutationlog.js +0 -66
  415. package/dist/rehydrate-from-mutationlog.js.map +0 -1
  416. package/dist/schema/EventId.d.ts +0 -39
  417. package/dist/schema/EventId.d.ts.map +0 -1
  418. package/dist/schema/EventId.js +0 -38
  419. package/dist/schema/EventId.js.map +0 -1
  420. package/dist/schema/EventId.test.d.ts +0 -2
  421. package/dist/schema/EventId.test.d.ts.map +0 -1
  422. package/dist/schema/EventId.test.js +0 -11
  423. package/dist/schema/EventId.test.js.map +0 -1
  424. package/dist/schema/MutationEvent.d.ts +0 -167
  425. package/dist/schema/MutationEvent.d.ts.map +0 -1
  426. package/dist/schema/MutationEvent.js +0 -72
  427. package/dist/schema/MutationEvent.js.map +0 -1
  428. package/dist/schema/MutationEvent.test.d.ts +0 -2
  429. package/dist/schema/MutationEvent.test.d.ts.map +0 -1
  430. package/dist/schema/MutationEvent.test.js +0 -2
  431. package/dist/schema/MutationEvent.test.js.map +0 -1
  432. package/dist/schema/mutations.d.ts +0 -107
  433. package/dist/schema/mutations.d.ts.map +0 -1
  434. package/dist/schema/mutations.js +0 -42
  435. package/dist/schema/mutations.js.map +0 -1
  436. package/dist/schema/schema-helpers.d.ts.map +0 -1
  437. package/dist/schema/schema-helpers.js.map +0 -1
  438. package/dist/schema/system-tables.d.ts +0 -399
  439. package/dist/schema/system-tables.d.ts.map +0 -1
  440. package/dist/schema/system-tables.js +0 -59
  441. package/dist/schema/system-tables.js.map +0 -1
  442. package/dist/schema/table-def.d.ts +0 -156
  443. package/dist/schema/table-def.d.ts.map +0 -1
  444. package/dist/schema/table-def.js +0 -79
  445. package/dist/schema/table-def.js.map +0 -1
  446. package/dist/schema-management/validate-mutation-defs.d.ts +0 -8
  447. package/dist/schema-management/validate-mutation-defs.d.ts.map +0 -1
  448. package/dist/schema-management/validate-mutation-defs.js +0 -39
  449. package/dist/schema-management/validate-mutation-defs.js.map +0 -1
  450. package/dist/sync/client-session-sync-processor.d.ts +0 -45
  451. package/dist/sync/client-session-sync-processor.d.ts.map +0 -1
  452. package/dist/sync/client-session-sync-processor.js +0 -131
  453. package/dist/sync/client-session-sync-processor.js.map +0 -1
  454. package/dist/sync/next/test/mutation-fixtures.d.ts.map +0 -1
  455. package/dist/sync/next/test/mutation-fixtures.js.map +0 -1
  456. package/src/derived-mutations.test.ts +0 -101
  457. package/src/derived-mutations.ts +0 -170
  458. package/src/devtools/devtools-bridge.ts +0 -14
  459. package/src/devtools/devtools-window-message.ts +0 -27
  460. package/src/devtools/index.ts +0 -48
  461. package/src/init-singleton-tables.ts +0 -24
  462. package/src/leader-thread/apply-mutation.ts +0 -161
  463. package/src/leader-thread/mutationlog.ts +0 -46
  464. package/src/leader-thread/pull-queue-set.ts +0 -58
  465. package/src/mutation.ts +0 -91
  466. package/src/query-builder/api.ts +0 -289
  467. package/src/query-builder/impl.test.ts +0 -239
  468. package/src/query-builder/impl.ts +0 -285
  469. package/src/query-info.ts +0 -78
  470. package/src/rehydrate-from-mutationlog.ts +0 -119
  471. package/src/schema/EventId.test.ts +0 -12
  472. package/src/schema/EventId.ts +0 -60
  473. package/src/schema/MutationEvent.ts +0 -185
  474. package/src/schema/mutations.ts +0 -192
  475. package/src/schema/system-tables.ts +0 -105
  476. package/src/schema/table-def.ts +0 -343
  477. package/src/schema-management/validate-mutation-defs.ts +0 -63
  478. package/src/sync/next/test/mutation-fixtures.ts +0 -224
  479. package/tsconfig.json +0 -11
  480. /package/dist/schema/{schema-helpers.d.ts → state/sqlite/schema-helpers.d.ts} +0 -0
@@ -1,245 +1,387 @@
1
- import { isNotUndefined, shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils'
2
- import type { HttpClient, Scope } from '@livestore/utils/effect'
1
+ import { casesHandled, isNotUndefined, LS_DEV, shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils'
2
+ import type { HttpClient, Runtime, Scope, Tracer } from '@livestore/utils/effect'
3
3
  import {
4
4
  BucketQueue,
5
5
  Deferred,
6
6
  Effect,
7
7
  Exit,
8
8
  FiberHandle,
9
- Option,
10
9
  OtelTracer,
10
+ Queue,
11
11
  ReadonlyArray,
12
- Schema,
13
12
  Stream,
14
13
  Subscribable,
15
14
  SubscriptionRef,
16
15
  } from '@livestore/utils/effect'
17
16
  import type * as otel from '@opentelemetry/api'
18
17
 
19
- import type { SynchronousDatabase } from '../adapter-types.js'
18
+ import type { SqliteDb } from '../adapter-types.js'
20
19
  import { UnexpectedError } from '../adapter-types.js'
21
- import type { LiveStoreSchema, SessionChangesetMetaRow } from '../schema/mod.js'
22
- import {
23
- EventId,
24
- MUTATION_LOG_META_TABLE,
25
- MutationEvent,
26
- mutationLogMetaTable,
27
- SESSION_CHANGESET_META_TABLE,
28
- } from '../schema/mod.js'
29
- import { updateRows } from '../sql-queries/index.js'
30
- import { InvalidPushError } from '../sync/sync.js'
20
+ import type { LiveStoreSchema } from '../schema/mod.js'
21
+ import { EventSequenceNumber, getEventDef, LiveStoreEvent, SystemTables } from '../schema/mod.js'
22
+ import { LeaderAheadError } from '../sync/sync.js'
31
23
  import * as SyncState from '../sync/syncstate.js'
32
24
  import { sql } from '../util.js'
33
- import { makeApplyMutation } from './apply-mutation.js'
34
- import { execSql } from './connection.js'
35
- import { getBackendHeadFromDb, getLocalHeadFromDb, getMutationEventsSince, updateBackendHead } from './mutationlog.js'
36
- import type { InitialBlockingSyncContext, InitialSyncInfo, LeaderSyncProcessor } from './types.js'
25
+ import * as Eventlog from './eventlog.js'
26
+ import { rollback } from './materialize-event.js'
27
+ import type { InitialBlockingSyncContext, LeaderSyncProcessor } from './types.js'
37
28
  import { LeaderThreadCtx } from './types.js'
38
29
 
30
+ type LocalPushQueueItem = [
31
+ event: LiveStoreEvent.EncodedWithMeta,
32
+ deferred: Deferred.Deferred<void, LeaderAheadError> | undefined,
33
+ /** Used to determine whether the batch has become invalid due to a rejected local push batch */
34
+ generation: number,
35
+ ]
36
+
39
37
  /**
40
- * The LeaderSyncProcessor manages synchronization of mutations between
38
+ * The LeaderSyncProcessor manages synchronization of events between
41
39
  * the local state and the sync backend, ensuring efficient and orderly processing.
42
40
  *
43
41
  * In the LeaderSyncProcessor, pulling always has precedence over pushing.
44
42
  *
45
43
  * Responsibilities:
46
- * - Queueing incoming local mutations in a localPushMailbox.
47
- * - Broadcasting mutations to client sessions via pull queues.
48
- * - Pushing mutations to the sync backend.
44
+ * - Queueing incoming local events in a localPushesQueue.
45
+ * - Broadcasting events to client sessions via pull queues.
46
+ * - Pushing events to the sync backend.
49
47
  *
50
48
  * Notes:
51
49
  *
52
50
  * local push processing:
53
- * - localPushMailbox:
51
+ * - localPushesQueue:
54
52
  * - Maintains events in ascending order.
55
53
  * - Uses `Deferred` objects to resolve/reject events based on application success.
56
- * - Processes events from the mailbox, applying mutations in batches.
54
+ * - Processes events from the queue, applying events in batches.
57
55
  * - Controlled by a `Latch` to manage execution flow.
58
56
  * - The latch closes on pull receipt and re-opens post-pull completion.
59
57
  * - Processes up to `maxBatchSize` events per cycle.
60
58
  *
59
+ * Currently we're advancing the db read model and eventlog in lockstep, but we could also decouple this in the future
60
+ *
61
+ * Tricky concurrency scenarios:
62
+ * - Queued local push batches becoming invalid due to a prior local push item being rejected.
63
+ * Solution: Introduce a generation number for local push batches which is used to filter out old batches items in case of rejection.
64
+ *
61
65
  */
62
66
  export const makeLeaderSyncProcessor = ({
63
67
  schema,
64
- dbMissing,
65
- dbLog,
68
+ dbEventlogMissing,
69
+ dbEventlog,
70
+ dbState,
71
+ dbStateMissing,
66
72
  initialBlockingSyncContext,
73
+ onError,
74
+ params,
75
+ testing,
67
76
  }: {
68
77
  schema: LiveStoreSchema
69
- /** Only used to know whether we can safely query dbLog during setup execution */
70
- dbMissing: boolean
71
- dbLog: SynchronousDatabase
78
+ /** Only used to know whether we can safely query dbEventlog during setup execution */
79
+ dbEventlogMissing: boolean
80
+ dbEventlog: SqliteDb
81
+ dbState: SqliteDb
82
+ /** Only used to know whether we can safely query dbState during setup execution */
83
+ dbStateMissing: boolean
72
84
  initialBlockingSyncContext: InitialBlockingSyncContext
85
+ onError: 'shutdown' | 'ignore'
86
+ params: {
87
+ /**
88
+ * @default 10
89
+ */
90
+ localPushBatchSize?: number
91
+ /**
92
+ * @default 50
93
+ */
94
+ backendPushBatchSize?: number
95
+ }
96
+ testing: {
97
+ delays?: {
98
+ localPushProcessing?: Effect.Effect<void>
99
+ }
100
+ }
73
101
  }): Effect.Effect<LeaderSyncProcessor, UnexpectedError, Scope.Scope> =>
74
102
  Effect.gen(function* () {
75
- const syncBackendQueue = yield* BucketQueue.make<MutationEvent.EncodedWithMeta>()
103
+ const syncBackendPushQueue = yield* BucketQueue.make<LiveStoreEvent.EncodedWithMeta>()
104
+ const localPushBatchSize = params.localPushBatchSize ?? 10
105
+ const backendPushBatchSize = params.backendPushBatchSize ?? 50
76
106
 
77
107
  const syncStateSref = yield* SubscriptionRef.make<SyncState.SyncState | undefined>(undefined)
78
108
 
79
- const isLocalEvent = (mutationEventEncoded: MutationEvent.EncodedWithMeta) => {
80
- const mutationDef = schema.mutations.get(mutationEventEncoded.mutation)!
81
- return mutationDef.options.localOnly
109
+ const isClientEvent = (eventEncoded: LiveStoreEvent.EncodedWithMeta) => {
110
+ const { eventDef } = getEventDef(schema, eventEncoded.name)
111
+ return eventDef.options.clientOnly
82
112
  }
83
113
 
84
- const spanRef = { current: undefined as otel.Span | undefined }
114
+ const connectedClientSessionPullQueues = yield* makePullQueueSet
115
+
116
+ /**
117
+ * Tracks generations of queued local push events.
118
+ * If a local-push batch is rejected, all subsequent push queue items with the same generation are also rejected,
119
+ * even if they would be valid on their own.
120
+ */
121
+ // TODO get rid of this in favour of the `mergeGeneration` event sequence number field
122
+ const currentLocalPushGenerationRef = { current: 0 }
123
+
124
+ type MergeCounter = number
125
+ const mergeCounterRef = { current: dbStateMissing ? 0 : yield* getMergeCounterFromDb(dbState) }
126
+ const mergePayloads = new Map<MergeCounter, typeof SyncState.PayloadUpstream.Type>()
127
+
128
+ // This context depends on data from `boot`, we should find a better implementation to avoid this ref indirection.
129
+ const ctxRef = {
130
+ current: undefined as
131
+ | undefined
132
+ | {
133
+ otelSpan: otel.Span | undefined
134
+ span: Tracer.Span
135
+ devtoolsLatch: Effect.Latch | undefined
136
+ runtime: Runtime.Runtime<LeaderThreadCtx>
137
+ },
138
+ }
85
139
 
86
- const localPushesQueue = yield* BucketQueue.make<MutationEvent.EncodedWithMeta>()
140
+ const localPushesQueue = yield* BucketQueue.make<LocalPushQueueItem>()
87
141
  const localPushesLatch = yield* Effect.makeLatch(true)
88
142
  const pullLatch = yield* Effect.makeLatch(true)
89
143
 
144
+ /**
145
+ * Additionally to the `syncStateSref` we also need the `pushHeadRef` in order to prevent old/duplicate
146
+ * events from being pushed in a scenario like this:
147
+ * - client session A pushes e1
148
+ * - leader sync processor takes a bit and hasn't yet taken e1 from the localPushesQueue
149
+ * - client session B also pushes e1 (which should be rejected)
150
+ *
151
+ * Thus the purpoe of the pushHeadRef is the guard the integrity of the local push queue
152
+ */
153
+ const pushHeadRef = { current: EventSequenceNumber.ROOT }
154
+ const advancePushHead = (eventNum: EventSequenceNumber.EventSequenceNumber) => {
155
+ pushHeadRef.current = EventSequenceNumber.max(pushHeadRef.current, eventNum)
156
+ }
157
+
158
+ // NOTE: New events are only pushed to sync backend after successful local push processing
90
159
  const push: LeaderSyncProcessor['push'] = (newEvents, options) =>
91
160
  Effect.gen(function* () {
92
- // TODO validate batch
93
161
  if (newEvents.length === 0) return
94
162
 
95
- const waitForProcessing = options?.waitForProcessing ?? false
163
+ yield* validatePushBatch(newEvents, pushHeadRef.current)
96
164
 
97
- const deferreds = waitForProcessing
98
- ? yield* Effect.forEach(newEvents, () => Deferred.make<void, InvalidPushError>())
99
- : newEvents.map((_) => undefined)
165
+ advancePushHead(newEvents.at(-1)!.seqNum)
100
166
 
101
- // TODO validate batch ordering
102
- const mappedEvents = newEvents.map(
103
- (mutationEventEncoded, i) =>
104
- new MutationEvent.EncodedWithMeta({
105
- ...mutationEventEncoded,
106
- meta: { deferred: deferreds[i] },
107
- }),
108
- )
109
- yield* BucketQueue.offerAll(localPushesQueue, mappedEvents)
167
+ const waitForProcessing = options?.waitForProcessing ?? false
168
+ const generation = currentLocalPushGenerationRef.current
110
169
 
111
170
  if (waitForProcessing) {
112
- yield* Effect.all(deferreds as ReadonlyArray<Deferred.Deferred<void, InvalidPushError>>)
171
+ const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make<void, LeaderAheadError>())
172
+
173
+ const items = newEvents.map(
174
+ (eventEncoded, i) => [eventEncoded, deferreds[i], generation] as LocalPushQueueItem,
175
+ )
176
+
177
+ yield* BucketQueue.offerAll(localPushesQueue, items)
178
+
179
+ yield* Effect.all(deferreds)
180
+ } else {
181
+ const items = newEvents.map((eventEncoded) => [eventEncoded, undefined, generation] as LocalPushQueueItem)
182
+ yield* BucketQueue.offerAll(localPushesQueue, items)
113
183
  }
114
184
  }).pipe(
115
- Effect.withSpan('@livestore/common:leader-thread:syncing:local-push', {
185
+ Effect.withSpan('@livestore/common:LeaderSyncProcessor:push', {
116
186
  attributes: {
117
187
  batchSize: newEvents.length,
118
188
  batch: TRACE_VERBOSE ? newEvents : undefined,
119
189
  },
120
- links: spanRef.current
121
- ? [{ _tag: 'SpanLink', span: OtelTracer.makeExternalSpan(spanRef.current.spanContext()), attributes: {} }]
122
- : undefined,
190
+ links: ctxRef.current?.span ? [{ _tag: 'SpanLink', span: ctxRef.current.span, attributes: {} }] : undefined,
123
191
  }),
124
192
  )
125
193
 
126
- const pushPartial: LeaderSyncProcessor['pushPartial'] = (mutationEventEncoded_) =>
194
+ const pushPartial: LeaderSyncProcessor['pushPartial'] = ({ event: { name, args }, clientId, sessionId }) =>
127
195
  Effect.gen(function* () {
128
196
  const syncState = yield* syncStateSref
129
197
  if (syncState === undefined) return shouldNeverHappen('Not initialized')
130
198
 
131
- const mutationDef =
132
- schema.mutations.get(mutationEventEncoded_.mutation) ??
133
- shouldNeverHappen(`Unknown mutation: ${mutationEventEncoded_.mutation}`)
199
+ const { eventDef } = getEventDef(schema, name)
134
200
 
135
- const mutationEventEncoded = new MutationEvent.EncodedWithMeta({
136
- ...mutationEventEncoded_,
137
- ...EventId.nextPair(syncState.localHead, mutationDef.options.localOnly),
201
+ const eventEncoded = new LiveStoreEvent.EncodedWithMeta({
202
+ name,
203
+ args,
204
+ clientId,
205
+ sessionId,
206
+ ...EventSequenceNumber.nextPair(syncState.localHead, eventDef.options.clientOnly),
138
207
  })
139
208
 
140
- yield* push([mutationEventEncoded])
141
- }).pipe(Effect.catchTag('InvalidPushError', Effect.orDie))
209
+ yield* push([eventEncoded])
210
+ }).pipe(Effect.catchTag('LeaderAheadError', Effect.orDie))
142
211
 
143
212
  // Starts various background loops
144
- const boot: LeaderSyncProcessor['boot'] = ({ dbReady }) =>
145
- Effect.gen(function* () {
146
- const span = yield* OtelTracer.currentOtelSpan.pipe(Effect.catchAll(() => Effect.succeed(undefined)))
147
- spanRef.current = span
213
+ const boot: LeaderSyncProcessor['boot'] = Effect.gen(function* () {
214
+ const span = yield* Effect.currentSpan.pipe(Effect.orDie)
215
+ const otelSpan = yield* OtelTracer.currentOtelSpan.pipe(Effect.catchAll(() => Effect.succeed(undefined)))
216
+ const { devtools, shutdownChannel } = yield* LeaderThreadCtx
217
+ const runtime = yield* Effect.runtime<LeaderThreadCtx>()
218
+
219
+ ctxRef.current = {
220
+ otelSpan,
221
+ span,
222
+ devtoolsLatch: devtools.enabled ? devtools.syncBackendLatch : undefined,
223
+ runtime,
224
+ }
148
225
 
149
- const initialBackendHead = dbMissing ? EventId.ROOT.global : getBackendHeadFromDb(dbLog)
150
- const initialLocalHead = dbMissing ? EventId.ROOT : getLocalHeadFromDb(dbLog)
226
+ const initialLocalHead = dbEventlogMissing ? EventSequenceNumber.ROOT : Eventlog.getClientHeadFromDb(dbEventlog)
151
227
 
152
- if (initialBackendHead > initialLocalHead.global) {
153
- return shouldNeverHappen(
154
- `During boot the backend head (${initialBackendHead}) should never be greater than the local head (${initialLocalHead.global})`,
155
- )
228
+ const initialBackendHead = dbEventlogMissing
229
+ ? EventSequenceNumber.ROOT.global
230
+ : Eventlog.getBackendHeadFromDb(dbEventlog)
231
+
232
+ if (initialBackendHead > initialLocalHead.global) {
233
+ return shouldNeverHappen(
234
+ `During boot the backend head (${initialBackendHead}) should never be greater than the local head (${initialLocalHead.global})`,
235
+ )
236
+ }
237
+
238
+ const pendingEvents = dbEventlogMissing
239
+ ? []
240
+ : yield* Eventlog.getEventsSince({ global: initialBackendHead, client: EventSequenceNumber.clientDefault })
241
+
242
+ const initialSyncState = new SyncState.SyncState({
243
+ pending: pendingEvents,
244
+ upstreamHead: { global: initialBackendHead, client: EventSequenceNumber.clientDefault },
245
+ localHead: initialLocalHead,
246
+ })
247
+
248
+ /** State transitions need to happen atomically, so we use a Ref to track the state */
249
+ yield* SubscriptionRef.set(syncStateSref, initialSyncState)
250
+
251
+ // Rehydrate sync queue
252
+ if (pendingEvents.length > 0) {
253
+ const globalPendingEvents = pendingEvents
254
+ // Don't sync clientOnly events
255
+ .filter((eventEncoded) => {
256
+ const { eventDef } = getEventDef(schema, eventEncoded.name)
257
+ return eventDef.options.clientOnly === false
258
+ })
259
+
260
+ if (globalPendingEvents.length > 0) {
261
+ yield* BucketQueue.offerAll(syncBackendPushQueue, globalPendingEvents)
156
262
  }
263
+ }
157
264
 
158
- const pendingMutationEvents = yield* getMutationEventsSince({
159
- global: initialBackendHead,
160
- local: EventId.localDefault,
161
- }).pipe(Effect.map(ReadonlyArray.map((_) => new MutationEvent.EncodedWithMeta(_))))
162
-
163
- const initialSyncState = new SyncState.SyncState({
164
- pending: pendingMutationEvents,
165
- // On the leader we don't need a rollback tail beyond `pending` items
166
- rollbackTail: [],
167
- upstreamHead: { global: initialBackendHead, local: EventId.localDefault },
168
- localHead: initialLocalHead,
265
+ const shutdownOnError = (cause: unknown) =>
266
+ Effect.gen(function* () {
267
+ if (onError === 'shutdown') {
268
+ yield* shutdownChannel.send(UnexpectedError.make({ cause }))
269
+ yield* Effect.die(cause)
270
+ }
169
271
  })
170
272
 
171
- /** State transitions need to happen atomically, so we use a Ref to track the state */
172
- yield* SubscriptionRef.set(syncStateSref, initialSyncState)
273
+ yield* backgroundApplyLocalPushes({
274
+ localPushesLatch,
275
+ localPushesQueue,
276
+ pullLatch,
277
+ syncStateSref,
278
+ syncBackendPushQueue,
279
+ schema,
280
+ isClientEvent,
281
+ otelSpan,
282
+ currentLocalPushGenerationRef,
283
+ connectedClientSessionPullQueues,
284
+ mergeCounterRef,
285
+ mergePayloads,
286
+ localPushBatchSize,
287
+ testing: {
288
+ delay: testing?.delays?.localPushProcessing,
289
+ },
290
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
291
+
292
+ const backendPushingFiberHandle = yield* FiberHandle.make()
293
+ const backendPushingEffect = backgroundBackendPushing({
294
+ syncBackendPushQueue,
295
+ otelSpan,
296
+ devtoolsLatch: ctxRef.current?.devtoolsLatch,
297
+ backendPushBatchSize,
298
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError))
299
+
300
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
301
+
302
+ yield* backgroundBackendPulling({
303
+ initialBackendHead,
304
+ isClientEvent,
305
+ restartBackendPushing: (filteredRebasedPending) =>
306
+ Effect.gen(function* () {
307
+ // Stop current pushing fiber
308
+ yield* FiberHandle.clear(backendPushingFiberHandle)
173
309
 
174
- // Rehydrate sync queue
175
- if (pendingMutationEvents.length > 0) {
176
- const filteredBatch = pendingMutationEvents
177
- // Don't sync localOnly mutations
178
- .filter((mutationEventEncoded) => {
179
- const mutationDef = schema.mutations.get(mutationEventEncoded.mutation)!
180
- return mutationDef.options.localOnly === false
181
- })
310
+ // Reset the sync backend push queue
311
+ yield* BucketQueue.clear(syncBackendPushQueue)
312
+ yield* BucketQueue.offerAll(syncBackendPushQueue, filteredRebasedPending)
182
313
 
183
- yield* BucketQueue.offerAll(syncBackendQueue, filteredBatch)
184
- }
314
+ // Restart pushing fiber
315
+ yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
316
+ }),
317
+ syncStateSref,
318
+ localPushesLatch,
319
+ pullLatch,
320
+ otelSpan,
321
+ initialBlockingSyncContext,
322
+ devtoolsLatch: ctxRef.current?.devtoolsLatch,
323
+ connectedClientSessionPullQueues,
324
+ mergeCounterRef,
325
+ mergePayloads,
326
+ advancePushHead,
327
+ }).pipe(Effect.tapCauseLogPretty, Effect.catchAllCause(shutdownOnError), Effect.forkScoped)
328
+
329
+ return { initialLeaderHead: initialLocalHead }
330
+ }).pipe(Effect.withSpanScoped('@livestore/common:LeaderSyncProcessor:boot'))
331
+
332
+ const pull: LeaderSyncProcessor['pull'] = ({ cursor }) =>
333
+ Effect.gen(function* () {
334
+ const queue = yield* pullQueue({ cursor })
335
+ return Stream.fromQueue(queue)
336
+ }).pipe(Stream.unwrapScoped)
337
+
338
+ const pullQueue: LeaderSyncProcessor['pullQueue'] = ({ cursor }) => {
339
+ const runtime = ctxRef.current?.runtime ?? shouldNeverHappen('Not initialized')
340
+ return Effect.gen(function* () {
341
+ const queue = yield* connectedClientSessionPullQueues.makeQueue
342
+ const payloadsSinceCursor = Array.from(mergePayloads.entries())
343
+ .map(([mergeCounter, payload]) => ({ payload, mergeCounter }))
344
+ .filter(({ mergeCounter }) => mergeCounter > cursor.mergeCounter)
345
+ .toSorted((a, b) => a.mergeCounter - b.mergeCounter)
346
+ .map(({ payload, mergeCounter }) => {
347
+ if (payload._tag === 'upstream-advance') {
348
+ return {
349
+ payload: {
350
+ _tag: 'upstream-advance' as const,
351
+ newEvents: ReadonlyArray.dropWhile(payload.newEvents, (eventEncoded) =>
352
+ EventSequenceNumber.isGreaterThanOrEqual(cursor.eventNum, eventEncoded.seqNum),
353
+ ),
354
+ },
355
+ mergeCounter,
356
+ }
357
+ } else {
358
+ return { payload, mergeCounter }
359
+ }
360
+ })
185
361
 
186
- yield* backgroundApplyLocalPushes({
187
- localPushesLatch,
188
- localPushesQueue,
189
- pullLatch,
190
- syncStateSref,
191
- syncBackendQueue,
192
- schema,
193
- isLocalEvent,
194
- span,
195
- }).pipe(Effect.tapCauseLogPretty, Effect.forkScoped)
196
-
197
- const backendPushingFiberHandle = yield* FiberHandle.make()
198
-
199
- yield* FiberHandle.run(
200
- backendPushingFiberHandle,
201
- backgroundBackendPushing({ dbReady, syncBackendQueue, span }).pipe(Effect.tapCauseLogPretty),
202
- )
362
+ yield* queue.offerAll(payloadsSinceCursor)
203
363
 
204
- yield* backgroundBackendPulling({
205
- dbReady,
206
- initialBackendHead,
207
- isLocalEvent,
208
- restartBackendPushing: (filteredRebasedPending) =>
209
- Effect.gen(function* () {
210
- // Stop current pushing fiber
211
- yield* FiberHandle.clear(backendPushingFiberHandle)
212
-
213
- // Reset the sync queue
214
- yield* BucketQueue.clear(syncBackendQueue)
215
- yield* BucketQueue.offerAll(syncBackendQueue, filteredRebasedPending)
216
-
217
- // Restart pushing fiber
218
- yield* FiberHandle.run(
219
- backendPushingFiberHandle,
220
- backgroundBackendPushing({ dbReady, syncBackendQueue, span }).pipe(Effect.tapCauseLogPretty),
221
- )
222
- }),
223
- syncStateSref,
224
- localPushesLatch,
225
- pullLatch,
226
- span,
227
- initialBlockingSyncContext,
228
- }).pipe(Effect.tapCauseLogPretty, Effect.forkScoped)
229
- }).pipe(Effect.withSpanScoped('@livestore/common:leader-thread:syncing'))
364
+ return queue
365
+ }).pipe(Effect.provide(runtime))
366
+ }
367
+
368
+ const syncState = Subscribable.make({
369
+ get: Effect.gen(function* () {
370
+ const syncState = yield* syncStateSref
371
+ if (syncState === undefined) return shouldNeverHappen('Not initialized')
372
+ return syncState
373
+ }),
374
+ changes: syncStateSref.changes.pipe(Stream.filter(isNotUndefined)),
375
+ })
230
376
 
231
377
  return {
378
+ pull,
379
+ pullQueue,
232
380
  push,
233
381
  pushPartial,
234
382
  boot,
235
- syncState: Subscribable.make({
236
- get: Effect.gen(function* () {
237
- const syncState = yield* syncStateSref
238
- if (syncState === undefined) return shouldNeverHappen('Not initialized')
239
- return syncState
240
- }),
241
- changes: syncStateSref.changes.pipe(Stream.filter(isNotUndefined)),
242
- }),
383
+ syncState,
384
+ getMergeCounter: () => mergeCounterRef.current,
243
385
  } satisfies LeaderSyncProcessor
244
386
  })
245
387
 
@@ -248,194 +390,264 @@ const backgroundApplyLocalPushes = ({
248
390
  localPushesQueue,
249
391
  pullLatch,
250
392
  syncStateSref,
251
- syncBackendQueue,
393
+ syncBackendPushQueue,
252
394
  schema,
253
- isLocalEvent,
254
- span,
395
+ isClientEvent,
396
+ otelSpan,
397
+ currentLocalPushGenerationRef,
398
+ connectedClientSessionPullQueues,
399
+ mergeCounterRef,
400
+ mergePayloads,
401
+ localPushBatchSize,
402
+ testing,
255
403
  }: {
256
404
  pullLatch: Effect.Latch
257
405
  localPushesLatch: Effect.Latch
258
- localPushesQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
406
+ localPushesQueue: BucketQueue.BucketQueue<LocalPushQueueItem>
259
407
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
260
- syncBackendQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
408
+ syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.EncodedWithMeta>
261
409
  schema: LiveStoreSchema
262
- isLocalEvent: (mutationEventEncoded: MutationEvent.EncodedWithMeta) => boolean
263
- span: otel.Span | undefined
410
+ isClientEvent: (eventEncoded: LiveStoreEvent.EncodedWithMeta) => boolean
411
+ otelSpan: otel.Span | undefined
412
+ currentLocalPushGenerationRef: { current: number }
413
+ connectedClientSessionPullQueues: PullQueueSet
414
+ mergeCounterRef: { current: number }
415
+ mergePayloads: Map<number, typeof SyncState.PayloadUpstream.Type>
416
+ localPushBatchSize: number
417
+ testing: {
418
+ delay: Effect.Effect<void> | undefined
419
+ }
264
420
  }) =>
265
421
  Effect.gen(function* () {
266
- const { connectedClientSessionPullQueues } = yield* LeaderThreadCtx
267
-
268
- const applyMutationItems = yield* makeApplyMutationItems
269
-
270
422
  while (true) {
271
- // TODO make this configurable
272
- const newEvents = yield* BucketQueue.takeBetween(localPushesQueue, 1, 10)
423
+ if (testing.delay !== undefined) {
424
+ yield* testing.delay.pipe(Effect.withSpan('localPushProcessingDelay'))
425
+ }
426
+
427
+ const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, localPushBatchSize)
273
428
 
274
429
  // Wait for the backend pulling to finish
275
430
  yield* localPushesLatch.await
276
431
 
277
- // Prevent the backend pulling from starting until this local push is finished
432
+ // Prevent backend pull processing until this local push is finished
278
433
  yield* pullLatch.close
279
434
 
435
+ // Since the generation might have changed since enqueuing, we need to filter out items with older generation
436
+ // It's important that we filter after we got localPushesLatch, otherwise we might filter with the old generation
437
+ const filteredBatchItems = batchItems
438
+ .filter(([_1, _2, generation]) => generation === currentLocalPushGenerationRef.current)
439
+ .map(([eventEncoded, deferred]) => [eventEncoded, deferred] as const)
440
+
441
+ if (filteredBatchItems.length === 0) {
442
+ // console.log('dropping old-gen batch', currentLocalPushGenerationRef.current)
443
+ // Allow the backend pulling to start
444
+ yield* pullLatch.open
445
+ continue
446
+ }
447
+
448
+ const [newEvents, deferreds] = ReadonlyArray.unzip(filteredBatchItems)
449
+
280
450
  const syncState = yield* syncStateSref
281
451
  if (syncState === undefined) return shouldNeverHappen('Not initialized')
282
452
 
283
- const updateResult = SyncState.updateSyncState({
453
+ const mergeResult = SyncState.merge({
284
454
  syncState,
285
455
  payload: { _tag: 'local-push', newEvents },
286
- isLocalEvent,
287
- isEqualEvent: MutationEvent.isEqualEncoded,
456
+ isClientEvent,
457
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
288
458
  })
289
459
 
290
- if (updateResult._tag === 'rebase') {
291
- return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
292
- } else if (updateResult._tag === 'reject') {
293
- span?.addEvent('local-push:reject', {
294
- batchSize: newEvents.length,
295
- updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
296
- })
460
+ const mergeCounter = yield* incrementMergeCounter(mergeCounterRef)
297
461
 
298
- const providedId = newEvents.at(0)!.id
299
- const remainingEvents = yield* BucketQueue.takeAll(localPushesQueue)
300
- const allEvents = [...newEvents, ...remainingEvents]
301
- yield* Effect.forEach(allEvents, (mutationEventEncoded) =>
302
- mutationEventEncoded.meta.deferred
303
- ? Deferred.fail(
304
- mutationEventEncoded.meta.deferred,
305
- InvalidPushError.make({
306
- // TODO improve error handling so it differentiates between a push being rejected
307
- // because of itself or because of another push
308
- reason: {
309
- _tag: 'LeaderAhead',
310
- minimumExpectedId: updateResult.expectedMinimumId,
311
- providedId,
312
- },
313
- }),
314
- )
315
- : Effect.void,
316
- )
462
+ switch (mergeResult._tag) {
463
+ case 'unexpected-error': {
464
+ otelSpan?.addEvent(`[${mergeCounter}]:push:unexpected-error`, {
465
+ batchSize: newEvents.length,
466
+ newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
467
+ })
468
+ return yield* Effect.fail(mergeResult.cause)
469
+ }
470
+ case 'rebase': {
471
+ return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
472
+ }
473
+ case 'reject': {
474
+ otelSpan?.addEvent(`[${mergeCounter}]:push:reject`, {
475
+ batchSize: newEvents.length,
476
+ mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
477
+ })
317
478
 
318
- // Allow the backend pulling to start
319
- yield* pullLatch.open
479
+ // TODO: how to test this?
480
+ currentLocalPushGenerationRef.current++
320
481
 
321
- // In this case we're skipping state update and down/upstream processing
322
- // We've cleared the local push queue and are now waiting for new local pushes / backend pulls
323
- continue
482
+ const nextGeneration = currentLocalPushGenerationRef.current
483
+
484
+ const providedNum = newEvents.at(0)!.seqNum
485
+ // All subsequent pushes with same generation should be rejected as well
486
+ // We're also handling the case where the localPushQueue already contains events
487
+ // from the next generation which we preserve in the queue
488
+ const remainingEventsMatchingGeneration = yield* BucketQueue.takeSplitWhere(
489
+ localPushesQueue,
490
+ (item) => item[2] >= nextGeneration,
491
+ )
492
+
493
+ // TODO we still need to better understand and handle this scenario
494
+ if (LS_DEV && (yield* BucketQueue.size(localPushesQueue)) > 0) {
495
+ console.log('localPushesQueue is not empty', yield* BucketQueue.size(localPushesQueue))
496
+ debugger
497
+ }
498
+
499
+ const allDeferredsToReject = [
500
+ ...deferreds,
501
+ ...remainingEventsMatchingGeneration.map(([_, deferred]) => deferred),
502
+ ].filter(isNotUndefined)
503
+
504
+ yield* Effect.forEach(allDeferredsToReject, (deferred) =>
505
+ Deferred.fail(
506
+ deferred,
507
+ LeaderAheadError.make({
508
+ minimumExpectedNum: mergeResult.expectedMinimumId,
509
+ providedNum,
510
+ // nextGeneration,
511
+ }),
512
+ ),
513
+ )
514
+
515
+ // Allow the backend pulling to start
516
+ yield* pullLatch.open
517
+
518
+ // In this case we're skipping state update and down/upstream processing
519
+ // We've cleared the local push queue and are now waiting for new local pushes / backend pulls
520
+ continue
521
+ }
522
+ case 'advance': {
523
+ break
524
+ }
525
+ default: {
526
+ casesHandled(mergeResult)
527
+ }
324
528
  }
325
529
 
326
- yield* SubscriptionRef.set(syncStateSref, updateResult.newSyncState)
530
+ yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState)
327
531
 
328
532
  yield* connectedClientSessionPullQueues.offer({
329
- payload: { _tag: 'upstream-advance', newEvents: updateResult.newEvents },
330
- remaining: 0,
533
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }),
534
+ mergeCounter,
331
535
  })
536
+ mergePayloads.set(mergeCounter, SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }))
332
537
 
333
- span?.addEvent('local-push', {
538
+ otelSpan?.addEvent(`[${mergeCounter}]:push:advance`, {
334
539
  batchSize: newEvents.length,
335
- updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
540
+ mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
336
541
  })
337
542
 
338
- // Don't sync localOnly mutations
339
- const filteredBatch = updateResult.newEvents.filter((mutationEventEncoded) => {
340
- const mutationDef = schema.mutations.get(mutationEventEncoded.mutation)!
341
- return mutationDef.options.localOnly === false
543
+ // Don't sync clientOnly events
544
+ const filteredBatch = mergeResult.newEvents.filter((eventEncoded) => {
545
+ const { eventDef } = getEventDef(schema, eventEncoded.name)
546
+ return eventDef.options.clientOnly === false
342
547
  })
343
548
 
344
- yield* BucketQueue.offerAll(syncBackendQueue, filteredBatch)
549
+ yield* BucketQueue.offerAll(syncBackendPushQueue, filteredBatch)
345
550
 
346
- yield* applyMutationItems({ batchItems: newEvents })
551
+ yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds })
347
552
 
348
553
  // Allow the backend pulling to start
349
554
  yield* pullLatch.open
350
555
  }
351
556
  })
352
557
 
353
- type ApplyMutationItems = (_: {
354
- batchItems: ReadonlyArray<MutationEvent.EncodedWithMeta>
355
- }) => Effect.Effect<void, UnexpectedError>
558
+ type MaterializeEventsBatch = (_: {
559
+ batchItems: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>
560
+ /**
561
+ * The deferreds are used by the caller to know when the mutation has been processed.
562
+ * Indexes are aligned with `batchItems`
563
+ */
564
+ deferreds: ReadonlyArray<Deferred.Deferred<void, LeaderAheadError> | undefined> | undefined
565
+ }) => Effect.Effect<void, UnexpectedError, LeaderThreadCtx>
356
566
 
357
567
  // TODO how to handle errors gracefully
358
- const makeApplyMutationItems: Effect.Effect<ApplyMutationItems, UnexpectedError, LeaderThreadCtx | Scope.Scope> =
568
+ const materializeEventsBatch: MaterializeEventsBatch = ({ batchItems, deferreds }) =>
359
569
  Effect.gen(function* () {
360
- const leaderThreadCtx = yield* LeaderThreadCtx
361
- const { db, dbLog } = leaderThreadCtx
570
+ const { dbState: db, dbEventlog, materializeEvent } = yield* LeaderThreadCtx
362
571
 
363
- const applyMutation = yield* makeApplyMutation
572
+ // NOTE We always start a transaction to ensure consistency between db and eventlog (even for single-item batches)
573
+ db.execute('BEGIN TRANSACTION', undefined) // Start the transaction
574
+ dbEventlog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
364
575
 
365
- return ({ batchItems }) =>
576
+ yield* Effect.addFinalizer((exit) =>
366
577
  Effect.gen(function* () {
367
- db.execute('BEGIN TRANSACTION', undefined) // Start the transaction
368
- dbLog.execute('BEGIN TRANSACTION', undefined) // Start the transaction
369
-
370
- yield* Effect.addFinalizer((exit) =>
371
- Effect.gen(function* () {
372
- if (Exit.isSuccess(exit)) return
373
-
374
- // Rollback in case of an error
375
- db.execute('ROLLBACK', undefined)
376
- dbLog.execute('ROLLBACK', undefined)
377
- }),
378
- )
578
+ if (Exit.isSuccess(exit)) return
379
579
 
380
- for (let i = 0; i < batchItems.length; i++) {
381
- const { meta, ...mutationEventEncoded } = batchItems[i]!
580
+ // Rollback in case of an error
581
+ db.execute('ROLLBACK', undefined)
582
+ dbEventlog.execute('ROLLBACK', undefined)
583
+ }),
584
+ )
382
585
 
383
- yield* applyMutation(mutationEventEncoded)
586
+ for (let i = 0; i < batchItems.length; i++) {
587
+ const { sessionChangeset } = yield* materializeEvent(batchItems[i]!)
588
+ batchItems[i]!.meta.sessionChangeset = sessionChangeset
384
589
 
385
- if (meta?.deferred) {
386
- yield* Deferred.succeed(meta.deferred, void 0)
387
- }
388
- }
590
+ if (deferreds?.[i] !== undefined) {
591
+ yield* Deferred.succeed(deferreds[i]!, void 0)
592
+ }
593
+ }
389
594
 
390
- db.execute('COMMIT', undefined) // Commit the transaction
391
- dbLog.execute('COMMIT', undefined) // Commit the transaction
392
- }).pipe(
393
- Effect.uninterruptible,
394
- Effect.scoped,
395
- Effect.withSpan('@livestore/common:leader-thread:syncing:applyMutationItems', {
396
- attributes: { count: batchItems.length },
397
- }),
398
- Effect.tapCauseLogPretty,
399
- UnexpectedError.mapToUnexpectedError,
400
- )
401
- })
595
+ db.execute('COMMIT', undefined) // Commit the transaction
596
+ dbEventlog.execute('COMMIT', undefined) // Commit the transaction
597
+ }).pipe(
598
+ Effect.uninterruptible,
599
+ Effect.scoped,
600
+ Effect.withSpan('@livestore/common:LeaderSyncProcessor:materializeEventItems', {
601
+ attributes: { batchSize: batchItems.length },
602
+ }),
603
+ Effect.tapCauseLogPretty,
604
+ UnexpectedError.mapToUnexpectedError,
605
+ )
402
606
 
403
607
  const backgroundBackendPulling = ({
404
- dbReady,
405
608
  initialBackendHead,
406
- isLocalEvent,
609
+ isClientEvent,
407
610
  restartBackendPushing,
408
- span,
611
+ otelSpan,
409
612
  syncStateSref,
410
613
  localPushesLatch,
411
614
  pullLatch,
615
+ devtoolsLatch,
412
616
  initialBlockingSyncContext,
617
+ connectedClientSessionPullQueues,
618
+ mergeCounterRef,
619
+ mergePayloads,
620
+ advancePushHead,
413
621
  }: {
414
- dbReady: Deferred.Deferred<void>
415
- initialBackendHead: EventId.GlobalEventId
416
- isLocalEvent: (mutationEventEncoded: MutationEvent.EncodedWithMeta) => boolean
622
+ initialBackendHead: EventSequenceNumber.GlobalEventSequenceNumber
623
+ isClientEvent: (eventEncoded: LiveStoreEvent.EncodedWithMeta) => boolean
417
624
  restartBackendPushing: (
418
- filteredRebasedPending: ReadonlyArray<MutationEvent.EncodedWithMeta>,
625
+ filteredRebasedPending: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>,
419
626
  ) => Effect.Effect<void, UnexpectedError, LeaderThreadCtx | HttpClient.HttpClient>
420
- span: otel.Span | undefined
627
+ otelSpan: otel.Span | undefined
421
628
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
422
629
  localPushesLatch: Effect.Latch
423
630
  pullLatch: Effect.Latch
631
+ devtoolsLatch: Effect.Latch | undefined
424
632
  initialBlockingSyncContext: InitialBlockingSyncContext
633
+ connectedClientSessionPullQueues: PullQueueSet
634
+ mergeCounterRef: { current: number }
635
+ mergePayloads: Map<number, typeof SyncState.PayloadUpstream.Type>
636
+ advancePushHead: (eventNum: EventSequenceNumber.EventSequenceNumber) => void
425
637
  }) =>
426
638
  Effect.gen(function* () {
427
- const { syncBackend, db, dbLog, connectedClientSessionPullQueues, schema } = yield* LeaderThreadCtx
639
+ const { syncBackend, dbState: db, dbEventlog, schema } = yield* LeaderThreadCtx
428
640
 
429
641
  if (syncBackend === undefined) return
430
642
 
431
- const cursorInfo = yield* getCursorInfo(initialBackendHead)
432
-
433
- const applyMutationItems = yield* makeApplyMutationItems
434
-
435
- const onNewPullChunk = (newEvents: MutationEvent.EncodedWithMeta[], remaining: number) =>
643
+ const onNewPullChunk = (newEvents: LiveStoreEvent.EncodedWithMeta[], remaining: number) =>
436
644
  Effect.gen(function* () {
437
645
  if (newEvents.length === 0) return
438
646
 
647
+ if (devtoolsLatch !== undefined) {
648
+ yield* devtoolsLatch.await
649
+ }
650
+
439
651
  // Prevent more local pushes from being processed until this pull is finished
440
652
  yield* localPushesLatch.close
441
653
 
@@ -445,75 +657,107 @@ const backgroundBackendPulling = ({
445
657
  const syncState = yield* syncStateSref
446
658
  if (syncState === undefined) return shouldNeverHappen('Not initialized')
447
659
 
448
- const trimRollbackUntil = newEvents.at(-1)!.id
449
-
450
- const updateResult = SyncState.updateSyncState({
660
+ const mergeResult = SyncState.merge({
451
661
  syncState,
452
- payload: { _tag: 'upstream-advance', newEvents, trimRollbackUntil },
453
- isLocalEvent,
454
- isEqualEvent: MutationEvent.isEqualEncoded,
455
- ignoreLocalEvents: true,
662
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents }),
663
+ isClientEvent,
664
+ isEqualEvent: LiveStoreEvent.isEqualEncoded,
665
+ ignoreClientEvents: true,
456
666
  })
457
667
 
458
- if (updateResult._tag === 'reject') {
668
+ const mergeCounter = yield* incrementMergeCounter(mergeCounterRef)
669
+
670
+ if (mergeResult._tag === 'reject') {
459
671
  return shouldNeverHappen('The leader thread should never reject upstream advances')
672
+ } else if (mergeResult._tag === 'unexpected-error') {
673
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:unexpected-error`, {
674
+ newEventsCount: newEvents.length,
675
+ newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
676
+ })
677
+ return yield* Effect.fail(mergeResult.cause)
460
678
  }
461
679
 
462
- const newBackendHead = newEvents.at(-1)!.id
680
+ const newBackendHead = newEvents.at(-1)!.seqNum
463
681
 
464
- updateBackendHead(dbLog, newBackendHead)
682
+ Eventlog.updateBackendHead(dbEventlog, newBackendHead)
465
683
 
466
- if (updateResult._tag === 'rebase') {
467
- span?.addEvent('backend-pull:rebase', {
684
+ if (mergeResult._tag === 'rebase') {
685
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:rebase`, {
468
686
  newEventsCount: newEvents.length,
469
687
  newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
470
- rollbackCount: updateResult.eventsToRollback.length,
471
- updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
688
+ rollbackCount: mergeResult.rollbackEvents.length,
689
+ mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
472
690
  })
473
691
 
474
- const filteredRebasedPending = updateResult.newSyncState.pending.filter((mutationEvent) => {
475
- const mutationDef = schema.mutations.get(mutationEvent.mutation)!
476
- return mutationDef.options.localOnly === false
692
+ const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
693
+ const { eventDef } = getEventDef(schema, event.name)
694
+ return eventDef.options.clientOnly === false
477
695
  })
478
- yield* restartBackendPushing(filteredRebasedPending)
696
+ yield* restartBackendPushing(globalRebasedPendingEvents)
479
697
 
480
- if (updateResult.eventsToRollback.length > 0) {
481
- yield* rollback({ db, dbLog, eventIdsToRollback: updateResult.eventsToRollback.map((_) => _.id) })
698
+ if (mergeResult.rollbackEvents.length > 0) {
699
+ yield* rollback({
700
+ dbState: db,
701
+ dbEventlog,
702
+ eventNumsToRollback: mergeResult.rollbackEvents.map((_) => _.seqNum),
703
+ })
482
704
  }
483
705
 
484
706
  yield* connectedClientSessionPullQueues.offer({
485
- payload: {
486
- _tag: 'upstream-rebase',
487
- newEvents: updateResult.newEvents,
488
- rollbackUntil: updateResult.eventsToRollback.at(0)!.id,
489
- trimRollbackUntil,
490
- },
491
- remaining,
707
+ payload: SyncState.PayloadUpstreamRebase.make({
708
+ newEvents: mergeResult.newEvents,
709
+ rollbackEvents: mergeResult.rollbackEvents,
710
+ }),
711
+ mergeCounter,
492
712
  })
713
+ mergePayloads.set(
714
+ mergeCounter,
715
+ SyncState.PayloadUpstreamRebase.make({
716
+ newEvents: mergeResult.newEvents,
717
+ rollbackEvents: mergeResult.rollbackEvents,
718
+ }),
719
+ )
493
720
  } else {
494
- span?.addEvent('backend-pull:advance', {
721
+ otelSpan?.addEvent(`[${mergeCounter}]:pull:advance`, {
495
722
  newEventsCount: newEvents.length,
496
- updateResult: TRACE_VERBOSE ? JSON.stringify(updateResult) : undefined,
723
+ mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
497
724
  })
498
725
 
499
726
  yield* connectedClientSessionPullQueues.offer({
500
- payload: { _tag: 'upstream-advance', newEvents: updateResult.newEvents, trimRollbackUntil },
501
- remaining,
727
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }),
728
+ mergeCounter,
502
729
  })
730
+ mergePayloads.set(mergeCounter, SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }))
731
+
732
+ if (mergeResult.confirmedEvents.length > 0) {
733
+ // `mergeResult.confirmedEvents` don't contain the correct sync metadata, so we need to use
734
+ // `newEvents` instead which we filter via `mergeResult.confirmedEvents`
735
+ const confirmedNewEvents = newEvents.filter((event) =>
736
+ mergeResult.confirmedEvents.some((confirmedEvent) =>
737
+ EventSequenceNumber.isEqual(event.seqNum, confirmedEvent.seqNum),
738
+ ),
739
+ )
740
+ yield* Eventlog.updateSyncMetadata(confirmedNewEvents)
741
+ }
503
742
  }
504
743
 
744
+ // Removes the changeset rows which are no longer needed as we'll never have to rollback beyond this point
505
745
  trimChangesetRows(db, newBackendHead)
506
746
 
507
- yield* applyMutationItems({ batchItems: updateResult.newEvents })
747
+ advancePushHead(mergeResult.newSyncState.localHead)
508
748
 
509
- yield* SubscriptionRef.set(syncStateSref, updateResult.newSyncState)
749
+ yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined })
510
750
 
751
+ yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState)
752
+
753
+ // Allow local pushes to be processed again
511
754
  if (remaining === 0) {
512
- // Allow local pushes to be processed again
513
755
  yield* localPushesLatch.open
514
756
  }
515
757
  })
516
758
 
759
+ const cursorInfo = yield* Eventlog.getSyncBackendCursorInfo(initialBackendHead)
760
+
517
761
  yield* syncBackend.pull(cursorInfo).pipe(
518
762
  // TODO only take from queue while connected
519
763
  Stream.tap(({ batch, remaining }) =>
@@ -525,16 +769,13 @@ const backgroundBackendPulling = ({
525
769
  // },
526
770
  // })
527
771
 
528
- // Wait for the db to be initially created
529
- yield* dbReady
530
-
531
- // NOTE we only want to take process mutations when the sync backend is connected
772
+ // NOTE we only want to take process events when the sync backend is connected
532
773
  // (e.g. needed for simulating being offline)
533
774
  // TODO remove when there's a better way to handle this in stream above
534
775
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
535
776
 
536
777
  yield* onNewPullChunk(
537
- batch.map((_) => MutationEvent.EncodedWithMeta.fromGlobal(_.mutationEventEncoded)),
778
+ batch.map((_) => LiveStoreEvent.EncodedWithMeta.fromGlobal(_.eventEncoded, _.metadata)),
538
779
  remaining,
539
780
  )
540
781
 
@@ -544,94 +785,35 @@ const backgroundBackendPulling = ({
544
785
  Stream.runDrain,
545
786
  Effect.interruptible,
546
787
  )
547
- }).pipe(Effect.withSpan('@livestore/common:leader-thread:syncing:backend-pulling'))
548
-
549
- const rollback = ({
550
- db,
551
- dbLog,
552
- eventIdsToRollback,
553
- }: {
554
- db: SynchronousDatabase
555
- dbLog: SynchronousDatabase
556
- eventIdsToRollback: EventId.EventId[]
557
- }) =>
558
- Effect.gen(function* () {
559
- const rollbackEvents = db
560
- .select<SessionChangesetMetaRow>(
561
- sql`SELECT * FROM ${SESSION_CHANGESET_META_TABLE} WHERE (idGlobal, idLocal) IN (${eventIdsToRollback.map((id) => `(${id.global}, ${id.local})`).join(', ')})`,
562
- )
563
- .map((_) => ({ id: { global: _.idGlobal, local: _.idLocal }, changeset: _.changeset, debug: _.debug }))
564
- .toSorted((a, b) => EventId.compare(a.id, b.id))
565
-
566
- // Apply changesets in reverse order
567
- for (let i = rollbackEvents.length - 1; i >= 0; i--) {
568
- const { changeset } = rollbackEvents[i]!
569
- if (changeset !== null) {
570
- db.makeChangeset(changeset).invert().apply()
571
- }
572
- }
573
-
574
- // Delete the changeset rows
575
- db.execute(
576
- sql`DELETE FROM ${SESSION_CHANGESET_META_TABLE} WHERE (idGlobal, idLocal) IN (${eventIdsToRollback.map((id) => `(${id.global}, ${id.local})`).join(', ')})`,
577
- )
578
-
579
- // Delete the mutation log rows
580
- dbLog.execute(
581
- sql`DELETE FROM ${MUTATION_LOG_META_TABLE} WHERE (idGlobal, idLocal) IN (${eventIdsToRollback.map((id) => `(${id.global}, ${id.local})`).join(', ')})`,
582
- )
583
- }).pipe(
584
- Effect.withSpan('@livestore/common:leader-thread:syncing:rollback', {
585
- attributes: { count: eventIdsToRollback.length },
586
- }),
587
- )
588
-
589
- const getCursorInfo = (remoteHead: EventId.GlobalEventId) =>
590
- Effect.gen(function* () {
591
- const { dbLog } = yield* LeaderThreadCtx
592
-
593
- if (remoteHead === EventId.ROOT.global) return Option.none()
594
-
595
- const MutationlogQuerySchema = Schema.Struct({
596
- syncMetadataJson: Schema.parseJson(Schema.Option(Schema.JsonValue)),
597
- }).pipe(Schema.pluck('syncMetadataJson'), Schema.Array, Schema.head)
598
-
599
- const syncMetadataOption = yield* Effect.sync(() =>
600
- dbLog.select<{ syncMetadataJson: string }>(
601
- sql`SELECT syncMetadataJson FROM ${MUTATION_LOG_META_TABLE} WHERE idGlobal = ${remoteHead} ORDER BY idLocal ASC LIMIT 1`,
602
- ),
603
- ).pipe(Effect.andThen(Schema.decode(MutationlogQuerySchema)), Effect.map(Option.flatten), Effect.orDie)
604
-
605
- return Option.some({
606
- cursor: { global: remoteHead, local: EventId.localDefault },
607
- metadata: syncMetadataOption,
608
- }) satisfies InitialSyncInfo
609
- }).pipe(Effect.withSpan('@livestore/common:leader-thread:syncing:getCursorInfo', { attributes: { remoteHead } }))
788
+ }).pipe(Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pulling'))
610
789
 
611
790
  const backgroundBackendPushing = ({
612
- dbReady,
613
- syncBackendQueue,
614
- span,
791
+ syncBackendPushQueue,
792
+ otelSpan,
793
+ devtoolsLatch,
794
+ backendPushBatchSize,
615
795
  }: {
616
- dbReady: Deferred.Deferred<void>
617
- syncBackendQueue: BucketQueue.BucketQueue<MutationEvent.EncodedWithMeta>
618
- span: otel.Span | undefined
796
+ syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.EncodedWithMeta>
797
+ otelSpan: otel.Span | undefined
798
+ devtoolsLatch: Effect.Latch | undefined
799
+ backendPushBatchSize: number
619
800
  }) =>
620
801
  Effect.gen(function* () {
621
- const { syncBackend, dbLog } = yield* LeaderThreadCtx
802
+ const { syncBackend } = yield* LeaderThreadCtx
622
803
  if (syncBackend === undefined) return
623
804
 
624
- yield* dbReady
625
-
626
805
  while (true) {
627
806
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
628
807
 
629
- // TODO make batch size configurable
630
- const queueItems = yield* BucketQueue.takeBetween(syncBackendQueue, 1, 50)
808
+ const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, backendPushBatchSize)
631
809
 
632
810
  yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
633
811
 
634
- span?.addEvent('backend-push', {
812
+ if (devtoolsLatch !== undefined) {
813
+ yield* devtoolsLatch.await
814
+ }
815
+
816
+ otelSpan?.addEvent('backend-push', {
635
817
  batchSize: queueItems.length,
636
818
  batch: TRACE_VERBOSE ? JSON.stringify(queueItems) : undefined,
637
819
  })
@@ -640,31 +822,119 @@ const backgroundBackendPushing = ({
640
822
  const pushResult = yield* syncBackend.push(queueItems.map((_) => _.toGlobal())).pipe(Effect.either)
641
823
 
642
824
  if (pushResult._tag === 'Left') {
643
- span?.addEvent('backend-push-error', { error: pushResult.left.toString() })
825
+ if (LS_DEV) {
826
+ yield* Effect.logDebug('handled backend-push-error', { error: pushResult.left.toString() })
827
+ }
828
+ otelSpan?.addEvent('backend-push-error', { error: pushResult.left.toString() })
644
829
  // wait for interrupt caused by background pulling which will then restart pushing
645
830
  return yield* Effect.never
646
831
  }
647
-
648
- const { metadata } = pushResult.right
649
-
650
- // TODO try to do this in a single query
651
- for (let i = 0; i < queueItems.length; i++) {
652
- const mutationEventEncoded = queueItems[i]!
653
- yield* execSql(
654
- dbLog,
655
- ...updateRows({
656
- tableName: MUTATION_LOG_META_TABLE,
657
- columns: mutationLogMetaTable.sqliteDef.columns,
658
- where: { idGlobal: mutationEventEncoded.id.global, idLocal: mutationEventEncoded.id.local },
659
- updateValues: { syncMetadataJson: metadata[i]! },
660
- }),
661
- )
662
- }
663
832
  }
664
- }).pipe(Effect.interruptible, Effect.withSpan('@livestore/common:leader-thread:syncing:backend-pushing'))
833
+ }).pipe(Effect.interruptible, Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pushing'))
665
834
 
666
- const trimChangesetRows = (db: SynchronousDatabase, newHead: EventId.EventId) => {
835
+ const trimChangesetRows = (db: SqliteDb, newHead: EventSequenceNumber.EventSequenceNumber) => {
667
836
  // Since we're using the session changeset rows to query for the current head,
668
837
  // we're keeping at least one row for the current head, and thus are using `<` instead of `<=`
669
- db.execute(sql`DELETE FROM ${SESSION_CHANGESET_META_TABLE} WHERE idGlobal < ${newHead.global}`)
838
+ db.execute(sql`DELETE FROM ${SystemTables.SESSION_CHANGESET_META_TABLE} WHERE seqNumGlobal < ${newHead.global}`)
670
839
  }
840
+
841
+ interface PullQueueSet {
842
+ makeQueue: Effect.Effect<
843
+ Queue.Queue<{ payload: typeof SyncState.PayloadUpstream.Type; mergeCounter: number }>,
844
+ UnexpectedError,
845
+ Scope.Scope | LeaderThreadCtx
846
+ >
847
+ offer: (item: {
848
+ payload: typeof SyncState.PayloadUpstream.Type
849
+ mergeCounter: number
850
+ }) => Effect.Effect<void, UnexpectedError>
851
+ }
852
+
853
+ const makePullQueueSet = Effect.gen(function* () {
854
+ const set = new Set<Queue.Queue<{ payload: typeof SyncState.PayloadUpstream.Type; mergeCounter: number }>>()
855
+
856
+ yield* Effect.addFinalizer(() =>
857
+ Effect.gen(function* () {
858
+ for (const queue of set) {
859
+ yield* Queue.shutdown(queue)
860
+ }
861
+
862
+ set.clear()
863
+ }),
864
+ )
865
+
866
+ const makeQueue: PullQueueSet['makeQueue'] = Effect.gen(function* () {
867
+ const queue = yield* Queue.unbounded<{
868
+ payload: typeof SyncState.PayloadUpstream.Type
869
+ mergeCounter: number
870
+ }>().pipe(Effect.acquireRelease(Queue.shutdown))
871
+
872
+ yield* Effect.addFinalizer(() => Effect.sync(() => set.delete(queue)))
873
+
874
+ set.add(queue)
875
+
876
+ return queue
877
+ })
878
+
879
+ const offer: PullQueueSet['offer'] = (item) =>
880
+ Effect.gen(function* () {
881
+ // Short-circuit if the payload is an empty upstream advance
882
+ if (item.payload._tag === 'upstream-advance' && item.payload.newEvents.length === 0) {
883
+ return
884
+ }
885
+
886
+ for (const queue of set) {
887
+ yield* Queue.offer(queue, item)
888
+ }
889
+ })
890
+
891
+ return {
892
+ makeQueue,
893
+ offer,
894
+ }
895
+ })
896
+
897
+ const incrementMergeCounter = (mergeCounterRef: { current: number }) =>
898
+ Effect.gen(function* () {
899
+ const { dbState } = yield* LeaderThreadCtx
900
+ mergeCounterRef.current++
901
+ dbState.execute(
902
+ sql`INSERT OR REPLACE INTO ${SystemTables.LEADER_MERGE_COUNTER_TABLE} (id, mergeCounter) VALUES (0, ${mergeCounterRef.current})`,
903
+ )
904
+ return mergeCounterRef.current
905
+ })
906
+
907
+ const getMergeCounterFromDb = (dbState: SqliteDb) =>
908
+ Effect.gen(function* () {
909
+ const result = dbState.select<{ mergeCounter: number }>(
910
+ sql`SELECT mergeCounter FROM ${SystemTables.LEADER_MERGE_COUNTER_TABLE} WHERE id = 0`,
911
+ )
912
+ return result[0]?.mergeCounter ?? 0
913
+ })
914
+
915
+ const validatePushBatch = (
916
+ batch: ReadonlyArray<LiveStoreEvent.EncodedWithMeta>,
917
+ pushHead: EventSequenceNumber.EventSequenceNumber,
918
+ ) =>
919
+ Effect.gen(function* () {
920
+ if (batch.length === 0) {
921
+ return
922
+ }
923
+
924
+ // Make sure batch is monotonically increasing
925
+ for (let i = 1; i < batch.length; i++) {
926
+ if (EventSequenceNumber.isGreaterThanOrEqual(batch[i - 1]!.seqNum, batch[i]!.seqNum)) {
927
+ shouldNeverHappen(
928
+ `Events must be ordered in monotonically ascending order by eventNum. Received: [${batch.map((e) => EventSequenceNumber.toString(e.seqNum)).join(', ')}]`,
929
+ )
930
+ }
931
+ }
932
+
933
+ // Make sure smallest sequence number is > pushHead
934
+ if (EventSequenceNumber.isGreaterThanOrEqual(pushHead, batch[0]!.seqNum)) {
935
+ return yield* LeaderAheadError.make({
936
+ minimumExpectedNum: pushHead,
937
+ providedNum: batch[0]!.seqNum,
938
+ })
939
+ }
940
+ })