@livestore/common 0.4.0-dev.21 → 0.4.0-dev.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (344) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/ClientSessionLeaderThreadProxy.d.ts +16 -9
  3. package/dist/ClientSessionLeaderThreadProxy.d.ts.map +1 -1
  4. package/dist/ClientSessionLeaderThreadProxy.js.map +1 -1
  5. package/dist/WorkerTransportError.d.ts +11 -0
  6. package/dist/WorkerTransportError.d.ts.map +1 -0
  7. package/dist/WorkerTransportError.js +11 -0
  8. package/dist/WorkerTransportError.js.map +1 -0
  9. package/dist/adapter-types.d.ts +26 -3
  10. package/dist/adapter-types.d.ts.map +1 -1
  11. package/dist/adapter-types.js +27 -1
  12. package/dist/adapter-types.js.map +1 -1
  13. package/dist/bounded-collections.d.ts.map +1 -1
  14. package/dist/bounded-collections.js +6 -4
  15. package/dist/bounded-collections.js.map +1 -1
  16. package/dist/debug-info.js +4 -4
  17. package/dist/debug-info.js.map +1 -1
  18. package/dist/devtools/devtools-messages-client-session.d.ts +42 -22
  19. package/dist/devtools/devtools-messages-client-session.d.ts.map +1 -1
  20. package/dist/devtools/devtools-messages-client-session.js +12 -1
  21. package/dist/devtools/devtools-messages-client-session.js.map +1 -1
  22. package/dist/devtools/devtools-messages-common.d.ts +12 -6
  23. package/dist/devtools/devtools-messages-common.d.ts.map +1 -1
  24. package/dist/devtools/devtools-messages-common.js +8 -3
  25. package/dist/devtools/devtools-messages-common.js.map +1 -1
  26. package/dist/devtools/devtools-messages-leader.d.ts +45 -25
  27. package/dist/devtools/devtools-messages-leader.d.ts.map +1 -1
  28. package/dist/devtools/devtools-messages-leader.js +12 -1
  29. package/dist/devtools/devtools-messages-leader.js.map +1 -1
  30. package/dist/devtools/mod.js +1 -1
  31. package/dist/devtools/mod.js.map +1 -1
  32. package/dist/errors.d.ts +15 -15
  33. package/dist/errors.d.ts.map +1 -1
  34. package/dist/errors.js +11 -11
  35. package/dist/errors.js.map +1 -1
  36. package/dist/index.d.ts +2 -0
  37. package/dist/index.d.ts.map +1 -1
  38. package/dist/index.js +2 -0
  39. package/dist/index.js.map +1 -1
  40. package/dist/leader-thread/LeaderSyncProcessor.d.ts +20 -6
  41. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  42. package/dist/leader-thread/LeaderSyncProcessor.js +283 -253
  43. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  44. package/dist/leader-thread/RejectedPushError.d.ts +107 -0
  45. package/dist/leader-thread/RejectedPushError.d.ts.map +1 -0
  46. package/dist/leader-thread/RejectedPushError.js +78 -0
  47. package/dist/leader-thread/RejectedPushError.js.map +1 -0
  48. package/dist/leader-thread/connection.js +1 -1
  49. package/dist/leader-thread/connection.js.map +1 -1
  50. package/dist/leader-thread/eventlog.d.ts.map +1 -1
  51. package/dist/leader-thread/eventlog.js +12 -11
  52. package/dist/leader-thread/eventlog.js.map +1 -1
  53. package/dist/leader-thread/leader-worker-devtools.d.ts +1 -2
  54. package/dist/leader-thread/leader-worker-devtools.d.ts.map +1 -1
  55. package/dist/leader-thread/leader-worker-devtools.js +34 -14
  56. package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
  57. package/dist/leader-thread/make-leader-thread-layer.d.ts +12 -5
  58. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  59. package/dist/leader-thread/make-leader-thread-layer.js +12 -11
  60. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  61. package/dist/leader-thread/make-leader-thread-layer.test.js +1 -1
  62. package/dist/leader-thread/make-leader-thread-layer.test.js.map +1 -1
  63. package/dist/leader-thread/materialize-event.d.ts.map +1 -1
  64. package/dist/leader-thread/materialize-event.js +7 -4
  65. package/dist/leader-thread/materialize-event.js.map +1 -1
  66. package/dist/leader-thread/recreate-db.js +1 -1
  67. package/dist/leader-thread/recreate-db.js.map +1 -1
  68. package/dist/leader-thread/shutdown-channel.d.ts +2 -2
  69. package/dist/leader-thread/shutdown-channel.d.ts.map +1 -1
  70. package/dist/leader-thread/shutdown-channel.js +2 -2
  71. package/dist/leader-thread/shutdown-channel.js.map +1 -1
  72. package/dist/leader-thread/stream-events.d.ts.map +1 -1
  73. package/dist/leader-thread/stream-events.js +4 -3
  74. package/dist/leader-thread/stream-events.js.map +1 -1
  75. package/dist/leader-thread/types.d.ts +7 -6
  76. package/dist/leader-thread/types.d.ts.map +1 -1
  77. package/dist/leader-thread/types.js.map +1 -1
  78. package/dist/logging.js +4 -4
  79. package/dist/logging.js.map +1 -1
  80. package/dist/make-client-session.js +2 -2
  81. package/dist/make-client-session.js.map +1 -1
  82. package/dist/materializer-helper.js +6 -6
  83. package/dist/materializer-helper.js.map +1 -1
  84. package/dist/otel.d.ts +1 -1
  85. package/dist/otel.d.ts.map +1 -1
  86. package/dist/otel.js +2 -2
  87. package/dist/otel.js.map +1 -1
  88. package/dist/rematerialize-from-eventlog.d.ts +1 -1
  89. package/dist/rematerialize-from-eventlog.d.ts.map +1 -1
  90. package/dist/rematerialize-from-eventlog.js +11 -9
  91. package/dist/rematerialize-from-eventlog.js.map +1 -1
  92. package/dist/schema/EventDef/define.d.ts +16 -2
  93. package/dist/schema/EventDef/define.d.ts.map +1 -1
  94. package/dist/schema/EventDef/define.js +5 -4
  95. package/dist/schema/EventDef/define.js.map +1 -1
  96. package/dist/schema/EventDef/deprecated.d.ts +99 -0
  97. package/dist/schema/EventDef/deprecated.d.ts.map +1 -0
  98. package/dist/schema/EventDef/deprecated.js +144 -0
  99. package/dist/schema/EventDef/deprecated.js.map +1 -0
  100. package/dist/schema/EventDef/deprecated.test.d.ts +2 -0
  101. package/dist/schema/EventDef/deprecated.test.d.ts.map +1 -0
  102. package/dist/schema/EventDef/deprecated.test.js +95 -0
  103. package/dist/schema/EventDef/deprecated.test.js.map +1 -0
  104. package/dist/schema/EventDef/event-def.d.ts +4 -0
  105. package/dist/schema/EventDef/event-def.d.ts.map +1 -1
  106. package/dist/schema/EventDef/mod.d.ts +1 -0
  107. package/dist/schema/EventDef/mod.d.ts.map +1 -1
  108. package/dist/schema/EventDef/mod.js +1 -0
  109. package/dist/schema/EventDef/mod.js.map +1 -1
  110. package/dist/schema/EventSequenceNumber/client.d.ts.map +1 -1
  111. package/dist/schema/EventSequenceNumber/client.js +11 -11
  112. package/dist/schema/EventSequenceNumber/client.js.map +1 -1
  113. package/dist/schema/EventSequenceNumber.test.js +1 -1
  114. package/dist/schema/EventSequenceNumber.test.js.map +1 -1
  115. package/dist/schema/LiveStoreEvent/client.d.ts +6 -6
  116. package/dist/schema/LiveStoreEvent/client.d.ts.map +1 -1
  117. package/dist/schema/LiveStoreEvent/client.js +6 -3
  118. package/dist/schema/LiveStoreEvent/client.js.map +1 -1
  119. package/dist/schema/LiveStoreEvent/client.test.d.ts +2 -0
  120. package/dist/schema/LiveStoreEvent/client.test.d.ts.map +1 -0
  121. package/dist/schema/LiveStoreEvent/client.test.js +83 -0
  122. package/dist/schema/LiveStoreEvent/client.test.js.map +1 -0
  123. package/dist/schema/schema.d.ts.map +1 -1
  124. package/dist/schema/schema.js +7 -4
  125. package/dist/schema/schema.js.map +1 -1
  126. package/dist/schema/state/sqlite/client-document-def.d.ts +1 -0
  127. package/dist/schema/state/sqlite/client-document-def.d.ts.map +1 -1
  128. package/dist/schema/state/sqlite/client-document-def.js +34 -13
  129. package/dist/schema/state/sqlite/client-document-def.js.map +1 -1
  130. package/dist/schema/state/sqlite/client-document-def.test.js +121 -2
  131. package/dist/schema/state/sqlite/client-document-def.test.js.map +1 -1
  132. package/dist/schema/state/sqlite/column-annotations.d.ts.map +1 -1
  133. package/dist/schema/state/sqlite/column-annotations.js +1 -1
  134. package/dist/schema/state/sqlite/column-annotations.js.map +1 -1
  135. package/dist/schema/state/sqlite/column-annotations.test.js +1 -1
  136. package/dist/schema/state/sqlite/column-annotations.test.js.map +1 -1
  137. package/dist/schema/state/sqlite/column-def.d.ts.map +1 -1
  138. package/dist/schema/state/sqlite/column-def.js +36 -34
  139. package/dist/schema/state/sqlite/column-def.js.map +1 -1
  140. package/dist/schema/state/sqlite/column-def.test.js +7 -6
  141. package/dist/schema/state/sqlite/column-def.test.js.map +1 -1
  142. package/dist/schema/state/sqlite/column-spec.d.ts.map +1 -1
  143. package/dist/schema/state/sqlite/column-spec.js +8 -8
  144. package/dist/schema/state/sqlite/column-spec.js.map +1 -1
  145. package/dist/schema/state/sqlite/column-spec.test.js +1 -1
  146. package/dist/schema/state/sqlite/column-spec.test.js.map +1 -1
  147. package/dist/schema/state/sqlite/db-schema/ast/sqlite.js +2 -2
  148. package/dist/schema/state/sqlite/db-schema/ast/sqlite.js.map +1 -1
  149. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.d.ts +2 -2
  150. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.d.ts.map +1 -1
  151. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.js +11 -2
  152. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.js.map +1 -1
  153. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.js +1 -1
  154. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.js.map +1 -1
  155. package/dist/schema/state/sqlite/db-schema/dsl/mod.d.ts +1 -1
  156. package/dist/schema/state/sqlite/db-schema/dsl/mod.d.ts.map +1 -1
  157. package/dist/schema/state/sqlite/db-schema/dsl/mod.js +1 -1
  158. package/dist/schema/state/sqlite/db-schema/dsl/mod.js.map +1 -1
  159. package/dist/schema/state/sqlite/mod.d.ts.map +1 -1
  160. package/dist/schema/state/sqlite/mod.js +3 -5
  161. package/dist/schema/state/sqlite/mod.js.map +1 -1
  162. package/dist/schema/state/sqlite/query-builder/api.d.ts +37 -13
  163. package/dist/schema/state/sqlite/query-builder/api.d.ts.map +1 -1
  164. package/dist/schema/state/sqlite/query-builder/astToSql.d.ts.map +1 -1
  165. package/dist/schema/state/sqlite/query-builder/astToSql.js +77 -7
  166. package/dist/schema/state/sqlite/query-builder/astToSql.js.map +1 -1
  167. package/dist/schema/state/sqlite/query-builder/impl.d.ts +1 -1
  168. package/dist/schema/state/sqlite/query-builder/impl.d.ts.map +1 -1
  169. package/dist/schema/state/sqlite/query-builder/impl.js +28 -14
  170. package/dist/schema/state/sqlite/query-builder/impl.js.map +1 -1
  171. package/dist/schema/state/sqlite/query-builder/impl.test.js +112 -3
  172. package/dist/schema/state/sqlite/query-builder/impl.test.js.map +1 -1
  173. package/dist/schema/state/sqlite/schema-helpers.js +2 -2
  174. package/dist/schema/state/sqlite/schema-helpers.js.map +1 -1
  175. package/dist/schema/state/sqlite/table-def.d.ts +5 -3
  176. package/dist/schema/state/sqlite/table-def.d.ts.map +1 -1
  177. package/dist/schema/state/sqlite/table-def.js +1 -1
  178. package/dist/schema/state/sqlite/table-def.js.map +1 -1
  179. package/dist/schema/state/sqlite/table-def.test.js +57 -4
  180. package/dist/schema/state/sqlite/table-def.test.js.map +1 -1
  181. package/dist/schema/unknown-events.d.ts +1 -1
  182. package/dist/schema/unknown-events.d.ts.map +1 -1
  183. package/dist/schema/unknown-events.js +1 -1
  184. package/dist/schema/unknown-events.js.map +1 -1
  185. package/dist/schema-management/__tests__/migrations-autoincrement-quoting.test.js +1 -1
  186. package/dist/schema-management/__tests__/migrations-autoincrement-quoting.test.js.map +1 -1
  187. package/dist/schema-management/common.js +2 -2
  188. package/dist/schema-management/common.js.map +1 -1
  189. package/dist/schema-management/migrations.js +1 -1
  190. package/dist/schema-management/migrations.js.map +1 -1
  191. package/dist/sql-queries/sql-queries.js +8 -6
  192. package/dist/sql-queries/sql-queries.js.map +1 -1
  193. package/dist/sql-queries/sql-query-builder.d.ts.map +1 -1
  194. package/dist/sql-queries/sql-query-builder.js.map +1 -1
  195. package/dist/sqlite-db-helper.js +3 -3
  196. package/dist/sqlite-db-helper.js.map +1 -1
  197. package/dist/sqlite-types.d.ts +2 -2
  198. package/dist/sqlite-types.d.ts.map +1 -1
  199. package/dist/sqlite-types.js.map +1 -1
  200. package/dist/sync/ClientSessionSyncProcessor.d.ts +8 -9
  201. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  202. package/dist/sync/ClientSessionSyncProcessor.js +93 -107
  203. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  204. package/dist/sync/errors.d.ts +0 -38
  205. package/dist/sync/errors.d.ts.map +1 -1
  206. package/dist/sync/errors.js +3 -20
  207. package/dist/sync/errors.js.map +1 -1
  208. package/dist/sync/mock-sync-backend.d.ts +5 -3
  209. package/dist/sync/mock-sync-backend.d.ts.map +1 -1
  210. package/dist/sync/mock-sync-backend.js +70 -68
  211. package/dist/sync/mock-sync-backend.js.map +1 -1
  212. package/dist/sync/next/compact-events.js +6 -6
  213. package/dist/sync/next/compact-events.js.map +1 -1
  214. package/dist/sync/next/facts.d.ts.map +1 -1
  215. package/dist/sync/next/facts.js +6 -6
  216. package/dist/sync/next/facts.js.map +1 -1
  217. package/dist/sync/next/history-dag-common.d.ts.map +1 -1
  218. package/dist/sync/next/history-dag-common.js +6 -6
  219. package/dist/sync/next/history-dag-common.js.map +1 -1
  220. package/dist/sync/next/history-dag.js +3 -3
  221. package/dist/sync/next/history-dag.js.map +1 -1
  222. package/dist/sync/next/rebase-events.js +1 -1
  223. package/dist/sync/next/rebase-events.js.map +1 -1
  224. package/dist/sync/next/test/compact-events.calculator.test.js +2 -2
  225. package/dist/sync/next/test/compact-events.calculator.test.js.map +1 -1
  226. package/dist/sync/next/test/compact-events.test.d.ts.map +1 -1
  227. package/dist/sync/next/test/compact-events.test.js +2 -2
  228. package/dist/sync/next/test/compact-events.test.js.map +1 -1
  229. package/dist/sync/next/test/event-fixtures.d.ts.map +1 -1
  230. package/dist/sync/next/test/event-fixtures.js +2 -2
  231. package/dist/sync/next/test/event-fixtures.js.map +1 -1
  232. package/dist/sync/sync-backend-kv.d.ts.map +1 -1
  233. package/dist/sync/sync-backend-kv.js.map +1 -1
  234. package/dist/sync/sync-backend.d.ts +3 -3
  235. package/dist/sync/sync-backend.d.ts.map +1 -1
  236. package/dist/sync/sync-backend.js +1 -1
  237. package/dist/sync/sync-backend.js.map +1 -1
  238. package/dist/sync/sync.d.ts +20 -0
  239. package/dist/sync/sync.d.ts.map +1 -1
  240. package/dist/sync/syncstate.d.ts +4 -17
  241. package/dist/sync/syncstate.d.ts.map +1 -1
  242. package/dist/sync/syncstate.js +51 -74
  243. package/dist/sync/syncstate.js.map +1 -1
  244. package/dist/sync/syncstate.test.js +112 -96
  245. package/dist/sync/syncstate.test.js.map +1 -1
  246. package/dist/sync/transport-chunking.js +3 -3
  247. package/dist/sync/transport-chunking.js.map +1 -1
  248. package/dist/sync/validate-push-payload.d.ts +2 -2
  249. package/dist/sync/validate-push-payload.d.ts.map +1 -1
  250. package/dist/sync/validate-push-payload.js +4 -6
  251. package/dist/sync/validate-push-payload.js.map +1 -1
  252. package/dist/util.js +2 -2
  253. package/dist/util.js.map +1 -1
  254. package/dist/version.d.ts +7 -1
  255. package/dist/version.d.ts.map +1 -1
  256. package/dist/version.js +8 -4
  257. package/dist/version.js.map +1 -1
  258. package/package.json +66 -12
  259. package/src/ClientSessionLeaderThreadProxy.ts +16 -9
  260. package/src/WorkerTransportError.ts +12 -0
  261. package/src/adapter-types.ts +39 -3
  262. package/src/bounded-collections.ts +6 -5
  263. package/src/debug-info.ts +4 -4
  264. package/src/devtools/devtools-messages-client-session.ts +12 -0
  265. package/src/devtools/devtools-messages-common.ts +8 -4
  266. package/src/devtools/devtools-messages-leader.ts +12 -0
  267. package/src/devtools/mod.ts +1 -1
  268. package/src/errors.ts +18 -17
  269. package/src/index.ts +2 -0
  270. package/src/leader-thread/LeaderSyncProcessor.ts +417 -347
  271. package/src/leader-thread/RejectedPushError.ts +106 -0
  272. package/src/leader-thread/connection.ts +1 -1
  273. package/src/leader-thread/eventlog.ts +16 -14
  274. package/src/leader-thread/leader-worker-devtools.ts +107 -66
  275. package/src/leader-thread/make-leader-thread-layer.test.ts +1 -1
  276. package/src/leader-thread/make-leader-thread-layer.ts +41 -31
  277. package/src/leader-thread/materialize-event.ts +8 -4
  278. package/src/leader-thread/recreate-db.ts +1 -1
  279. package/src/leader-thread/shutdown-channel.ts +2 -6
  280. package/src/leader-thread/stream-events.ts +10 -5
  281. package/src/leader-thread/types.ts +7 -6
  282. package/src/logging.ts +4 -4
  283. package/src/make-client-session.ts +2 -2
  284. package/src/materializer-helper.ts +9 -9
  285. package/src/otel.ts +3 -2
  286. package/src/rematerialize-from-eventlog.ts +60 -60
  287. package/src/schema/EventDef/define.ts +22 -6
  288. package/src/schema/EventDef/deprecated.test.ts +129 -0
  289. package/src/schema/EventDef/deprecated.ts +175 -0
  290. package/src/schema/EventDef/event-def.ts +5 -0
  291. package/src/schema/EventDef/mod.ts +1 -0
  292. package/src/schema/EventSequenceNumber/client.ts +11 -11
  293. package/src/schema/EventSequenceNumber.test.ts +2 -1
  294. package/src/schema/LiveStoreEvent/client.test.ts +97 -0
  295. package/src/schema/LiveStoreEvent/client.ts +6 -3
  296. package/src/schema/schema.ts +9 -4
  297. package/src/schema/state/sqlite/client-document-def.test.ts +142 -3
  298. package/src/schema/state/sqlite/client-document-def.ts +37 -14
  299. package/src/schema/state/sqlite/column-annotations.test.ts +2 -1
  300. package/src/schema/state/sqlite/column-annotations.ts +2 -1
  301. package/src/schema/state/sqlite/column-def.test.ts +8 -6
  302. package/src/schema/state/sqlite/column-def.ts +41 -36
  303. package/src/schema/state/sqlite/column-spec.test.ts +3 -1
  304. package/src/schema/state/sqlite/column-spec.ts +9 -8
  305. package/src/schema/state/sqlite/db-schema/ast/sqlite.ts +2 -2
  306. package/src/schema/state/sqlite/db-schema/dsl/field-defs.test.ts +2 -1
  307. package/src/schema/state/sqlite/db-schema/dsl/field-defs.ts +13 -4
  308. package/src/schema/state/sqlite/db-schema/dsl/mod.ts +3 -3
  309. package/src/schema/state/sqlite/mod.ts +4 -5
  310. package/src/schema/state/sqlite/query-builder/api.ts +37 -8
  311. package/src/schema/state/sqlite/query-builder/astToSql.ts +87 -7
  312. package/src/schema/state/sqlite/query-builder/impl.test.ts +145 -3
  313. package/src/schema/state/sqlite/query-builder/impl.ts +26 -12
  314. package/src/schema/state/sqlite/schema-helpers.ts +2 -2
  315. package/src/schema/state/sqlite/table-def.test.ts +67 -4
  316. package/src/schema/state/sqlite/table-def.ts +8 -15
  317. package/src/schema/unknown-events.ts +2 -2
  318. package/src/schema-management/__tests__/migrations-autoincrement-quoting.test.ts +3 -1
  319. package/src/schema-management/common.ts +2 -2
  320. package/src/schema-management/migrations.ts +1 -1
  321. package/src/sql-queries/sql-queries.ts +10 -6
  322. package/src/sql-queries/sql-query-builder.ts +1 -0
  323. package/src/sqlite-db-helper.ts +3 -3
  324. package/src/sqlite-types.ts +3 -2
  325. package/src/sync/ClientSessionSyncProcessor.ts +142 -133
  326. package/src/sync/errors.ts +10 -22
  327. package/src/sync/mock-sync-backend.ts +139 -97
  328. package/src/sync/next/compact-events.ts +5 -5
  329. package/src/sync/next/facts.ts +7 -6
  330. package/src/sync/next/history-dag-common.ts +9 -6
  331. package/src/sync/next/history-dag.ts +3 -3
  332. package/src/sync/next/rebase-events.ts +1 -1
  333. package/src/sync/next/test/compact-events.calculator.test.ts +3 -2
  334. package/src/sync/next/test/compact-events.test.ts +4 -3
  335. package/src/sync/next/test/event-fixtures.ts +2 -2
  336. package/src/sync/sync-backend-kv.ts +1 -0
  337. package/src/sync/sync-backend.ts +5 -4
  338. package/src/sync/sync.ts +21 -0
  339. package/src/sync/syncstate.test.ts +513 -435
  340. package/src/sync/syncstate.ts +80 -86
  341. package/src/sync/transport-chunking.ts +3 -3
  342. package/src/sync/validate-push-payload.ts +4 -6
  343. package/src/util.ts +2 -2
  344. package/src/version.ts +8 -4
@@ -1,14 +1,18 @@
1
- import { casesHandled, isNotUndefined, LS_DEV, shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils';
2
- import { BucketQueue, Cause, Deferred, Duration, Effect, Exit, FiberHandle, Layer, Option, OtelTracer, Queue, ReadonlyArray, Schedule, Stream, Subscribable, SubscriptionRef, } from '@livestore/utils/effect';
1
+ import { casesHandled, isNotUndefined, LS_DEV, TRACE_VERBOSE } from '@livestore/utils';
2
+ import { BucketQueue, Cause, Deferred, Duration, Effect, Exit, FiberHandle, Layer, Option, Queue, ReadonlyArray, Schedule, Schema, Stream, Subscribable, SubscriptionRef, } from '@livestore/utils/effect';
3
3
  import { UnknownError } from "../adapter-types.js";
4
+ import { IntentionalShutdownCause } from "../errors.js";
4
5
  import { makeMaterializerHash } from "../materializer-helper.js";
5
6
  import { EventSequenceNumber, LiveStoreEvent, resolveEventDef, SystemTables } from "../schema/mod.js";
6
- import { LeaderAheadError, } from "../sync/sync.js";
7
+ import { EVENTLOG_META_TABLE, SYNC_STATUS_TABLE } from "../schema/state/sqlite/system-tables/eventlog-tables.js";
8
+ import { isRejectedPushError, LeaderAheadError, NonMonotonicBatchError, StaleRebaseGenerationError } from "./RejectedPushError.js";
7
9
  import * as SyncState from "../sync/syncstate.js";
8
10
  import { sql } from "../util.js";
9
11
  import * as Eventlog from "./eventlog.js";
10
12
  import { rollback } from "./materialize-event.js";
11
13
  import { LeaderThreadCtx } from "./types.js";
14
+ /** Serialize value to JSON string for trace attributes */
15
+ const jsonStringify = Schema.encodeSync(Schema.parseJson());
12
16
  /**
13
17
  * The LeaderSyncProcessor manages synchronization of events between
14
18
  * the local state and the sync backend, ensuring efficient and orderly processing.
@@ -27,11 +31,11 @@ import { LeaderThreadCtx } from "./types.js";
27
31
  * - Maintains events in ascending order.
28
32
  * - Uses `Deferred` objects to resolve/reject events based on application success.
29
33
  * - Processes events from the queue, applying events in batches.
30
- * - Controlled by a `Latch` to manage execution flow.
31
- * - The latch closes on pull receipt and re-opens post-pull completion.
34
+ * - Controlled by a mutex (`Semaphore(1)`) to ensure mutual exclusion between local push and backend pull processing.
35
+ * - The backend pull side acquires the mutex before processing and releases it on post-pull completion.
32
36
  * - Processes up to `maxBatchSize` events per cycle.
33
37
  *
34
- * Currently we're advancing the state db and eventlog in lockstep, but we could also decouple this in the future
38
+ * Currently, we're advancing the state db and eventlog in lockstep, but we could also decouple this in the future
35
39
  *
36
40
  * Tricky concurrency scenarios:
37
41
  * - Queued local push batches becoming invalid due to a prior local push item being rejected.
@@ -39,7 +43,7 @@ import { LeaderThreadCtx } from "./types.js";
39
43
  *
40
44
  * See ClientSessionSyncProcessor for how the leader and session sync processors are similar/different.
41
45
  */
42
- export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncContext, initialSyncState, onError, livePull, params, testing, }) => Effect.gen(function* () {
46
+ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncContext, initialSyncState, onError, onBackendIdMismatch, livePull, params, testing, }) => Effect.gen(function* () {
43
47
  const syncBackendPushQueue = yield* BucketQueue.make();
44
48
  const localPushBatchSize = params.localPushBatchSize ?? 10;
45
49
  const backendPushBatchSize = params.backendPushBatchSize ?? 50;
@@ -51,8 +55,8 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
51
55
  current: undefined,
52
56
  };
53
57
  const localPushesQueue = yield* BucketQueue.make();
54
- const localPushesLatch = yield* Effect.makeLatch(true);
55
- const pullLatch = yield* Effect.makeLatch(true);
58
+ // Ensures mutual exclusion between local push and backend pull processing.
59
+ const localPushBackendPullMutex = yield* Effect.makeSemaphore(1);
56
60
  /**
57
61
  * Additionally to the `syncStateSref` we also need the `pushHeadRef` in order to prevent old/duplicate
58
62
  * events from being pushed in a scenario like this:
@@ -74,7 +78,7 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
74
78
  yield* validatePushBatch(newEvents, pushHeadRef.current);
75
79
  advancePushHead(newEvents.at(-1).seqNum);
76
80
  const waitForProcessing = options?.waitForProcessing ?? false;
77
- if (waitForProcessing) {
81
+ if (waitForProcessing === true) {
78
82
  const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make());
79
83
  const items = newEvents.map((eventEncoded, i) => [eventEncoded, deferreds[i]]);
80
84
  yield* BucketQueue.offerAll(localPushesQueue, items);
@@ -87,14 +91,14 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
87
91
  }).pipe(Effect.withSpan('@livestore/common:LeaderSyncProcessor:push', {
88
92
  attributes: {
89
93
  batchSize: newEvents.length,
90
- batch: TRACE_VERBOSE ? newEvents : undefined,
94
+ batch: TRACE_VERBOSE === true ? newEvents : undefined,
91
95
  },
92
- links: ctxRef.current?.span ? [{ _tag: 'SpanLink', span: ctxRef.current.span, attributes: {} }] : undefined,
96
+ links: ctxRef.current?.span !== undefined
97
+ ? [{ _tag: 'SpanLink', span: ctxRef.current.span, attributes: {} }]
98
+ : undefined,
93
99
  }));
94
100
  const pushPartial = ({ event: { name, args }, clientId, sessionId }) => Effect.gen(function* () {
95
- const syncState = yield* syncStateSref;
96
- if (syncState === undefined)
97
- return shouldNeverHappen('Not initialized');
101
+ const syncState = yield* Effect.fromNullable(yield* syncStateSref).pipe(Effect.orDieDebugger);
98
102
  const resolution = yield* resolveEventDef(schema, {
99
103
  operation: '@livestore/common:LeaderSyncProcessor:pushPartial',
100
104
  event: {
@@ -104,7 +108,7 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
104
108
  sessionId,
105
109
  seqNum: syncState.localHead,
106
110
  },
107
- }).pipe(UnknownError.mapToUnknownError);
111
+ });
108
112
  if (resolution._tag === 'unknown') {
109
113
  // Ignore partial pushes for unrecognised events – they are still
110
114
  // persisted server-side once a schema update ships.
@@ -121,17 +125,17 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
121
125
  }),
122
126
  });
123
127
  yield* push([eventEncoded]);
124
- }).pipe(Effect.catchTag('LeaderAheadError', Effect.orDie));
128
+ }).pipe(
129
+ // pushPartial constructs the event sequence number internally, so these errors should never happen.
130
+ Effect.catchIf(isRejectedPushError, Effect.die));
125
131
  // Starts various background loops
126
132
  const boot = Effect.gen(function* () {
127
133
  const span = yield* Effect.currentSpan.pipe(Effect.orDie);
128
- const otelSpan = yield* OtelTracer.currentOtelSpan.pipe(Effect.catchAll(() => Effect.succeed(undefined)));
129
134
  const { devtools, shutdownChannel } = yield* LeaderThreadCtx;
130
135
  const runtime = yield* Effect.runtime();
131
136
  ctxRef.current = {
132
- otelSpan,
133
137
  span,
134
- devtoolsLatch: devtools.enabled ? devtools.syncBackendLatch : undefined,
138
+ devtoolsLatch: devtools.enabled === true ? devtools.syncBackendLatch : undefined,
135
139
  runtime,
136
140
  };
137
141
  /** State transitions need to happen atomically, so we use a Ref to track the state */
@@ -148,26 +152,25 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
148
152
  yield* BucketQueue.offerAll(syncBackendPushQueue, globalPendingEvents);
149
153
  }
150
154
  }
155
+ const handleBackendIdMismatchError = (error) => handleBackendIdMismatch({ error, onBackendIdMismatch, shutdownChannel });
151
156
  const maybeShutdownOnError = (cause) => Effect.gen(function* () {
152
157
  if (onError === 'ignore') {
153
- if (LS_DEV) {
158
+ if (LS_DEV === true) {
154
159
  yield* Effect.logDebug(`Ignoring sync error (${cause._tag === 'Fail' ? cause.error._tag : cause._tag})`, Cause.pretty(cause));
155
160
  }
156
161
  return;
157
162
  }
158
- const errorToSend = Cause.isFailType(cause) ? cause.error : UnknownError.make({ cause });
163
+ const errorToSend = Cause.isFailType(cause) === true ? cause.error : UnknownError.make({ cause });
159
164
  yield* shutdownChannel.send(errorToSend).pipe(Effect.orDie);
160
- return yield* Effect.die(cause);
165
+ return yield* Effect.failCause(cause).pipe(Effect.orDie);
161
166
  });
162
167
  yield* backgroundApplyLocalPushes({
163
- localPushesLatch,
168
+ localPushBackendPullMutex,
164
169
  localPushesQueue,
165
- pullLatch,
166
170
  syncStateSref,
167
171
  syncBackendPushQueue,
168
172
  schema,
169
173
  isClientEvent,
170
- otelSpan,
171
174
  connectedClientSessionPullQueues,
172
175
  localPushBatchSize,
173
176
  testing: {
@@ -177,10 +180,9 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
177
180
  const backendPushingFiberHandle = yield* FiberHandle.make();
178
181
  const backendPushingEffect = backgroundBackendPushing({
179
182
  syncBackendPushQueue,
180
- otelSpan,
181
183
  devtoolsLatch: ctxRef.current?.devtoolsLatch,
182
184
  backendPushBatchSize,
183
- }).pipe(Effect.catchAllCause(maybeShutdownOnError));
185
+ }).pipe(Effect.catchTag('BackendIdMismatchError', handleBackendIdMismatchError), Effect.catchAllCause(maybeShutdownOnError));
184
186
  yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect);
185
187
  yield* backgroundBackendPulling({
186
188
  isClientEvent,
@@ -194,19 +196,19 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
194
196
  yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect);
195
197
  }),
196
198
  syncStateSref,
197
- localPushesLatch,
198
- pullLatch,
199
+ localPushBackendPullMutex,
199
200
  livePull,
200
201
  dbState,
201
- otelSpan,
202
202
  initialBlockingSyncContext,
203
203
  devtoolsLatch: ctxRef.current?.devtoolsLatch,
204
204
  connectedClientSessionPullQueues,
205
205
  advancePushHead,
206
206
  }).pipe(Effect.retry({
207
- // We want to retry pulling if we've lost connection to the sync backend
208
- while: (cause) => cause._tag === 'IsOfflineError',
209
- }), Effect.catchAllCause(maybeShutdownOnError),
207
+ // Retry pulling when we've lost connection to the sync backend
208
+ // We're using `until` with a refinement instead of `while` to narrow `IsOfflineError` out of the error type.
209
+ // See https://github.com/Effect-TS/effect/issues/6122
210
+ until: (error) => error._tag !== 'IsOfflineError',
211
+ }), Effect.catchTag('BackendIdMismatchError', handleBackendIdMismatchError), Effect.catchAllCause(maybeShutdownOnError),
210
212
  // Needed to avoid `Fiber terminated with an unhandled error` logs which seem to happen because of the `Effect.retry` above.
211
213
  // This might be a bug in Effect. Only seems to happen in the browser.
212
214
  Effect.provide(Layer.setUnhandledErrorLogLevel(Option.none())), Effect.forkScoped);
@@ -230,17 +232,9 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
230
232
  - full new state db snapshot in the "rebase" case
231
233
  - downside: importing the snapshot is expensive
232
234
  */
233
- const pullQueue = ({ cursor }) => {
234
- const runtime = ctxRef.current?.runtime ?? shouldNeverHappen('Not initialized');
235
- return connectedClientSessionPullQueues.makeQueue(cursor).pipe(Effect.provide(runtime));
236
- };
235
+ const pullQueue = ({ cursor }) => Effect.fromNullable(ctxRef.current?.runtime).pipe(Effect.orDieDebugger, Effect.flatMap((runtime) => connectedClientSessionPullQueues.makeQueue(cursor).pipe(Effect.provide(runtime))));
237
236
  const syncState = Subscribable.make({
238
- get: Effect.gen(function* () {
239
- const syncState = yield* syncStateSref;
240
- if (syncState === undefined)
241
- return shouldNeverHappen('Not initialized');
242
- return syncState;
243
- }),
237
+ get: syncStateSref.pipe(Effect.flatMap(Effect.fromNullable), Effect.orDieDebugger),
244
238
  changes: syncStateSref.changes.pipe(Stream.filter(isNotUndefined)),
245
239
  });
246
240
  return {
@@ -252,113 +246,103 @@ export const makeLeaderSyncProcessor = ({ schema, dbState, initialBlockingSyncCo
252
246
  syncState,
253
247
  };
254
248
  });
255
- const backgroundApplyLocalPushes = ({ localPushesLatch, localPushesQueue, pullLatch, syncStateSref, syncBackendPushQueue, schema, isClientEvent, otelSpan, connectedClientSessionPullQueues, localPushBatchSize, testing, }) => Effect.gen(function* () {
249
+ const backgroundApplyLocalPushes = ({ localPushBackendPullMutex, localPushesQueue, syncStateSref, syncBackendPushQueue, schema, isClientEvent, connectedClientSessionPullQueues, localPushBatchSize, testing, }) => Effect.gen(function* () {
256
250
  while (true) {
257
251
  if (testing.delay !== undefined) {
258
252
  yield* testing.delay.pipe(Effect.withSpan('localPushProcessingDelay'));
259
253
  }
260
254
  const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, localPushBatchSize);
261
- // Wait for the backend pulling to finish
262
- yield* localPushesLatch.await;
263
- // Prevent backend pull processing until this local push is finished
264
- yield* pullLatch.close;
265
- const syncState = yield* syncStateSref;
266
- if (syncState === undefined)
267
- return shouldNeverHappen('Not initialized');
268
- const currentRebaseGeneration = syncState.localHead.rebaseGeneration;
269
- // Since the rebase generation might have changed since enqueuing, we need to filter out items with older generation
270
- // It's important that we filter after we got localPushesLatch, otherwise we might filter with the old generation
271
- const [droppedItems, filteredItems] = ReadonlyArray.partition(batchItems, ([eventEncoded]) => eventEncoded.seqNum.rebaseGeneration >= currentRebaseGeneration);
272
- if (droppedItems.length > 0) {
273
- otelSpan?.addEvent(`push:drop-old-generation`, {
274
- droppedCount: droppedItems.length,
275
- currentRebaseGeneration,
276
- });
277
- /**
278
- * Dropped pushes may still have a deferred awaiting completion.
279
- * Fail it so the caller learns the leader advanced and resubmits with the updated generation.
280
- */
281
- yield* Effect.forEach(droppedItems.filter((item) => item[1] !== undefined), ([eventEncoded, deferred]) => Deferred.fail(deferred, LeaderAheadError.make({
282
- minimumExpectedNum: syncState.localHead,
283
- providedNum: eventEncoded.seqNum,
284
- })));
285
- }
286
- if (filteredItems.length === 0) {
287
- yield* pullLatch.open;
288
- continue;
289
- }
290
- const [newEvents, deferreds] = ReadonlyArray.unzip(filteredItems);
291
- const mergeResult = SyncState.merge({
292
- syncState,
293
- payload: { _tag: 'local-push', newEvents },
294
- isClientEvent,
295
- isEqualEvent: LiveStoreEvent.Client.isEqualEncoded,
296
- });
297
- switch (mergeResult._tag) {
298
- case 'unknown-error': {
299
- otelSpan?.addEvent(`push:unknown-error`, {
300
- batchSize: newEvents.length,
301
- newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
255
+ // Applies a batch of local pushes, guarded by the localPushBackendPullMutex to ensure mutual exclusion with backend pulling
256
+ yield* Effect.gen(function* () {
257
+ const syncState = yield* Effect.fromNullable(yield* syncStateSref).pipe(Effect.orDieDebugger);
258
+ const currentRebaseGeneration = syncState.localHead.rebaseGeneration;
259
+ // Since the rebase generation might have changed since enqueuing, we need to filter out items with older generation
260
+ // It's important that we filter after acquiring the localPushBackendPullMutex, otherwise we might filter with the old generation
261
+ const [droppedItems, filteredItems] = ReadonlyArray.partition(batchItems, ([eventEncoded]) => eventEncoded.seqNum.rebaseGeneration >= currentRebaseGeneration);
262
+ if (droppedItems.length > 0) {
263
+ yield* Effect.spanEvent(`push:drop-old-generation`, {
264
+ droppedCount: droppedItems.length,
265
+ currentRebaseGeneration,
302
266
  });
303
- return yield* new UnknownError({ cause: mergeResult.message });
267
+ /**
268
+ * Dropped pushes may still have a deferred awaiting completion.
269
+ * Fail it so the caller learns the leader advanced and resubmits with the updated generation.
270
+ */
271
+ yield* Effect.forEach(droppedItems.filter((item) => item[1] !== undefined), ([eventEncoded, deferred]) => Deferred.fail(deferred, StaleRebaseGenerationError.make({
272
+ currentRebaseGeneration,
273
+ providedRebaseGeneration: eventEncoded.seqNum.rebaseGeneration,
274
+ sessionId: eventEncoded.sessionId,
275
+ })));
304
276
  }
305
- case 'rebase': {
306
- return shouldNeverHappen('The leader thread should never have to rebase due to a local push');
277
+ if (filteredItems.length === 0) {
278
+ return;
307
279
  }
308
- case 'reject': {
309
- otelSpan?.addEvent(`push:reject`, {
310
- batchSize: newEvents.length,
311
- mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
312
- });
313
- // TODO: how to test this?
314
- const nextRebaseGeneration = currentRebaseGeneration + 1;
315
- const providedNum = newEvents.at(0).seqNum;
316
- // All subsequent pushes with same generation should be rejected as well
317
- // We're also handling the case where the localPushQueue already contains events
318
- // from the next generation which we preserve in the queue
319
- const remainingEventsMatchingGeneration = yield* BucketQueue.takeSplitWhere(localPushesQueue, ([eventEncoded]) => eventEncoded.seqNum.rebaseGeneration >= nextRebaseGeneration);
320
- // TODO we still need to better understand and handle this scenario
321
- if (LS_DEV && (yield* BucketQueue.size(localPushesQueue)) > 0) {
322
- console.log('localPushesQueue is not empty', yield* BucketQueue.size(localPushesQueue));
323
- // biome-ignore lint/suspicious/noDebugger: debugging
324
- debugger;
280
+ const [newEvents, deferreds] = ReadonlyArray.unzip(filteredItems);
281
+ yield* Effect.annotateCurrentSpan({
282
+ 'batchSize': newEvents.length,
283
+ ...(TRACE_VERBOSE === true ? { 'newEvents': jsonStringify(newEvents) } : {}),
284
+ });
285
+ const mergeResult = yield* SyncState.merge({
286
+ syncState,
287
+ payload: { _tag: 'local-push', newEvents },
288
+ isClientEvent,
289
+ isEqualEvent: LiveStoreEvent.Client.isEqualEncoded,
290
+ });
291
+ switch (mergeResult._tag) {
292
+ case 'rebase': {
293
+ return yield* Effect.dieDebugger('The leader thread should never have to rebase due to a local push');
294
+ }
295
+ case 'reject': {
296
+ yield* Effect.spanEvent(`push:reject`, {
297
+ batchSize: newEvents.length,
298
+ ...(TRACE_VERBOSE === true ? { mergeResult: jsonStringify(mergeResult) } : {}),
299
+ });
300
+ // TODO: how to test this?
301
+ const nextRebaseGeneration = currentRebaseGeneration + 1;
302
+ const providedNum = newEvents.at(0).seqNum;
303
+ // All subsequent pushes with same generation should be rejected as well
304
+ // We're also handling the case where the localPushQueue already contains events
305
+ // from the next generation which we preserve in the queue
306
+ const remainingEventsMatchingGeneration = yield* BucketQueue.takeSplitWhere(localPushesQueue, ([eventEncoded]) => eventEncoded.seqNum.rebaseGeneration >= nextRebaseGeneration);
307
+ // TODO we still need to better understand and handle this scenario
308
+ if (LS_DEV === true && (yield* BucketQueue.size(localPushesQueue)) > 0) {
309
+ console.log('localPushesQueue is not empty', yield* BucketQueue.size(localPushesQueue));
310
+ // oxlint-disable-next-line eslint(no-debugger) -- intentional breakpoint for unexpected queue state
311
+ debugger;
312
+ }
313
+ const allDeferredsToReject = [
314
+ ...deferreds,
315
+ ...remainingEventsMatchingGeneration.map(([_, deferred]) => deferred),
316
+ ].filter(isNotUndefined);
317
+ yield* Effect.forEach(allDeferredsToReject, (deferred) => Deferred.fail(deferred, LeaderAheadError.make({ minimumExpectedNum: mergeResult.expectedMinimumId, providedNum, sessionId: newEvents.at(0).sessionId })));
318
+ // In this case we're skipping state update and down/upstream processing
319
+ // We've cleared the local push queue and are now waiting for new local pushes / backend pulls
320
+ return;
321
+ }
322
+ case 'advance': {
323
+ break;
324
+ }
325
+ default: {
326
+ casesHandled(mergeResult);
325
327
  }
326
- const allDeferredsToReject = [
327
- ...deferreds,
328
- ...remainingEventsMatchingGeneration.map(([_, deferred]) => deferred),
329
- ].filter(isNotUndefined);
330
- yield* Effect.forEach(allDeferredsToReject, (deferred) => Deferred.fail(deferred, LeaderAheadError.make({ minimumExpectedNum: mergeResult.expectedMinimumId, providedNum })));
331
- // Allow the backend pulling to start
332
- yield* pullLatch.open;
333
- // In this case we're skipping state update and down/upstream processing
334
- // We've cleared the local push queue and are now waiting for new local pushes / backend pulls
335
- continue;
336
- }
337
- case 'advance': {
338
- break;
339
- }
340
- default: {
341
- casesHandled(mergeResult);
342
328
  }
343
- }
344
- yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState);
345
- yield* connectedClientSessionPullQueues.offer({
346
- payload: SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }),
347
- leaderHead: mergeResult.newSyncState.localHead,
348
- });
349
- otelSpan?.addEvent(`push:advance`, {
350
- batchSize: newEvents.length,
351
- mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
352
- });
353
- // Don't sync client-local events
354
- const filteredBatch = mergeResult.newEvents.filter((eventEncoded) => {
355
- const eventDef = schema.eventsDefsMap.get(eventEncoded.name);
356
- return eventDef === undefined ? true : eventDef.options.clientOnly === false;
357
- });
358
- yield* BucketQueue.offerAll(syncBackendPushQueue, filteredBatch);
359
- yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds });
360
- // Allow the backend pulling to start
361
- yield* pullLatch.open;
329
+ yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState);
330
+ yield* connectedClientSessionPullQueues.offer({
331
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }),
332
+ leaderHead: mergeResult.newSyncState.localHead,
333
+ });
334
+ yield* Effect.spanEvent(`push:advance`, {
335
+ batchSize: newEvents.length,
336
+ ...(TRACE_VERBOSE === true ? { mergeResult: jsonStringify(mergeResult) } : {}),
337
+ });
338
+ // Don't sync client-local events
339
+ const filteredBatch = mergeResult.newEvents.filter((eventEncoded) => {
340
+ const eventDef = schema.eventsDefsMap.get(eventEncoded.name);
341
+ return eventDef === undefined ? true : eventDef.options.clientOnly === false;
342
+ });
343
+ yield* BucketQueue.offerAll(syncBackendPushQueue, filteredBatch);
344
+ yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds });
345
+ }).pipe(localPushBackendPullMutex.withPermits(1));
362
346
  }
363
347
  });
364
348
  // TODO how to handle errors gracefully
@@ -368,7 +352,7 @@ const materializeEventsBatch = ({ batchItems, deferreds }) => Effect.gen(functio
368
352
  db.execute('BEGIN TRANSACTION', undefined); // Start the transaction
369
353
  dbEventlog.execute('BEGIN TRANSACTION', undefined); // Start the transaction
370
354
  yield* Effect.addFinalizer((exit) => Effect.gen(function* () {
371
- if (Exit.isSuccess(exit))
355
+ if (Exit.isSuccess(exit) === true)
372
356
  return;
373
357
  // Rollback in case of an error
374
358
  db.execute('ROLLBACK', undefined);
@@ -387,112 +371,117 @@ const materializeEventsBatch = ({ batchItems, deferreds }) => Effect.gen(functio
387
371
  }).pipe(Effect.uninterruptible, Effect.scoped, Effect.withSpan('@livestore/common:LeaderSyncProcessor:materializeEventItems', {
388
372
  attributes: { batchSize: batchItems.length },
389
373
  }), Effect.tapCauseLogPretty);
390
- const backgroundBackendPulling = ({ isClientEvent, restartBackendPushing, otelSpan, dbState, syncStateSref, localPushesLatch, livePull, pullLatch, devtoolsLatch, initialBlockingSyncContext, connectedClientSessionPullQueues, advancePushHead, }) => Effect.gen(function* () {
374
+ const backgroundBackendPulling = Effect.fn('@livestore/common:LeaderSyncProcessor:backend-pulling')(function* ({ isClientEvent, restartBackendPushing, dbState, syncStateSref, localPushBackendPullMutex, livePull, devtoolsLatch, initialBlockingSyncContext, connectedClientSessionPullQueues, advancePushHead, }) {
391
375
  const { syncBackend, dbState: db, dbEventlog, schema } = yield* LeaderThreadCtx;
392
376
  if (syncBackend === undefined)
393
377
  return;
394
- const onNewPullChunk = (newEvents, pageInfo) => Effect.gen(function* () {
395
- if (newEvents.length === 0)
378
+ let pullMutexHeld = false;
379
+ const releasePullMutexIfHeld = Effect.gen(function* () {
380
+ if (pullMutexHeld === false)
396
381
  return;
382
+ pullMutexHeld = false;
383
+ yield* localPushBackendPullMutex.release(1);
384
+ });
385
+ const isPullPaginationComplete = (pageInfo) => pageInfo._tag === 'NoMore';
386
+ const onNewPullChunk = (newEvents, pageInfo) => Effect.gen(function* () {
397
387
  if (devtoolsLatch !== undefined) {
398
388
  yield* devtoolsLatch.await;
399
389
  }
400
- // Prevent more local pushes from being processed until this pull is finished
401
- yield* localPushesLatch.close;
402
- // Wait for pending local pushes to finish
403
- yield* pullLatch.await;
404
- const syncState = yield* syncStateSref;
405
- if (syncState === undefined)
406
- return shouldNeverHappen('Not initialized');
407
- const mergeResult = SyncState.merge({
408
- syncState,
409
- payload: SyncState.PayloadUpstreamAdvance.make({ newEvents }),
410
- isClientEvent,
411
- isEqualEvent: LiveStoreEvent.Client.isEqualEncoded,
412
- ignoreClientEvents: true,
413
- });
414
- if (mergeResult._tag === 'reject') {
415
- return shouldNeverHappen('The leader thread should never reject upstream advances');
390
+ if (newEvents.length === 0) {
391
+ if (isPullPaginationComplete(pageInfo) === true) {
392
+ yield* releasePullMutexIfHeld;
393
+ }
394
+ return;
416
395
  }
417
- else if (mergeResult._tag === 'unknown-error') {
418
- otelSpan?.addEvent(`pull:unknown-error`, {
419
- newEventsCount: newEvents.length,
420
- newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
421
- });
422
- return yield* new UnknownError({ cause: mergeResult.message });
396
+ // Prevent more local pushes from being processed until this pull pagination sequence is finished.
397
+ if (pullMutexHeld === false) {
398
+ yield* localPushBackendPullMutex.take(1);
399
+ pullMutexHeld = true;
423
400
  }
424
- const newBackendHead = newEvents.at(-1).seqNum;
425
- Eventlog.updateBackendHead(dbEventlog, newBackendHead);
426
- if (mergeResult._tag === 'rebase') {
427
- otelSpan?.addEvent(`pull:rebase[${mergeResult.newSyncState.localHead.rebaseGeneration}]`, {
428
- newEventsCount: newEvents.length,
429
- newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
430
- rollbackCount: mergeResult.rollbackEvents.length,
431
- mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
401
+ const chunkExit = yield* Effect.gen(function* () {
402
+ const syncState = yield* Effect.fromNullable(yield* syncStateSref).pipe(Effect.orDieDebugger);
403
+ yield* Effect.annotateCurrentSpan({
404
+ 'merge.newEventsCount': newEvents.length,
405
+ ...(TRACE_VERBOSE === true ? { 'merge.newEvents': jsonStringify(newEvents) } : {}),
432
406
  });
433
- const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
434
- const eventDef = schema.eventsDefsMap.get(event.name);
435
- return eventDef === undefined ? true : eventDef.options.clientOnly === false;
407
+ const mergeResult = yield* SyncState.merge({
408
+ syncState,
409
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents }),
410
+ isClientEvent,
411
+ isEqualEvent: LiveStoreEvent.Client.isEqualEncoded,
412
+ ignoreClientEvents: true,
436
413
  });
437
- yield* restartBackendPushing(globalRebasedPendingEvents);
438
- if (mergeResult.rollbackEvents.length > 0) {
439
- yield* rollback({
440
- dbState: db,
441
- dbEventlog,
442
- eventNumsToRollback: mergeResult.rollbackEvents.map((_) => _.seqNum),
414
+ if (mergeResult._tag === 'reject') {
415
+ return yield* Effect.dieDebugger('The leader thread should never reject upstream advances');
416
+ }
417
+ const newBackendHead = newEvents.at(-1).seqNum;
418
+ Eventlog.updateBackendHead(dbEventlog, newBackendHead);
419
+ if (mergeResult._tag === 'rebase') {
420
+ yield* Effect.spanEvent(`pull:rebase[${mergeResult.newSyncState.localHead.rebaseGeneration}]`, {
421
+ newEventsCount: newEvents.length,
422
+ ...(TRACE_VERBOSE === true ? { newEvents: jsonStringify(newEvents) } : {}),
423
+ rollbackCount: mergeResult.rollbackEvents.length,
424
+ ...(TRACE_VERBOSE === true ? { mergeResult: jsonStringify(mergeResult) } : {}),
425
+ });
426
+ const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
427
+ const eventDef = schema.eventsDefsMap.get(event.name);
428
+ return eventDef === undefined ? true : eventDef.options.clientOnly === false;
429
+ });
430
+ yield* restartBackendPushing(globalRebasedPendingEvents);
431
+ if (mergeResult.rollbackEvents.length > 0) {
432
+ yield* rollback({
433
+ dbState: db,
434
+ dbEventlog,
435
+ eventNumsToRollback: mergeResult.rollbackEvents.map((_) => _.seqNum),
436
+ });
437
+ }
438
+ yield* connectedClientSessionPullQueues.offer({
439
+ payload: SyncState.payloadFromMergeResult(mergeResult),
440
+ leaderHead: mergeResult.newSyncState.localHead,
443
441
  });
444
442
  }
445
- yield* connectedClientSessionPullQueues.offer({
446
- payload: SyncState.payloadFromMergeResult(mergeResult),
447
- leaderHead: mergeResult.newSyncState.localHead,
448
- });
449
- }
450
- else {
451
- otelSpan?.addEvent(`pull:advance`, {
452
- newEventsCount: newEvents.length,
453
- mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
454
- });
455
- // Ensure push fiber is active after advance by restarting with current pending (non-client) events
456
- const globalPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
457
- const eventDef = schema.eventsDefsMap.get(event.name);
458
- return eventDef === undefined ? true : eventDef.options.clientOnly === false;
459
- });
460
- yield* restartBackendPushing(globalPendingEvents);
461
- yield* connectedClientSessionPullQueues.offer({
462
- payload: SyncState.payloadFromMergeResult(mergeResult),
463
- leaderHead: mergeResult.newSyncState.localHead,
464
- });
465
- if (mergeResult.confirmedEvents.length > 0) {
466
- // `mergeResult.confirmedEvents` don't contain the correct sync metadata, so we need to use
467
- // `newEvents` instead which we filter via `mergeResult.confirmedEvents`
468
- const confirmedNewEvents = newEvents.filter((event) => mergeResult.confirmedEvents.some((confirmedEvent) => EventSequenceNumber.Client.isEqual(event.seqNum, confirmedEvent.seqNum)));
469
- yield* Eventlog.updateSyncMetadata(confirmedNewEvents).pipe(UnknownError.mapToUnknownError);
443
+ else {
444
+ yield* Effect.spanEvent(`pull:advance`, {
445
+ newEventsCount: newEvents.length,
446
+ ...(TRACE_VERBOSE === true ? { mergeResult: jsonStringify(mergeResult) } : {}),
447
+ });
448
+ // Ensure push fiber is active after advance by restarting with current pending (non-client) events
449
+ const globalPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
450
+ const eventDef = schema.eventsDefsMap.get(event.name);
451
+ return eventDef === undefined ? true : eventDef.options.clientOnly === false;
452
+ });
453
+ yield* restartBackendPushing(globalPendingEvents);
454
+ yield* connectedClientSessionPullQueues.offer({
455
+ payload: SyncState.payloadFromMergeResult(mergeResult),
456
+ leaderHead: mergeResult.newSyncState.localHead,
457
+ });
458
+ if (mergeResult.confirmedEvents.length > 0) {
459
+ // `mergeResult.confirmedEvents` don't contain the correct sync metadata, so we need to use
460
+ // `newEvents` instead which we filter via `mergeResult.confirmedEvents`
461
+ const confirmedNewEvents = newEvents.filter((event) => mergeResult.confirmedEvents.some((confirmedEvent) => EventSequenceNumber.Client.isEqual(event.seqNum, confirmedEvent.seqNum)));
462
+ yield* Eventlog.updateSyncMetadata(confirmedNewEvents).pipe(Effect.orDieDebugger);
463
+ }
470
464
  }
465
+ // Removes the changeset rows which are no longer needed as we'll never have to rollback beyond this point
466
+ trimChangesetRows(db, newBackendHead);
467
+ advancePushHead(mergeResult.newSyncState.localHead);
468
+ yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined });
469
+ yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState);
470
+ }).pipe(Effect.exit);
471
+ if (Exit.isFailure(chunkExit) === true) {
472
+ yield* releasePullMutexIfHeld;
473
+ return yield* Effect.failCause(chunkExit.cause);
471
474
  }
472
- // Removes the changeset rows which are no longer needed as we'll never have to rollback beyond this point
473
- trimChangesetRows(db, newBackendHead);
474
- advancePushHead(mergeResult.newSyncState.localHead);
475
- yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined });
476
- yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState);
477
- // Allow local pushes to be processed again
478
- if (pageInfo._tag === 'NoMore') {
479
- yield* localPushesLatch.open;
475
+ if (isPullPaginationComplete(pageInfo) === true) {
476
+ yield* releasePullMutexIfHeld;
480
477
  }
481
478
  });
482
- const syncState = yield* syncStateSref;
483
- if (syncState === undefined)
484
- return shouldNeverHappen('Not initialized');
479
+ const syncState = yield* Effect.fromNullable(yield* syncStateSref).pipe(Effect.orDieDebugger);
485
480
  const cursorInfo = yield* Eventlog.getSyncBackendCursorInfo({ remoteHead: syncState.upstreamHead.global });
486
481
  const hashMaterializerResult = makeMaterializerHash({ schema, dbState });
487
482
  yield* syncBackend.pull(cursorInfo, { live: livePull }).pipe(
488
483
  // TODO only take from queue while connected
489
484
  Stream.tap(({ batch, pageInfo }) => Effect.gen(function* () {
490
- // yield* Effect.spanEvent('batch', {
491
- // attributes: {
492
- // batchSize: batch.length,
493
- // batch: TRACE_VERBOSE ? batch : undefined,
494
- // },
495
- // })
496
485
  // NOTE we only want to take process events when the sync backend is connected
497
486
  // (e.g. needed for simulating being offline)
498
487
  // TODO remove when there's a better way to handle this in stream above
@@ -505,11 +494,11 @@ const backgroundBackendPulling = ({ isClientEvent, restartBackendPushing, otelSp
505
494
  materializerHashSession: Option.none(),
506
495
  })), pageInfo);
507
496
  yield* initialBlockingSyncContext.update({ processed: batch.length, pageInfo });
508
- })), Stream.runDrain, Effect.interruptible);
497
+ })), Stream.runDrain, Effect.interruptible, Effect.ensuring(releasePullMutexIfHeld));
509
498
  // Should only ever happen when livePull is false
510
499
  yield* Effect.logDebug('backend-pulling finished', { livePull });
511
- }).pipe(Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pulling'));
512
- const backgroundBackendPushing = ({ syncBackendPushQueue, otelSpan, devtoolsLatch, backendPushBatchSize, }) => Effect.gen(function* () {
500
+ });
501
+ const backgroundBackendPushing = Effect.fn('@livestore/common:LeaderSyncProcessor:backend-pushing')(function* ({ syncBackendPushQueue, devtoolsLatch, backendPushBatchSize, }) {
513
502
  const { syncBackend } = yield* LeaderThreadCtx;
514
503
  if (syncBackend === undefined)
515
504
  return;
@@ -520,45 +509,47 @@ const backgroundBackendPushing = ({ syncBackendPushQueue, otelSpan, devtoolsLatc
520
509
  if (devtoolsLatch !== undefined) {
521
510
  yield* devtoolsLatch.await;
522
511
  }
523
- otelSpan?.addEvent('backend-push', {
512
+ yield* Effect.spanEvent('backend-push', {
524
513
  batchSize: queueItems.length,
525
- batch: TRACE_VERBOSE ? JSON.stringify(queueItems) : undefined,
514
+ ...(TRACE_VERBOSE === true ? { batch: jsonStringify(queueItems) } : {}),
526
515
  });
527
516
  // Push with declarative retry/backoff using Effect schedules
528
517
  // - Exponential backoff starting at 1s and doubling (1s, 2s, 4s, 8s, 16s, 30s ...)
529
518
  // - Delay clamped at 30s (continues retrying at 30s)
530
519
  // - Resets automatically after successful push
531
520
  // TODO(metrics): expose counters/gauges for retry attempts and queue health via devtools/metrics
532
- // Only retry for transient UnknownError cases
533
- const isRetryable = (err) => err._tag === 'InvalidPushError' && err.cause._tag === 'LiveStore.UnknownError';
534
- // Input: InvalidPushError | IsOfflineError, Output: Duration
535
- const retrySchedule = Schedule.exponential(Duration.seconds(1)).pipe(Schedule.andThenEither(Schedule.spaced(Duration.seconds(30))), // clamp at 30 second intervals
536
- Schedule.compose(Schedule.elapsed), Schedule.whileInput(isRetryable));
537
521
  yield* Effect.gen(function* () {
538
522
  const iteration = yield* Schedule.CurrentIterationMetadata;
539
523
  const pushResult = yield* syncBackend.push(queueItems.map((_) => _.toGlobal())).pipe(Effect.either);
540
524
  const retries = iteration.recurrence;
541
525
  if (retries > 0 && pushResult._tag === 'Right') {
542
- otelSpan?.addEvent('backend-push-retry-success', { retries, batchSize: queueItems.length });
526
+ yield* Effect.spanEvent('backend-push-retry-success', { retries, batchSize: queueItems.length });
543
527
  }
544
528
  if (pushResult._tag === 'Left') {
545
- otelSpan?.addEvent('backend-push-error', {
529
+ yield* Effect.spanEvent('backend-push-error', {
546
530
  error: pushResult.left.toString(),
547
531
  retries,
548
532
  batchSize: queueItems.length,
549
533
  });
550
534
  const error = pushResult.left;
551
- if (error._tag === 'IsOfflineError' ||
552
- (error._tag === 'InvalidPushError' && error.cause._tag === 'ServerAheadError')) {
535
+ if (error._tag === 'ServerAheadError') {
553
536
  // It's a core part of the sync protocol that the sync backend will emit a new pull chunk alongside the ServerAheadError
554
537
  yield* Effect.logDebug('handled backend-push-error (waiting for interupt caused by pull)', { error });
555
538
  return yield* Effect.never;
556
539
  }
557
540
  return yield* error;
558
541
  }
559
- }).pipe(Effect.retry(retrySchedule));
542
+ }).pipe(
543
+ // Retry transient errors
544
+ Effect.retry({
545
+ schedule: Schedule.exponential(Duration.seconds(1)).pipe(Schedule.modifyDelay((_, delay) => Duration.min(delay, Duration.seconds(30))) // Cap delay at 30s intervals.
546
+ ),
547
+ while: (error) => error._tag === 'IsOfflineError' || error._tag === 'UnknownError',
548
+ }),
549
+ // This is needed to narrow the Error type. Our retry policy runs indefinitely, but Effect.retry does not narrow the Error type.
550
+ Effect.catchIf((error) => error._tag === 'IsOfflineError' || error._tag === 'UnknownError', Effect.die));
560
551
  }
561
- }).pipe(Effect.interruptible, Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pushing'));
552
+ }, Effect.interruptible);
562
553
  const trimChangesetRows = (db, newHead) => {
563
554
  // Since we're using the session changeset rows to query for the current head,
564
555
  // we're keeping at least one row for the current head, and thus are using `<` instead of `<=`
@@ -627,7 +618,7 @@ const makePullQueueSet = Effect.gen(function* () {
627
618
  });
628
619
  const offer = (item) => Effect.gen(function* () {
629
620
  const seqNumStr = EventSequenceNumber.Client.toString(item.leaderHead);
630
- if (cachedPayloads.has(seqNumStr)) {
621
+ if (cachedPayloads.has(seqNumStr) === true) {
631
622
  cachedPayloads.get(seqNumStr).push(item.payload);
632
623
  }
633
624
  else {
@@ -656,24 +647,63 @@ const validatePushBatch = (batch, pushHead) => Effect.gen(function* () {
656
647
  if (batch.length === 0) {
657
648
  return;
658
649
  }
659
- // Example: session A already enqueued e1…e6 while session B (same client, different
660
- // session) still believes the head is e1 and submits [e2, e7, e8]. The numbers look
661
- // monotonic from B’s perspective, but we must reject and force B to rebase locally
662
- // so the leader never regresses.
650
+ // Defensive check: callers should already provide a strictly increasing sequence
651
+ // of event numbers.
663
652
  for (let i = 1; i < batch.length; i++) {
664
- if (EventSequenceNumber.Client.isGreaterThanOrEqual(batch[i - 1].seqNum, batch[i].seqNum)) {
665
- return yield* LeaderAheadError.make({
666
- minimumExpectedNum: batch[i - 1].seqNum,
667
- providedNum: batch[i].seqNum,
653
+ if (EventSequenceNumber.Client.isGreaterThanOrEqual(batch[i - 1].seqNum, batch[i].seqNum) === true) {
654
+ return yield* NonMonotonicBatchError.make({
655
+ precedingSeqNum: batch[i - 1].seqNum,
656
+ violatingSeqNum: batch[i].seqNum,
657
+ violationIndex: i,
658
+ sessionId: batch[i].sessionId,
668
659
  });
669
660
  }
670
661
  }
671
- // Make sure smallest sequence number is > pushHead
672
- if (EventSequenceNumber.Client.isGreaterThanOrEqual(pushHead, batch[0].seqNum)) {
662
+ // Reject stale batches whose first event is at or behind the leader's push head.
663
+ if (EventSequenceNumber.Client.isGreaterThanOrEqual(pushHead, batch[0].seqNum) === true) {
673
664
  return yield* LeaderAheadError.make({
674
665
  minimumExpectedNum: pushHead,
675
666
  providedNum: batch[0].seqNum,
667
+ sessionId: batch[0].sessionId,
676
668
  });
677
669
  }
678
670
  });
671
+ /**
672
+ * Handles a BackendIdMismatchError based on the configured behavior.
673
+ * This occurs when the sync backend has been reset and has a new identity.
674
+ */
675
+ const handleBackendIdMismatch = Effect.fn('@livestore/common:LeaderSyncProcessor:handleBackendIdMismatch')(function* ({ error, onBackendIdMismatch, shutdownChannel, }) {
676
+ const { dbEventlog, dbState } = yield* LeaderThreadCtx;
677
+ if (onBackendIdMismatch === 'reset') {
678
+ yield* Effect.logWarning('Sync backend identity changed (backend was reset). Clearing local storage and shutting down.', error);
679
+ // Clear local databases so the client can start fresh on next boot
680
+ yield* clearLocalDatabases({ dbEventlog, dbState });
681
+ // Send shutdown signal with special reason
682
+ yield* shutdownChannel.send(IntentionalShutdownCause.make({ reason: 'backend-id-mismatch' })).pipe(Effect.orDie);
683
+ return yield* Effect.die(error);
684
+ }
685
+ if (onBackendIdMismatch === 'shutdown') {
686
+ yield* Effect.logWarning('Sync backend identity changed (backend was reset). Shutting down without clearing local storage.', error);
687
+ yield* shutdownChannel.send(error).pipe(Effect.orDie);
688
+ return yield* Effect.die(error);
689
+ }
690
+ // ignore mode
691
+ if (LS_DEV === true) {
692
+ yield* Effect.logDebug('Ignoring BackendIdMismatchError (sync backend was reset but client continues with stale data)', error);
693
+ }
694
+ });
695
+ /**
696
+ * Clears local databases (eventlog and state) so the client can start fresh on next boot.
697
+ * This is used when the sync backend identity has changed (i.e. backend was reset).
698
+ */
699
+ const clearLocalDatabases = ({ dbEventlog, dbState }) => Effect.sync(() => {
700
+ // Clear eventlog tables
701
+ dbEventlog.execute(sql `DELETE FROM ${EVENTLOG_META_TABLE}`);
702
+ dbEventlog.execute(sql `DELETE FROM ${SYNC_STATUS_TABLE}`);
703
+ // Drop all state tables - they'll be recreated on next boot
704
+ const tables = dbState.select(sql `SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'`);
705
+ for (const { name } of tables) {
706
+ dbState.execute(`DROP TABLE IF EXISTS "${name}"`);
707
+ }
708
+ });
679
709
  //# sourceMappingURL=LeaderSyncProcessor.js.map