@livestore/common 0.4.0-dev.22 → 0.4.0-dev.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (313) hide show
  1. package/dist/.tsbuildinfo +1 -1
  2. package/dist/ClientSessionLeaderThreadProxy.d.ts +9 -9
  3. package/dist/ClientSessionLeaderThreadProxy.d.ts.map +1 -1
  4. package/dist/WorkerTransportError.d.ts +11 -0
  5. package/dist/WorkerTransportError.d.ts.map +1 -0
  6. package/dist/WorkerTransportError.js +11 -0
  7. package/dist/WorkerTransportError.js.map +1 -0
  8. package/dist/adapter-types.d.ts +3 -3
  9. package/dist/adapter-types.d.ts.map +1 -1
  10. package/dist/adapter-types.js.map +1 -1
  11. package/dist/bounded-collections.d.ts.map +1 -1
  12. package/dist/bounded-collections.js +6 -4
  13. package/dist/bounded-collections.js.map +1 -1
  14. package/dist/debug-info.js +4 -4
  15. package/dist/debug-info.js.map +1 -1
  16. package/dist/devtools/devtools-messages-common.js +1 -1
  17. package/dist/devtools/devtools-messages-common.js.map +1 -1
  18. package/dist/devtools/mod.js +1 -1
  19. package/dist/devtools/mod.js.map +1 -1
  20. package/dist/errors.d.ts +15 -15
  21. package/dist/errors.d.ts.map +1 -1
  22. package/dist/errors.js +11 -11
  23. package/dist/errors.js.map +1 -1
  24. package/dist/index.d.ts +2 -0
  25. package/dist/index.d.ts.map +1 -1
  26. package/dist/index.js +2 -0
  27. package/dist/index.js.map +1 -1
  28. package/dist/leader-thread/LeaderSyncProcessor.d.ts +20 -6
  29. package/dist/leader-thread/LeaderSyncProcessor.d.ts.map +1 -1
  30. package/dist/leader-thread/LeaderSyncProcessor.js +287 -257
  31. package/dist/leader-thread/LeaderSyncProcessor.js.map +1 -1
  32. package/dist/leader-thread/RejectedPushError.d.ts +107 -0
  33. package/dist/leader-thread/RejectedPushError.d.ts.map +1 -0
  34. package/dist/leader-thread/RejectedPushError.js +78 -0
  35. package/dist/leader-thread/RejectedPushError.js.map +1 -0
  36. package/dist/leader-thread/connection.js +1 -1
  37. package/dist/leader-thread/connection.js.map +1 -1
  38. package/dist/leader-thread/eventlog.d.ts.map +1 -1
  39. package/dist/leader-thread/eventlog.js +12 -11
  40. package/dist/leader-thread/eventlog.js.map +1 -1
  41. package/dist/leader-thread/leader-worker-devtools.d.ts +1 -2
  42. package/dist/leader-thread/leader-worker-devtools.d.ts.map +1 -1
  43. package/dist/leader-thread/leader-worker-devtools.js +25 -14
  44. package/dist/leader-thread/leader-worker-devtools.js.map +1 -1
  45. package/dist/leader-thread/make-leader-thread-layer.d.ts +8 -3
  46. package/dist/leader-thread/make-leader-thread-layer.d.ts.map +1 -1
  47. package/dist/leader-thread/make-leader-thread-layer.js +7 -10
  48. package/dist/leader-thread/make-leader-thread-layer.js.map +1 -1
  49. package/dist/leader-thread/make-leader-thread-layer.test.js +1 -1
  50. package/dist/leader-thread/make-leader-thread-layer.test.js.map +1 -1
  51. package/dist/leader-thread/materialize-event.js +4 -4
  52. package/dist/leader-thread/materialize-event.js.map +1 -1
  53. package/dist/leader-thread/recreate-db.js +1 -1
  54. package/dist/leader-thread/recreate-db.js.map +1 -1
  55. package/dist/leader-thread/shutdown-channel.d.ts +2 -2
  56. package/dist/leader-thread/shutdown-channel.d.ts.map +1 -1
  57. package/dist/leader-thread/shutdown-channel.js +2 -2
  58. package/dist/leader-thread/shutdown-channel.js.map +1 -1
  59. package/dist/leader-thread/stream-events.d.ts.map +1 -1
  60. package/dist/leader-thread/stream-events.js +4 -3
  61. package/dist/leader-thread/stream-events.js.map +1 -1
  62. package/dist/leader-thread/types.d.ts +7 -6
  63. package/dist/leader-thread/types.d.ts.map +1 -1
  64. package/dist/leader-thread/types.js.map +1 -1
  65. package/dist/logging.js +4 -4
  66. package/dist/logging.js.map +1 -1
  67. package/dist/make-client-session.js +2 -2
  68. package/dist/make-client-session.js.map +1 -1
  69. package/dist/materializer-helper.js +6 -6
  70. package/dist/materializer-helper.js.map +1 -1
  71. package/dist/otel.d.ts +1 -1
  72. package/dist/otel.d.ts.map +1 -1
  73. package/dist/otel.js +2 -2
  74. package/dist/otel.js.map +1 -1
  75. package/dist/rematerialize-from-eventlog.d.ts +1 -1
  76. package/dist/rematerialize-from-eventlog.d.ts.map +1 -1
  77. package/dist/rematerialize-from-eventlog.js +11 -9
  78. package/dist/rematerialize-from-eventlog.js.map +1 -1
  79. package/dist/schema/EventDef/define.d.ts +2 -2
  80. package/dist/schema/EventDef/define.d.ts.map +1 -1
  81. package/dist/schema/EventDef/define.js +4 -4
  82. package/dist/schema/EventDef/define.js.map +1 -1
  83. package/dist/schema/EventDef/deprecated.js +3 -3
  84. package/dist/schema/EventDef/deprecated.js.map +1 -1
  85. package/dist/schema/EventDef/deprecated.test.js +1 -1
  86. package/dist/schema/EventDef/deprecated.test.js.map +1 -1
  87. package/dist/schema/EventSequenceNumber/client.d.ts.map +1 -1
  88. package/dist/schema/EventSequenceNumber/client.js +11 -11
  89. package/dist/schema/EventSequenceNumber/client.js.map +1 -1
  90. package/dist/schema/EventSequenceNumber.test.js +1 -1
  91. package/dist/schema/EventSequenceNumber.test.js.map +1 -1
  92. package/dist/schema/LiveStoreEvent/client.d.ts.map +1 -1
  93. package/dist/schema/LiveStoreEvent/client.js +6 -3
  94. package/dist/schema/LiveStoreEvent/client.js.map +1 -1
  95. package/dist/schema/LiveStoreEvent/client.test.d.ts +2 -0
  96. package/dist/schema/LiveStoreEvent/client.test.d.ts.map +1 -0
  97. package/dist/schema/LiveStoreEvent/client.test.js +83 -0
  98. package/dist/schema/LiveStoreEvent/client.test.js.map +1 -0
  99. package/dist/schema/schema.d.ts.map +1 -1
  100. package/dist/schema/schema.js +7 -4
  101. package/dist/schema/schema.js.map +1 -1
  102. package/dist/schema/state/sqlite/client-document-def.d.ts.map +1 -1
  103. package/dist/schema/state/sqlite/client-document-def.js +18 -6
  104. package/dist/schema/state/sqlite/client-document-def.js.map +1 -1
  105. package/dist/schema/state/sqlite/client-document-def.test.js +1 -1
  106. package/dist/schema/state/sqlite/client-document-def.test.js.map +1 -1
  107. package/dist/schema/state/sqlite/column-annotations.d.ts.map +1 -1
  108. package/dist/schema/state/sqlite/column-annotations.js +1 -1
  109. package/dist/schema/state/sqlite/column-annotations.js.map +1 -1
  110. package/dist/schema/state/sqlite/column-annotations.test.js +1 -1
  111. package/dist/schema/state/sqlite/column-annotations.test.js.map +1 -1
  112. package/dist/schema/state/sqlite/column-def.d.ts.map +1 -1
  113. package/dist/schema/state/sqlite/column-def.js +36 -34
  114. package/dist/schema/state/sqlite/column-def.js.map +1 -1
  115. package/dist/schema/state/sqlite/column-def.test.js +7 -6
  116. package/dist/schema/state/sqlite/column-def.test.js.map +1 -1
  117. package/dist/schema/state/sqlite/column-spec.d.ts.map +1 -1
  118. package/dist/schema/state/sqlite/column-spec.js +8 -8
  119. package/dist/schema/state/sqlite/column-spec.js.map +1 -1
  120. package/dist/schema/state/sqlite/column-spec.test.js +1 -1
  121. package/dist/schema/state/sqlite/column-spec.test.js.map +1 -1
  122. package/dist/schema/state/sqlite/db-schema/ast/sqlite.js +2 -2
  123. package/dist/schema/state/sqlite/db-schema/ast/sqlite.js.map +1 -1
  124. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.d.ts +2 -2
  125. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.d.ts.map +1 -1
  126. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.js +11 -2
  127. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.js.map +1 -1
  128. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.js +1 -1
  129. package/dist/schema/state/sqlite/db-schema/dsl/field-defs.test.js.map +1 -1
  130. package/dist/schema/state/sqlite/db-schema/dsl/mod.d.ts +1 -1
  131. package/dist/schema/state/sqlite/db-schema/dsl/mod.d.ts.map +1 -1
  132. package/dist/schema/state/sqlite/db-schema/dsl/mod.js +1 -1
  133. package/dist/schema/state/sqlite/db-schema/dsl/mod.js.map +1 -1
  134. package/dist/schema/state/sqlite/mod.d.ts.map +1 -1
  135. package/dist/schema/state/sqlite/mod.js +3 -5
  136. package/dist/schema/state/sqlite/mod.js.map +1 -1
  137. package/dist/schema/state/sqlite/query-builder/api.d.ts +10 -2
  138. package/dist/schema/state/sqlite/query-builder/api.d.ts.map +1 -1
  139. package/dist/schema/state/sqlite/query-builder/astToSql.js +11 -11
  140. package/dist/schema/state/sqlite/query-builder/astToSql.js.map +1 -1
  141. package/dist/schema/state/sqlite/query-builder/impl.d.ts +1 -1
  142. package/dist/schema/state/sqlite/query-builder/impl.d.ts.map +1 -1
  143. package/dist/schema/state/sqlite/query-builder/impl.js +28 -14
  144. package/dist/schema/state/sqlite/query-builder/impl.js.map +1 -1
  145. package/dist/schema/state/sqlite/query-builder/impl.test.js +3 -2
  146. package/dist/schema/state/sqlite/query-builder/impl.test.js.map +1 -1
  147. package/dist/schema/state/sqlite/schema-helpers.js +2 -2
  148. package/dist/schema/state/sqlite/schema-helpers.js.map +1 -1
  149. package/dist/schema/state/sqlite/table-def.d.ts +5 -3
  150. package/dist/schema/state/sqlite/table-def.d.ts.map +1 -1
  151. package/dist/schema/state/sqlite/table-def.js +1 -1
  152. package/dist/schema/state/sqlite/table-def.js.map +1 -1
  153. package/dist/schema/state/sqlite/table-def.test.js +57 -4
  154. package/dist/schema/state/sqlite/table-def.test.js.map +1 -1
  155. package/dist/schema/unknown-events.d.ts +1 -1
  156. package/dist/schema/unknown-events.d.ts.map +1 -1
  157. package/dist/schema/unknown-events.js +1 -1
  158. package/dist/schema/unknown-events.js.map +1 -1
  159. package/dist/schema-management/__tests__/migrations-autoincrement-quoting.test.js +1 -1
  160. package/dist/schema-management/__tests__/migrations-autoincrement-quoting.test.js.map +1 -1
  161. package/dist/schema-management/common.js +2 -2
  162. package/dist/schema-management/common.js.map +1 -1
  163. package/dist/schema-management/migrations.js +1 -1
  164. package/dist/schema-management/migrations.js.map +1 -1
  165. package/dist/sql-queries/sql-queries.js +8 -6
  166. package/dist/sql-queries/sql-queries.js.map +1 -1
  167. package/dist/sql-queries/sql-query-builder.d.ts.map +1 -1
  168. package/dist/sql-queries/sql-query-builder.js.map +1 -1
  169. package/dist/sqlite-db-helper.js +3 -3
  170. package/dist/sqlite-db-helper.js.map +1 -1
  171. package/dist/sqlite-types.d.ts +2 -2
  172. package/dist/sqlite-types.d.ts.map +1 -1
  173. package/dist/sqlite-types.js.map +1 -1
  174. package/dist/sync/ClientSessionSyncProcessor.d.ts +8 -9
  175. package/dist/sync/ClientSessionSyncProcessor.d.ts.map +1 -1
  176. package/dist/sync/ClientSessionSyncProcessor.js +95 -113
  177. package/dist/sync/ClientSessionSyncProcessor.js.map +1 -1
  178. package/dist/sync/errors.d.ts +0 -38
  179. package/dist/sync/errors.d.ts.map +1 -1
  180. package/dist/sync/errors.js +3 -20
  181. package/dist/sync/errors.js.map +1 -1
  182. package/dist/sync/mock-sync-backend.d.ts +5 -3
  183. package/dist/sync/mock-sync-backend.d.ts.map +1 -1
  184. package/dist/sync/mock-sync-backend.js +70 -68
  185. package/dist/sync/mock-sync-backend.js.map +1 -1
  186. package/dist/sync/next/compact-events.js +6 -6
  187. package/dist/sync/next/compact-events.js.map +1 -1
  188. package/dist/sync/next/facts.d.ts.map +1 -1
  189. package/dist/sync/next/facts.js +6 -6
  190. package/dist/sync/next/facts.js.map +1 -1
  191. package/dist/sync/next/history-dag-common.d.ts.map +1 -1
  192. package/dist/sync/next/history-dag-common.js +6 -6
  193. package/dist/sync/next/history-dag-common.js.map +1 -1
  194. package/dist/sync/next/history-dag.js +3 -3
  195. package/dist/sync/next/history-dag.js.map +1 -1
  196. package/dist/sync/next/rebase-events.js +1 -1
  197. package/dist/sync/next/rebase-events.js.map +1 -1
  198. package/dist/sync/next/test/compact-events.calculator.test.js +2 -2
  199. package/dist/sync/next/test/compact-events.calculator.test.js.map +1 -1
  200. package/dist/sync/next/test/compact-events.test.d.ts.map +1 -1
  201. package/dist/sync/next/test/compact-events.test.js +2 -2
  202. package/dist/sync/next/test/compact-events.test.js.map +1 -1
  203. package/dist/sync/next/test/event-fixtures.d.ts.map +1 -1
  204. package/dist/sync/next/test/event-fixtures.js +2 -2
  205. package/dist/sync/next/test/event-fixtures.js.map +1 -1
  206. package/dist/sync/sync-backend-kv.d.ts.map +1 -1
  207. package/dist/sync/sync-backend-kv.js.map +1 -1
  208. package/dist/sync/sync-backend.d.ts +3 -3
  209. package/dist/sync/sync-backend.d.ts.map +1 -1
  210. package/dist/sync/sync-backend.js +1 -1
  211. package/dist/sync/sync-backend.js.map +1 -1
  212. package/dist/sync/sync.d.ts +20 -0
  213. package/dist/sync/sync.d.ts.map +1 -1
  214. package/dist/sync/syncstate.d.ts +4 -17
  215. package/dist/sync/syncstate.d.ts.map +1 -1
  216. package/dist/sync/syncstate.js +51 -74
  217. package/dist/sync/syncstate.js.map +1 -1
  218. package/dist/sync/syncstate.test.js +112 -96
  219. package/dist/sync/syncstate.test.js.map +1 -1
  220. package/dist/sync/transport-chunking.js +3 -3
  221. package/dist/sync/transport-chunking.js.map +1 -1
  222. package/dist/sync/validate-push-payload.d.ts +2 -2
  223. package/dist/sync/validate-push-payload.d.ts.map +1 -1
  224. package/dist/sync/validate-push-payload.js +4 -6
  225. package/dist/sync/validate-push-payload.js.map +1 -1
  226. package/dist/util.js +2 -2
  227. package/dist/util.js.map +1 -1
  228. package/dist/version.d.ts.map +1 -1
  229. package/dist/version.js +2 -5
  230. package/dist/version.js.map +1 -1
  231. package/package.json +66 -12
  232. package/src/ClientSessionLeaderThreadProxy.ts +9 -9
  233. package/src/WorkerTransportError.ts +12 -0
  234. package/src/adapter-types.ts +9 -3
  235. package/src/bounded-collections.ts +6 -5
  236. package/src/debug-info.ts +4 -4
  237. package/src/devtools/devtools-messages-common.ts +1 -1
  238. package/src/devtools/mod.ts +1 -1
  239. package/src/errors.ts +18 -17
  240. package/src/index.ts +2 -0
  241. package/src/leader-thread/LeaderSyncProcessor.ts +421 -392
  242. package/src/leader-thread/RejectedPushError.ts +106 -0
  243. package/src/leader-thread/connection.ts +1 -1
  244. package/src/leader-thread/eventlog.ts +16 -14
  245. package/src/leader-thread/leader-worker-devtools.ts +96 -66
  246. package/src/leader-thread/make-leader-thread-layer.test.ts +1 -1
  247. package/src/leader-thread/make-leader-thread-layer.ts +33 -31
  248. package/src/leader-thread/materialize-event.ts +4 -4
  249. package/src/leader-thread/recreate-db.ts +1 -1
  250. package/src/leader-thread/shutdown-channel.ts +2 -6
  251. package/src/leader-thread/stream-events.ts +10 -5
  252. package/src/leader-thread/types.ts +7 -6
  253. package/src/logging.ts +4 -4
  254. package/src/make-client-session.ts +2 -2
  255. package/src/materializer-helper.ts +9 -9
  256. package/src/otel.ts +3 -2
  257. package/src/rematerialize-from-eventlog.ts +60 -60
  258. package/src/schema/EventDef/define.ts +6 -6
  259. package/src/schema/EventDef/deprecated.test.ts +2 -1
  260. package/src/schema/EventDef/deprecated.ts +3 -3
  261. package/src/schema/EventSequenceNumber/client.ts +11 -11
  262. package/src/schema/EventSequenceNumber.test.ts +2 -1
  263. package/src/schema/LiveStoreEvent/client.test.ts +97 -0
  264. package/src/schema/LiveStoreEvent/client.ts +6 -3
  265. package/src/schema/schema.ts +9 -4
  266. package/src/schema/state/sqlite/client-document-def.test.ts +2 -1
  267. package/src/schema/state/sqlite/client-document-def.ts +20 -6
  268. package/src/schema/state/sqlite/column-annotations.test.ts +2 -1
  269. package/src/schema/state/sqlite/column-annotations.ts +2 -1
  270. package/src/schema/state/sqlite/column-def.test.ts +8 -6
  271. package/src/schema/state/sqlite/column-def.ts +41 -36
  272. package/src/schema/state/sqlite/column-spec.test.ts +3 -1
  273. package/src/schema/state/sqlite/column-spec.ts +9 -8
  274. package/src/schema/state/sqlite/db-schema/ast/sqlite.ts +2 -2
  275. package/src/schema/state/sqlite/db-schema/dsl/field-defs.test.ts +2 -1
  276. package/src/schema/state/sqlite/db-schema/dsl/field-defs.ts +13 -4
  277. package/src/schema/state/sqlite/db-schema/dsl/mod.ts +3 -3
  278. package/src/schema/state/sqlite/mod.ts +4 -5
  279. package/src/schema/state/sqlite/query-builder/api.ts +12 -5
  280. package/src/schema/state/sqlite/query-builder/astToSql.ts +11 -11
  281. package/src/schema/state/sqlite/query-builder/impl.test.ts +4 -2
  282. package/src/schema/state/sqlite/query-builder/impl.ts +26 -12
  283. package/src/schema/state/sqlite/schema-helpers.ts +2 -2
  284. package/src/schema/state/sqlite/table-def.test.ts +67 -4
  285. package/src/schema/state/sqlite/table-def.ts +8 -15
  286. package/src/schema/unknown-events.ts +2 -2
  287. package/src/schema-management/__tests__/migrations-autoincrement-quoting.test.ts +3 -1
  288. package/src/schema-management/common.ts +2 -2
  289. package/src/schema-management/migrations.ts +1 -1
  290. package/src/sql-queries/sql-queries.ts +10 -6
  291. package/src/sql-queries/sql-query-builder.ts +1 -0
  292. package/src/sqlite-db-helper.ts +3 -3
  293. package/src/sqlite-types.ts +3 -2
  294. package/src/sync/ClientSessionSyncProcessor.ts +148 -152
  295. package/src/sync/errors.ts +10 -22
  296. package/src/sync/mock-sync-backend.ts +139 -97
  297. package/src/sync/next/compact-events.ts +5 -5
  298. package/src/sync/next/facts.ts +7 -6
  299. package/src/sync/next/history-dag-common.ts +9 -6
  300. package/src/sync/next/history-dag.ts +3 -3
  301. package/src/sync/next/rebase-events.ts +1 -1
  302. package/src/sync/next/test/compact-events.calculator.test.ts +3 -2
  303. package/src/sync/next/test/compact-events.test.ts +4 -3
  304. package/src/sync/next/test/event-fixtures.ts +2 -2
  305. package/src/sync/sync-backend-kv.ts +1 -0
  306. package/src/sync/sync-backend.ts +5 -4
  307. package/src/sync/sync.ts +21 -0
  308. package/src/sync/syncstate.test.ts +513 -435
  309. package/src/sync/syncstate.ts +80 -86
  310. package/src/sync/transport-chunking.ts +3 -3
  311. package/src/sync/validate-push-payload.ts +4 -6
  312. package/src/util.ts +2 -2
  313. package/src/version.ts +2 -6
@@ -1,4 +1,4 @@
1
- import { casesHandled, isNotUndefined, LS_DEV, shouldNeverHappen, TRACE_VERBOSE } from '@livestore/utils'
1
+ import { casesHandled, isNotUndefined, LS_DEV, TRACE_VERBOSE } from '@livestore/utils'
2
2
  import type { HttpClient, Runtime, Scope, Tracer } from '@livestore/utils/effect'
3
3
  import {
4
4
  BucketQueue,
@@ -10,41 +10,37 @@ import {
10
10
  FiberHandle,
11
11
  Layer,
12
12
  Option,
13
- OtelTracer,
14
13
  Queue,
15
14
  ReadonlyArray,
16
15
  Schedule,
16
+ Schema,
17
17
  Stream,
18
18
  Subscribable,
19
19
  SubscriptionRef,
20
20
  } from '@livestore/utils/effect'
21
- import type * as otel from '@opentelemetry/api'
22
- import { type IntentionalShutdownCause, type MaterializeError, type SqliteDb, UnknownError } from '../adapter-types.ts'
21
+
22
+ import { type MaterializeError, type SqliteDb, UnknownError } from '../adapter-types.ts'
23
+ import { IntentionalShutdownCause } from '../errors.ts'
23
24
  import { makeMaterializerHash } from '../materializer-helper.ts'
24
25
  import type { LiveStoreSchema } from '../schema/mod.ts'
25
26
  import { EventSequenceNumber, LiveStoreEvent, resolveEventDef, SystemTables } from '../schema/mod.ts'
26
- import {
27
- type InvalidPullError,
28
- type InvalidPushError,
29
- type IsOfflineError,
30
- LeaderAheadError,
31
- type SyncBackend,
32
- } from '../sync/sync.ts'
27
+ import { EVENTLOG_META_TABLE, SYNC_STATUS_TABLE } from '../schema/state/sqlite/system-tables/eventlog-tables.ts'
28
+ import type { BackendIdMismatchError, IsOfflineError, SyncBackend } from '../sync/sync.ts'
29
+ import { isRejectedPushError, LeaderAheadError, NonMonotonicBatchError, StaleRebaseGenerationError } from './RejectedPushError.ts'
33
30
  import * as SyncState from '../sync/syncstate.ts'
34
31
  import { sql } from '../util.ts'
35
32
  import * as Eventlog from './eventlog.ts'
36
33
  import { rollback } from './materialize-event.ts'
34
+ import type { ShutdownChannel } from './shutdown-channel.ts'
37
35
  import type { InitialBlockingSyncContext, LeaderSyncProcessor } from './types.ts'
38
36
  import { LeaderThreadCtx } from './types.ts'
39
37
 
40
- // WORKAROUND: @effect/opentelemetry mis-parses `Span.addEvent(name, attributes)` and treats the attributes object as a
41
- // time input, causing `TypeError: {} is not iterable` at runtime.
42
- // Upstream: https://github.com/Effect-TS/effect/pull/5929
43
- // TODO: simplify back to the 2-arg overload once the upstream fix is released and adopted.
38
+ /** Serialize value to JSON string for trace attributes */
39
+ const jsonStringify = Schema.encodeSync(Schema.parseJson())
44
40
 
45
41
  type LocalPushQueueItem = [
46
42
  event: LiveStoreEvent.Client.EncodedWithMeta,
47
- deferred: Deferred.Deferred<void, LeaderAheadError> | undefined,
43
+ deferred: Deferred.Deferred<void, LeaderAheadError | StaleRebaseGenerationError> | undefined,
48
44
  ]
49
45
 
50
46
  /**
@@ -65,11 +61,11 @@ type LocalPushQueueItem = [
65
61
  * - Maintains events in ascending order.
66
62
  * - Uses `Deferred` objects to resolve/reject events based on application success.
67
63
  * - Processes events from the queue, applying events in batches.
68
- * - Controlled by a `Latch` to manage execution flow.
69
- * - The latch closes on pull receipt and re-opens post-pull completion.
64
+ * - Controlled by a mutex (`Semaphore(1)`) to ensure mutual exclusion between local push and backend pull processing.
65
+ * - The backend pull side acquires the mutex before processing and releases it on post-pull completion.
70
66
  * - Processes up to `maxBatchSize` events per cycle.
71
67
  *
72
- * Currently we're advancing the state db and eventlog in lockstep, but we could also decouple this in the future
68
+ * Currently, we're advancing the state db and eventlog in lockstep, but we could also decouple this in the future
73
69
  *
74
70
  * Tricky concurrency scenarios:
75
71
  * - Queued local push batches becoming invalid due to a prior local push item being rejected.
@@ -83,6 +79,7 @@ export const makeLeaderSyncProcessor = ({
83
79
  initialBlockingSyncContext,
84
80
  initialSyncState,
85
81
  onError,
82
+ onBackendIdMismatch,
86
83
  livePull,
87
84
  params,
88
85
  testing,
@@ -92,7 +89,21 @@ export const makeLeaderSyncProcessor = ({
92
89
  initialBlockingSyncContext: InitialBlockingSyncContext
93
90
  /** Initial sync state rehydrated from the persisted eventlog or initial sync state */
94
91
  initialSyncState: SyncState.SyncState
92
+ /**
93
+ * What to do when a failure (any cause) occurs (except `BackendIdMismatchError`).
94
+ *
95
+ * - `'shutdown'`: Send the error to the shutdown channel and terminate the sync processor.
96
+ * - `'ignore'`: Continue running.
97
+ */
95
98
  onError: 'shutdown' | 'ignore'
99
+ /**
100
+ * What to do when the sync backend identity has changed (i.e. the backend was reset).
101
+ *
102
+ * - `'reset'`: Clear local databases (eventlog and state) and send an intentional shutdown signal.
103
+ * - `'shutdown'`: Send a shutdown signal without clearing local storage.
104
+ * - `'ignore'`: Continue running with stale data.
105
+ */
106
+ onBackendIdMismatch: 'reset' | 'shutdown' | 'ignore'
96
107
  params: {
97
108
  /**
98
109
  * Maximum number of local events to process per batch cycle.
@@ -146,7 +157,7 @@ export const makeLeaderSyncProcessor = ({
146
157
  localPushProcessing?: Effect.Effect<void>
147
158
  }
148
159
  }
149
- }): Effect.Effect<LeaderSyncProcessor, UnknownError, Scope.Scope> =>
160
+ }): Effect.Effect<LeaderSyncProcessor, never, Scope.Scope> =>
150
161
  Effect.gen(function* () {
151
162
  const syncBackendPushQueue = yield* BucketQueue.make<LiveStoreEvent.Client.EncodedWithMeta>()
152
163
  const localPushBatchSize = params.localPushBatchSize ?? 10
@@ -164,7 +175,6 @@ export const makeLeaderSyncProcessor = ({
164
175
  current: undefined as
165
176
  | undefined
166
177
  | {
167
- otelSpan: otel.Span | undefined
168
178
  span: Tracer.Span
169
179
  devtoolsLatch: Effect.Latch | undefined
170
180
  runtime: Runtime.Runtime<LeaderThreadCtx>
@@ -172,8 +182,8 @@ export const makeLeaderSyncProcessor = ({
172
182
  }
173
183
 
174
184
  const localPushesQueue = yield* BucketQueue.make<LocalPushQueueItem>()
175
- const localPushesLatch = yield* Effect.makeLatch(true)
176
- const pullLatch = yield* Effect.makeLatch(true)
185
+ // Ensures mutual exclusion between local push and backend pull processing.
186
+ const localPushBackendPullMutex = yield* Effect.makeSemaphore(1)
177
187
 
178
188
  /**
179
189
  * Additionally to the `syncStateSref` we also need the `pushHeadRef` in order to prevent old/duplicate
@@ -202,8 +212,8 @@ export const makeLeaderSyncProcessor = ({
202
212
 
203
213
  const waitForProcessing = options?.waitForProcessing ?? false
204
214
 
205
- if (waitForProcessing) {
206
- const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make<void, LeaderAheadError>())
215
+ if (waitForProcessing === true) {
216
+ const deferreds = yield* Effect.forEach(newEvents, () => Deferred.make<void, LeaderAheadError | StaleRebaseGenerationError>())
207
217
 
208
218
  const items = newEvents.map((eventEncoded, i) => [eventEncoded, deferreds[i]] as LocalPushQueueItem)
209
219
 
@@ -218,16 +228,18 @@ export const makeLeaderSyncProcessor = ({
218
228
  Effect.withSpan('@livestore/common:LeaderSyncProcessor:push', {
219
229
  attributes: {
220
230
  batchSize: newEvents.length,
221
- batch: TRACE_VERBOSE ? newEvents : undefined,
231
+ batch: TRACE_VERBOSE === true ? newEvents : undefined,
222
232
  },
223
- links: ctxRef.current?.span ? [{ _tag: 'SpanLink', span: ctxRef.current.span, attributes: {} }] : undefined,
233
+ links:
234
+ ctxRef.current?.span !== undefined
235
+ ? [{ _tag: 'SpanLink', span: ctxRef.current.span, attributes: {} }]
236
+ : undefined,
224
237
  }),
225
238
  )
226
239
 
227
240
  const pushPartial: LeaderSyncProcessor['pushPartial'] = ({ event: { name, args }, clientId, sessionId }) =>
228
241
  Effect.gen(function* () {
229
- const syncState = yield* syncStateSref
230
- if (syncState === undefined) return shouldNeverHappen('Not initialized')
242
+ const syncState = yield* Effect.fromNullable(yield* syncStateSref).pipe(Effect.orDieDebugger)
231
243
 
232
244
  const resolution = yield* resolveEventDef(schema, {
233
245
  operation: '@livestore/common:LeaderSyncProcessor:pushPartial',
@@ -238,7 +250,7 @@ export const makeLeaderSyncProcessor = ({
238
250
  sessionId,
239
251
  seqNum: syncState.localHead,
240
252
  },
241
- }).pipe(UnknownError.mapToUnknownError)
253
+ })
242
254
 
243
255
  if (resolution._tag === 'unknown') {
244
256
  // Ignore partial pushes for unrecognised events – they are still
@@ -258,19 +270,20 @@ export const makeLeaderSyncProcessor = ({
258
270
  })
259
271
 
260
272
  yield* push([eventEncoded])
261
- }).pipe(Effect.catchTag('LeaderAheadError', Effect.orDie))
273
+ }).pipe(
274
+ // pushPartial constructs the event sequence number internally, so these errors should never happen.
275
+ Effect.catchIf(isRejectedPushError, Effect.die),
276
+ )
262
277
 
263
278
  // Starts various background loops
264
279
  const boot: LeaderSyncProcessor['boot'] = Effect.gen(function* () {
265
280
  const span = yield* Effect.currentSpan.pipe(Effect.orDie)
266
- const otelSpan = yield* OtelTracer.currentOtelSpan.pipe(Effect.catchAll(() => Effect.succeed(undefined)))
267
281
  const { devtools, shutdownChannel } = yield* LeaderThreadCtx
268
282
  const runtime = yield* Effect.runtime<LeaderThreadCtx>()
269
283
 
270
284
  ctxRef.current = {
271
- otelSpan,
272
285
  span,
273
- devtoolsLatch: devtools.enabled ? devtools.syncBackendLatch : undefined,
286
+ devtoolsLatch: devtools.enabled === true ? devtools.syncBackendLatch : undefined,
274
287
  runtime,
275
288
  }
276
289
 
@@ -291,19 +304,18 @@ export const makeLeaderSyncProcessor = ({
291
304
  }
292
305
  }
293
306
 
307
+ const handleBackendIdMismatchError = (error: BackendIdMismatchError) =>
308
+ handleBackendIdMismatch({ error, onBackendIdMismatch, shutdownChannel })
309
+
294
310
  const maybeShutdownOnError = (
295
311
  cause: Cause.Cause<
296
312
  | UnknownError
297
- | IntentionalShutdownCause
298
- | IsOfflineError
299
- | InvalidPushError
300
- | InvalidPullError
301
313
  | MaterializeError
302
314
  >,
303
315
  ) =>
304
316
  Effect.gen(function* () {
305
317
  if (onError === 'ignore') {
306
- if (LS_DEV) {
318
+ if (LS_DEV === true) {
307
319
  yield* Effect.logDebug(
308
320
  `Ignoring sync error (${cause._tag === 'Fail' ? cause.error._tag : cause._tag})`,
309
321
  Cause.pretty(cause),
@@ -312,35 +324,38 @@ export const makeLeaderSyncProcessor = ({
312
324
  return
313
325
  }
314
326
 
315
- const errorToSend = Cause.isFailType(cause) ? cause.error : UnknownError.make({ cause })
327
+ const errorToSend = Cause.isFailType(cause) === true ? cause.error : UnknownError.make({ cause })
316
328
  yield* shutdownChannel.send(errorToSend).pipe(Effect.orDie)
317
329
 
318
- return yield* Effect.die(cause)
330
+ return yield* Effect.failCause(cause).pipe(Effect.orDie)
319
331
  })
320
332
 
321
333
  yield* backgroundApplyLocalPushes({
322
- localPushesLatch,
334
+ localPushBackendPullMutex,
323
335
  localPushesQueue,
324
- pullLatch,
325
336
  syncStateSref,
326
337
  syncBackendPushQueue,
327
338
  schema,
328
339
  isClientEvent,
329
- otelSpan,
330
340
  connectedClientSessionPullQueues,
331
341
  localPushBatchSize,
332
342
  testing: {
333
343
  delay: testing?.delays?.localPushProcessing,
334
344
  },
335
- }).pipe(Effect.catchAllCause(maybeShutdownOnError), Effect.forkScoped)
345
+ }).pipe(
346
+ Effect.catchAllCause(maybeShutdownOnError),
347
+ Effect.forkScoped,
348
+ )
336
349
 
337
350
  const backendPushingFiberHandle = yield* FiberHandle.make<void, never>()
338
351
  const backendPushingEffect = backgroundBackendPushing({
339
352
  syncBackendPushQueue,
340
- otelSpan,
341
353
  devtoolsLatch: ctxRef.current?.devtoolsLatch,
342
354
  backendPushBatchSize,
343
- }).pipe(Effect.catchAllCause(maybeShutdownOnError))
355
+ }).pipe(
356
+ Effect.catchTag('BackendIdMismatchError', handleBackendIdMismatchError),
357
+ Effect.catchAllCause(maybeShutdownOnError),
358
+ )
344
359
 
345
360
  yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
346
361
 
@@ -359,20 +374,21 @@ export const makeLeaderSyncProcessor = ({
359
374
  yield* FiberHandle.run(backendPushingFiberHandle, backendPushingEffect)
360
375
  }),
361
376
  syncStateSref,
362
- localPushesLatch,
363
- pullLatch,
377
+ localPushBackendPullMutex,
364
378
  livePull,
365
379
  dbState,
366
- otelSpan,
367
380
  initialBlockingSyncContext,
368
381
  devtoolsLatch: ctxRef.current?.devtoolsLatch,
369
382
  connectedClientSessionPullQueues,
370
383
  advancePushHead,
371
384
  }).pipe(
372
385
  Effect.retry({
373
- // We want to retry pulling if we've lost connection to the sync backend
374
- while: (cause) => cause._tag === 'IsOfflineError',
386
+ // Retry pulling when we've lost connection to the sync backend
387
+ // We're using `until` with a refinement instead of `while` to narrow `IsOfflineError` out of the error type.
388
+ // See https://github.com/Effect-TS/effect/issues/6122
389
+ until: (error): error is Exclude<typeof error, IsOfflineError> => error._tag !== 'IsOfflineError',
375
390
  }),
391
+ Effect.catchTag('BackendIdMismatchError', handleBackendIdMismatchError),
376
392
  Effect.catchAllCause(maybeShutdownOnError),
377
393
  // Needed to avoid `Fiber terminated with an unhandled error` logs which seem to happen because of the `Effect.retry` above.
378
394
  // This might be a bug in Effect. Only seems to happen in the browser.
@@ -403,17 +419,16 @@ export const makeLeaderSyncProcessor = ({
403
419
  - full new state db snapshot in the "rebase" case
404
420
  - downside: importing the snapshot is expensive
405
421
  */
406
- const pullQueue: LeaderSyncProcessor['pullQueue'] = ({ cursor }) => {
407
- const runtime = ctxRef.current?.runtime ?? shouldNeverHappen('Not initialized')
408
- return connectedClientSessionPullQueues.makeQueue(cursor).pipe(Effect.provide(runtime))
409
- }
422
+ const pullQueue: LeaderSyncProcessor['pullQueue'] = ({ cursor }) =>
423
+ Effect.fromNullable(ctxRef.current?.runtime).pipe(
424
+ Effect.orDieDebugger,
425
+ Effect.flatMap((runtime) =>
426
+ connectedClientSessionPullQueues.makeQueue(cursor).pipe(Effect.provide(runtime))
427
+ )
428
+ )
410
429
 
411
430
  const syncState = Subscribable.make({
412
- get: Effect.gen(function* () {
413
- const syncState = yield* syncStateSref
414
- if (syncState === undefined) return shouldNeverHappen('Not initialized')
415
- return syncState
416
- }),
431
+ get: syncStateSref.pipe(Effect.flatMap(Effect.fromNullable), Effect.orDieDebugger),
417
432
  changes: syncStateSref.changes.pipe(Stream.filter(isNotUndefined)),
418
433
  })
419
434
 
@@ -428,26 +443,22 @@ export const makeLeaderSyncProcessor = ({
428
443
  })
429
444
 
430
445
  const backgroundApplyLocalPushes = ({
431
- localPushesLatch,
446
+ localPushBackendPullMutex,
432
447
  localPushesQueue,
433
- pullLatch,
434
448
  syncStateSref,
435
449
  syncBackendPushQueue,
436
450
  schema,
437
451
  isClientEvent,
438
- otelSpan,
439
452
  connectedClientSessionPullQueues,
440
453
  localPushBatchSize,
441
454
  testing,
442
455
  }: {
443
- pullLatch: Effect.Latch
444
- localPushesLatch: Effect.Latch
456
+ localPushBackendPullMutex: Effect.Semaphore
445
457
  localPushesQueue: BucketQueue.BucketQueue<LocalPushQueueItem>
446
458
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
447
459
  syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.Client.EncodedWithMeta>
448
460
  schema: LiveStoreSchema
449
461
  isClientEvent: (eventEncoded: LiveStoreEvent.Client.EncodedWithMeta) => boolean
450
- otelSpan: otel.Span | undefined
451
462
  connectedClientSessionPullQueues: PullQueueSet
452
463
  localPushBatchSize: number
453
464
  testing: {
@@ -462,167 +473,139 @@ const backgroundApplyLocalPushes = ({
462
473
 
463
474
  const batchItems = yield* BucketQueue.takeBetween(localPushesQueue, 1, localPushBatchSize)
464
475
 
465
- // Wait for the backend pulling to finish
466
- yield* localPushesLatch.await
467
-
468
- // Prevent backend pull processing until this local push is finished
469
- yield* pullLatch.close
470
-
471
- const syncState = yield* syncStateSref
472
- if (syncState === undefined) return shouldNeverHappen('Not initialized')
473
-
474
- const currentRebaseGeneration = syncState.localHead.rebaseGeneration
476
+ // Applies a batch of local pushes, guarded by the localPushBackendPullMutex to ensure mutual exclusion with backend pulling
477
+ yield* Effect.gen(function* () {
478
+ const syncState = yield* Effect.fromNullable(yield* syncStateSref).pipe(Effect.orDieDebugger)
475
479
 
476
- // Since the rebase generation might have changed since enqueuing, we need to filter out items with older generation
477
- // It's important that we filter after we got localPushesLatch, otherwise we might filter with the old generation
478
- const [droppedItems, filteredItems] = ReadonlyArray.partition(
479
- batchItems,
480
- ([eventEncoded]) => eventEncoded.seqNum.rebaseGeneration >= currentRebaseGeneration,
481
- )
480
+ const currentRebaseGeneration = syncState.localHead.rebaseGeneration
482
481
 
483
- if (droppedItems.length > 0) {
484
- otelSpan?.addEvent(
485
- `push:drop-old-generation`,
486
- {
487
- droppedCount: droppedItems.length,
488
- currentRebaseGeneration,
489
- },
490
- undefined,
482
+ // Since the rebase generation might have changed since enqueuing, we need to filter out items with older generation
483
+ // It's important that we filter after acquiring the localPushBackendPullMutex, otherwise we might filter with the old generation
484
+ const [droppedItems, filteredItems] = ReadonlyArray.partition(
485
+ batchItems,
486
+ ([eventEncoded]) => eventEncoded.seqNum.rebaseGeneration >= currentRebaseGeneration,
491
487
  )
492
488
 
489
+ if (droppedItems.length > 0) {
490
+ yield* Effect.spanEvent(`push:drop-old-generation`, {
491
+ droppedCount: droppedItems.length,
492
+ currentRebaseGeneration,
493
+ })
494
+
493
495
  /**
494
496
  * Dropped pushes may still have a deferred awaiting completion.
495
497
  * Fail it so the caller learns the leader advanced and resubmits with the updated generation.
496
498
  */
497
499
  yield* Effect.forEach(
498
500
  droppedItems.filter(
499
- (item): item is [LiveStoreEvent.Client.EncodedWithMeta, Deferred.Deferred<void, LeaderAheadError>] =>
500
- item[1] !== undefined,
501
- ),
502
- ([eventEncoded, deferred]) =>
503
- Deferred.fail(
504
- deferred,
505
- LeaderAheadError.make({
506
- minimumExpectedNum: syncState.localHead,
507
- providedNum: eventEncoded.seqNum,
508
- }),
501
+ (item): item is [LiveStoreEvent.Client.EncodedWithMeta, Deferred.Deferred<void, LeaderAheadError | StaleRebaseGenerationError>] =>
502
+ item[1] !== undefined,
509
503
  ),
510
- )
511
- }
512
-
513
- if (filteredItems.length === 0) {
514
- yield* pullLatch.open
515
- continue
516
- }
517
-
518
- const [newEvents, deferreds] = ReadonlyArray.unzip(filteredItems)
519
-
520
- const mergeResult = SyncState.merge({
521
- syncState,
522
- payload: { _tag: 'local-push', newEvents },
523
- isClientEvent,
524
- isEqualEvent: LiveStoreEvent.Client.isEqualEncoded,
525
- })
526
-
527
- switch (mergeResult._tag) {
528
- case 'unknown-error': {
529
- otelSpan?.addEvent(
530
- `push:unknown-error`,
531
- {
532
- batchSize: newEvents.length,
533
- newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
534
- },
535
- undefined,
504
+ ([eventEncoded, deferred]) =>
505
+ Deferred.fail(
506
+ deferred,
507
+ StaleRebaseGenerationError.make({
508
+ currentRebaseGeneration,
509
+ providedRebaseGeneration: eventEncoded.seqNum.rebaseGeneration,
510
+ sessionId: eventEncoded.sessionId,
511
+ }),
512
+ ),
536
513
  )
537
- return yield* new UnknownError({ cause: mergeResult.message })
538
514
  }
539
- case 'rebase': {
540
- return shouldNeverHappen('The leader thread should never have to rebase due to a local push')
515
+
516
+ if (filteredItems.length === 0) {
517
+ return
541
518
  }
542
- case 'reject': {
543
- otelSpan?.addEvent(
544
- `push:reject`,
545
- {
546
- batchSize: newEvents.length,
547
- mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
548
- },
549
- undefined,
550
- )
551
519
 
552
- // TODO: how to test this?
553
- const nextRebaseGeneration = currentRebaseGeneration + 1
520
+ const [newEvents, deferreds] = ReadonlyArray.unzip(filteredItems)
554
521
 
555
- const providedNum = newEvents.at(0)!.seqNum
556
- // All subsequent pushes with same generation should be rejected as well
557
- // We're also handling the case where the localPushQueue already contains events
558
- // from the next generation which we preserve in the queue
559
- const remainingEventsMatchingGeneration = yield* BucketQueue.takeSplitWhere(
560
- localPushesQueue,
561
- ([eventEncoded]) => eventEncoded.seqNum.rebaseGeneration >= nextRebaseGeneration,
562
- )
522
+ yield* Effect.annotateCurrentSpan({
523
+ 'batchSize': newEvents.length,
524
+ ...(TRACE_VERBOSE === true ? { 'newEvents': jsonStringify(newEvents) } : {}),
525
+ })
563
526
 
564
- // TODO we still need to better understand and handle this scenario
565
- if (LS_DEV && (yield* BucketQueue.size(localPushesQueue)) > 0) {
566
- console.log('localPushesQueue is not empty', yield* BucketQueue.size(localPushesQueue))
567
- // biome-ignore lint/suspicious/noDebugger: debugging
568
- debugger
527
+ const mergeResult = yield* SyncState.merge({
528
+ syncState,
529
+ payload: { _tag: 'local-push', newEvents },
530
+ isClientEvent,
531
+ isEqualEvent: LiveStoreEvent.Client.isEqualEncoded,
532
+ })
533
+
534
+ switch (mergeResult._tag) {
535
+ case 'rebase': {
536
+ return yield* Effect.dieDebugger('The leader thread should never have to rebase due to a local push')
569
537
  }
538
+ case 'reject': {
539
+ yield* Effect.spanEvent(`push:reject`, {
540
+ batchSize: newEvents.length,
541
+ ...(TRACE_VERBOSE === true ? { mergeResult: jsonStringify(mergeResult) } : {}),
542
+ })
570
543
 
571
- const allDeferredsToReject = [
572
- ...deferreds,
573
- ...remainingEventsMatchingGeneration.map(([_, deferred]) => deferred),
574
- ].filter(isNotUndefined)
544
+ // TODO: how to test this?
545
+ const nextRebaseGeneration = currentRebaseGeneration + 1
575
546
 
576
- yield* Effect.forEach(allDeferredsToReject, (deferred) =>
577
- Deferred.fail(
578
- deferred,
579
- LeaderAheadError.make({ minimumExpectedNum: mergeResult.expectedMinimumId, providedNum }),
580
- ),
581
- )
547
+ const providedNum = newEvents.at(0)!.seqNum
548
+ // All subsequent pushes with same generation should be rejected as well
549
+ // We're also handling the case where the localPushQueue already contains events
550
+ // from the next generation which we preserve in the queue
551
+ const remainingEventsMatchingGeneration = yield* BucketQueue.takeSplitWhere(
552
+ localPushesQueue,
553
+ ([eventEncoded]) => eventEncoded.seqNum.rebaseGeneration >= nextRebaseGeneration,
554
+ )
582
555
 
583
- // Allow the backend pulling to start
584
- yield* pullLatch.open
556
+ // TODO we still need to better understand and handle this scenario
557
+ if (LS_DEV === true && (yield* BucketQueue.size(localPushesQueue)) > 0) {
558
+ console.log('localPushesQueue is not empty', yield* BucketQueue.size(localPushesQueue))
559
+ // oxlint-disable-next-line eslint(no-debugger) -- intentional breakpoint for unexpected queue state
560
+ debugger
561
+ }
585
562
 
586
- // In this case we're skipping state update and down/upstream processing
587
- // We've cleared the local push queue and are now waiting for new local pushes / backend pulls
588
- continue
589
- }
590
- case 'advance': {
591
- break
592
- }
593
- default: {
594
- casesHandled(mergeResult)
563
+ const allDeferredsToReject = [
564
+ ...deferreds,
565
+ ...remainingEventsMatchingGeneration.map(([_, deferred]) => deferred),
566
+ ].filter(isNotUndefined)
567
+
568
+ yield* Effect.forEach(allDeferredsToReject, (deferred) =>
569
+ Deferred.fail(
570
+ deferred,
571
+ LeaderAheadError.make({ minimumExpectedNum: mergeResult.expectedMinimumId, providedNum, sessionId: newEvents.at(0)!.sessionId }),
572
+ ),
573
+ )
574
+
575
+ // In this case we're skipping state update and down/upstream processing
576
+ // We've cleared the local push queue and are now waiting for new local pushes / backend pulls
577
+ return
578
+ }
579
+ case 'advance': {
580
+ break
581
+ }
582
+ default: {
583
+ casesHandled(mergeResult)
584
+ }
595
585
  }
596
- }
597
586
 
598
- yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState)
587
+ yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState)
599
588
 
600
- yield* connectedClientSessionPullQueues.offer({
601
- payload: SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }),
602
- leaderHead: mergeResult.newSyncState.localHead,
603
- })
589
+ yield* connectedClientSessionPullQueues.offer({
590
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents: mergeResult.newEvents }),
591
+ leaderHead: mergeResult.newSyncState.localHead,
592
+ })
604
593
 
605
- otelSpan?.addEvent(
606
- `push:advance`,
607
- {
594
+ yield* Effect.spanEvent(`push:advance`, {
608
595
  batchSize: newEvents.length,
609
- mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
610
- },
611
- undefined,
612
- )
613
-
614
- // Don't sync client-local events
615
- const filteredBatch = mergeResult.newEvents.filter((eventEncoded) => {
616
- const eventDef = schema.eventsDefsMap.get(eventEncoded.name)
617
- return eventDef === undefined ? true : eventDef.options.clientOnly === false
618
- })
596
+ ...(TRACE_VERBOSE === true ? { mergeResult: jsonStringify(mergeResult) } : {}),
597
+ })
619
598
 
620
- yield* BucketQueue.offerAll(syncBackendPushQueue, filteredBatch)
599
+ // Don't sync client-local events
600
+ const filteredBatch = mergeResult.newEvents.filter((eventEncoded) => {
601
+ const eventDef = schema.eventsDefsMap.get(eventEncoded.name)
602
+ return eventDef === undefined ? true : eventDef.options.clientOnly === false
603
+ })
621
604
 
622
- yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds })
605
+ yield* BucketQueue.offerAll(syncBackendPushQueue, filteredBatch)
623
606
 
624
- // Allow the backend pulling to start
625
- yield* pullLatch.open
607
+ yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds })
608
+ }).pipe(localPushBackendPullMutex.withPermits(1))
626
609
  }
627
610
  })
628
611
 
@@ -632,7 +615,7 @@ type MaterializeEventsBatch = (_: {
632
615
  * The deferreds are used by the caller to know when the mutation has been processed.
633
616
  * Indexes are aligned with `batchItems`
634
617
  */
635
- deferreds: ReadonlyArray<Deferred.Deferred<void, LeaderAheadError> | undefined> | undefined
618
+ deferreds: ReadonlyArray<Deferred.Deferred<void, LeaderAheadError | StaleRebaseGenerationError> | undefined> | undefined
636
619
  }) => Effect.Effect<void, MaterializeError, LeaderThreadCtx>
637
620
 
638
621
  // TODO how to handle errors gracefully
@@ -646,7 +629,7 @@ const materializeEventsBatch: MaterializeEventsBatch = ({ batchItems, deferreds
646
629
 
647
630
  yield* Effect.addFinalizer((exit) =>
648
631
  Effect.gen(function* () {
649
- if (Exit.isSuccess(exit)) return
632
+ if (Exit.isSuccess(exit) === true) return
650
633
 
651
634
  // Rollback in case of an error
652
635
  db.execute('ROLLBACK', undefined)
@@ -675,15 +658,13 @@ const materializeEventsBatch: MaterializeEventsBatch = ({ batchItems, deferreds
675
658
  Effect.tapCauseLogPretty,
676
659
  )
677
660
 
678
- const backgroundBackendPulling = ({
661
+ const backgroundBackendPulling = Effect.fn('@livestore/common:LeaderSyncProcessor:backend-pulling')(function* ({
679
662
  isClientEvent,
680
663
  restartBackendPushing,
681
- otelSpan,
682
664
  dbState,
683
665
  syncStateSref,
684
- localPushesLatch,
666
+ localPushBackendPullMutex,
685
667
  livePull,
686
- pullLatch,
687
668
  devtoolsLatch,
688
669
  initialBlockingSyncContext,
689
670
  connectedClientSessionPullQueues,
@@ -692,80 +673,80 @@ const backgroundBackendPulling = ({
692
673
  isClientEvent: (eventEncoded: LiveStoreEvent.Client.EncodedWithMeta) => boolean
693
674
  restartBackendPushing: (
694
675
  filteredRebasedPending: ReadonlyArray<LiveStoreEvent.Client.EncodedWithMeta>,
695
- ) => Effect.Effect<void, UnknownError, LeaderThreadCtx | HttpClient.HttpClient>
696
- otelSpan: otel.Span | undefined
676
+ ) => Effect.Effect<void, never, LeaderThreadCtx | HttpClient.HttpClient>
697
677
  syncStateSref: SubscriptionRef.SubscriptionRef<SyncState.SyncState | undefined>
698
678
  dbState: SqliteDb
699
- localPushesLatch: Effect.Latch
700
- pullLatch: Effect.Latch
679
+ localPushBackendPullMutex: Effect.Semaphore
701
680
  livePull: boolean
702
681
  devtoolsLatch: Effect.Latch | undefined
703
682
  initialBlockingSyncContext: InitialBlockingSyncContext
704
683
  connectedClientSessionPullQueues: PullQueueSet
705
684
  advancePushHead: (eventNum: EventSequenceNumber.Client.Composite) => void
706
- }) =>
707
- Effect.gen(function* () {
708
- const { syncBackend, dbState: db, dbEventlog, schema } = yield* LeaderThreadCtx
685
+ }) {
686
+ const { syncBackend, dbState: db, dbEventlog, schema } = yield* LeaderThreadCtx
709
687
 
710
- if (syncBackend === undefined) return
688
+ if (syncBackend === undefined) return
711
689
 
712
- const onNewPullChunk = (
713
- newEvents: LiveStoreEvent.Client.EncodedWithMeta[],
714
- pageInfo: SyncBackend.PullResPageInfo,
715
- ) =>
716
- Effect.gen(function* () {
717
- if (newEvents.length === 0) return
690
+ let pullMutexHeld = false
718
691
 
719
- if (devtoolsLatch !== undefined) {
720
- yield* devtoolsLatch.await
692
+ const releasePullMutexIfHeld = Effect.gen(function* () {
693
+ if (pullMutexHeld === false) return
694
+ pullMutexHeld = false
695
+ yield* localPushBackendPullMutex.release(1)
696
+ })
697
+
698
+ const isPullPaginationComplete = (pageInfo: SyncBackend.PullResPageInfo) => pageInfo._tag === 'NoMore'
699
+
700
+ const onNewPullChunk = (newEvents: LiveStoreEvent.Client.EncodedWithMeta[], pageInfo: SyncBackend.PullResPageInfo) =>
701
+ Effect.gen(function* () {
702
+ if (devtoolsLatch !== undefined) {
703
+ yield* devtoolsLatch.await
704
+ }
705
+
706
+ if (newEvents.length === 0) {
707
+ if (isPullPaginationComplete(pageInfo) === true) {
708
+ yield* releasePullMutexIfHeld
721
709
  }
710
+ return
711
+ }
722
712
 
723
- // Prevent more local pushes from being processed until this pull is finished
724
- yield* localPushesLatch.close
713
+ // Prevent more local pushes from being processed until this pull pagination sequence is finished.
714
+ if (pullMutexHeld === false) {
715
+ yield* localPushBackendPullMutex.take(1)
716
+ pullMutexHeld = true
717
+ }
725
718
 
726
- // Wait for pending local pushes to finish
727
- yield* pullLatch.await
719
+ const chunkExit = yield* Effect.gen(function* () {
720
+ const syncState = yield* Effect.fromNullable(yield* syncStateSref).pipe(Effect.orDieDebugger)
728
721
 
729
- const syncState = yield* syncStateSref
730
- if (syncState === undefined) return shouldNeverHappen('Not initialized')
722
+ yield* Effect.annotateCurrentSpan({
723
+ 'merge.newEventsCount': newEvents.length,
724
+ ...(TRACE_VERBOSE === true ? { 'merge.newEvents': jsonStringify(newEvents) } : {}),
725
+ })
731
726
 
732
- const mergeResult = SyncState.merge({
733
- syncState,
734
- payload: SyncState.PayloadUpstreamAdvance.make({ newEvents }),
735
- isClientEvent,
736
- isEqualEvent: LiveStoreEvent.Client.isEqualEncoded,
737
- ignoreClientEvents: true,
738
- })
727
+ const mergeResult = yield* SyncState.merge({
728
+ syncState,
729
+ payload: SyncState.PayloadUpstreamAdvance.make({ newEvents }),
730
+ isClientEvent,
731
+ isEqualEvent: LiveStoreEvent.Client.isEqualEncoded,
732
+ ignoreClientEvents: true,
733
+ })
739
734
 
740
- if (mergeResult._tag === 'reject') {
741
- return shouldNeverHappen('The leader thread should never reject upstream advances')
742
- } else if (mergeResult._tag === 'unknown-error') {
743
- otelSpan?.addEvent(
744
- `pull:unknown-error`,
745
- {
746
- newEventsCount: newEvents.length,
747
- newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
748
- },
749
- undefined,
750
- )
751
- return yield* new UnknownError({ cause: mergeResult.message })
752
- }
735
+ if (mergeResult._tag === 'reject') {
736
+ return yield* Effect.dieDebugger('The leader thread should never reject upstream advances')
737
+ }
753
738
 
754
739
  const newBackendHead = newEvents.at(-1)!.seqNum
755
740
 
756
741
  Eventlog.updateBackendHead(dbEventlog, newBackendHead)
757
742
 
758
743
  if (mergeResult._tag === 'rebase') {
759
- otelSpan?.addEvent(
760
- `pull:rebase[${mergeResult.newSyncState.localHead.rebaseGeneration}]`,
761
- {
762
- newEventsCount: newEvents.length,
763
- newEvents: TRACE_VERBOSE ? JSON.stringify(newEvents) : undefined,
764
- rollbackCount: mergeResult.rollbackEvents.length,
765
- mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
766
- },
767
- undefined,
768
- )
744
+ yield* Effect.spanEvent(`pull:rebase[${mergeResult.newSyncState.localHead.rebaseGeneration}]`, {
745
+ newEventsCount: newEvents.length,
746
+ ...(TRACE_VERBOSE === true ? { newEvents: jsonStringify(newEvents) } : {}),
747
+ rollbackCount: mergeResult.rollbackEvents.length,
748
+ ...(TRACE_VERBOSE === true ? { mergeResult: jsonStringify(mergeResult) } : {}),
749
+ })
769
750
 
770
751
  const globalRebasedPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
771
752
  const eventDef = schema.eventsDefsMap.get(event.name)
@@ -786,14 +767,10 @@ const backgroundBackendPulling = ({
786
767
  leaderHead: mergeResult.newSyncState.localHead,
787
768
  })
788
769
  } else {
789
- otelSpan?.addEvent(
790
- `pull:advance`,
791
- {
792
- newEventsCount: newEvents.length,
793
- mergeResult: TRACE_VERBOSE ? JSON.stringify(mergeResult) : undefined,
794
- },
795
- undefined,
796
- )
770
+ yield* Effect.spanEvent(`pull:advance`, {
771
+ newEventsCount: newEvents.length,
772
+ ...(TRACE_VERBOSE === true ? { mergeResult: jsonStringify(mergeResult) } : {}),
773
+ })
797
774
 
798
775
  // Ensure push fiber is active after advance by restarting with current pending (non-client) events
799
776
  const globalPendingEvents = mergeResult.newSyncState.pending.filter((event) => {
@@ -815,7 +792,7 @@ const backgroundBackendPulling = ({
815
792
  EventSequenceNumber.Client.isEqual(event.seqNum, confirmedEvent.seqNum),
816
793
  ),
817
794
  )
818
- yield* Eventlog.updateSyncMetadata(confirmedNewEvents).pipe(UnknownError.mapToUnknownError)
795
+ yield* Eventlog.updateSyncMetadata(confirmedNewEvents).pipe(Effect.orDieDebugger)
819
796
  }
820
797
  }
821
798
 
@@ -827,144 +804,126 @@ const backgroundBackendPulling = ({
827
804
  yield* materializeEventsBatch({ batchItems: mergeResult.newEvents, deferreds: undefined })
828
805
 
829
806
  yield* SubscriptionRef.set(syncStateSref, mergeResult.newSyncState)
807
+ }).pipe(Effect.exit)
830
808
 
831
- // Allow local pushes to be processed again
832
- if (pageInfo._tag === 'NoMore') {
833
- yield* localPushesLatch.open
834
- }
835
- })
809
+ if (Exit.isFailure(chunkExit) === true) {
810
+ yield* releasePullMutexIfHeld
811
+ return yield* Effect.failCause(chunkExit.cause)
812
+ }
836
813
 
837
- const syncState = yield* syncStateSref
838
- if (syncState === undefined) return shouldNeverHappen('Not initialized')
839
- const cursorInfo = yield* Eventlog.getSyncBackendCursorInfo({ remoteHead: syncState.upstreamHead.global })
814
+ if (isPullPaginationComplete(pageInfo) === true) {
815
+ yield* releasePullMutexIfHeld
816
+ }
817
+ })
840
818
 
841
- const hashMaterializerResult = makeMaterializerHash({ schema, dbState })
819
+ const syncState = yield* Effect.fromNullable(yield* syncStateSref).pipe(Effect.orDieDebugger)
820
+ const cursorInfo = yield* Eventlog.getSyncBackendCursorInfo({ remoteHead: syncState.upstreamHead.global })
842
821
 
843
- yield* syncBackend.pull(cursorInfo, { live: livePull }).pipe(
844
- // TODO only take from queue while connected
845
- Stream.tap(({ batch, pageInfo }) =>
846
- Effect.gen(function* () {
847
- // yield* Effect.spanEvent('batch', {
848
- // attributes: {
849
- // batchSize: batch.length,
850
- // batch: TRACE_VERBOSE ? batch : undefined,
851
- // },
852
- // })
853
- // NOTE we only want to take process events when the sync backend is connected
854
- // (e.g. needed for simulating being offline)
855
- // TODO remove when there's a better way to handle this in stream above
856
- yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
857
- yield* onNewPullChunk(
858
- batch.map((_) =>
859
- LiveStoreEvent.Client.EncodedWithMeta.fromGlobal(_.eventEncoded, {
860
- syncMetadata: _.metadata,
861
- // TODO we can't really know the materializer result here yet beyond the first event batch item as we need to materialize it one by one first
862
- // This is a bug and needs to be fixed https://github.com/livestorejs/livestore/issues/503#issuecomment-3114533165
863
- materializerHashLeader: hashMaterializerResult(LiveStoreEvent.Global.toClientEncoded(_.eventEncoded)),
864
- materializerHashSession: Option.none(),
865
- }),
866
- ),
867
- pageInfo,
868
- )
869
- yield* initialBlockingSyncContext.update({ processed: batch.length, pageInfo })
870
- }),
871
- ),
872
- Stream.runDrain,
873
- Effect.interruptible,
874
- )
822
+ const hashMaterializerResult = makeMaterializerHash({ schema, dbState })
823
+
824
+ yield* syncBackend.pull(cursorInfo, { live: livePull }).pipe(
825
+ // TODO only take from queue while connected
826
+ Stream.tap(({ batch, pageInfo }) =>
827
+ Effect.gen(function* () {
828
+ // NOTE we only want to take process events when the sync backend is connected
829
+ // (e.g. needed for simulating being offline)
830
+ // TODO remove when there's a better way to handle this in stream above
831
+ yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
832
+ yield* onNewPullChunk(
833
+ batch.map((_) =>
834
+ LiveStoreEvent.Client.EncodedWithMeta.fromGlobal(_.eventEncoded, {
835
+ syncMetadata: _.metadata,
836
+ // TODO we can't really know the materializer result here yet beyond the first event batch item as we need to materialize it one by one first
837
+ // This is a bug and needs to be fixed https://github.com/livestorejs/livestore/issues/503#issuecomment-3114533165
838
+ materializerHashLeader: hashMaterializerResult(LiveStoreEvent.Global.toClientEncoded(_.eventEncoded)),
839
+ materializerHashSession: Option.none(),
840
+ }),
841
+ ),
842
+ pageInfo,
843
+ )
844
+ yield* initialBlockingSyncContext.update({ processed: batch.length, pageInfo })
845
+ }),
846
+ ),
847
+ Stream.runDrain,
848
+ Effect.interruptible,
849
+ Effect.ensuring(releasePullMutexIfHeld),
850
+ )
875
851
 
876
- // Should only ever happen when livePull is false
877
- yield* Effect.logDebug('backend-pulling finished', { livePull })
878
- }).pipe(Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pulling'))
852
+ // Should only ever happen when livePull is false
853
+ yield* Effect.logDebug('backend-pulling finished', { livePull })
854
+ })
879
855
 
880
- const backgroundBackendPushing = ({
856
+ const backgroundBackendPushing = Effect.fn('@livestore/common:LeaderSyncProcessor:backend-pushing')(function* ({
881
857
  syncBackendPushQueue,
882
- otelSpan,
883
858
  devtoolsLatch,
884
859
  backendPushBatchSize,
885
860
  }: {
886
861
  syncBackendPushQueue: BucketQueue.BucketQueue<LiveStoreEvent.Client.EncodedWithMeta>
887
- otelSpan: otel.Span | undefined
888
862
  devtoolsLatch: Effect.Latch | undefined
889
863
  backendPushBatchSize: number
890
- }) =>
891
- Effect.gen(function* () {
892
- const { syncBackend } = yield* LeaderThreadCtx
893
- if (syncBackend === undefined) return
864
+ }) {
865
+ const { syncBackend } = yield* LeaderThreadCtx
866
+ if (syncBackend === undefined) return
894
867
 
895
- while (true) {
896
- yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
868
+ while (true) {
869
+ yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
897
870
 
898
- const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, backendPushBatchSize)
871
+ const queueItems = yield* BucketQueue.takeBetween(syncBackendPushQueue, 1, backendPushBatchSize)
899
872
 
900
- yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
873
+ yield* SubscriptionRef.waitUntil(syncBackend.isConnected, (isConnected) => isConnected === true)
901
874
 
902
- if (devtoolsLatch !== undefined) {
903
- yield* devtoolsLatch.await
904
- }
875
+ if (devtoolsLatch !== undefined) {
876
+ yield* devtoolsLatch.await
877
+ }
905
878
 
906
- otelSpan?.addEvent(
907
- 'backend-push',
908
- {
909
- batchSize: queueItems.length,
910
- batch: TRACE_VERBOSE ? JSON.stringify(queueItems) : undefined,
911
- },
912
- undefined,
913
- )
879
+ yield* Effect.spanEvent('backend-push', {
880
+ batchSize: queueItems.length,
881
+ ...(TRACE_VERBOSE === true ? { batch: jsonStringify(queueItems) } : {}),
882
+ })
914
883
 
915
- // Push with declarative retry/backoff using Effect schedules
916
- // - Exponential backoff starting at 1s and doubling (1s, 2s, 4s, 8s, 16s, 30s ...)
917
- // - Delay clamped at 30s (continues retrying at 30s)
918
- // - Resets automatically after successful push
919
- // TODO(metrics): expose counters/gauges for retry attempts and queue health via devtools/metrics
920
-
921
- // Only retry for transient UnknownError cases
922
- const isRetryable = (err: InvalidPushError | IsOfflineError) =>
923
- err._tag === 'InvalidPushError' && err.cause._tag === 'LiveStore.UnknownError'
924
-
925
- // Input: InvalidPushError | IsOfflineError, Output: Duration
926
- const retrySchedule: Schedule.Schedule<Duration.DurationInput, InvalidPushError | IsOfflineError> =
927
- Schedule.exponential(Duration.seconds(1)).pipe(
928
- Schedule.andThenEither(Schedule.spaced(Duration.seconds(30))), // clamp at 30 second intervals
929
- Schedule.compose(Schedule.elapsed),
930
- Schedule.whileInput(isRetryable),
931
- )
884
+ // Push with declarative retry/backoff using Effect schedules
885
+ // - Exponential backoff starting at 1s and doubling (1s, 2s, 4s, 8s, 16s, 30s ...)
886
+ // - Delay clamped at 30s (continues retrying at 30s)
887
+ // - Resets automatically after successful push
888
+ // TODO(metrics): expose counters/gauges for retry attempts and queue health via devtools/metrics
889
+ yield* Effect.gen(function* () {
890
+ const iteration = yield* Schedule.CurrentIterationMetadata
932
891
 
933
- yield* Effect.gen(function* () {
934
- const iteration = yield* Schedule.CurrentIterationMetadata
892
+ const pushResult = yield* syncBackend.push(queueItems.map((_) => _.toGlobal())).pipe(Effect.either)
935
893
 
936
- const pushResult = yield* syncBackend.push(queueItems.map((_) => _.toGlobal())).pipe(Effect.either)
894
+ const retries = iteration.recurrence
895
+ if (retries > 0 && pushResult._tag === 'Right') {
896
+ yield* Effect.spanEvent('backend-push-retry-success', { retries, batchSize: queueItems.length })
897
+ }
937
898
 
938
- const retries = iteration.recurrence
939
- if (retries > 0 && pushResult._tag === 'Right') {
940
- otelSpan?.addEvent('backend-push-retry-success', { retries, batchSize: queueItems.length }, undefined)
899
+ if (pushResult._tag === 'Left') {
900
+ yield* Effect.spanEvent('backend-push-error', {
901
+ error: pushResult.left.toString(),
902
+ retries,
903
+ batchSize: queueItems.length,
904
+ })
905
+ const error = pushResult.left
906
+ if (error._tag === 'ServerAheadError') {
907
+ // It's a core part of the sync protocol that the sync backend will emit a new pull chunk alongside the ServerAheadError
908
+ yield* Effect.logDebug('handled backend-push-error (waiting for interupt caused by pull)', { error })
909
+ return yield* Effect.never
941
910
  }
942
911
 
943
- if (pushResult._tag === 'Left') {
944
- otelSpan?.addEvent(
945
- 'backend-push-error',
946
- {
947
- error: pushResult.left.toString(),
948
- retries,
949
- batchSize: queueItems.length,
950
- },
951
- undefined,
952
- )
953
- const error = pushResult.left
954
- if (
955
- error._tag === 'IsOfflineError' ||
956
- (error._tag === 'InvalidPushError' && error.cause._tag === 'ServerAheadError')
957
- ) {
958
- // It's a core part of the sync protocol that the sync backend will emit a new pull chunk alongside the ServerAheadError
959
- yield* Effect.logDebug('handled backend-push-error (waiting for interupt caused by pull)', { error })
960
- return yield* Effect.never
961
- }
962
-
963
- return yield* error
964
- }
965
- }).pipe(Effect.retry(retrySchedule))
966
- }
967
- }).pipe(Effect.interruptible, Effect.withSpan('@livestore/common:LeaderSyncProcessor:backend-pushing'))
912
+ return yield* error
913
+ }
914
+ }).pipe(
915
+ // Retry transient errors
916
+ Effect.retry({
917
+ schedule: Schedule.exponential(Duration.seconds(1)).pipe(
918
+ Schedule.modifyDelay((_, delay) => Duration.min(delay, Duration.seconds(30))) // Cap delay at 30s intervals.
919
+ ),
920
+ while: (error) => error._tag === 'IsOfflineError' || error._tag === 'UnknownError',
921
+ }),
922
+ // This is needed to narrow the Error type. Our retry policy runs indefinitely, but Effect.retry does not narrow the Error type.
923
+ Effect.catchIf((error) => error._tag === 'IsOfflineError' || error._tag === 'UnknownError', Effect.die),
924
+ )
925
+ }
926
+ }, Effect.interruptible)
968
927
 
969
928
  const trimChangesetRows = (db: SqliteDb, newHead: EventSequenceNumber.Client.Composite) => {
970
929
  // Since we're using the session changeset rows to query for the current head,
@@ -977,13 +936,13 @@ interface PullQueueSet {
977
936
  cursor: EventSequenceNumber.Client.Composite,
978
937
  ) => Effect.Effect<
979
938
  Queue.Queue<{ payload: typeof SyncState.PayloadUpstream.Type }>,
980
- UnknownError,
939
+ never,
981
940
  Scope.Scope | LeaderThreadCtx
982
941
  >
983
942
  offer: (item: {
984
943
  payload: typeof SyncState.PayloadUpstream.Type
985
944
  leaderHead: EventSequenceNumber.Client.Composite
986
- }) => Effect.Effect<void, UnknownError>
945
+ }) => Effect.Effect<void, never>
987
946
  }
988
947
 
989
948
  const makePullQueueSet = Effect.gen(function* () {
@@ -1070,7 +1029,7 @@ const makePullQueueSet = Effect.gen(function* () {
1070
1029
  const offer: PullQueueSet['offer'] = (item) =>
1071
1030
  Effect.gen(function* () {
1072
1031
  const seqNumStr = EventSequenceNumber.Client.toString(item.leaderHead)
1073
- if (cachedPayloads.has(seqNumStr)) {
1032
+ if (cachedPayloads.has(seqNumStr) === true) {
1074
1033
  cachedPayloads.get(seqNumStr)!.push(item.payload)
1075
1034
  } else {
1076
1035
  cachedPayloads.set(seqNumStr, [item.payload])
@@ -1108,24 +1067,94 @@ const validatePushBatch = (
1108
1067
  return
1109
1068
  }
1110
1069
 
1111
- // Example: session A already enqueued e1…e6 while session B (same client, different
1112
- // session) still believes the head is e1 and submits [e2, e7, e8]. The numbers look
1113
- // monotonic from B’s perspective, but we must reject and force B to rebase locally
1114
- // so the leader never regresses.
1070
+ // Defensive check: callers should already provide a strictly increasing sequence
1071
+ // of event numbers.
1115
1072
  for (let i = 1; i < batch.length; i++) {
1116
- if (EventSequenceNumber.Client.isGreaterThanOrEqual(batch[i - 1]!.seqNum, batch[i]!.seqNum)) {
1117
- return yield* LeaderAheadError.make({
1118
- minimumExpectedNum: batch[i - 1]!.seqNum,
1119
- providedNum: batch[i]!.seqNum,
1073
+ if (EventSequenceNumber.Client.isGreaterThanOrEqual(batch[i - 1]!.seqNum, batch[i]!.seqNum) === true) {
1074
+ return yield* NonMonotonicBatchError.make({
1075
+ precedingSeqNum: batch[i - 1]!.seqNum,
1076
+ violatingSeqNum: batch[i]!.seqNum,
1077
+ violationIndex: i,
1078
+ sessionId: batch[i]!.sessionId,
1120
1079
  })
1121
1080
  }
1122
1081
  }
1123
1082
 
1124
- // Make sure smallest sequence number is > pushHead
1125
- if (EventSequenceNumber.Client.isGreaterThanOrEqual(pushHead, batch[0]!.seqNum)) {
1083
+ // Reject stale batches whose first event is at or behind the leader's push head.
1084
+ if (EventSequenceNumber.Client.isGreaterThanOrEqual(pushHead, batch[0]!.seqNum) === true) {
1126
1085
  return yield* LeaderAheadError.make({
1127
1086
  minimumExpectedNum: pushHead,
1128
1087
  providedNum: batch[0]!.seqNum,
1088
+ sessionId: batch[0]!.sessionId,
1129
1089
  })
1130
1090
  }
1131
1091
  })
1092
+
1093
+ /**
1094
+ * Handles a BackendIdMismatchError based on the configured behavior.
1095
+ * This occurs when the sync backend has been reset and has a new identity.
1096
+ */
1097
+ const handleBackendIdMismatch = Effect.fn('@livestore/common:LeaderSyncProcessor:handleBackendIdMismatch')(function* ({
1098
+ error,
1099
+ onBackendIdMismatch,
1100
+ shutdownChannel,
1101
+ }: {
1102
+ error: BackendIdMismatchError
1103
+ onBackendIdMismatch: 'reset' | 'shutdown' | 'ignore'
1104
+ shutdownChannel: ShutdownChannel
1105
+ }) {
1106
+ const { dbEventlog, dbState } = yield* LeaderThreadCtx
1107
+
1108
+ if (onBackendIdMismatch === 'reset') {
1109
+ yield* Effect.logWarning(
1110
+ 'Sync backend identity changed (backend was reset). Clearing local storage and shutting down.',
1111
+ error,
1112
+ )
1113
+
1114
+ // Clear local databases so the client can start fresh on next boot
1115
+ yield* clearLocalDatabases({ dbEventlog, dbState })
1116
+
1117
+ // Send shutdown signal with special reason
1118
+ yield* shutdownChannel.send(IntentionalShutdownCause.make({ reason: 'backend-id-mismatch' })).pipe(Effect.orDie)
1119
+
1120
+ return yield* Effect.die(error)
1121
+ }
1122
+
1123
+ if (onBackendIdMismatch === 'shutdown') {
1124
+ yield* Effect.logWarning(
1125
+ 'Sync backend identity changed (backend was reset). Shutting down without clearing local storage.',
1126
+ error,
1127
+ )
1128
+
1129
+ yield* shutdownChannel.send(error).pipe(Effect.orDie)
1130
+
1131
+ return yield* Effect.die(error)
1132
+ }
1133
+
1134
+ // ignore mode
1135
+ if (LS_DEV === true) {
1136
+ yield* Effect.logDebug(
1137
+ 'Ignoring BackendIdMismatchError (sync backend was reset but client continues with stale data)',
1138
+ error,
1139
+ )
1140
+ }
1141
+ })
1142
+
1143
+ /**
1144
+ * Clears local databases (eventlog and state) so the client can start fresh on next boot.
1145
+ * This is used when the sync backend identity has changed (i.e. backend was reset).
1146
+ */
1147
+ const clearLocalDatabases = ({ dbEventlog, dbState }: { dbEventlog: SqliteDb; dbState: SqliteDb }) =>
1148
+ Effect.sync(() => {
1149
+ // Clear eventlog tables
1150
+ dbEventlog.execute(sql`DELETE FROM ${EVENTLOG_META_TABLE}`)
1151
+ dbEventlog.execute(sql`DELETE FROM ${SYNC_STATUS_TABLE}`)
1152
+
1153
+ // Drop all state tables - they'll be recreated on next boot
1154
+ const tables = dbState.select<{ name: string }>(
1155
+ sql`SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'`,
1156
+ )
1157
+ for (const { name } of tables) {
1158
+ dbState.execute(`DROP TABLE IF EXISTS "${name}"`)
1159
+ }
1160
+ })