@rocicorp/zero 0.26.0-canary.2 → 0.26.0-canary.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (333) hide show
  1. package/README.md +1 -1
  2. package/out/replicache/src/persist/collect-idb-databases.d.ts +4 -4
  3. package/out/replicache/src/persist/collect-idb-databases.d.ts.map +1 -1
  4. package/out/replicache/src/persist/collect-idb-databases.js +22 -19
  5. package/out/replicache/src/persist/collect-idb-databases.js.map +1 -1
  6. package/out/replicache/src/persist/refresh.d.ts.map +1 -1
  7. package/out/replicache/src/persist/refresh.js +0 -8
  8. package/out/replicache/src/persist/refresh.js.map +1 -1
  9. package/out/replicache/src/process-scheduler.d.ts +23 -0
  10. package/out/replicache/src/process-scheduler.d.ts.map +1 -1
  11. package/out/replicache/src/process-scheduler.js +50 -1
  12. package/out/replicache/src/process-scheduler.js.map +1 -1
  13. package/out/replicache/src/replicache-impl.d.ts +8 -0
  14. package/out/replicache/src/replicache-impl.d.ts.map +1 -1
  15. package/out/replicache/src/replicache-impl.js +11 -2
  16. package/out/replicache/src/replicache-impl.js.map +1 -1
  17. package/out/shared/src/falsy.d.ts +3 -0
  18. package/out/shared/src/falsy.d.ts.map +1 -0
  19. package/out/zero/package.json.js +1 -1
  20. package/out/zero/src/adapters/drizzle.js +1 -2
  21. package/out/zero/src/adapters/prisma.d.ts +2 -0
  22. package/out/zero/src/adapters/prisma.d.ts.map +1 -0
  23. package/out/zero/src/adapters/prisma.js +6 -0
  24. package/out/zero/src/adapters/prisma.js.map +1 -0
  25. package/out/zero/src/pg.js +4 -7
  26. package/out/zero/src/react.js +3 -1
  27. package/out/zero/src/react.js.map +1 -1
  28. package/out/zero/src/server.js +5 -8
  29. package/out/zero-cache/src/auth/load-permissions.d.ts +3 -2
  30. package/out/zero-cache/src/auth/load-permissions.d.ts.map +1 -1
  31. package/out/zero-cache/src/auth/load-permissions.js +14 -8
  32. package/out/zero-cache/src/auth/load-permissions.js.map +1 -1
  33. package/out/zero-cache/src/auth/write-authorizer.d.ts +6 -0
  34. package/out/zero-cache/src/auth/write-authorizer.d.ts.map +1 -1
  35. package/out/zero-cache/src/auth/write-authorizer.js +16 -3
  36. package/out/zero-cache/src/auth/write-authorizer.js.map +1 -1
  37. package/out/zero-cache/src/config/zero-config.d.ts +44 -8
  38. package/out/zero-cache/src/config/zero-config.d.ts.map +1 -1
  39. package/out/zero-cache/src/config/zero-config.js +53 -13
  40. package/out/zero-cache/src/config/zero-config.js.map +1 -1
  41. package/out/zero-cache/src/custom/fetch.d.ts +3 -0
  42. package/out/zero-cache/src/custom/fetch.d.ts.map +1 -1
  43. package/out/zero-cache/src/custom/fetch.js +26 -0
  44. package/out/zero-cache/src/custom/fetch.js.map +1 -1
  45. package/out/zero-cache/src/db/lite-tables.js +1 -1
  46. package/out/zero-cache/src/db/lite-tables.js.map +1 -1
  47. package/out/zero-cache/src/db/migration-lite.d.ts.map +1 -1
  48. package/out/zero-cache/src/db/migration-lite.js +9 -3
  49. package/out/zero-cache/src/db/migration-lite.js.map +1 -1
  50. package/out/zero-cache/src/db/migration.d.ts.map +1 -1
  51. package/out/zero-cache/src/db/migration.js +9 -3
  52. package/out/zero-cache/src/db/migration.js.map +1 -1
  53. package/out/zero-cache/src/db/specs.d.ts +4 -3
  54. package/out/zero-cache/src/db/specs.d.ts.map +1 -1
  55. package/out/zero-cache/src/db/specs.js +4 -1
  56. package/out/zero-cache/src/db/specs.js.map +1 -1
  57. package/out/zero-cache/src/db/transaction-pool.d.ts.map +1 -1
  58. package/out/zero-cache/src/db/transaction-pool.js +9 -3
  59. package/out/zero-cache/src/db/transaction-pool.js.map +1 -1
  60. package/out/zero-cache/src/server/inspector-delegate.d.ts +1 -1
  61. package/out/zero-cache/src/server/inspector-delegate.d.ts.map +1 -1
  62. package/out/zero-cache/src/server/inspector-delegate.js +11 -30
  63. package/out/zero-cache/src/server/inspector-delegate.js.map +1 -1
  64. package/out/zero-cache/src/server/main.js +1 -1
  65. package/out/zero-cache/src/server/main.js.map +1 -1
  66. package/out/zero-cache/src/server/priority-op.d.ts +8 -0
  67. package/out/zero-cache/src/server/priority-op.d.ts.map +1 -0
  68. package/out/zero-cache/src/server/priority-op.js +29 -0
  69. package/out/zero-cache/src/server/priority-op.js.map +1 -0
  70. package/out/zero-cache/src/server/syncer.d.ts +0 -1
  71. package/out/zero-cache/src/server/syncer.d.ts.map +1 -1
  72. package/out/zero-cache/src/server/syncer.js +3 -21
  73. package/out/zero-cache/src/server/syncer.js.map +1 -1
  74. package/out/zero-cache/src/services/analyze.js +1 -1
  75. package/out/zero-cache/src/services/analyze.js.map +1 -1
  76. package/out/zero-cache/src/services/change-source/custom/change-source.d.ts.map +1 -1
  77. package/out/zero-cache/src/services/change-source/custom/change-source.js +4 -3
  78. package/out/zero-cache/src/services/change-source/custom/change-source.js.map +1 -1
  79. package/out/zero-cache/src/services/change-source/pg/change-source.d.ts.map +1 -1
  80. package/out/zero-cache/src/services/change-source/pg/change-source.js +68 -13
  81. package/out/zero-cache/src/services/change-source/pg/change-source.js.map +1 -1
  82. package/out/zero-cache/src/services/change-source/pg/initial-sync.d.ts.map +1 -1
  83. package/out/zero-cache/src/services/change-source/pg/initial-sync.js +7 -2
  84. package/out/zero-cache/src/services/change-source/pg/initial-sync.js.map +1 -1
  85. package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.d.ts.map +1 -1
  86. package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.js +7 -4
  87. package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.js.map +1 -1
  88. package/out/zero-cache/src/services/change-source/pg/schema/ddl.d.ts +125 -180
  89. package/out/zero-cache/src/services/change-source/pg/schema/ddl.d.ts.map +1 -1
  90. package/out/zero-cache/src/services/change-source/pg/schema/init.d.ts.map +1 -1
  91. package/out/zero-cache/src/services/change-source/pg/schema/init.js +18 -10
  92. package/out/zero-cache/src/services/change-source/pg/schema/init.js.map +1 -1
  93. package/out/zero-cache/src/services/change-source/pg/schema/published.d.ts +36 -90
  94. package/out/zero-cache/src/services/change-source/pg/schema/published.d.ts.map +1 -1
  95. package/out/zero-cache/src/services/change-source/pg/schema/published.js +51 -14
  96. package/out/zero-cache/src/services/change-source/pg/schema/published.js.map +1 -1
  97. package/out/zero-cache/src/services/change-source/pg/schema/shard.d.ts +31 -36
  98. package/out/zero-cache/src/services/change-source/pg/schema/shard.d.ts.map +1 -1
  99. package/out/zero-cache/src/services/change-source/pg/schema/shard.js +24 -3
  100. package/out/zero-cache/src/services/change-source/pg/schema/shard.js.map +1 -1
  101. package/out/zero-cache/src/services/change-source/pg/schema/validation.d.ts +2 -2
  102. package/out/zero-cache/src/services/change-source/pg/schema/validation.d.ts.map +1 -1
  103. package/out/zero-cache/src/services/change-source/pg/schema/validation.js +2 -4
  104. package/out/zero-cache/src/services/change-source/pg/schema/validation.js.map +1 -1
  105. package/out/zero-cache/src/services/change-source/protocol/current/data.d.ts +158 -53
  106. package/out/zero-cache/src/services/change-source/protocol/current/data.d.ts.map +1 -1
  107. package/out/zero-cache/src/services/change-source/protocol/current/data.js +55 -10
  108. package/out/zero-cache/src/services/change-source/protocol/current/data.js.map +1 -1
  109. package/out/zero-cache/src/services/change-source/protocol/current/downstream.d.ts +210 -72
  110. package/out/zero-cache/src/services/change-source/protocol/current/downstream.d.ts.map +1 -1
  111. package/out/zero-cache/src/services/change-source/protocol/current.js +4 -2
  112. package/out/zero-cache/src/services/change-source/replica-schema.d.ts.map +1 -1
  113. package/out/zero-cache/src/services/change-source/replica-schema.js +19 -10
  114. package/out/zero-cache/src/services/change-source/replica-schema.js.map +1 -1
  115. package/out/zero-cache/src/services/change-streamer/change-streamer-service.js +1 -1
  116. package/out/zero-cache/src/services/change-streamer/change-streamer-service.js.map +1 -1
  117. package/out/zero-cache/src/services/change-streamer/change-streamer.d.ts +71 -25
  118. package/out/zero-cache/src/services/change-streamer/change-streamer.d.ts.map +1 -1
  119. package/out/zero-cache/src/services/change-streamer/change-streamer.js +1 -1
  120. package/out/zero-cache/src/services/change-streamer/change-streamer.js.map +1 -1
  121. package/out/zero-cache/src/services/change-streamer/schema/tables.d.ts +1 -0
  122. package/out/zero-cache/src/services/change-streamer/schema/tables.d.ts.map +1 -1
  123. package/out/zero-cache/src/services/change-streamer/schema/tables.js +6 -5
  124. package/out/zero-cache/src/services/change-streamer/schema/tables.js.map +1 -1
  125. package/out/zero-cache/src/services/change-streamer/storer.js +1 -1
  126. package/out/zero-cache/src/services/change-streamer/storer.js.map +1 -1
  127. package/out/zero-cache/src/services/change-streamer/subscriber.d.ts +2 -0
  128. package/out/zero-cache/src/services/change-streamer/subscriber.d.ts.map +1 -1
  129. package/out/zero-cache/src/services/change-streamer/subscriber.js +14 -1
  130. package/out/zero-cache/src/services/change-streamer/subscriber.js.map +1 -1
  131. package/out/zero-cache/src/services/heapz.d.ts.map +1 -1
  132. package/out/zero-cache/src/services/heapz.js +1 -0
  133. package/out/zero-cache/src/services/heapz.js.map +1 -1
  134. package/out/zero-cache/src/services/mutagen/error.d.ts.map +1 -1
  135. package/out/zero-cache/src/services/mutagen/error.js +4 -1
  136. package/out/zero-cache/src/services/mutagen/error.js.map +1 -1
  137. package/out/zero-cache/src/services/mutagen/mutagen.d.ts.map +1 -1
  138. package/out/zero-cache/src/services/mutagen/mutagen.js +1 -0
  139. package/out/zero-cache/src/services/mutagen/mutagen.js.map +1 -1
  140. package/out/zero-cache/src/services/mutagen/pusher.d.ts +7 -4
  141. package/out/zero-cache/src/services/mutagen/pusher.d.ts.map +1 -1
  142. package/out/zero-cache/src/services/mutagen/pusher.js +80 -8
  143. package/out/zero-cache/src/services/mutagen/pusher.js.map +1 -1
  144. package/out/zero-cache/src/services/replicator/change-processor.d.ts.map +1 -1
  145. package/out/zero-cache/src/services/replicator/change-processor.js +21 -29
  146. package/out/zero-cache/src/services/replicator/change-processor.js.map +1 -1
  147. package/out/zero-cache/src/services/replicator/schema/change-log.d.ts +1 -2
  148. package/out/zero-cache/src/services/replicator/schema/change-log.d.ts.map +1 -1
  149. package/out/zero-cache/src/services/replicator/schema/change-log.js +2 -5
  150. package/out/zero-cache/src/services/replicator/schema/change-log.js.map +1 -1
  151. package/out/zero-cache/src/services/{change-source → replicator/schema}/column-metadata.d.ts +3 -3
  152. package/out/zero-cache/src/services/replicator/schema/column-metadata.d.ts.map +1 -0
  153. package/out/zero-cache/src/services/{change-source → replicator/schema}/column-metadata.js +3 -3
  154. package/out/zero-cache/src/services/replicator/schema/column-metadata.js.map +1 -0
  155. package/out/zero-cache/src/services/replicator/schema/replication-state.d.ts.map +1 -1
  156. package/out/zero-cache/src/services/replicator/schema/replication-state.js +3 -1
  157. package/out/zero-cache/src/services/replicator/schema/replication-state.js.map +1 -1
  158. package/out/zero-cache/src/services/run-ast.js +1 -1
  159. package/out/zero-cache/src/services/run-ast.js.map +1 -1
  160. package/out/zero-cache/src/services/statz.d.ts.map +1 -1
  161. package/out/zero-cache/src/services/statz.js +1 -0
  162. package/out/zero-cache/src/services/statz.js.map +1 -1
  163. package/out/zero-cache/src/services/view-syncer/cvr-store.d.ts +1 -1
  164. package/out/zero-cache/src/services/view-syncer/cvr-store.d.ts.map +1 -1
  165. package/out/zero-cache/src/services/view-syncer/cvr-store.js +59 -40
  166. package/out/zero-cache/src/services/view-syncer/cvr-store.js.map +1 -1
  167. package/out/zero-cache/src/services/view-syncer/cvr.d.ts +0 -1
  168. package/out/zero-cache/src/services/view-syncer/cvr.d.ts.map +1 -1
  169. package/out/zero-cache/src/services/view-syncer/cvr.js +23 -6
  170. package/out/zero-cache/src/services/view-syncer/cvr.js.map +1 -1
  171. package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts +13 -14
  172. package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts.map +1 -1
  173. package/out/zero-cache/src/services/view-syncer/pipeline-driver.js +44 -56
  174. package/out/zero-cache/src/services/view-syncer/pipeline-driver.js.map +1 -1
  175. package/out/zero-cache/src/services/view-syncer/row-record-cache.d.ts +1 -1
  176. package/out/zero-cache/src/services/view-syncer/row-record-cache.d.ts.map +1 -1
  177. package/out/zero-cache/src/services/view-syncer/row-record-cache.js +22 -11
  178. package/out/zero-cache/src/services/view-syncer/row-record-cache.js.map +1 -1
  179. package/out/zero-cache/src/services/view-syncer/snapshotter.js +1 -1
  180. package/out/zero-cache/src/services/view-syncer/snapshotter.js.map +1 -1
  181. package/out/zero-cache/src/services/view-syncer/view-syncer.d.ts +6 -3
  182. package/out/zero-cache/src/services/view-syncer/view-syncer.d.ts.map +1 -1
  183. package/out/zero-cache/src/services/view-syncer/view-syncer.js +192 -217
  184. package/out/zero-cache/src/services/view-syncer/view-syncer.js.map +1 -1
  185. package/out/zero-cache/src/types/lexi-version.d.ts.map +1 -1
  186. package/out/zero-cache/src/types/lexi-version.js +4 -1
  187. package/out/zero-cache/src/types/lexi-version.js.map +1 -1
  188. package/out/zero-cache/src/types/lite.d.ts.map +1 -1
  189. package/out/zero-cache/src/types/lite.js +8 -2
  190. package/out/zero-cache/src/types/lite.js.map +1 -1
  191. package/out/zero-cache/src/types/shards.js +1 -1
  192. package/out/zero-cache/src/types/shards.js.map +1 -1
  193. package/out/zero-cache/src/types/sql.d.ts +5 -0
  194. package/out/zero-cache/src/types/sql.d.ts.map +1 -1
  195. package/out/zero-cache/src/types/sql.js +5 -1
  196. package/out/zero-cache/src/types/sql.js.map +1 -1
  197. package/out/zero-cache/src/types/subscription.js +1 -1
  198. package/out/zero-cache/src/types/subscription.js.map +1 -1
  199. package/out/zero-cache/src/workers/connect-params.d.ts +1 -0
  200. package/out/zero-cache/src/workers/connect-params.d.ts.map +1 -1
  201. package/out/zero-cache/src/workers/connect-params.js +2 -1
  202. package/out/zero-cache/src/workers/connect-params.js.map +1 -1
  203. package/out/zero-cache/src/workers/syncer-ws-message-handler.d.ts.map +1 -1
  204. package/out/zero-cache/src/workers/syncer-ws-message-handler.js +14 -6
  205. package/out/zero-cache/src/workers/syncer-ws-message-handler.js.map +1 -1
  206. package/out/zero-cache/src/workers/syncer.d.ts.map +1 -1
  207. package/out/zero-cache/src/workers/syncer.js +17 -10
  208. package/out/zero-cache/src/workers/syncer.js.map +1 -1
  209. package/out/zero-client/src/client/connection-manager.d.ts +8 -0
  210. package/out/zero-client/src/client/connection-manager.d.ts.map +1 -1
  211. package/out/zero-client/src/client/connection-manager.js +33 -0
  212. package/out/zero-client/src/client/connection-manager.js.map +1 -1
  213. package/out/zero-client/src/client/connection.d.ts.map +1 -1
  214. package/out/zero-client/src/client/connection.js +6 -3
  215. package/out/zero-client/src/client/connection.js.map +1 -1
  216. package/out/zero-client/src/client/error.js +1 -1
  217. package/out/zero-client/src/client/error.js.map +1 -1
  218. package/out/zero-client/src/client/mutator-proxy.d.ts.map +1 -1
  219. package/out/zero-client/src/client/mutator-proxy.js +15 -1
  220. package/out/zero-client/src/client/mutator-proxy.js.map +1 -1
  221. package/out/zero-client/src/client/options.d.ts +10 -0
  222. package/out/zero-client/src/client/options.d.ts.map +1 -1
  223. package/out/zero-client/src/client/options.js.map +1 -1
  224. package/out/zero-client/src/client/query-manager.d.ts +4 -0
  225. package/out/zero-client/src/client/query-manager.d.ts.map +1 -1
  226. package/out/zero-client/src/client/query-manager.js +7 -0
  227. package/out/zero-client/src/client/query-manager.js.map +1 -1
  228. package/out/zero-client/src/client/version.js +1 -1
  229. package/out/zero-client/src/client/zero.d.ts +3 -1
  230. package/out/zero-client/src/client/zero.d.ts.map +1 -1
  231. package/out/zero-client/src/client/zero.js +52 -7
  232. package/out/zero-client/src/client/zero.js.map +1 -1
  233. package/out/zero-client/src/mod.d.ts +1 -0
  234. package/out/zero-client/src/mod.d.ts.map +1 -1
  235. package/out/zero-protocol/src/connect.d.ts +4 -0
  236. package/out/zero-protocol/src/connect.d.ts.map +1 -1
  237. package/out/zero-protocol/src/connect.js +3 -1
  238. package/out/zero-protocol/src/connect.js.map +1 -1
  239. package/out/zero-protocol/src/protocol-version.d.ts +1 -1
  240. package/out/zero-protocol/src/protocol-version.d.ts.map +1 -1
  241. package/out/zero-protocol/src/protocol-version.js +1 -1
  242. package/out/zero-protocol/src/protocol-version.js.map +1 -1
  243. package/out/zero-protocol/src/push.d.ts +11 -2
  244. package/out/zero-protocol/src/push.d.ts.map +1 -1
  245. package/out/zero-protocol/src/push.js +22 -6
  246. package/out/zero-protocol/src/push.js.map +1 -1
  247. package/out/zero-protocol/src/up.d.ts +2 -0
  248. package/out/zero-protocol/src/up.d.ts.map +1 -1
  249. package/out/zero-react/src/mod.d.ts +3 -1
  250. package/out/zero-react/src/mod.d.ts.map +1 -1
  251. package/out/zero-react/src/paging-reducer.d.ts +61 -0
  252. package/out/zero-react/src/paging-reducer.d.ts.map +1 -0
  253. package/out/zero-react/src/paging-reducer.js +77 -0
  254. package/out/zero-react/src/paging-reducer.js.map +1 -0
  255. package/out/zero-react/src/use-query.d.ts +11 -1
  256. package/out/zero-react/src/use-query.d.ts.map +1 -1
  257. package/out/zero-react/src/use-query.js +13 -11
  258. package/out/zero-react/src/use-query.js.map +1 -1
  259. package/out/zero-react/src/use-rows.d.ts +39 -0
  260. package/out/zero-react/src/use-rows.d.ts.map +1 -0
  261. package/out/zero-react/src/use-rows.js +130 -0
  262. package/out/zero-react/src/use-rows.js.map +1 -0
  263. package/out/zero-react/src/use-zero-virtualizer.d.ts +122 -0
  264. package/out/zero-react/src/use-zero-virtualizer.d.ts.map +1 -0
  265. package/out/zero-react/src/use-zero-virtualizer.js +342 -0
  266. package/out/zero-react/src/use-zero-virtualizer.js.map +1 -0
  267. package/out/zero-react/src/zero-provider.js +1 -1
  268. package/out/zero-react/src/zero-provider.js.map +1 -1
  269. package/out/zero-server/src/adapters/drizzle.d.ts +18 -18
  270. package/out/zero-server/src/adapters/drizzle.d.ts.map +1 -1
  271. package/out/zero-server/src/adapters/drizzle.js +8 -22
  272. package/out/zero-server/src/adapters/drizzle.js.map +1 -1
  273. package/out/zero-server/src/adapters/pg.d.ts +19 -13
  274. package/out/zero-server/src/adapters/pg.d.ts.map +1 -1
  275. package/out/zero-server/src/adapters/pg.js.map +1 -1
  276. package/out/zero-server/src/adapters/postgresjs.d.ts +19 -13
  277. package/out/zero-server/src/adapters/postgresjs.d.ts.map +1 -1
  278. package/out/zero-server/src/adapters/postgresjs.js.map +1 -1
  279. package/out/zero-server/src/adapters/prisma.d.ts +66 -0
  280. package/out/zero-server/src/adapters/prisma.d.ts.map +1 -0
  281. package/out/zero-server/src/adapters/prisma.js +63 -0
  282. package/out/zero-server/src/adapters/prisma.js.map +1 -0
  283. package/out/zero-server/src/custom.js +1 -15
  284. package/out/zero-server/src/custom.js.map +1 -1
  285. package/out/zero-server/src/mod.d.ts +9 -8
  286. package/out/zero-server/src/mod.d.ts.map +1 -1
  287. package/out/zero-server/src/process-mutations.d.ts +2 -2
  288. package/out/zero-server/src/process-mutations.d.ts.map +1 -1
  289. package/out/zero-server/src/process-mutations.js +4 -8
  290. package/out/zero-server/src/process-mutations.js.map +1 -1
  291. package/out/zero-server/src/push-processor.js +1 -1
  292. package/out/zero-server/src/push-processor.js.map +1 -1
  293. package/out/zero-server/src/schema.d.ts.map +1 -1
  294. package/out/zero-server/src/schema.js +4 -1
  295. package/out/zero-server/src/schema.js.map +1 -1
  296. package/out/zero-server/src/zql-database.d.ts.map +1 -1
  297. package/out/zero-server/src/zql-database.js +17 -8
  298. package/out/zero-server/src/zql-database.js.map +1 -1
  299. package/out/zero-solid/src/mod.d.ts +1 -1
  300. package/out/zero-solid/src/mod.d.ts.map +1 -1
  301. package/out/zero-solid/src/use-query.d.ts +10 -1
  302. package/out/zero-solid/src/use-query.d.ts.map +1 -1
  303. package/out/zero-solid/src/use-query.js +21 -5
  304. package/out/zero-solid/src/use-query.js.map +1 -1
  305. package/out/zero-solid/src/use-zero.js +1 -1
  306. package/out/zero-solid/src/use-zero.js.map +1 -1
  307. package/out/zql/src/ivm/constraint.d.ts.map +1 -1
  308. package/out/zql/src/ivm/constraint.js +4 -1
  309. package/out/zql/src/ivm/constraint.js.map +1 -1
  310. package/out/zql/src/ivm/exists.d.ts.map +1 -1
  311. package/out/zql/src/ivm/exists.js +4 -1
  312. package/out/zql/src/ivm/exists.js.map +1 -1
  313. package/out/zql/src/ivm/join-utils.d.ts.map +1 -1
  314. package/out/zql/src/ivm/join-utils.js +8 -2
  315. package/out/zql/src/ivm/join-utils.js.map +1 -1
  316. package/out/zql/src/ivm/memory-source.d.ts.map +1 -1
  317. package/out/zql/src/ivm/memory-source.js +12 -3
  318. package/out/zql/src/ivm/memory-source.js.map +1 -1
  319. package/out/zql/src/ivm/push-accumulated.d.ts.map +1 -1
  320. package/out/zql/src/ivm/push-accumulated.js +25 -2
  321. package/out/zql/src/ivm/push-accumulated.js.map +1 -1
  322. package/out/zql/src/ivm/take.d.ts.map +1 -1
  323. package/out/zql/src/ivm/take.js +24 -6
  324. package/out/zql/src/ivm/take.js.map +1 -1
  325. package/out/zql/src/ivm/union-fan-in.d.ts.map +1 -1
  326. package/out/zql/src/ivm/union-fan-in.js +12 -3
  327. package/out/zql/src/ivm/union-fan-in.js.map +1 -1
  328. package/out/zqlite/src/table-source.d.ts.map +1 -1
  329. package/out/zqlite/src/table-source.js +1 -2
  330. package/out/zqlite/src/table-source.js.map +1 -1
  331. package/package.json +6 -2
  332. package/out/zero-cache/src/services/change-source/column-metadata.d.ts.map +0 -1
  333. package/out/zero-cache/src/services/change-source/column-metadata.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"pusher.js","sources":["../../../../../../zero-cache/src/services/mutagen/pusher.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {groupBy} from '../../../../shared/src/arrays.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {getErrorMessage} from '../../../../shared/src/error.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {Queue} from '../../../../shared/src/queue.ts';\nimport type {Downstream} from '../../../../zero-protocol/src/down.ts';\nimport {ErrorKind} from '../../../../zero-protocol/src/error-kind.ts';\nimport {ErrorOrigin} from '../../../../zero-protocol/src/error-origin.ts';\nimport {ErrorReason} from '../../../../zero-protocol/src/error-reason.ts';\nimport {\n isProtocolError,\n type PushFailedBody,\n} from '../../../../zero-protocol/src/error.ts';\nimport {\n CLEANUP_RESULTS_MUTATION_NAME,\n pushResponseSchema,\n type MutationID,\n type PushBody,\n type PushResponse,\n} from '../../../../zero-protocol/src/push.ts';\nimport * as MutationType from '../../../../zero-protocol/src/mutation-type-enum.ts';\nimport {type ZeroConfig} from '../../config/zero-config.ts';\nimport {compileUrlPattern, fetchFromAPIServer} from '../../custom/fetch.ts';\nimport {getOrCreateCounter} from '../../observability/metrics.ts';\nimport {recordMutation} from '../../server/anonymous-otel-start.ts';\nimport {ProtocolErrorWithLevel} from '../../types/error-with-level.ts';\nimport type {Source} from '../../types/streams.ts';\nimport {Subscription} from '../../types/subscription.ts';\nimport type {HandlerResult, StreamResult} from '../../workers/connection.ts';\nimport type {RefCountedService, Service} from '../service.ts';\n\nexport interface Pusher extends RefCountedService {\n readonly pushURL: string | undefined;\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n ): Source<Downstream>;\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): HandlerResult;\n ackMutationResponses(upToID: MutationID): Promise<void>;\n}\n\ntype Config = Pick<ZeroConfig, 'app' | 'shard'>;\n\n/**\n * Receives push messages from zero-client and forwards\n * them the the user's API server.\n *\n * If the user's API server is taking too long to process\n * the push, the PusherService will add the push to a queue\n * and send pushes in bulk the next time the user's API server\n * is available.\n *\n * - One PusherService exists per client group.\n * - Mutations for a given client are always sent in-order\n * - Mutations for different clients in the same group may be interleaved\n */\nexport class PusherService implements Service, Pusher {\n readonly id: string;\n readonly #pusher: PushWorker;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #pushConfig: ZeroConfig['push'] & {url: string[]};\n readonly #config: Config;\n readonly #lc: LogContext;\n readonly #pushURLPatterns: URLPattern[];\n #stopped: Promise<void> | undefined;\n #refCount = 0;\n #isStopped = false;\n\n constructor(\n appConfig: Config,\n pushConfig: ZeroConfig['push'] & {url: string[]},\n lc: LogContext,\n clientGroupID: string,\n ) {\n this.#config = appConfig;\n this.#lc = lc.withContext('component', 'pusherService');\n this.#pushURLPatterns = pushConfig.url.map(compileUrlPattern);\n this.#queue = new Queue();\n this.#pusher = new PushWorker(\n appConfig,\n lc,\n pushConfig.url,\n pushConfig.apiKey,\n this.#queue,\n );\n this.id = clientGroupID;\n this.#pushConfig = pushConfig;\n }\n\n get pushURL(): string | undefined {\n return this.#pusher.pushURL[0];\n }\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n ) {\n return this.#pusher.initConnection(clientID, wsID, userPushURL);\n }\n\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n ): Exclude<HandlerResult, StreamResult> {\n if (!this.#pushConfig.forwardCookies) {\n httpCookie = undefined; // remove cookies if not forwarded\n }\n this.#queue.enqueue({push, auth, clientID, httpCookie});\n\n return {\n type: 'ok',\n };\n }\n\n async ackMutationResponses(upToID: MutationID) {\n const url = this.#pushConfig.url[0];\n if (!url) {\n // No push URL configured, skip cleanup\n return;\n }\n\n const cleanupBody: PushBody = {\n clientGroupID: this.id,\n mutations: [\n {\n type: MutationType.Custom,\n id: 0, // Not tracked - this is fire-and-forget\n clientID: upToID.clientID,\n name: CLEANUP_RESULTS_MUTATION_NAME,\n args: [\n {\n clientGroupID: this.id,\n clientID: upToID.clientID,\n upToMutationID: upToID.id,\n },\n ],\n timestamp: Date.now(),\n },\n ],\n pushVersion: 1,\n timestamp: Date.now(),\n requestID: `cleanup-${this.id}-${upToID.clientID}-${upToID.id}`,\n };\n\n try {\n await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n false,\n this.#pushURLPatterns,\n {appID: this.#config.app.id, shardNum: this.#config.shard.num},\n {apiKey: this.#pushConfig.apiKey},\n cleanupBody,\n );\n } catch (e) {\n this.#lc.warn?.('Failed to send cleanup mutation', {\n error: getErrorMessage(e),\n });\n }\n }\n\n ref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n ++this.#refCount;\n }\n\n unref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n --this.#refCount;\n if (this.#refCount <= 0) {\n void this.stop();\n }\n }\n\n hasRefs(): boolean {\n return this.#refCount > 0;\n }\n\n run(): Promise<void> {\n this.#stopped = this.#pusher.run();\n return this.#stopped;\n }\n\n stop(): Promise<void> {\n if (this.#isStopped) {\n return must(this.#stopped, 'Stop was called before `run`');\n }\n this.#isStopped = true;\n this.#queue.enqueue('stop');\n return must(this.#stopped, 'Stop was called before `run`');\n }\n}\n\ntype PusherEntry = {\n push: PushBody;\n auth: string | undefined;\n httpCookie: string | undefined;\n clientID: string;\n};\ntype PusherEntryOrStop = PusherEntry | 'stop';\n\n/**\n * Awaits items in the queue then drains and sends them all\n * to the user's API server.\n */\nclass PushWorker {\n readonly #pushURLs: string[];\n readonly #pushURLPatterns: URLPattern[];\n readonly #apiKey: string | undefined;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #lc: LogContext;\n readonly #config: Config;\n readonly #clients: Map<\n string,\n {\n wsID: string;\n downstream: Subscription<Downstream>;\n }\n >;\n #userPushURL?: string | undefined;\n\n readonly #customMutations = getOrCreateCounter(\n 'mutation',\n 'custom',\n 'Number of custom mutations processed',\n );\n readonly #pushes = getOrCreateCounter(\n 'mutation',\n 'pushes',\n 'Number of pushes processed by the pusher',\n );\n\n constructor(\n config: Config,\n lc: LogContext,\n pushURL: string[],\n apiKey: string | undefined,\n queue: Queue<PusherEntryOrStop>,\n ) {\n this.#pushURLs = pushURL;\n this.#lc = lc.withContext('component', 'pusher');\n this.#pushURLPatterns = pushURL.map(compileUrlPattern);\n this.#apiKey = apiKey;\n this.#queue = queue;\n this.#config = config;\n this.#clients = new Map();\n }\n\n get pushURL() {\n return this.#pushURLs;\n }\n\n /**\n * Returns a new downstream stream if the clientID,wsID pair has not been seen before.\n * If a clientID already exists with a different wsID, that client's downstream is cancelled.\n */\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n ) {\n const existing = this.#clients.get(clientID);\n if (existing && existing.wsID === wsID) {\n // already initialized for this socket\n throw new Error('Connection was already initialized');\n }\n\n // client is back on a new connection\n if (existing) {\n existing.downstream.cancel();\n }\n\n // Handle client group level URL parameters\n if (this.#userPushURL === undefined) {\n // First client in the group - store its URL\n this.#userPushURL = userPushURL;\n } else {\n // Validate that subsequent clients have compatible parameters\n if (this.#userPushURL !== userPushURL) {\n this.#lc.warn?.(\n 'Client provided different mutate parameters than client group',\n {\n clientID,\n clientURL: userPushURL,\n clientGroupURL: this.#userPushURL,\n },\n );\n }\n }\n\n const downstream = Subscription.create<Downstream>({\n cleanup: () => {\n this.#clients.delete(clientID);\n },\n });\n this.#clients.set(clientID, {wsID, downstream});\n return downstream;\n }\n\n async run() {\n for (;;) {\n const task = await this.#queue.dequeue();\n const rest = this.#queue.drain();\n const [pushes, terminate] = combinePushes([task, ...rest]);\n for (const push of pushes) {\n const response = await this.#processPush(push);\n await this.#fanOutResponses(response);\n }\n\n if (terminate) {\n break;\n }\n }\n }\n\n /**\n * 1. If the entire `push` fails, we send the error to relevant clients.\n * 2. If the push succeeds, we look for any mutation failure that should cause the connection to terminate\n * and terminate the connection for those clients.\n */\n #fanOutResponses(response: PushResponse) {\n const connectionTerminations: (() => void)[] = [];\n\n // if the entire push failed, send that to the client.\n if ('kind' in response || 'error' in response) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a push error.',\n response,\n );\n const groupedMutationIDs = groupBy(\n response.mutationIDs ?? [],\n m => m.clientID,\n );\n for (const [clientID, mutationIDs] of groupedMutationIDs) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n // We do not resolve mutations on the client if the push fails\n // as those mutations will be retried.\n if ('error' in response) {\n // This error code path will eventually be removed when we\n // no longer support the legacy push error format.\n const pushFailedBody: PushFailedBody =\n response.error === 'http'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.HTTP,\n status: response.status,\n bodyPreview: response.details,\n mutationIDs,\n message: `Fetch from API server returned non-OK status ${response.status}`,\n }\n : response.error === 'unsupportedPushVersion'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.UnsupportedPushVersion,\n mutationIDs,\n message: `Unsupported push version`,\n }\n : {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.Internal,\n mutationIDs,\n message:\n response.error === 'zeroPusher'\n ? response.details\n : response.error === 'unsupportedSchemaVersion'\n ? 'Unsupported schema version'\n : 'An unknown error occurred while pushing to the API server',\n };\n\n this.#failDownstream(client.downstream, pushFailedBody);\n } else if ('kind' in response) {\n this.#failDownstream(client.downstream, response);\n } else {\n unreachable(response);\n }\n }\n } else {\n // Look for mutations results that should cause us to terminate the connection\n const groupedMutations = groupBy(response.mutations, m => m.id.clientID);\n for (const [clientID, mutations] of groupedMutations) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n let failure: PushFailedBody | undefined;\n let i = 0;\n for (; i < mutations.length; i++) {\n const m = mutations[i];\n if ('error' in m.result) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a mutation error.',\n m.result,\n );\n }\n // This error code path will eventually be removed,\n // keeping this for backwards compatibility, but the server\n // should now return a PushFailedBody with the mutationIDs\n if ('error' in m.result && m.result.error === 'oooMutation') {\n failure = {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.OutOfOrderMutation,\n message: 'mutation was out of order',\n details: m.result.details,\n mutationIDs: mutations.map(m => ({\n clientID: m.id.clientID,\n id: m.id.id,\n })),\n };\n break;\n }\n }\n\n if (failure && i < mutations.length - 1) {\n this.#lc.warn?.(\n 'push-response contains mutations after a mutation which should fatal the connection',\n );\n }\n\n if (failure) {\n connectionTerminations.push(() =>\n this.#failDownstream(client.downstream, failure),\n );\n }\n }\n }\n\n connectionTerminations.forEach(cb => cb());\n }\n\n async #processPush(entry: PusherEntry): Promise<PushResponse> {\n this.#customMutations.add(entry.push.mutations.length, {\n clientGroupID: entry.push.clientGroupID,\n });\n this.#pushes.add(1, {\n clientGroupID: entry.push.clientGroupID,\n });\n\n // Record custom mutations for telemetry\n recordMutation('custom', entry.push.mutations.length);\n\n const url =\n this.#userPushURL ??\n must(this.#pushURLs[0], 'ZERO_MUTATE_URL is not set');\n\n this.#lc.debug?.(\n 'pushing to',\n url,\n 'with',\n entry.push.mutations.length,\n 'mutations',\n );\n\n let mutationIDs: MutationID[] = [];\n\n try {\n mutationIDs = entry.push.mutations.map(m => ({\n id: m.id,\n clientID: m.clientID,\n }));\n\n return await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n url === this.#userPushURL,\n this.#pushURLPatterns,\n {\n appID: this.#config.app.id,\n shardNum: this.#config.shard.num,\n },\n {\n apiKey: this.#apiKey,\n token: entry.auth,\n cookie: entry.httpCookie,\n },\n entry.push,\n );\n } catch (e) {\n if (isProtocolError(e) && e.errorBody.kind === ErrorKind.PushFailed) {\n return {\n ...e.errorBody,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n\n return {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.Internal,\n message: `Failed to push: ${getErrorMessage(e)}`,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n }\n\n #failDownstream(\n downstream: Subscription<Downstream>,\n errorBody: PushFailedBody,\n ): void {\n const logLevel = errorBody.origin === ErrorOrigin.Server ? 'warn' : 'error';\n downstream.fail(new ProtocolErrorWithLevel(errorBody, logLevel));\n }\n}\n\n/**\n * Pushes for different clientIDs could theoretically be interleaved.\n *\n * In order to do efficient batching to the user's API server,\n * we collect all pushes for the same clientID into a single push.\n */\nexport function combinePushes(\n entries: readonly (PusherEntryOrStop | undefined)[],\n): [PusherEntry[], boolean] {\n const pushesByClientID = new Map<string, PusherEntry[]>();\n\n function collect() {\n const ret: PusherEntry[] = [];\n for (const entries of pushesByClientID.values()) {\n const composite: PusherEntry = {\n ...entries[0],\n push: {\n ...entries[0].push,\n mutations: [],\n },\n };\n ret.push(composite);\n for (const entry of entries) {\n assertAreCompatiblePushes(composite, entry);\n composite.push.mutations.push(...entry.push.mutations);\n }\n }\n return ret;\n }\n\n for (const entry of entries) {\n if (entry === 'stop' || entry === undefined) {\n return [collect(), true];\n }\n\n const {clientID} = entry;\n const existing = pushesByClientID.get(clientID);\n if (existing) {\n existing.push(entry);\n } else {\n pushesByClientID.set(clientID, [entry]);\n }\n }\n\n return [collect(), false] as const;\n}\n\n// These invariants should always be true for a given clientID.\n// If they are not, we have a bug in the code somewhere.\nfunction assertAreCompatiblePushes(left: PusherEntry, right: PusherEntry) {\n assert(\n left.clientID === right.clientID,\n 'clientID must be the same for all pushes',\n );\n assert(\n left.auth === right.auth,\n 'auth must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.schemaVersion === right.push.schemaVersion,\n 'schemaVersion must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.pushVersion === right.push.pushVersion,\n 'pushVersion must be the same for all pushes with the same clientID',\n );\n}\n"],"names":["MutationType.Custom","ErrorKind.PushFailed","ErrorOrigin.ZeroCache","ErrorReason.HTTP","ErrorOrigin.Server","ErrorReason.UnsupportedPushVersion","ErrorReason.Internal","ErrorReason.OutOfOrderMutation","m","entries"],"mappings":";;;;;;;;;;;;;;;;;AAgEO,MAAM,cAAyC;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EACA,YAAY;AAAA,EACZ,aAAa;AAAA,EAEb,YACE,WACA,YACA,IACA,eACA;AACA,SAAK,UAAU;AACf,SAAK,MAAM,GAAG,YAAY,aAAa,eAAe;AACtD,SAAK,mBAAmB,WAAW,IAAI,IAAI,iBAAiB;AAC5D,SAAK,SAAS,IAAI,MAAA;AAClB,SAAK,UAAU,IAAI;AAAA,MACjB;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,WAAW;AAAA,MACX,KAAK;AAAA,IAAA;AAEP,SAAK,KAAK;AACV,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,UAA8B;AAChC,WAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,EAC/B;AAAA,EAEA,eACE,UACA,MACA,aACA;AACA,WAAO,KAAK,QAAQ,eAAe,UAAU,MAAM,WAAW;AAAA,EAChE;AAAA,EAEA,YACE,UACA,MACA,MACA,YACsC;AACtC,QAAI,CAAC,KAAK,YAAY,gBAAgB;AACpC,mBAAa;AAAA,IACf;AACA,SAAK,OAAO,QAAQ,EAAC,MAAM,MAAM,UAAU,YAAW;AAEtD,WAAO;AAAA,MACL,MAAM;AAAA,IAAA;AAAA,EAEV;AAAA,EAEA,MAAM,qBAAqB,QAAoB;AAC7C,UAAM,MAAM,KAAK,YAAY,IAAI,CAAC;AAClC,QAAI,CAAC,KAAK;AAER;AAAA,IACF;AAEA,UAAM,cAAwB;AAAA,MAC5B,eAAe,KAAK;AAAA,MACpB,WAAW;AAAA,QACT;AAAA,UACE,MAAMA;AAAAA,UACN,IAAI;AAAA;AAAA,UACJ,UAAU,OAAO;AAAA,UACjB,MAAM;AAAA,UACN,MAAM;AAAA,YACJ;AAAA,cACE,eAAe,KAAK;AAAA,cACpB,UAAU,OAAO;AAAA,cACjB,gBAAgB,OAAO;AAAA,YAAA;AAAA,UACzB;AAAA,UAEF,WAAW,KAAK,IAAA;AAAA,QAAI;AAAA,MACtB;AAAA,MAEF,aAAa;AAAA,MACb,WAAW,KAAK,IAAA;AAAA,MAChB,WAAW,WAAW,KAAK,EAAE,IAAI,OAAO,QAAQ,IAAI,OAAO,EAAE;AAAA,IAAA;AAG/D,QAAI;AACF,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL,EAAC,OAAO,KAAK,QAAQ,IAAI,IAAI,UAAU,KAAK,QAAQ,MAAM,IAAA;AAAA,QAC1D,EAAC,QAAQ,KAAK,YAAY,OAAA;AAAA,QAC1B;AAAA,MAAA;AAAA,IAEJ,SAAS,GAAG;AACV,WAAK,IAAI,OAAO,mCAAmC;AAAA,QACjD,OAAO,gBAAgB,CAAC;AAAA,MAAA,CACzB;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAM;AACJ,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AAAA,EACT;AAAA,EAEA,QAAQ;AACN,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AACP,QAAI,KAAK,aAAa,GAAG;AACvB,WAAK,KAAK,KAAA;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,UAAmB;AACjB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,MAAqB;AACnB,SAAK,WAAW,KAAK,QAAQ,IAAA;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAAsB;AACpB,QAAI,KAAK,YAAY;AACnB,aAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,IAC3D;AACA,SAAK,aAAa;AAClB,SAAK,OAAO,QAAQ,MAAM;AAC1B,WAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,EAC3D;AACF;AAcA,MAAM,WAAW;AAAA,EACN;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAOT;AAAA,EAES,mBAAmB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAEO,UAAU;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAGF,YACE,QACA,IACA,SACA,QACA,OACA;AACA,SAAK,YAAY;AACjB,SAAK,MAAM,GAAG,YAAY,aAAa,QAAQ;AAC/C,SAAK,mBAAmB,QAAQ,IAAI,iBAAiB;AACrD,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,+BAAe,IAAA;AAAA,EACtB;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,eACE,UACA,MACA,aACA;AACA,UAAM,WAAW,KAAK,SAAS,IAAI,QAAQ;AAC3C,QAAI,YAAY,SAAS,SAAS,MAAM;AAEtC,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAGA,QAAI,UAAU;AACZ,eAAS,WAAW,OAAA;AAAA,IACtB;AAGA,QAAI,KAAK,iBAAiB,QAAW;AAEnC,WAAK,eAAe;AAAA,IACtB,OAAO;AAEL,UAAI,KAAK,iBAAiB,aAAa;AACrC,aAAK,IAAI;AAAA,UACP;AAAA,UACA;AAAA,YACE;AAAA,YACA,WAAW;AAAA,YACX,gBAAgB,KAAK;AAAA,UAAA;AAAA,QACvB;AAAA,MAEJ;AAAA,IACF;AAEA,UAAM,aAAa,aAAa,OAAmB;AAAA,MACjD,SAAS,MAAM;AACb,aAAK,SAAS,OAAO,QAAQ;AAAA,MAC/B;AAAA,IAAA,CACD;AACD,SAAK,SAAS,IAAI,UAAU,EAAC,MAAM,YAAW;AAC9C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,MAAM;AACV,eAAS;AACP,YAAM,OAAO,MAAM,KAAK,OAAO,QAAA;AAC/B,YAAM,OAAO,KAAK,OAAO,MAAA;AACzB,YAAM,CAAC,QAAQ,SAAS,IAAI,cAAc,CAAC,MAAM,GAAG,IAAI,CAAC;AACzD,iBAAW,QAAQ,QAAQ;AACzB,cAAM,WAAW,MAAM,KAAK,aAAa,IAAI;AAC7C,cAAM,KAAK,iBAAiB,QAAQ;AAAA,MACtC;AAEA,UAAI,WAAW;AACb;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,UAAwB;AACvC,UAAM,yBAAyC,CAAA;AAG/C,QAAI,UAAU,YAAY,WAAW,UAAU;AAC7C,WAAK,IAAI;AAAA,QACP;AAAA,QACA;AAAA,MAAA;AAEF,YAAM,qBAAqB;AAAA,QACzB,SAAS,eAAe,CAAA;AAAA,QACxB,OAAK,EAAE;AAAA,MAAA;AAET,iBAAW,CAAC,UAAU,WAAW,KAAK,oBAAoB;AACxD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAIA,YAAI,WAAW,UAAU;AAGvB,gBAAM,iBACJ,SAAS,UAAU,SACf;AAAA,YACE,MAAMC;AAAAA,YACN,QAAQC;AAAAA,YACR,QAAQC;AAAAA,YACR,QAAQ,SAAS;AAAA,YACjB,aAAa,SAAS;AAAA,YACtB;AAAA,YACA,SAAS,gDAAgD,SAAS,MAAM;AAAA,UAAA,IAE1E,SAAS,UAAU,2BACjB;AAAA,YACE,MAAMF;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQC;AAAAA,YACR;AAAA,YACA,SAAS;AAAA,UAAA,IAEX;AAAA,YACE,MAAMJ;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQE;AAAAA,YACR;AAAA,YACA,SACE,SAAS,UAAU,eACf,SAAS,UACT,SAAS,UAAU,6BACjB,+BACA;AAAA,UAAA;AAGlB,eAAK,gBAAgB,OAAO,YAAY,cAAc;AAAA,QACxD,WAAW,UAAU,UAAU;AAC7B,eAAK,gBAAgB,OAAO,YAAY,QAAQ;AAAA,QAClD,OAAO;AACL,sBAAoB;AAAA,QACtB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,mBAAmB,QAAQ,SAAS,WAAW,CAAA,MAAK,EAAE,GAAG,QAAQ;AACvE,iBAAW,CAAC,UAAU,SAAS,KAAK,kBAAkB;AACpD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAEA,YAAI;AACJ,YAAI,IAAI;AACR,eAAO,IAAI,UAAU,QAAQ,KAAK;AAChC,gBAAM,IAAI,UAAU,CAAC;AACrB,cAAI,WAAW,EAAE,QAAQ;AACvB,iBAAK,IAAI;AAAA,cACP;AAAA,cACA,EAAE;AAAA,YAAA;AAAA,UAEN;AAIA,cAAI,WAAW,EAAE,UAAU,EAAE,OAAO,UAAU,eAAe;AAC3D,sBAAU;AAAA,cACR,MAAML;AAAAA,cACN,QAAQG;AAAAA,cACR,QAAQG;AAAAA,cACR,SAAS;AAAA,cACT,SAAS,EAAE,OAAO;AAAA,cAClB,aAAa,UAAU,IAAI,CAAAC,QAAM;AAAA,gBAC/B,UAAUA,GAAE,GAAG;AAAA,gBACf,IAAIA,GAAE,GAAG;AAAA,cAAA,EACT;AAAA,YAAA;AAEJ;AAAA,UACF;AAAA,QACF;AAEA,YAAI,WAAW,IAAI,UAAU,SAAS,GAAG;AACvC,eAAK,IAAI;AAAA,YACP;AAAA,UAAA;AAAA,QAEJ;AAEA,YAAI,SAAS;AACX,iCAAuB;AAAA,YAAK,MAC1B,KAAK,gBAAgB,OAAO,YAAY,OAAO;AAAA,UAAA;AAAA,QAEnD;AAAA,MACF;AAAA,IACF;AAEA,2BAAuB,QAAQ,CAAA,OAAM,GAAA,CAAI;AAAA,EAC3C;AAAA,EAEA,MAAM,aAAa,OAA2C;AAC5D,SAAK,iBAAiB,IAAI,MAAM,KAAK,UAAU,QAAQ;AAAA,MACrD,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AACD,SAAK,QAAQ,IAAI,GAAG;AAAA,MAClB,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AAGD,mBAAe,UAAU,MAAM,KAAK,UAAU,MAAM;AAEpD,UAAM,MACJ,KAAK,gBACL,KAAK,KAAK,UAAU,CAAC,GAAG,4BAA4B;AAEtD,SAAK,IAAI;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,MACrB;AAAA,IAAA;AAGF,QAAI,cAA4B,CAAA;AAEhC,QAAI;AACF,oBAAc,MAAM,KAAK,UAAU,IAAI,CAAA,OAAM;AAAA,QAC3C,IAAI,EAAE;AAAA,QACN,UAAU,EAAE;AAAA,MAAA,EACZ;AAEF,aAAO,MAAM;AAAA,QACX;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA,QAAQ,KAAK;AAAA,QACb,KAAK;AAAA,QACL;AAAA,UACE,OAAO,KAAK,QAAQ,IAAI;AAAA,UACxB,UAAU,KAAK,QAAQ,MAAM;AAAA,QAAA;AAAA,QAE/B;AAAA,UACE,QAAQ,KAAK;AAAA,UACb,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,QAAA;AAAA,QAEhB,MAAM;AAAA,MAAA;AAAA,IAEV,SAAS,GAAG;AACV,UAAI,gBAAgB,CAAC,KAAK,EAAE,UAAU,SAASP,YAAsB;AACnE,eAAO;AAAA,UACL,GAAG,EAAE;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAEA,aAAO;AAAA,QACL,MAAMA;AAAAA,QACN,QAAQC;AAAAA,QACR,QAAQI;AAAAA,QACR,SAAS,mBAAmB,gBAAgB,CAAC,CAAC;AAAA,QAC9C;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AAAA,EAEA,gBACE,YACA,WACM;AACN,UAAM,WAAW,UAAU,WAAWF,SAAqB,SAAS;AACpE,eAAW,KAAK,IAAI,uBAAuB,WAAW,QAAQ,CAAC;AAAA,EACjE;AACF;AAQO,SAAS,cACd,SAC0B;AAC1B,QAAM,uCAAuB,IAAA;AAE7B,WAAS,UAAU;AACjB,UAAM,MAAqB,CAAA;AAC3B,eAAWK,YAAW,iBAAiB,UAAU;AAC/C,YAAM,YAAyB;AAAA,QAC7B,GAAGA,SAAQ,CAAC;AAAA,QACZ,MAAM;AAAA,UACJ,GAAGA,SAAQ,CAAC,EAAE;AAAA,UACd,WAAW,CAAA;AAAA,QAAC;AAAA,MACd;AAEF,UAAI,KAAK,SAAS;AAClB,iBAAW,SAASA,UAAS;AAC3B,kCAA0B,WAAW,KAAK;AAC1C,kBAAU,KAAK,UAAU,KAAK,GAAG,MAAM,KAAK,SAAS;AAAA,MACvD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,aAAW,SAAS,SAAS;AAC3B,QAAI,UAAU,UAAU,UAAU,QAAW;AAC3C,aAAO,CAAC,QAAA,GAAW,IAAI;AAAA,IACzB;AAEA,UAAM,EAAC,aAAY;AACnB,UAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,uBAAiB,IAAI,UAAU,CAAC,KAAK,CAAC;AAAA,IACxC;AAAA,EACF;AAEA,SAAO,CAAC,QAAA,GAAW,KAAK;AAC1B;AAIA,SAAS,0BAA0B,MAAmB,OAAoB;AACxE;AAAA,IACE,KAAK,aAAa,MAAM;AAAA,IACxB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,SAAS,MAAM;AAAA,IACpB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,kBAAkB,MAAM,KAAK;AAAA,IACvC;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,gBAAgB,MAAM,KAAK;AAAA,IACrC;AAAA,EAAA;AAEJ;"}
1
+ {"version":3,"file":"pusher.js","sources":["../../../../../../zero-cache/src/services/mutagen/pusher.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {groupBy} from '../../../../shared/src/arrays.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {getErrorMessage} from '../../../../shared/src/error.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {Queue} from '../../../../shared/src/queue.ts';\nimport type {Downstream} from '../../../../zero-protocol/src/down.ts';\nimport {ErrorKind} from '../../../../zero-protocol/src/error-kind.ts';\nimport {ErrorOrigin} from '../../../../zero-protocol/src/error-origin.ts';\nimport {ErrorReason} from '../../../../zero-protocol/src/error-reason.ts';\nimport {\n isProtocolError,\n type PushFailedBody,\n} from '../../../../zero-protocol/src/error.ts';\nimport * as MutationType from '../../../../zero-protocol/src/mutation-type-enum.ts';\nimport {\n CLEANUP_RESULTS_MUTATION_NAME,\n pushResponseSchema,\n type MutationID,\n type PushBody,\n type PushResponse,\n} from '../../../../zero-protocol/src/push.ts';\nimport {type ZeroConfig} from '../../config/zero-config.ts';\nimport {compileUrlPattern, fetchFromAPIServer} from '../../custom/fetch.ts';\nimport {getOrCreateCounter} from '../../observability/metrics.ts';\nimport {recordMutation} from '../../server/anonymous-otel-start.ts';\nimport {ProtocolErrorWithLevel} from '../../types/error-with-level.ts';\nimport type {Source} from '../../types/streams.ts';\nimport {Subscription} from '../../types/subscription.ts';\nimport type {HandlerResult, StreamResult} from '../../workers/connection.ts';\nimport type {RefCountedService, Service} from '../service.ts';\n\nexport interface Pusher extends RefCountedService {\n readonly pushURL: string | undefined;\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n userPushHeaders: Record<string, string> | undefined,\n ): Source<Downstream>;\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n origin: string | undefined,\n ): HandlerResult;\n ackMutationResponses(upToID: MutationID): Promise<void>;\n deleteClientMutations(clientIDs: string[]): Promise<void>;\n}\n\ntype Config = Pick<ZeroConfig, 'app' | 'shard'>;\n\n/**\n * Receives push messages from zero-client and forwards\n * them the the user's API server.\n *\n * If the user's API server is taking too long to process\n * the push, the PusherService will add the push to a queue\n * and send pushes in bulk the next time the user's API server\n * is available.\n *\n * - One PusherService exists per client group.\n * - Mutations for a given client are always sent in-order\n * - Mutations for different clients in the same group may be interleaved\n */\nexport class PusherService implements Service, Pusher {\n readonly id: string;\n readonly #pusher: PushWorker;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #pushConfig: ZeroConfig['push'] & {url: string[]};\n readonly #config: Config;\n readonly #lc: LogContext;\n readonly #pushURLPatterns: URLPattern[];\n #stopped: Promise<void> | undefined;\n #refCount = 0;\n #isStopped = false;\n\n constructor(\n appConfig: Config,\n pushConfig: ZeroConfig['push'] & {url: string[]},\n lc: LogContext,\n clientGroupID: string,\n ) {\n this.#config = appConfig;\n this.#lc = lc.withContext('component', 'pusherService');\n this.#pushURLPatterns = pushConfig.url.map(compileUrlPattern);\n this.#queue = new Queue();\n this.#pusher = new PushWorker(\n appConfig,\n lc,\n pushConfig.url,\n pushConfig.apiKey,\n pushConfig.allowedClientHeaders,\n this.#queue,\n );\n this.id = clientGroupID;\n this.#pushConfig = pushConfig;\n }\n\n get pushURL(): string | undefined {\n return this.#pusher.pushURL[0];\n }\n\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n userPushHeaders: Record<string, string> | undefined,\n ) {\n return this.#pusher.initConnection(\n clientID,\n wsID,\n userPushURL,\n userPushHeaders,\n );\n }\n\n enqueuePush(\n clientID: string,\n push: PushBody,\n auth: string | undefined,\n httpCookie: string | undefined,\n origin: string | undefined,\n ): Exclude<HandlerResult, StreamResult> {\n if (!this.#pushConfig.forwardCookies) {\n httpCookie = undefined; // remove cookies if not forwarded\n }\n this.#queue.enqueue({push, auth, clientID, httpCookie, origin});\n\n return {\n type: 'ok',\n };\n }\n\n async ackMutationResponses(upToID: MutationID) {\n const url = this.#pushConfig.url[0];\n if (!url) {\n // No push URL configured, skip cleanup\n return;\n }\n\n const cleanupBody: PushBody = {\n clientGroupID: this.id,\n mutations: [\n {\n type: MutationType.Custom,\n id: 0, // Not tracked - this is fire-and-forget\n clientID: upToID.clientID,\n name: CLEANUP_RESULTS_MUTATION_NAME,\n args: [\n {\n type: 'single',\n clientGroupID: this.id,\n clientID: upToID.clientID,\n upToMutationID: upToID.id,\n },\n ],\n timestamp: Date.now(),\n },\n ],\n pushVersion: 1,\n timestamp: Date.now(),\n requestID: `cleanup-${this.id}-${upToID.clientID}-${upToID.id}`,\n };\n\n try {\n await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n false,\n this.#pushURLPatterns,\n {appID: this.#config.app.id, shardNum: this.#config.shard.num},\n {apiKey: this.#pushConfig.apiKey},\n cleanupBody,\n );\n } catch (e) {\n this.#lc.warn?.('Failed to send cleanup mutation', {\n error: getErrorMessage(e),\n });\n }\n }\n\n async deleteClientMutations(clientIDs: string[]) {\n if (clientIDs.length === 0) {\n return;\n }\n const url = this.#pushConfig.url[0];\n if (!url) {\n // No push URL configured, skip cleanup\n return;\n }\n\n const cleanupBody: PushBody = {\n clientGroupID: this.id,\n mutations: [\n {\n type: MutationType.Custom,\n id: 0, // Not tracked - this is fire-and-forget\n clientID: clientIDs[0], // Use first client as sender\n name: CLEANUP_RESULTS_MUTATION_NAME,\n args: [\n {\n type: 'bulk',\n clientGroupID: this.id,\n clientIDs,\n },\n ],\n timestamp: Date.now(),\n },\n ],\n pushVersion: 1,\n timestamp: Date.now(),\n requestID: `cleanup-bulk-${this.id}-${Date.now()}`,\n };\n\n try {\n await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n false,\n this.#pushURLPatterns,\n {appID: this.#config.app.id, shardNum: this.#config.shard.num},\n {apiKey: this.#pushConfig.apiKey},\n cleanupBody,\n );\n } catch (e) {\n this.#lc.warn?.('Failed to send bulk cleanup mutation', {\n error: getErrorMessage(e),\n });\n }\n }\n\n ref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n ++this.#refCount;\n }\n\n unref() {\n assert(!this.#isStopped, 'PusherService is already stopped');\n --this.#refCount;\n if (this.#refCount <= 0) {\n void this.stop();\n }\n }\n\n hasRefs(): boolean {\n return this.#refCount > 0;\n }\n\n run(): Promise<void> {\n this.#stopped = this.#pusher.run();\n return this.#stopped;\n }\n\n stop(): Promise<void> {\n if (this.#isStopped) {\n return must(this.#stopped, 'Stop was called before `run`');\n }\n this.#isStopped = true;\n this.#queue.enqueue('stop');\n return must(this.#stopped, 'Stop was called before `run`');\n }\n}\n\ntype PusherEntry = {\n push: PushBody;\n auth: string | undefined;\n httpCookie: string | undefined;\n origin: string | undefined;\n clientID: string;\n};\ntype PusherEntryOrStop = PusherEntry | 'stop';\n\n/**\n * Awaits items in the queue then drains and sends them all\n * to the user's API server.\n */\nclass PushWorker {\n readonly #pushURLs: string[];\n readonly #pushURLPatterns: URLPattern[];\n readonly #apiKey: string | undefined;\n readonly #allowedClientHeaders: readonly string[] | undefined;\n readonly #queue: Queue<PusherEntryOrStop>;\n readonly #lc: LogContext;\n readonly #config: Config;\n readonly #clients: Map<\n string,\n {\n wsID: string;\n downstream: Subscription<Downstream>;\n }\n >;\n #userPushURL?: string | undefined;\n #userPushHeaders?: Record<string, string> | undefined;\n\n readonly #customMutations = getOrCreateCounter(\n 'mutation',\n 'custom',\n 'Number of custom mutations processed',\n );\n readonly #pushes = getOrCreateCounter(\n 'mutation',\n 'pushes',\n 'Number of pushes processed by the pusher',\n );\n\n constructor(\n config: Config,\n lc: LogContext,\n pushURL: string[],\n apiKey: string | undefined,\n allowedClientHeaders: readonly string[] | undefined,\n queue: Queue<PusherEntryOrStop>,\n ) {\n this.#pushURLs = pushURL;\n this.#lc = lc.withContext('component', 'pusher');\n this.#pushURLPatterns = pushURL.map(compileUrlPattern);\n this.#apiKey = apiKey;\n this.#allowedClientHeaders = allowedClientHeaders;\n this.#queue = queue;\n this.#config = config;\n this.#clients = new Map();\n }\n\n get pushURL() {\n return this.#pushURLs;\n }\n\n /**\n * Returns a new downstream stream if the clientID,wsID pair has not been seen before.\n * If a clientID already exists with a different wsID, that client's downstream is cancelled.\n */\n initConnection(\n clientID: string,\n wsID: string,\n userPushURL: string | undefined,\n userPushHeaders: Record<string, string> | undefined,\n ) {\n const existing = this.#clients.get(clientID);\n if (existing && existing.wsID === wsID) {\n // already initialized for this socket\n throw new Error('Connection was already initialized');\n }\n\n // client is back on a new connection\n if (existing) {\n existing.downstream.cancel();\n }\n\n // Handle client group level URL parameters\n if (this.#userPushURL === undefined) {\n // First client in the group - store its URL and headers\n this.#userPushURL = userPushURL;\n this.#userPushHeaders = userPushHeaders;\n } else {\n // Validate that subsequent clients have compatible parameters\n if (this.#userPushURL !== userPushURL) {\n this.#lc.warn?.(\n 'Client provided different mutate parameters than client group',\n {\n clientID,\n clientURL: userPushURL,\n clientGroupURL: this.#userPushURL,\n },\n );\n }\n }\n\n const downstream = Subscription.create<Downstream>({\n cleanup: () => {\n this.#clients.delete(clientID);\n },\n });\n this.#clients.set(clientID, {wsID, downstream});\n return downstream;\n }\n\n async run() {\n for (;;) {\n const task = await this.#queue.dequeue();\n const rest = this.#queue.drain();\n const [pushes, terminate] = combinePushes([task, ...rest]);\n for (const push of pushes) {\n const response = await this.#processPush(push);\n await this.#fanOutResponses(response);\n }\n\n if (terminate) {\n break;\n }\n }\n }\n\n /**\n * 1. If the entire `push` fails, we send the error to relevant clients.\n * 2. If the push succeeds, we look for any mutation failure that should cause the connection to terminate\n * and terminate the connection for those clients.\n */\n #fanOutResponses(response: PushResponse) {\n const connectionTerminations: (() => void)[] = [];\n\n // if the entire push failed, send that to the client.\n if ('kind' in response || 'error' in response) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a push error.',\n response,\n );\n const groupedMutationIDs = groupBy(\n response.mutationIDs ?? [],\n m => m.clientID,\n );\n for (const [clientID, mutationIDs] of groupedMutationIDs) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n // We do not resolve mutations on the client if the push fails\n // as those mutations will be retried.\n if ('error' in response) {\n // This error code path will eventually be removed when we\n // no longer support the legacy push error format.\n const pushFailedBody: PushFailedBody =\n response.error === 'http'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.HTTP,\n status: response.status,\n bodyPreview: response.details,\n mutationIDs,\n message: `Fetch from API server returned non-OK status ${response.status}`,\n }\n : response.error === 'unsupportedPushVersion'\n ? {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.UnsupportedPushVersion,\n mutationIDs,\n message: `Unsupported push version`,\n }\n : {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.Internal,\n mutationIDs,\n message:\n response.error === 'zeroPusher'\n ? response.details\n : response.error === 'unsupportedSchemaVersion'\n ? 'Unsupported schema version'\n : 'An unknown error occurred while pushing to the API server',\n };\n\n this.#failDownstream(client.downstream, pushFailedBody);\n } else if ('kind' in response) {\n this.#failDownstream(client.downstream, response);\n } else {\n unreachable(response);\n }\n }\n } else {\n // Look for mutations results that should cause us to terminate the connection\n const groupedMutations = groupBy(response.mutations, m => m.id.clientID);\n for (const [clientID, mutations] of groupedMutations) {\n const client = this.#clients.get(clientID);\n if (!client) {\n continue;\n }\n\n let failure: PushFailedBody | undefined;\n let i = 0;\n for (; i < mutations.length; i++) {\n const m = mutations[i];\n if ('error' in m.result) {\n this.#lc.warn?.(\n 'The server behind ZERO_MUTATE_URL returned a mutation error.',\n m.result,\n );\n }\n // This error code path will eventually be removed,\n // keeping this for backwards compatibility, but the server\n // should now return a PushFailedBody with the mutationIDs\n if ('error' in m.result && m.result.error === 'oooMutation') {\n failure = {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.Server,\n reason: ErrorReason.OutOfOrderMutation,\n message: 'mutation was out of order',\n details: m.result.details,\n mutationIDs: mutations.map(m => ({\n clientID: m.id.clientID,\n id: m.id.id,\n })),\n };\n break;\n }\n }\n\n if (failure && i < mutations.length - 1) {\n this.#lc.warn?.(\n 'push-response contains mutations after a mutation which should fatal the connection',\n );\n }\n\n if (failure) {\n connectionTerminations.push(() =>\n this.#failDownstream(client.downstream, failure),\n );\n }\n }\n }\n\n connectionTerminations.forEach(cb => cb());\n }\n\n async #processPush(entry: PusherEntry): Promise<PushResponse> {\n this.#customMutations.add(entry.push.mutations.length, {\n clientGroupID: entry.push.clientGroupID,\n });\n this.#pushes.add(1, {\n clientGroupID: entry.push.clientGroupID,\n });\n\n // Record custom mutations for telemetry\n recordMutation('custom', entry.push.mutations.length);\n\n const url =\n this.#userPushURL ??\n must(this.#pushURLs[0], 'ZERO_MUTATE_URL is not set');\n\n this.#lc.debug?.(\n 'pushing to',\n url,\n 'with',\n entry.push.mutations.length,\n 'mutations',\n );\n\n let mutationIDs: MutationID[] = [];\n\n try {\n mutationIDs = entry.push.mutations.map(m => ({\n id: m.id,\n clientID: m.clientID,\n }));\n\n return await fetchFromAPIServer(\n pushResponseSchema,\n 'push',\n this.#lc,\n url,\n url === this.#userPushURL,\n this.#pushURLPatterns,\n {\n appID: this.#config.app.id,\n shardNum: this.#config.shard.num,\n },\n {\n apiKey: this.#apiKey,\n customHeaders: this.#userPushHeaders,\n allowedClientHeaders: this.#allowedClientHeaders,\n token: entry.auth,\n cookie: entry.httpCookie,\n origin: entry.origin,\n },\n entry.push,\n );\n } catch (e) {\n if (isProtocolError(e) && e.errorBody.kind === ErrorKind.PushFailed) {\n return {\n ...e.errorBody,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n\n return {\n kind: ErrorKind.PushFailed,\n origin: ErrorOrigin.ZeroCache,\n reason: ErrorReason.Internal,\n message: `Failed to push: ${getErrorMessage(e)}`,\n mutationIDs,\n } as const satisfies PushFailedBody;\n }\n }\n\n #failDownstream(\n downstream: Subscription<Downstream>,\n errorBody: PushFailedBody,\n ): void {\n const logLevel = errorBody.origin === ErrorOrigin.Server ? 'warn' : 'error';\n downstream.fail(new ProtocolErrorWithLevel(errorBody, logLevel));\n }\n}\n\n/**\n * Pushes for different clientIDs could theoretically be interleaved.\n *\n * In order to do efficient batching to the user's API server,\n * we collect all pushes for the same clientID into a single push.\n */\nexport function combinePushes(\n entries: readonly (PusherEntryOrStop | undefined)[],\n): [PusherEntry[], boolean] {\n const pushesByClientID = new Map<string, PusherEntry[]>();\n\n function collect() {\n const ret: PusherEntry[] = [];\n for (const entries of pushesByClientID.values()) {\n const composite: PusherEntry = {\n ...entries[0],\n push: {\n ...entries[0].push,\n mutations: [],\n },\n };\n ret.push(composite);\n for (const entry of entries) {\n assertAreCompatiblePushes(composite, entry);\n composite.push.mutations.push(...entry.push.mutations);\n }\n }\n return ret;\n }\n\n for (const entry of entries) {\n if (entry === 'stop' || entry === undefined) {\n return [collect(), true];\n }\n\n const {clientID} = entry;\n const existing = pushesByClientID.get(clientID);\n if (existing) {\n existing.push(entry);\n } else {\n pushesByClientID.set(clientID, [entry]);\n }\n }\n\n return [collect(), false] as const;\n}\n\n// These invariants should always be true for a given clientID.\n// If they are not, we have a bug in the code somewhere.\nfunction assertAreCompatiblePushes(left: PusherEntry, right: PusherEntry) {\n assert(\n left.clientID === right.clientID,\n 'clientID must be the same for all pushes',\n );\n assert(\n left.auth === right.auth,\n 'auth must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.schemaVersion === right.push.schemaVersion,\n 'schemaVersion must be the same for all pushes with the same clientID',\n );\n assert(\n left.push.pushVersion === right.push.pushVersion,\n 'pushVersion must be the same for all pushes with the same clientID',\n );\n assert(\n left.httpCookie === right.httpCookie,\n 'httpCookie must be the same for all pushes with the same clientID',\n );\n assert(\n left.origin === right.origin,\n 'origin must be the same for all pushes with the same clientID',\n );\n}\n"],"names":["MutationType.Custom","ErrorKind.PushFailed","ErrorOrigin.ZeroCache","ErrorReason.HTTP","ErrorOrigin.Server","ErrorReason.UnsupportedPushVersion","ErrorReason.Internal","ErrorReason.OutOfOrderMutation","m","entries"],"mappings":";;;;;;;;;;;;;;;;;AAmEO,MAAM,cAAyC;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EACA,YAAY;AAAA,EACZ,aAAa;AAAA,EAEb,YACE,WACA,YACA,IACA,eACA;AACA,SAAK,UAAU;AACf,SAAK,MAAM,GAAG,YAAY,aAAa,eAAe;AACtD,SAAK,mBAAmB,WAAW,IAAI,IAAI,iBAAiB;AAC5D,SAAK,SAAS,IAAI,MAAA;AAClB,SAAK,UAAU,IAAI;AAAA,MACjB;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,WAAW;AAAA,MACX,WAAW;AAAA,MACX,KAAK;AAAA,IAAA;AAEP,SAAK,KAAK;AACV,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,IAAI,UAA8B;AAChC,WAAO,KAAK,QAAQ,QAAQ,CAAC;AAAA,EAC/B;AAAA,EAEA,eACE,UACA,MACA,aACA,iBACA;AACA,WAAO,KAAK,QAAQ;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,YACE,UACA,MACA,MACA,YACA,QACsC;AACtC,QAAI,CAAC,KAAK,YAAY,gBAAgB;AACpC,mBAAa;AAAA,IACf;AACA,SAAK,OAAO,QAAQ,EAAC,MAAM,MAAM,UAAU,YAAY,QAAO;AAE9D,WAAO;AAAA,MACL,MAAM;AAAA,IAAA;AAAA,EAEV;AAAA,EAEA,MAAM,qBAAqB,QAAoB;AAC7C,UAAM,MAAM,KAAK,YAAY,IAAI,CAAC;AAClC,QAAI,CAAC,KAAK;AAER;AAAA,IACF;AAEA,UAAM,cAAwB;AAAA,MAC5B,eAAe,KAAK;AAAA,MACpB,WAAW;AAAA,QACT;AAAA,UACE,MAAMA;AAAAA,UACN,IAAI;AAAA;AAAA,UACJ,UAAU,OAAO;AAAA,UACjB,MAAM;AAAA,UACN,MAAM;AAAA,YACJ;AAAA,cACE,MAAM;AAAA,cACN,eAAe,KAAK;AAAA,cACpB,UAAU,OAAO;AAAA,cACjB,gBAAgB,OAAO;AAAA,YAAA;AAAA,UACzB;AAAA,UAEF,WAAW,KAAK,IAAA;AAAA,QAAI;AAAA,MACtB;AAAA,MAEF,aAAa;AAAA,MACb,WAAW,KAAK,IAAA;AAAA,MAChB,WAAW,WAAW,KAAK,EAAE,IAAI,OAAO,QAAQ,IAAI,OAAO,EAAE;AAAA,IAAA;AAG/D,QAAI;AACF,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL,EAAC,OAAO,KAAK,QAAQ,IAAI,IAAI,UAAU,KAAK,QAAQ,MAAM,IAAA;AAAA,QAC1D,EAAC,QAAQ,KAAK,YAAY,OAAA;AAAA,QAC1B;AAAA,MAAA;AAAA,IAEJ,SAAS,GAAG;AACV,WAAK,IAAI,OAAO,mCAAmC;AAAA,QACjD,OAAO,gBAAgB,CAAC;AAAA,MAAA,CACzB;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAM,sBAAsB,WAAqB;AAC/C,QAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,IACF;AACA,UAAM,MAAM,KAAK,YAAY,IAAI,CAAC;AAClC,QAAI,CAAC,KAAK;AAER;AAAA,IACF;AAEA,UAAM,cAAwB;AAAA,MAC5B,eAAe,KAAK;AAAA,MACpB,WAAW;AAAA,QACT;AAAA,UACE,MAAMA;AAAAA,UACN,IAAI;AAAA;AAAA,UACJ,UAAU,UAAU,CAAC;AAAA;AAAA,UACrB,MAAM;AAAA,UACN,MAAM;AAAA,YACJ;AAAA,cACE,MAAM;AAAA,cACN,eAAe,KAAK;AAAA,cACpB;AAAA,YAAA;AAAA,UACF;AAAA,UAEF,WAAW,KAAK,IAAA;AAAA,QAAI;AAAA,MACtB;AAAA,MAEF,aAAa;AAAA,MACb,WAAW,KAAK,IAAA;AAAA,MAChB,WAAW,gBAAgB,KAAK,EAAE,IAAI,KAAK,KAAK;AAAA,IAAA;AAGlD,QAAI;AACF,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL,EAAC,OAAO,KAAK,QAAQ,IAAI,IAAI,UAAU,KAAK,QAAQ,MAAM,IAAA;AAAA,QAC1D,EAAC,QAAQ,KAAK,YAAY,OAAA;AAAA,QAC1B;AAAA,MAAA;AAAA,IAEJ,SAAS,GAAG;AACV,WAAK,IAAI,OAAO,wCAAwC;AAAA,QACtD,OAAO,gBAAgB,CAAC;AAAA,MAAA,CACzB;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAM;AACJ,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AAAA,EACT;AAAA,EAEA,QAAQ;AACN,WAAO,CAAC,KAAK,YAAY,kCAAkC;AAC3D,MAAE,KAAK;AACP,QAAI,KAAK,aAAa,GAAG;AACvB,WAAK,KAAK,KAAA;AAAA,IACZ;AAAA,EACF;AAAA,EAEA,UAAmB;AACjB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,MAAqB;AACnB,SAAK,WAAW,KAAK,QAAQ,IAAA;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,OAAsB;AACpB,QAAI,KAAK,YAAY;AACnB,aAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,IAC3D;AACA,SAAK,aAAa;AAClB,SAAK,OAAO,QAAQ,MAAM;AAC1B,WAAO,KAAK,KAAK,UAAU,8BAA8B;AAAA,EAC3D;AACF;AAeA,MAAM,WAAW;AAAA,EACN;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAOT;AAAA,EACA;AAAA,EAES,mBAAmB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAEO,UAAU;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAAA,EAGF,YACE,QACA,IACA,SACA,QACA,sBACA,OACA;AACA,SAAK,YAAY;AACjB,SAAK,MAAM,GAAG,YAAY,aAAa,QAAQ;AAC/C,SAAK,mBAAmB,QAAQ,IAAI,iBAAiB;AACrD,SAAK,UAAU;AACf,SAAK,wBAAwB;AAC7B,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,+BAAe,IAAA;AAAA,EACtB;AAAA,EAEA,IAAI,UAAU;AACZ,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,eACE,UACA,MACA,aACA,iBACA;AACA,UAAM,WAAW,KAAK,SAAS,IAAI,QAAQ;AAC3C,QAAI,YAAY,SAAS,SAAS,MAAM;AAEtC,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAGA,QAAI,UAAU;AACZ,eAAS,WAAW,OAAA;AAAA,IACtB;AAGA,QAAI,KAAK,iBAAiB,QAAW;AAEnC,WAAK,eAAe;AACpB,WAAK,mBAAmB;AAAA,IAC1B,OAAO;AAEL,UAAI,KAAK,iBAAiB,aAAa;AACrC,aAAK,IAAI;AAAA,UACP;AAAA,UACA;AAAA,YACE;AAAA,YACA,WAAW;AAAA,YACX,gBAAgB,KAAK;AAAA,UAAA;AAAA,QACvB;AAAA,MAEJ;AAAA,IACF;AAEA,UAAM,aAAa,aAAa,OAAmB;AAAA,MACjD,SAAS,MAAM;AACb,aAAK,SAAS,OAAO,QAAQ;AAAA,MAC/B;AAAA,IAAA,CACD;AACD,SAAK,SAAS,IAAI,UAAU,EAAC,MAAM,YAAW;AAC9C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,MAAM;AACV,eAAS;AACP,YAAM,OAAO,MAAM,KAAK,OAAO,QAAA;AAC/B,YAAM,OAAO,KAAK,OAAO,MAAA;AACzB,YAAM,CAAC,QAAQ,SAAS,IAAI,cAAc,CAAC,MAAM,GAAG,IAAI,CAAC;AACzD,iBAAW,QAAQ,QAAQ;AACzB,cAAM,WAAW,MAAM,KAAK,aAAa,IAAI;AAC7C,cAAM,KAAK,iBAAiB,QAAQ;AAAA,MACtC;AAEA,UAAI,WAAW;AACb;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,UAAwB;AACvC,UAAM,yBAAyC,CAAA;AAG/C,QAAI,UAAU,YAAY,WAAW,UAAU;AAC7C,WAAK,IAAI;AAAA,QACP;AAAA,QACA;AAAA,MAAA;AAEF,YAAM,qBAAqB;AAAA,QACzB,SAAS,eAAe,CAAA;AAAA,QACxB,OAAK,EAAE;AAAA,MAAA;AAET,iBAAW,CAAC,UAAU,WAAW,KAAK,oBAAoB;AACxD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAIA,YAAI,WAAW,UAAU;AAGvB,gBAAM,iBACJ,SAAS,UAAU,SACf;AAAA,YACE,MAAMC;AAAAA,YACN,QAAQC;AAAAA,YACR,QAAQC;AAAAA,YACR,QAAQ,SAAS;AAAA,YACjB,aAAa,SAAS;AAAA,YACtB;AAAA,YACA,SAAS,gDAAgD,SAAS,MAAM;AAAA,UAAA,IAE1E,SAAS,UAAU,2BACjB;AAAA,YACE,MAAMF;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQC;AAAAA,YACR;AAAA,YACA,SAAS;AAAA,UAAA,IAEX;AAAA,YACE,MAAMJ;AAAAA,YACN,QAAQG;AAAAA,YACR,QAAQE;AAAAA,YACR;AAAA,YACA,SACE,SAAS,UAAU,eACf,SAAS,UACT,SAAS,UAAU,6BACjB,+BACA;AAAA,UAAA;AAGlB,eAAK,gBAAgB,OAAO,YAAY,cAAc;AAAA,QACxD,WAAW,UAAU,UAAU;AAC7B,eAAK,gBAAgB,OAAO,YAAY,QAAQ;AAAA,QAClD,OAAO;AACL,sBAAoB;AAAA,QACtB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,mBAAmB,QAAQ,SAAS,WAAW,CAAA,MAAK,EAAE,GAAG,QAAQ;AACvE,iBAAW,CAAC,UAAU,SAAS,KAAK,kBAAkB;AACpD,cAAM,SAAS,KAAK,SAAS,IAAI,QAAQ;AACzC,YAAI,CAAC,QAAQ;AACX;AAAA,QACF;AAEA,YAAI;AACJ,YAAI,IAAI;AACR,eAAO,IAAI,UAAU,QAAQ,KAAK;AAChC,gBAAM,IAAI,UAAU,CAAC;AACrB,cAAI,WAAW,EAAE,QAAQ;AACvB,iBAAK,IAAI;AAAA,cACP;AAAA,cACA,EAAE;AAAA,YAAA;AAAA,UAEN;AAIA,cAAI,WAAW,EAAE,UAAU,EAAE,OAAO,UAAU,eAAe;AAC3D,sBAAU;AAAA,cACR,MAAML;AAAAA,cACN,QAAQG;AAAAA,cACR,QAAQG;AAAAA,cACR,SAAS;AAAA,cACT,SAAS,EAAE,OAAO;AAAA,cAClB,aAAa,UAAU,IAAI,CAAAC,QAAM;AAAA,gBAC/B,UAAUA,GAAE,GAAG;AAAA,gBACf,IAAIA,GAAE,GAAG;AAAA,cAAA,EACT;AAAA,YAAA;AAEJ;AAAA,UACF;AAAA,QACF;AAEA,YAAI,WAAW,IAAI,UAAU,SAAS,GAAG;AACvC,eAAK,IAAI;AAAA,YACP;AAAA,UAAA;AAAA,QAEJ;AAEA,YAAI,SAAS;AACX,iCAAuB;AAAA,YAAK,MAC1B,KAAK,gBAAgB,OAAO,YAAY,OAAO;AAAA,UAAA;AAAA,QAEnD;AAAA,MACF;AAAA,IACF;AAEA,2BAAuB,QAAQ,CAAA,OAAM,GAAA,CAAI;AAAA,EAC3C;AAAA,EAEA,MAAM,aAAa,OAA2C;AAC5D,SAAK,iBAAiB,IAAI,MAAM,KAAK,UAAU,QAAQ;AAAA,MACrD,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AACD,SAAK,QAAQ,IAAI,GAAG;AAAA,MAClB,eAAe,MAAM,KAAK;AAAA,IAAA,CAC3B;AAGD,mBAAe,UAAU,MAAM,KAAK,UAAU,MAAM;AAEpD,UAAM,MACJ,KAAK,gBACL,KAAK,KAAK,UAAU,CAAC,GAAG,4BAA4B;AAEtD,SAAK,IAAI;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,MACrB;AAAA,IAAA;AAGF,QAAI,cAA4B,CAAA;AAEhC,QAAI;AACF,oBAAc,MAAM,KAAK,UAAU,IAAI,CAAA,OAAM;AAAA,QAC3C,IAAI,EAAE;AAAA,QACN,UAAU,EAAE;AAAA,MAAA,EACZ;AAEF,aAAO,MAAM;AAAA,QACX;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,QACA,QAAQ,KAAK;AAAA,QACb,KAAK;AAAA,QACL;AAAA,UACE,OAAO,KAAK,QAAQ,IAAI;AAAA,UACxB,UAAU,KAAK,QAAQ,MAAM;AAAA,QAAA;AAAA,QAE/B;AAAA,UACE,QAAQ,KAAK;AAAA,UACb,eAAe,KAAK;AAAA,UACpB,sBAAsB,KAAK;AAAA,UAC3B,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd,QAAQ,MAAM;AAAA,QAAA;AAAA,QAEhB,MAAM;AAAA,MAAA;AAAA,IAEV,SAAS,GAAG;AACV,UAAI,gBAAgB,CAAC,KAAK,EAAE,UAAU,SAASP,YAAsB;AACnE,eAAO;AAAA,UACL,GAAG,EAAE;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAEA,aAAO;AAAA,QACL,MAAMA;AAAAA,QACN,QAAQC;AAAAA,QACR,QAAQI;AAAAA,QACR,SAAS,mBAAmB,gBAAgB,CAAC,CAAC;AAAA,QAC9C;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF;AAAA,EAEA,gBACE,YACA,WACM;AACN,UAAM,WAAW,UAAU,WAAWF,SAAqB,SAAS;AACpE,eAAW,KAAK,IAAI,uBAAuB,WAAW,QAAQ,CAAC;AAAA,EACjE;AACF;AAQO,SAAS,cACd,SAC0B;AAC1B,QAAM,uCAAuB,IAAA;AAE7B,WAAS,UAAU;AACjB,UAAM,MAAqB,CAAA;AAC3B,eAAWK,YAAW,iBAAiB,UAAU;AAC/C,YAAM,YAAyB;AAAA,QAC7B,GAAGA,SAAQ,CAAC;AAAA,QACZ,MAAM;AAAA,UACJ,GAAGA,SAAQ,CAAC,EAAE;AAAA,UACd,WAAW,CAAA;AAAA,QAAC;AAAA,MACd;AAEF,UAAI,KAAK,SAAS;AAClB,iBAAW,SAASA,UAAS;AAC3B,kCAA0B,WAAW,KAAK;AAC1C,kBAAU,KAAK,UAAU,KAAK,GAAG,MAAM,KAAK,SAAS;AAAA,MACvD;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,aAAW,SAAS,SAAS;AAC3B,QAAI,UAAU,UAAU,UAAU,QAAW;AAC3C,aAAO,CAAC,QAAA,GAAW,IAAI;AAAA,IACzB;AAEA,UAAM,EAAC,aAAY;AACnB,UAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,uBAAiB,IAAI,UAAU,CAAC,KAAK,CAAC;AAAA,IACxC;AAAA,EACF;AAEA,SAAO,CAAC,QAAA,GAAW,KAAK;AAC1B;AAIA,SAAS,0BAA0B,MAAmB,OAAoB;AACxE;AAAA,IACE,KAAK,aAAa,MAAM;AAAA,IACxB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,SAAS,MAAM;AAAA,IACpB;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,kBAAkB,MAAM,KAAK;AAAA,IACvC;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,KAAK,gBAAgB,MAAM,KAAK;AAAA,IACrC;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,eAAe,MAAM;AAAA,IAC1B;AAAA,EAAA;AAEF;AAAA,IACE,KAAK,WAAW,MAAM;AAAA,IACtB;AAAA,EAAA;AAEJ;"}
@@ -1 +1 @@
1
- {"version":3,"file":"change-processor.d.ts","sourceRoot":"","sources":["../../../../../../zero-cache/src/services/replicator/change-processor.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAsBjD,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,wBAAwB,CAAC;AA8B5D,OAAO,KAAK,EAAC,gBAAgB,EAAC,MAAM,iDAAiD,CAAC;AACtF,OAAO,KAAK,EAAC,cAAc,EAAC,MAAM,iBAAiB,CAAC;AAYpD,MAAM,MAAM,mBAAmB,GAAG,cAAc,GAAG,cAAc,CAAC;AAElE,MAAM,MAAM,YAAY,GAAG;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,aAAa,EAAE,OAAO,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;GAUG;AACH,qBAAa,eAAe;;gBAexB,EAAE,EAAE,eAAe,EACnB,IAAI,EAAE,mBAAmB,EACzB,WAAW,EAAE,CAAC,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,OAAO,KAAK,IAAI;IAqBrD,KAAK,CAAC,EAAE,EAAE,UAAU;IAIpB,8CAA8C;IAC9C,cAAc,CACZ,EAAE,EAAE,UAAU,EACd,UAAU,EAAE,gBAAgB,GAC3B,YAAY,GAAG,IAAI;CA+IvB"}
1
+ {"version":3,"file":"change-processor.d.ts","sourceRoot":"","sources":["../../../../../../zero-cache/src/services/replicator/change-processor.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAsBjD,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,wBAAwB,CAAC;AA6B5D,OAAO,KAAK,EAAC,gBAAgB,EAAC,MAAM,iDAAiD,CAAC;AACtF,OAAO,KAAK,EAAC,cAAc,EAAC,MAAM,iBAAiB,CAAC;AAapD,MAAM,MAAM,mBAAmB,GAAG,cAAc,GAAG,cAAc,CAAC;AAElE,MAAM,MAAM,YAAY,GAAG;IACzB,SAAS,EAAE,MAAM,CAAC;IAClB,aAAa,EAAE,OAAO,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;GAUG;AACH,qBAAa,eAAe;;gBAexB,EAAE,EAAE,eAAe,EACnB,IAAI,EAAE,mBAAmB,EACzB,WAAW,EAAE,CAAC,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,OAAO,KAAK,IAAI;IAqBrD,KAAK,CAAC,EAAE,EAAE,UAAU;IAIpB,8CAA8C;IAC9C,cAAc,CACZ,EAAE,EAAE,UAAU,EACd,UAAU,EAAE,gBAAgB,GAC3B,YAAY,GAAG,IAAI;CAmJvB"}
@@ -9,8 +9,8 @@ import { mapPostgresToLite, mapPostgresToLiteColumn, mapPostgresToLiteIndex } fr
9
9
  import { JSON_PARSED, liteRow } from "../../types/lite.js";
10
10
  import { liteTableName } from "../../types/names.js";
11
11
  import { id } from "../../types/sql.js";
12
- import { ColumnMetadataStore } from "../change-source/column-metadata.js";
13
12
  import { logSetOp, logDeleteOp, logTruncateOp, logResetOp } from "./schema/change-log.js";
13
+ import { ColumnMetadataStore } from "./schema/column-metadata.js";
14
14
  import { updateReplicationWatermark } from "./schema/replication-state.js";
15
15
  import { ZERO_VERSION_COLUMN_NAME } from "./schema/constants.js";
16
16
  class ChangeProcessor {
@@ -101,7 +101,7 @@ class ChangeProcessor {
101
101
  }
102
102
  if (msg.tag === "commit") {
103
103
  this.#currentTx = null;
104
- assert(watermark);
104
+ assert(watermark, "watermark is required for commit messages");
105
105
  const schemaUpdated = tx.processCommit(msg, watermark);
106
106
  return { watermark, schemaUpdated };
107
107
  }
@@ -129,6 +129,9 @@ class ChangeProcessor {
129
129
  case "rename-table":
130
130
  tx.processRenameTable(msg);
131
131
  break;
132
+ case "update-table-metadata":
133
+ lc.info?.(`Received table metadata update`, msg);
134
+ break;
132
135
  case "add-column":
133
136
  tx.processAddColumn(msg);
134
137
  break;
@@ -161,6 +164,7 @@ class TransactionProcessor {
161
164
  #version;
162
165
  #tableSpecs;
163
166
  #jsonFormat;
167
+ #columnMetadata;
164
168
  #pos = 0;
165
169
  #schemaChanged = false;
166
170
  constructor(lc, db, mode, tableSpecs, commitVersion, jsonFormat) {
@@ -183,6 +187,7 @@ class TransactionProcessor {
183
187
  this.#version = commitVersion;
184
188
  this.#lc = lc.withContext("version", commitVersion);
185
189
  this.#tableSpecs = tableSpecs;
190
+ this.#columnMetadata = must(ColumnMetadataStore.getInstance(db.db));
186
191
  if (this.#tableSpecs.size === 0) {
187
192
  this.#reloadTableSpecs();
188
193
  }
@@ -206,7 +211,7 @@ class TransactionProcessor {
206
211
  return must(this.#tableSpecs.get(name), `Unknown table ${name}`);
207
212
  }
208
213
  #getKey({ row, numCols }, { relation }) {
209
- const keyColumns = relation.replicaIdentity !== "full" ? relation.keyColumns : this.#tableSpec(liteTableName(relation)).primaryKey;
214
+ const keyColumns = relation.rowKey.type !== "full" ? relation.rowKey.columns : this.#tableSpec(liteTableName(relation)).primaryKey;
210
215
  if (!keyColumns?.length) {
211
216
  throw new Error(
212
217
  `Cannot replicate table "${relation.name}" without a PRIMARY KEY or UNIQUE INDEX`
@@ -232,7 +237,7 @@ class TransactionProcessor {
232
237
  ...newRow.row,
233
238
  [ZERO_VERSION_COLUMN_NAME]: this.#version
234
239
  });
235
- if (insert.relation.keyColumns.length === 0) {
240
+ if (insert.relation.rowKey.columns.length === 0) {
236
241
  return;
237
242
  }
238
243
  const key = this.#getKey(newRow, insert);
@@ -323,11 +328,8 @@ class TransactionProcessor {
323
328
  processCreateTable(create) {
324
329
  const table = mapPostgresToLite(create.spec);
325
330
  this.#db.db.exec(createLiteTableStatement(table));
326
- const store = ColumnMetadataStore.getInstance(this.#db.db);
327
- if (store) {
328
- for (const [colName, colSpec] of Object.entries(create.spec.columns)) {
329
- store.insert(table.name, colName, colSpec);
330
- }
331
+ for (const [colName, colSpec] of Object.entries(create.spec.columns)) {
332
+ this.#columnMetadata.insert(table.name, colName, colSpec);
331
333
  }
332
334
  this.#logResetOp(table.name);
333
335
  this.#lc.info?.(create.tag, table.name);
@@ -336,10 +338,7 @@ class TransactionProcessor {
336
338
  const oldName = liteTableName(rename.old);
337
339
  const newName = liteTableName(rename.new);
338
340
  this.#db.db.exec(`ALTER TABLE ${id(oldName)} RENAME TO ${id(newName)}`);
339
- const store = ColumnMetadataStore.getInstance(this.#db.db);
340
- if (store) {
341
- store.renameTable(oldName, newName);
342
- }
341
+ this.#columnMetadata.renameTable(oldName, newName);
343
342
  this.#bumpVersions(newName);
344
343
  this.#logResetOp(oldName);
345
344
  this.#lc.info?.(rename.tag, oldName, newName);
@@ -351,10 +350,7 @@ class TransactionProcessor {
351
350
  this.#db.db.exec(
352
351
  `ALTER TABLE ${id(table)} ADD ${id(name)} ${liteColumnDef(spec)}`
353
352
  );
354
- const store = ColumnMetadataStore.getInstance(this.#db.db);
355
- if (store) {
356
- store.insert(table, name, msg.column.spec);
357
- }
353
+ this.#columnMetadata.insert(table, name, msg.column.spec);
358
354
  this.#bumpVersions(table);
359
355
  this.#lc.info?.(msg.tag, table, msg.column);
360
356
  }
@@ -392,10 +388,12 @@ class TransactionProcessor {
392
388
  `ALTER TABLE ${id(table)} RENAME ${id(oldName)} TO ${id(newName)}`
393
389
  );
394
390
  }
395
- const store = ColumnMetadataStore.getInstance(this.#db.db);
396
- if (store) {
397
- store.update(table, msg.old.name, msg.new.name, msg.new.spec);
398
- }
391
+ this.#columnMetadata.update(
392
+ table,
393
+ msg.old.name,
394
+ msg.new.name,
395
+ msg.new.spec
396
+ );
399
397
  this.#bumpVersions(table);
400
398
  this.#lc.info?.(msg.tag, table, msg.new);
401
399
  }
@@ -403,20 +401,14 @@ class TransactionProcessor {
403
401
  const table = liteTableName(msg.table);
404
402
  const { column } = msg;
405
403
  this.#db.db.exec(`ALTER TABLE ${id(table)} DROP ${id(column)}`);
406
- const store = ColumnMetadataStore.getInstance(this.#db.db);
407
- if (store) {
408
- store.deleteColumn(table, column);
409
- }
404
+ this.#columnMetadata.deleteColumn(table, column);
410
405
  this.#bumpVersions(table);
411
406
  this.#lc.info?.(msg.tag, table, column);
412
407
  }
413
408
  processDropTable(drop) {
414
409
  const name = liteTableName(drop.id);
415
410
  this.#db.db.exec(`DROP TABLE IF EXISTS ${id(name)}`);
416
- const store = ColumnMetadataStore.getInstance(this.#db.db);
417
- if (store) {
418
- store.deleteTable(name);
419
- }
411
+ this.#columnMetadata.deleteTable(name);
420
412
  this.#logResetOp(name);
421
413
  this.#lc.info?.(drop.tag, name);
422
414
  }
@@ -1 +1 @@
1
- {"version":3,"file":"change-processor.js","sources":["../../../../../../zero-cache/src/services/replicator/change-processor.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {SqliteError} from '@rocicorp/zero-sqlite3';\nimport {AbortError} from '../../../../shared/src/abort-error.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {stringify} from '../../../../shared/src/bigint-json.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {\n createLiteIndexStatement,\n createLiteTableStatement,\n liteColumnDef,\n} from '../../db/create.ts';\nimport {\n computeZqlSpecs,\n listIndexes,\n listTables,\n} from '../../db/lite-tables.ts';\nimport {\n mapPostgresToLite,\n mapPostgresToLiteColumn,\n mapPostgresToLiteIndex,\n} from '../../db/pg-to-lite.ts';\nimport type {LiteTableSpec} from '../../db/specs.ts';\nimport type {StatementRunner} from '../../db/statements.ts';\nimport type {LexiVersion} from '../../types/lexi-version.ts';\nimport {\n JSON_PARSED,\n liteRow,\n type JSONFormat,\n type LiteRow,\n type LiteRowKey,\n type LiteValueType,\n} from '../../types/lite.ts';\nimport {liteTableName} from '../../types/names.ts';\nimport {id} from '../../types/sql.ts';\nimport {ColumnMetadataStore} from '../change-source/column-metadata.ts';\nimport type {\n Change,\n ColumnAdd,\n ColumnDrop,\n ColumnUpdate,\n IndexCreate,\n IndexDrop,\n MessageCommit,\n MessageDelete,\n MessageInsert,\n MessageRelation,\n MessageTruncate,\n MessageUpdate,\n TableCreate,\n TableDrop,\n TableRename,\n} from '../change-source/protocol/current/data.ts';\nimport type {ChangeStreamData} from '../change-source/protocol/current/downstream.ts';\nimport type {ReplicatorMode} from './replicator.ts';\nimport {\n logDeleteOp,\n logResetOp,\n logSetOp,\n logTruncateOp,\n} from './schema/change-log.ts';\nimport {\n ZERO_VERSION_COLUMN_NAME,\n updateReplicationWatermark,\n} from './schema/replication-state.ts';\n\nexport type ChangeProcessorMode = ReplicatorMode | 'initial-sync';\n\nexport type CommitResult = {\n watermark: string;\n schemaUpdated: boolean;\n};\n\n/**\n * The ChangeProcessor partitions the stream of messages into transactions\n * by creating a {@link TransactionProcessor} when a transaction begins, and dispatching\n * messages to it until the commit is received.\n *\n * From https://www.postgresql.org/docs/current/protocol-logical-replication.html#PROTOCOL-LOGICAL-MESSAGES-FLOW :\n *\n * \"The logical replication protocol sends individual transactions one by one.\n * This means that all messages between a pair of Begin and Commit messages\n * belong to the same transaction.\"\n */\nexport class ChangeProcessor {\n readonly #db: StatementRunner;\n readonly #mode: ChangeProcessorMode;\n readonly #failService: (lc: LogContext, err: unknown) => void;\n\n // The TransactionProcessor lazily loads table specs into this Map,\n // and reloads them after a schema change. It is cached here to avoid\n // reading them from the DB on every transaction.\n readonly #tableSpecs = new Map<string, LiteTableSpec>();\n\n #currentTx: TransactionProcessor | null = null;\n\n #failure: Error | undefined;\n\n constructor(\n db: StatementRunner,\n mode: ChangeProcessorMode,\n failService: (lc: LogContext, err: unknown) => void,\n ) {\n this.#db = db;\n this.#mode = mode;\n this.#failService = failService;\n }\n\n #fail(lc: LogContext, err: unknown) {\n if (!this.#failure) {\n this.#currentTx?.abort(lc); // roll back any pending transaction.\n\n this.#failure = ensureError(err);\n\n if (!(err instanceof AbortError)) {\n // Propagate the failure up to the service.\n lc.error?.('Message Processing failed:', this.#failure);\n this.#failService(lc, this.#failure);\n }\n }\n }\n\n abort(lc: LogContext) {\n this.#fail(lc, new AbortError());\n }\n\n /** @return If a transaction was committed. */\n processMessage(\n lc: LogContext,\n downstream: ChangeStreamData,\n ): CommitResult | null {\n const [type, message] = downstream;\n if (this.#failure) {\n lc.debug?.(`Dropping ${message.tag}`);\n return null;\n }\n try {\n const watermark =\n type === 'begin'\n ? downstream[2].commitWatermark\n : type === 'commit'\n ? downstream[2].watermark\n : undefined;\n return this.#processMessage(lc, message, watermark);\n } catch (e) {\n this.#fail(lc, e);\n }\n return null;\n }\n\n #beginTransaction(\n lc: LogContext,\n commitVersion: string,\n jsonFormat: JSONFormat,\n ): TransactionProcessor {\n const start = Date.now();\n\n // litestream can technically hold the lock for an arbitrary amount of time\n // when checkpointing a large commit. Crashing on the busy-timeout in this\n // scenario will either produce a corrupt backup or otherwise prevent\n // replication from proceeding.\n //\n // Instead, retry the lock acquisition indefinitely. If this masks\n // an unknown deadlock situation, manual intervention will be necessary.\n for (let i = 0; ; i++) {\n try {\n return new TransactionProcessor(\n lc,\n this.#db,\n this.#mode,\n this.#tableSpecs,\n commitVersion,\n jsonFormat,\n );\n } catch (e) {\n if (e instanceof SqliteError && e.code === 'SQLITE_BUSY') {\n lc.warn?.(\n `SQLITE_BUSY for ${Date.now() - start} ms (attempt ${i + 1}). ` +\n `This is only expected if litestream is performing a large ` +\n `checkpoint.`,\n e,\n );\n continue;\n }\n throw e;\n }\n }\n }\n\n /** @return If a transaction was committed. */\n #processMessage(\n lc: LogContext,\n msg: Change,\n watermark: string | undefined,\n ): CommitResult | null {\n if (msg.tag === 'begin') {\n if (this.#currentTx) {\n throw new Error(`Already in a transaction ${stringify(msg)}`);\n }\n this.#currentTx = this.#beginTransaction(\n lc,\n must(watermark),\n msg.json ?? JSON_PARSED,\n );\n return null;\n }\n\n // For non-begin messages, there should be a #currentTx set.\n const tx = this.#currentTx;\n if (!tx) {\n throw new Error(\n `Received message outside of transaction: ${stringify(msg)}`,\n );\n }\n\n if (msg.tag === 'commit') {\n // Undef this.#currentTx to allow the assembly of the next transaction.\n this.#currentTx = null;\n\n assert(watermark);\n const schemaUpdated = tx.processCommit(msg, watermark);\n return {watermark, schemaUpdated};\n }\n\n if (msg.tag === 'rollback') {\n this.#currentTx?.abort(lc);\n this.#currentTx = null;\n return null;\n }\n\n switch (msg.tag) {\n case 'insert':\n tx.processInsert(msg);\n break;\n case 'update':\n tx.processUpdate(msg);\n break;\n case 'delete':\n tx.processDelete(msg);\n break;\n case 'truncate':\n tx.processTruncate(msg);\n break;\n case 'create-table':\n tx.processCreateTable(msg);\n break;\n case 'rename-table':\n tx.processRenameTable(msg);\n break;\n case 'add-column':\n tx.processAddColumn(msg);\n break;\n case 'update-column':\n tx.processUpdateColumn(msg);\n break;\n case 'drop-column':\n tx.processDropColumn(msg);\n break;\n case 'drop-table':\n tx.processDropTable(msg);\n break;\n case 'create-index':\n tx.processCreateIndex(msg);\n break;\n case 'drop-index':\n tx.processDropIndex(msg);\n break;\n default:\n unreachable(msg);\n }\n\n return null;\n }\n}\n\n/**\n * The {@link TransactionProcessor} handles the sequence of messages from\n * upstream, from `BEGIN` to `COMMIT` and executes the corresponding mutations\n * on the {@link postgres.TransactionSql} on the replica.\n *\n * When applying row contents to the replica, the `_0_version` column is added / updated,\n * and a corresponding entry in the `ChangeLog` is added. The version value is derived\n * from the watermark of the preceding transaction (stored as the `nextStateVersion` in the\n * `ReplicationState` table).\n *\n * Side note: For non-streaming Postgres transactions, the commitEndLsn (and thus\n * commit watermark) is available in the `begin` message, so it could theoretically\n * be used for the row version of changes within the transaction. However, the\n * commitEndLsn is not available in the streaming (in-progress) transaction\n * protocol, and may not be available for CDC streams of other upstream types.\n * Therefore, the zero replication protocol is designed to not require the commit\n * watermark when a transaction begins.\n *\n * Also of interest is the fact that all INSERT Messages are logically applied as\n * UPSERTs. See {@link processInsert} for the underlying motivation.\n */\nclass TransactionProcessor {\n readonly #lc: LogContext;\n readonly #startMs: number;\n readonly #db: StatementRunner;\n readonly #mode: ChangeProcessorMode;\n readonly #version: LexiVersion;\n readonly #tableSpecs: Map<string, LiteTableSpec>;\n readonly #jsonFormat: JSONFormat;\n\n #pos = 0;\n #schemaChanged = false;\n\n constructor(\n lc: LogContext,\n db: StatementRunner,\n mode: ChangeProcessorMode,\n tableSpecs: Map<string, LiteTableSpec>,\n commitVersion: LexiVersion,\n jsonFormat: JSONFormat,\n ) {\n this.#startMs = Date.now();\n this.#mode = mode;\n this.#jsonFormat = jsonFormat;\n\n switch (mode) {\n case 'serving':\n // Although the Replicator / Incremental Syncer is the only writer of the replica,\n // a `BEGIN CONCURRENT` transaction is used to allow View Syncers to simulate\n // (i.e. and `ROLLBACK`) changes on historic snapshots of the database for the\n // purpose of IVM).\n //\n // This TransactionProcessor is the only logic that will actually\n // `COMMIT` any transactions to the replica.\n db.beginConcurrent();\n break;\n case 'backup':\n // For the backup-replicator (i.e. replication-manager), there are no View Syncers\n // and thus BEGIN CONCURRENT is not necessary. In fact, BEGIN CONCURRENT can cause\n // deadlocks with forced wal-checkpoints (which `litestream replicate` performs),\n // so it is important to use vanilla transactions in this configuration.\n db.beginImmediate();\n break;\n case 'initial-sync':\n // When the ChangeProcessor is used for initial-sync, the calling code\n // handles the transaction boundaries.\n break;\n default:\n unreachable();\n }\n this.#db = db;\n this.#version = commitVersion;\n this.#lc = lc.withContext('version', commitVersion);\n this.#tableSpecs = tableSpecs;\n\n if (this.#tableSpecs.size === 0) {\n this.#reloadTableSpecs();\n }\n }\n\n #reloadTableSpecs() {\n this.#tableSpecs.clear();\n // zqlSpecs include the primary key derived from unique indexes\n const zqlSpecs = computeZqlSpecs(this.#lc, this.#db.db);\n for (let spec of listTables(this.#db.db)) {\n if (!spec.primaryKey) {\n spec = {\n ...spec,\n primaryKey: [\n ...(zqlSpecs.get(spec.name)?.tableSpec.primaryKey ?? []),\n ],\n };\n }\n this.#tableSpecs.set(spec.name, spec);\n }\n }\n\n #tableSpec(name: string) {\n return must(this.#tableSpecs.get(name), `Unknown table ${name}`);\n }\n\n #getKey(\n {row, numCols}: {row: LiteRow; numCols: number},\n {relation}: {relation: MessageRelation},\n ): LiteRowKey {\n const keyColumns =\n relation.replicaIdentity !== 'full'\n ? relation.keyColumns // already a suitable key\n : this.#tableSpec(liteTableName(relation)).primaryKey;\n if (!keyColumns?.length) {\n throw new Error(\n `Cannot replicate table \"${relation.name}\" without a PRIMARY KEY or UNIQUE INDEX`,\n );\n }\n // For the common case (replica identity default), the row is already the\n // key for deletes and updates, in which case a new object can be avoided.\n if (numCols === keyColumns.length) {\n return row;\n }\n const key: Record<string, LiteValueType> = {};\n for (const col of keyColumns) {\n key[col] = row[col];\n }\n return key;\n }\n\n processInsert(insert: MessageInsert) {\n const table = liteTableName(insert.relation);\n const newRow = liteRow(\n insert.new,\n this.#tableSpec(table),\n this.#jsonFormat,\n );\n\n this.#upsert(table, {\n ...newRow.row,\n [ZERO_VERSION_COLUMN_NAME]: this.#version,\n });\n\n if (insert.relation.keyColumns.length === 0) {\n // INSERTs can be replicated for rows without a PRIMARY KEY or a\n // UNIQUE INDEX. These are written to the replica but not recorded\n // in the changeLog, because these rows cannot participate in IVM.\n //\n // (Once the table schema has been corrected to include a key, the\n // associated schema change will reset pipelines and data can be\n // loaded via hydration.)\n return;\n }\n const key = this.#getKey(newRow, insert);\n this.#logSetOp(table, key);\n }\n\n #upsert(table: string, row: LiteRow) {\n const columns = Object.keys(row).map(c => id(c));\n this.#db.run(\n `\n INSERT OR REPLACE INTO ${id(table)} (${columns.join(',')})\n VALUES (${Array.from({length: columns.length}).fill('?').join(',')})\n `,\n Object.values(row),\n );\n }\n\n // Updates by default are applied as UPDATE commands to support partial\n // row specifications from the change source. In particular, this is needed\n // to handle updates for which unchanged TOASTed values are not sent:\n //\n // https://www.postgresql.org/docs/current/protocol-logicalrep-message-formats.html#PROTOCOL-LOGICALREP-MESSAGE-FORMATS-TUPLEDATA\n //\n // However, in certain cases an UPDATE may be received for a row that\n // was not initially synced, such as when:\n // (1) an existing table is added to the app's publication, or\n // (2) a new sharding key is added to a shard during resharding.\n //\n // In order to facilitate \"resumptive\" replication, the logic falls back to\n // an INSERT if the update did not change any rows.\n // TODO: Figure out a solution for resumptive replication of rows\n // with TOASTed values.\n processUpdate(update: MessageUpdate) {\n const table = liteTableName(update.relation);\n const newRow = liteRow(\n update.new,\n this.#tableSpec(table),\n this.#jsonFormat,\n );\n const row = {...newRow.row, [ZERO_VERSION_COLUMN_NAME]: this.#version};\n\n // update.key is set with the old values if the key has changed.\n const oldKey = update.key\n ? this.#getKey(\n liteRow(update.key, this.#tableSpec(table), this.#jsonFormat),\n update,\n )\n : null;\n const newKey = this.#getKey(newRow, update);\n\n if (oldKey) {\n this.#logDeleteOp(table, oldKey);\n }\n this.#logSetOp(table, newKey);\n\n const currKey = oldKey ?? newKey;\n const conds = Object.keys(currKey).map(col => `${id(col)}=?`);\n const setExprs = Object.keys(row).map(col => `${id(col)}=?`);\n\n const {changes} = this.#db.run(\n `\n UPDATE ${id(table)}\n SET ${setExprs.join(',')}\n WHERE ${conds.join(' AND ')}\n `,\n [...Object.values(row), ...Object.values(currKey)],\n );\n\n // If the UPDATE did not affect any rows, perform an UPSERT of the\n // new row for resumptive replication.\n if (changes === 0) {\n this.#upsert(table, row);\n }\n }\n\n processDelete(del: MessageDelete) {\n const table = liteTableName(del.relation);\n const rowKey = this.#getKey(\n liteRow(del.key, this.#tableSpec(table), this.#jsonFormat),\n del,\n );\n\n this.#delete(table, rowKey);\n\n if (this.#mode === 'serving') {\n this.#logDeleteOp(table, rowKey);\n }\n }\n\n #delete(table: string, rowKey: LiteRowKey) {\n const conds = Object.keys(rowKey).map(col => `${id(col)}=?`);\n this.#db.run(\n `DELETE FROM ${id(table)} WHERE ${conds.join(' AND ')}`,\n Object.values(rowKey),\n );\n }\n\n processTruncate(truncate: MessageTruncate) {\n for (const relation of truncate.relations) {\n const table = liteTableName(relation);\n // Update replica data.\n this.#db.run(`DELETE FROM ${id(table)}`);\n\n // Update change log.\n this.#logTruncateOp(table);\n }\n }\n processCreateTable(create: TableCreate) {\n const table = mapPostgresToLite(create.spec);\n this.#db.db.exec(createLiteTableStatement(table));\n\n // Write to metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n for (const [colName, colSpec] of Object.entries(create.spec.columns)) {\n store.insert(table.name, colName, colSpec);\n }\n }\n\n this.#logResetOp(table.name);\n this.#lc.info?.(create.tag, table.name);\n }\n\n processRenameTable(rename: TableRename) {\n const oldName = liteTableName(rename.old);\n const newName = liteTableName(rename.new);\n this.#db.db.exec(`ALTER TABLE ${id(oldName)} RENAME TO ${id(newName)}`);\n\n // Rename in metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.renameTable(oldName, newName);\n }\n\n this.#bumpVersions(newName);\n this.#logResetOp(oldName);\n this.#lc.info?.(rename.tag, oldName, newName);\n }\n\n processAddColumn(msg: ColumnAdd) {\n const table = liteTableName(msg.table);\n const {name} = msg.column;\n const spec = mapPostgresToLiteColumn(table, msg.column);\n this.#db.db.exec(\n `ALTER TABLE ${id(table)} ADD ${id(name)} ${liteColumnDef(spec)}`,\n );\n\n // Write to metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.insert(table, name, msg.column.spec);\n }\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, msg.column);\n }\n\n processUpdateColumn(msg: ColumnUpdate) {\n const table = liteTableName(msg.table);\n let oldName = msg.old.name;\n const newName = msg.new.name;\n\n // update-column can ignore defaults because it does not change the values\n // in existing rows.\n //\n // https://www.postgresql.org/docs/current/sql-altertable.html#SQL-ALTERTABLE-DESC-SET-DROP-DEFAULT\n //\n // \"The new default value will only apply in subsequent INSERT or UPDATE\n // commands; it does not cause rows already in the table to change.\"\n //\n // This allows support for _changing_ column defaults to any expression,\n // since it does not affect what the replica needs to do.\n const oldSpec = mapPostgresToLiteColumn(table, msg.old, 'ignore-default');\n const newSpec = mapPostgresToLiteColumn(table, msg.new, 'ignore-default');\n\n // The only updates that are relevant are the column name and the data type.\n if (oldName === newName && oldSpec.dataType === newSpec.dataType) {\n this.#lc.info?.(msg.tag, 'no thing to update', oldSpec, newSpec);\n return;\n }\n // If the data type changes, we have to make a new column with the new data type\n // and copy the values over.\n if (oldSpec.dataType !== newSpec.dataType) {\n // Remember (and drop) the indexes that reference the column.\n const indexes = listIndexes(this.#db.db).filter(\n idx => idx.tableName === table && oldName in idx.columns,\n );\n const stmts = indexes.map(idx => `DROP INDEX IF EXISTS ${id(idx.name)};`);\n const tmpName = `tmp.${newName}`;\n stmts.push(`\n ALTER TABLE ${id(table)} ADD ${id(tmpName)} ${liteColumnDef(newSpec)};\n UPDATE ${id(table)} SET ${id(tmpName)} = ${id(oldName)};\n ALTER TABLE ${id(table)} DROP ${id(oldName)};\n `);\n for (const idx of indexes) {\n // Re-create the indexes to reference the new column.\n idx.columns[tmpName] = idx.columns[oldName];\n delete idx.columns[oldName];\n stmts.push(createLiteIndexStatement(idx));\n }\n this.#db.db.exec(stmts.join(''));\n oldName = tmpName;\n }\n if (oldName !== newName) {\n this.#db.db.exec(\n `ALTER TABLE ${id(table)} RENAME ${id(oldName)} TO ${id(newName)}`,\n );\n }\n\n // Update metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.update(table, msg.old.name, msg.new.name, msg.new.spec);\n }\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, msg.new);\n }\n\n processDropColumn(msg: ColumnDrop) {\n const table = liteTableName(msg.table);\n const {column} = msg;\n this.#db.db.exec(`ALTER TABLE ${id(table)} DROP ${id(column)}`);\n\n // Delete from metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.deleteColumn(table, column);\n }\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, column);\n }\n\n processDropTable(drop: TableDrop) {\n const name = liteTableName(drop.id);\n this.#db.db.exec(`DROP TABLE IF EXISTS ${id(name)}`);\n\n // Delete from metadata table\n const store = ColumnMetadataStore.getInstance(this.#db.db);\n if (store) {\n store.deleteTable(name);\n }\n\n this.#logResetOp(name);\n this.#lc.info?.(drop.tag, name);\n }\n\n processCreateIndex(create: IndexCreate) {\n const index = mapPostgresToLiteIndex(create.spec);\n this.#db.db.exec(createLiteIndexStatement(index));\n\n // indexes affect tables visibility (e.g. sync-ability is gated on\n // having a unique index), so reset pipelines to refresh table schemas.\n this.#logResetOp(index.tableName);\n this.#lc.info?.(create.tag, index.name);\n }\n\n processDropIndex(drop: IndexDrop) {\n const name = liteTableName(drop.id);\n this.#db.db.exec(`DROP INDEX IF EXISTS ${id(name)}`);\n this.#lc.info?.(drop.tag, name);\n }\n\n #bumpVersions(table: string) {\n this.#db.run(\n `UPDATE ${id(table)} SET ${id(ZERO_VERSION_COLUMN_NAME)} = ?`,\n this.#version,\n );\n this.#logResetOp(table);\n }\n\n #logSetOp(table: string, key: LiteRowKey) {\n if (this.#mode === 'serving') {\n logSetOp(this.#db, this.#version, this.#pos++, table, key);\n }\n }\n\n #logDeleteOp(table: string, key: LiteRowKey) {\n if (this.#mode === 'serving') {\n logDeleteOp(this.#db, this.#version, this.#pos++, table, key);\n }\n }\n\n #logTruncateOp(table: string) {\n if (this.#mode === 'serving') {\n logTruncateOp(this.#db, this.#version, table);\n }\n }\n\n #logResetOp(table: string) {\n this.#schemaChanged = true;\n if (this.#mode === 'serving') {\n logResetOp(this.#db, this.#version, table);\n }\n this.#reloadTableSpecs();\n }\n\n /** @returns `true` if the schema was updated. */\n processCommit(commit: MessageCommit, watermark: string): boolean {\n if (watermark !== this.#version) {\n throw new Error(\n `'commit' version ${watermark} does not match 'begin' version ${\n this.#version\n }: ${stringify(commit)}`,\n );\n }\n updateReplicationWatermark(this.#db, watermark);\n\n if (this.#schemaChanged) {\n const start = Date.now();\n this.#db.db.pragma('optimize');\n this.#lc.info?.(\n `PRAGMA optimized after schema change (${Date.now() - start} ms)`,\n );\n }\n\n if (this.#mode !== 'initial-sync') {\n this.#db.commit();\n }\n\n const elapsedMs = Date.now() - this.#startMs;\n this.#lc.debug?.(`Committed tx@${this.#version} (${elapsedMs} ms)`);\n\n return this.#schemaChanged;\n }\n\n abort(lc: LogContext) {\n lc.info?.(`aborting transaction ${this.#version}`);\n this.#db.rollback();\n }\n}\n\nfunction ensureError(err: unknown): Error {\n if (err instanceof Error) {\n return err;\n }\n const error = new Error();\n error.cause = err;\n return error;\n}\n"],"names":[],"mappings":";;;;;;;;;;;;;;;AAmFO,MAAM,gBAAgB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA,EAKA,kCAAkB,IAAA;AAAA,EAE3B,aAA0C;AAAA,EAE1C;AAAA,EAEA,YACE,IACA,MACA,aACA;AACA,SAAK,MAAM;AACX,SAAK,QAAQ;AACb,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,IAAgB,KAAc;AAClC,QAAI,CAAC,KAAK,UAAU;AAClB,WAAK,YAAY,MAAM,EAAE;AAEzB,WAAK,WAAW,YAAY,GAAG;AAE/B,UAAI,EAAE,eAAe,aAAa;AAEhC,WAAG,QAAQ,8BAA8B,KAAK,QAAQ;AACtD,aAAK,aAAa,IAAI,KAAK,QAAQ;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,IAAgB;AACpB,SAAK,MAAM,IAAI,IAAI,WAAA,CAAY;AAAA,EACjC;AAAA;AAAA,EAGA,eACE,IACA,YACqB;AACrB,UAAM,CAAC,MAAM,OAAO,IAAI;AACxB,QAAI,KAAK,UAAU;AACjB,SAAG,QAAQ,YAAY,QAAQ,GAAG,EAAE;AACpC,aAAO;AAAA,IACT;AACA,QAAI;AACF,YAAM,YACJ,SAAS,UACL,WAAW,CAAC,EAAE,kBACd,SAAS,WACP,WAAW,CAAC,EAAE,YACd;AACR,aAAO,KAAK,gBAAgB,IAAI,SAAS,SAAS;AAAA,IACpD,SAAS,GAAG;AACV,WAAK,MAAM,IAAI,CAAC;AAAA,IAClB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,kBACE,IACA,eACA,YACsB;AACtB,UAAM,QAAQ,KAAK,IAAA;AASnB,aAAS,IAAI,KAAK,KAAK;AACrB,UAAI;AACF,eAAO,IAAI;AAAA,UACT;AAAA,UACA,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,UACL;AAAA,UACA;AAAA,QAAA;AAAA,MAEJ,SAAS,GAAG;AACV,YAAI,aAAa,eAAe,EAAE,SAAS,eAAe;AACxD,aAAG;AAAA,YACD,mBAAmB,KAAK,IAAA,IAAQ,KAAK,gBAAgB,IAAI,CAAC;AAAA,YAG1D;AAAA,UAAA;AAEF;AAAA,QACF;AACA,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,gBACE,IACA,KACA,WACqB;AACrB,QAAI,IAAI,QAAQ,SAAS;AACvB,UAAI,KAAK,YAAY;AACnB,cAAM,IAAI,MAAM,4BAA4B,UAAU,GAAG,CAAC,EAAE;AAAA,MAC9D;AACA,WAAK,aAAa,KAAK;AAAA,QACrB;AAAA,QACA,KAAK,SAAS;AAAA,QACd,IAAI,QAAQ;AAAA,MAAA;AAEd,aAAO;AAAA,IACT;AAGA,UAAM,KAAK,KAAK;AAChB,QAAI,CAAC,IAAI;AACP,YAAM,IAAI;AAAA,QACR,4CAA4C,UAAU,GAAG,CAAC;AAAA,MAAA;AAAA,IAE9D;AAEA,QAAI,IAAI,QAAQ,UAAU;AAExB,WAAK,aAAa;AAElB,aAAO,SAAS;AAChB,YAAM,gBAAgB,GAAG,cAAc,KAAK,SAAS;AACrD,aAAO,EAAC,WAAW,cAAA;AAAA,IACrB;AAEA,QAAI,IAAI,QAAQ,YAAY;AAC1B,WAAK,YAAY,MAAM,EAAE;AACzB,WAAK,aAAa;AAClB,aAAO;AAAA,IACT;AAEA,YAAQ,IAAI,KAAA;AAAA,MACV,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,gBAAgB,GAAG;AACtB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF,KAAK;AACH,WAAG,oBAAoB,GAAG;AAC1B;AAAA,MACF,KAAK;AACH,WAAG,kBAAkB,GAAG;AACxB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF;AACE,oBAAe;AAAA,IAAA;AAGnB,WAAO;AAAA,EACT;AACF;AAuBA,MAAM,qBAAqB;AAAA,EAChB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,OAAO;AAAA,EACP,iBAAiB;AAAA,EAEjB,YACE,IACA,IACA,MACA,YACA,eACA,YACA;AACA,SAAK,WAAW,KAAK,IAAA;AACrB,SAAK,QAAQ;AACb,SAAK,cAAc;AAEnB,YAAQ,MAAA;AAAA,MACN,KAAK;AAQH,WAAG,gBAAA;AACH;AAAA,MACF,KAAK;AAKH,WAAG,eAAA;AACH;AAAA,MACF,KAAK;AAGH;AAAA,MACF;AACE,oBAAA;AAAA,IAAY;AAEhB,SAAK,MAAM;AACX,SAAK,WAAW;AAChB,SAAK,MAAM,GAAG,YAAY,WAAW,aAAa;AAClD,SAAK,cAAc;AAEnB,QAAI,KAAK,YAAY,SAAS,GAAG;AAC/B,WAAK,kBAAA;AAAA,IACP;AAAA,EACF;AAAA,EAEA,oBAAoB;AAClB,SAAK,YAAY,MAAA;AAEjB,UAAM,WAAW,gBAAgB,KAAK,KAAK,KAAK,IAAI,EAAE;AACtD,aAAS,QAAQ,WAAW,KAAK,IAAI,EAAE,GAAG;AACxC,UAAI,CAAC,KAAK,YAAY;AACpB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,YAAY;AAAA,YACV,GAAI,SAAS,IAAI,KAAK,IAAI,GAAG,UAAU,cAAc,CAAA;AAAA,UAAC;AAAA,QACxD;AAAA,MAEJ;AACA,WAAK,YAAY,IAAI,KAAK,MAAM,IAAI;AAAA,IACtC;AAAA,EACF;AAAA,EAEA,WAAW,MAAc;AACvB,WAAO,KAAK,KAAK,YAAY,IAAI,IAAI,GAAG,iBAAiB,IAAI,EAAE;AAAA,EACjE;AAAA,EAEA,QACE,EAAC,KAAK,WACN,EAAC,YACW;AACZ,UAAM,aACJ,SAAS,oBAAoB,SACzB,SAAS,aACT,KAAK,WAAW,cAAc,QAAQ,CAAC,EAAE;AAC/C,QAAI,CAAC,YAAY,QAAQ;AACvB,YAAM,IAAI;AAAA,QACR,2BAA2B,SAAS,IAAI;AAAA,MAAA;AAAA,IAE5C;AAGA,QAAI,YAAY,WAAW,QAAQ;AACjC,aAAO;AAAA,IACT;AACA,UAAM,MAAqC,CAAA;AAC3C,eAAW,OAAO,YAAY;AAC5B,UAAI,GAAG,IAAI,IAAI,GAAG;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,cAAc,QAAuB;AACnC,UAAM,QAAQ,cAAc,OAAO,QAAQ;AAC3C,UAAM,SAAS;AAAA,MACb,OAAO;AAAA,MACP,KAAK,WAAW,KAAK;AAAA,MACrB,KAAK;AAAA,IAAA;AAGP,SAAK,QAAQ,OAAO;AAAA,MAClB,GAAG,OAAO;AAAA,MACV,CAAC,wBAAwB,GAAG,KAAK;AAAA,IAAA,CAClC;AAED,QAAI,OAAO,SAAS,WAAW,WAAW,GAAG;AAQ3C;AAAA,IACF;AACA,UAAM,MAAM,KAAK,QAAQ,QAAQ,MAAM;AACvC,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EAEA,QAAQ,OAAe,KAAc;AACnC,UAAM,UAAU,OAAO,KAAK,GAAG,EAAE,IAAI,CAAA,MAAK,GAAG,CAAC,CAAC;AAC/C,SAAK,IAAI;AAAA,MACP;AAAA,+BACyB,GAAG,KAAK,CAAC,KAAK,QAAQ,KAAK,GAAG,CAAC;AAAA,kBAC5C,MAAM,KAAK,EAAC,QAAQ,QAAQ,QAAO,EAAE,KAAK,GAAG,EAAE,KAAK,GAAG,CAAC;AAAA;AAAA,MAEpE,OAAO,OAAO,GAAG;AAAA,IAAA;AAAA,EAErB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,cAAc,QAAuB;AACnC,UAAM,QAAQ,cAAc,OAAO,QAAQ;AAC3C,UAAM,SAAS;AAAA,MACb,OAAO;AAAA,MACP,KAAK,WAAW,KAAK;AAAA,MACrB,KAAK;AAAA,IAAA;AAEP,UAAM,MAAM,EAAC,GAAG,OAAO,KAAK,CAAC,wBAAwB,GAAG,KAAK,SAAA;AAG7D,UAAM,SAAS,OAAO,MAClB,KAAK;AAAA,MACH,QAAQ,OAAO,KAAK,KAAK,WAAW,KAAK,GAAG,KAAK,WAAW;AAAA,MAC5D;AAAA,IAAA,IAEF;AACJ,UAAM,SAAS,KAAK,QAAQ,QAAQ,MAAM;AAE1C,QAAI,QAAQ;AACV,WAAK,aAAa,OAAO,MAAM;AAAA,IACjC;AACA,SAAK,UAAU,OAAO,MAAM;AAE5B,UAAM,UAAU,UAAU;AAC1B,UAAM,QAAQ,OAAO,KAAK,OAAO,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAC5D,UAAM,WAAW,OAAO,KAAK,GAAG,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAE3D,UAAM,EAAC,QAAA,IAAW,KAAK,IAAI;AAAA,MACzB;AAAA,eACS,GAAG,KAAK,CAAC;AAAA,cACV,SAAS,KAAK,GAAG,CAAC;AAAA,gBAChB,MAAM,KAAK,OAAO,CAAC;AAAA;AAAA,MAE7B,CAAC,GAAG,OAAO,OAAO,GAAG,GAAG,GAAG,OAAO,OAAO,OAAO,CAAC;AAAA,IAAA;AAKnD,QAAI,YAAY,GAAG;AACjB,WAAK,QAAQ,OAAO,GAAG;AAAA,IACzB;AAAA,EACF;AAAA,EAEA,cAAc,KAAoB;AAChC,UAAM,QAAQ,cAAc,IAAI,QAAQ;AACxC,UAAM,SAAS,KAAK;AAAA,MAClB,QAAQ,IAAI,KAAK,KAAK,WAAW,KAAK,GAAG,KAAK,WAAW;AAAA,MACzD;AAAA,IAAA;AAGF,SAAK,QAAQ,OAAO,MAAM;AAE1B,QAAI,KAAK,UAAU,WAAW;AAC5B,WAAK,aAAa,OAAO,MAAM;AAAA,IACjC;AAAA,EACF;AAAA,EAEA,QAAQ,OAAe,QAAoB;AACzC,UAAM,QAAQ,OAAO,KAAK,MAAM,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAC3D,SAAK,IAAI;AAAA,MACP,eAAe,GAAG,KAAK,CAAC,UAAU,MAAM,KAAK,OAAO,CAAC;AAAA,MACrD,OAAO,OAAO,MAAM;AAAA,IAAA;AAAA,EAExB;AAAA,EAEA,gBAAgB,UAA2B;AACzC,eAAW,YAAY,SAAS,WAAW;AACzC,YAAM,QAAQ,cAAc,QAAQ;AAEpC,WAAK,IAAI,IAAI,eAAe,GAAG,KAAK,CAAC,EAAE;AAGvC,WAAK,eAAe,KAAK;AAAA,IAC3B;AAAA,EACF;AAAA,EACA,mBAAmB,QAAqB;AACtC,UAAM,QAAQ,kBAAkB,OAAO,IAAI;AAC3C,SAAK,IAAI,GAAG,KAAK,yBAAyB,KAAK,CAAC;AAGhD,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,iBAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,OAAO,KAAK,OAAO,GAAG;AACpE,cAAM,OAAO,MAAM,MAAM,SAAS,OAAO;AAAA,MAC3C;AAAA,IACF;AAEA,SAAK,YAAY,MAAM,IAAI;AAC3B,SAAK,IAAI,OAAO,OAAO,KAAK,MAAM,IAAI;AAAA,EACxC;AAAA,EAEA,mBAAmB,QAAqB;AACtC,UAAM,UAAU,cAAc,OAAO,GAAG;AACxC,UAAM,UAAU,cAAc,OAAO,GAAG;AACxC,SAAK,IAAI,GAAG,KAAK,eAAe,GAAG,OAAO,CAAC,cAAc,GAAG,OAAO,CAAC,EAAE;AAGtE,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,YAAY,SAAS,OAAO;AAAA,IACpC;AAEA,SAAK,cAAc,OAAO;AAC1B,SAAK,YAAY,OAAO;AACxB,SAAK,IAAI,OAAO,OAAO,KAAK,SAAS,OAAO;AAAA,EAC9C;AAAA,EAEA,iBAAiB,KAAgB;AAC/B,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,UAAM,EAAC,SAAQ,IAAI;AACnB,UAAM,OAAO,wBAAwB,OAAO,IAAI,MAAM;AACtD,SAAK,IAAI,GAAG;AAAA,MACV,eAAe,GAAG,KAAK,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,cAAc,IAAI,CAAC;AAAA,IAAA;AAIjE,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,OAAO,OAAO,MAAM,IAAI,OAAO,IAAI;AAAA,IAC3C;AAEA,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,IAAI,MAAM;AAAA,EAC5C;AAAA,EAEA,oBAAoB,KAAmB;AACrC,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,QAAI,UAAU,IAAI,IAAI;AACtB,UAAM,UAAU,IAAI,IAAI;AAYxB,UAAM,UAAU,wBAAwB,OAAO,IAAI,KAAK,gBAAgB;AACxE,UAAM,UAAU,wBAAwB,OAAO,IAAI,KAAK,gBAAgB;AAGxE,QAAI,YAAY,WAAW,QAAQ,aAAa,QAAQ,UAAU;AAChE,WAAK,IAAI,OAAO,IAAI,KAAK,sBAAsB,SAAS,OAAO;AAC/D;AAAA,IACF;AAGA,QAAI,QAAQ,aAAa,QAAQ,UAAU;AAEzC,YAAM,UAAU,YAAY,KAAK,IAAI,EAAE,EAAE;AAAA,QACvC,CAAA,QAAO,IAAI,cAAc,SAAS,WAAW,IAAI;AAAA,MAAA;AAEnD,YAAM,QAAQ,QAAQ,IAAI,CAAA,QAAO,wBAAwB,GAAG,IAAI,IAAI,CAAC,GAAG;AACxE,YAAM,UAAU,OAAO,OAAO;AAC9B,YAAM,KAAK;AAAA,sBACK,GAAG,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAC,IAAI,cAAc,OAAO,CAAC;AAAA,iBAC3D,GAAG,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAC,MAAM,GAAG,OAAO,CAAC;AAAA,sBACxC,GAAG,KAAK,CAAC,SAAS,GAAG,OAAO,CAAC;AAAA,SAC1C;AACH,iBAAW,OAAO,SAAS;AAEzB,YAAI,QAAQ,OAAO,IAAI,IAAI,QAAQ,OAAO;AAC1C,eAAO,IAAI,QAAQ,OAAO;AAC1B,cAAM,KAAK,yBAAyB,GAAG,CAAC;AAAA,MAC1C;AACA,WAAK,IAAI,GAAG,KAAK,MAAM,KAAK,EAAE,CAAC;AAC/B,gBAAU;AAAA,IACZ;AACA,QAAI,YAAY,SAAS;AACvB,WAAK,IAAI,GAAG;AAAA,QACV,eAAe,GAAG,KAAK,CAAC,WAAW,GAAG,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC;AAAA,MAAA;AAAA,IAEpE;AAGA,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,OAAO,OAAO,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,IAAI;AAAA,IAC9D;AAEA,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,IAAI,GAAG;AAAA,EACzC;AAAA,EAEA,kBAAkB,KAAiB;AACjC,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,UAAM,EAAC,WAAU;AACjB,SAAK,IAAI,GAAG,KAAK,eAAe,GAAG,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,EAAE;AAG9D,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,aAAa,OAAO,MAAM;AAAA,IAClC;AAEA,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,MAAM;AAAA,EACxC;AAAA,EAEA,iBAAiB,MAAiB;AAChC,UAAM,OAAO,cAAc,KAAK,EAAE;AAClC,SAAK,IAAI,GAAG,KAAK,wBAAwB,GAAG,IAAI,CAAC,EAAE;AAGnD,UAAM,QAAQ,oBAAoB,YAAY,KAAK,IAAI,EAAE;AACzD,QAAI,OAAO;AACT,YAAM,YAAY,IAAI;AAAA,IACxB;AAEA,SAAK,YAAY,IAAI;AACrB,SAAK,IAAI,OAAO,KAAK,KAAK,IAAI;AAAA,EAChC;AAAA,EAEA,mBAAmB,QAAqB;AACtC,UAAM,QAAQ,uBAAuB,OAAO,IAAI;AAChD,SAAK,IAAI,GAAG,KAAK,yBAAyB,KAAK,CAAC;AAIhD,SAAK,YAAY,MAAM,SAAS;AAChC,SAAK,IAAI,OAAO,OAAO,KAAK,MAAM,IAAI;AAAA,EACxC;AAAA,EAEA,iBAAiB,MAAiB;AAChC,UAAM,OAAO,cAAc,KAAK,EAAE;AAClC,SAAK,IAAI,GAAG,KAAK,wBAAwB,GAAG,IAAI,CAAC,EAAE;AACnD,SAAK,IAAI,OAAO,KAAK,KAAK,IAAI;AAAA,EAChC;AAAA,EAEA,cAAc,OAAe;AAC3B,SAAK,IAAI;AAAA,MACP,UAAU,GAAG,KAAK,CAAC,QAAQ,GAAG,wBAAwB,CAAC;AAAA,MACvD,KAAK;AAAA,IAAA;AAEP,SAAK,YAAY,KAAK;AAAA,EACxB;AAAA,EAEA,UAAU,OAAe,KAAiB;AACxC,QAAI,KAAK,UAAU,WAAW;AAC5B,eAAS,KAAK,KAAK,KAAK,UAAU,KAAK,QAAQ,OAAO,GAAG;AAAA,IAC3D;AAAA,EACF;AAAA,EAEA,aAAa,OAAe,KAAiB;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,kBAAY,KAAK,KAAK,KAAK,UAAU,KAAK,QAAQ,OAAO,GAAG;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,eAAe,OAAe;AAC5B,QAAI,KAAK,UAAU,WAAW;AAC5B,oBAAc,KAAK,KAAK,KAAK,UAAU,KAAK;AAAA,IAC9C;AAAA,EACF;AAAA,EAEA,YAAY,OAAe;AACzB,SAAK,iBAAiB;AACtB,QAAI,KAAK,UAAU,WAAW;AAC5B,iBAAW,KAAK,KAAK,KAAK,UAAU,KAAK;AAAA,IAC3C;AACA,SAAK,kBAAA;AAAA,EACP;AAAA;AAAA,EAGA,cAAc,QAAuB,WAA4B;AAC/D,QAAI,cAAc,KAAK,UAAU;AAC/B,YAAM,IAAI;AAAA,QACR,oBAAoB,SAAS,mCAC3B,KAAK,QACP,KAAK,UAAU,MAAM,CAAC;AAAA,MAAA;AAAA,IAE1B;AACA,+BAA2B,KAAK,KAAK,SAAS;AAE9C,QAAI,KAAK,gBAAgB;AACvB,YAAM,QAAQ,KAAK,IAAA;AACnB,WAAK,IAAI,GAAG,OAAO,UAAU;AAC7B,WAAK,IAAI;AAAA,QACP,yCAAyC,KAAK,IAAA,IAAQ,KAAK;AAAA,MAAA;AAAA,IAE/D;AAEA,QAAI,KAAK,UAAU,gBAAgB;AACjC,WAAK,IAAI,OAAA;AAAA,IACX;AAEA,UAAM,YAAY,KAAK,IAAA,IAAQ,KAAK;AACpC,SAAK,IAAI,QAAQ,gBAAgB,KAAK,QAAQ,KAAK,SAAS,MAAM;AAElE,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,IAAgB;AACpB,OAAG,OAAO,wBAAwB,KAAK,QAAQ,EAAE;AACjD,SAAK,IAAI,SAAA;AAAA,EACX;AACF;AAEA,SAAS,YAAY,KAAqB;AACxC,MAAI,eAAe,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,QAAQ,IAAI,MAAA;AAClB,QAAM,QAAQ;AACd,SAAO;AACT;"}
1
+ {"version":3,"file":"change-processor.js","sources":["../../../../../../zero-cache/src/services/replicator/change-processor.ts"],"sourcesContent":["import type {LogContext} from '@rocicorp/logger';\nimport {SqliteError} from '@rocicorp/zero-sqlite3';\nimport {AbortError} from '../../../../shared/src/abort-error.ts';\nimport {assert, unreachable} from '../../../../shared/src/asserts.ts';\nimport {stringify} from '../../../../shared/src/bigint-json.ts';\nimport {must} from '../../../../shared/src/must.ts';\nimport {\n createLiteIndexStatement,\n createLiteTableStatement,\n liteColumnDef,\n} from '../../db/create.ts';\nimport {\n computeZqlSpecs,\n listIndexes,\n listTables,\n} from '../../db/lite-tables.ts';\nimport {\n mapPostgresToLite,\n mapPostgresToLiteColumn,\n mapPostgresToLiteIndex,\n} from '../../db/pg-to-lite.ts';\nimport type {LiteTableSpec} from '../../db/specs.ts';\nimport type {StatementRunner} from '../../db/statements.ts';\nimport type {LexiVersion} from '../../types/lexi-version.ts';\nimport {\n JSON_PARSED,\n liteRow,\n type JSONFormat,\n type LiteRow,\n type LiteRowKey,\n type LiteValueType,\n} from '../../types/lite.ts';\nimport {liteTableName} from '../../types/names.ts';\nimport {id} from '../../types/sql.ts';\nimport type {\n Change,\n ColumnAdd,\n ColumnDrop,\n ColumnUpdate,\n IndexCreate,\n IndexDrop,\n MessageCommit,\n MessageDelete,\n MessageInsert,\n MessageRelation,\n MessageTruncate,\n MessageUpdate,\n TableCreate,\n TableDrop,\n TableRename,\n} from '../change-source/protocol/current/data.ts';\nimport type {ChangeStreamData} from '../change-source/protocol/current/downstream.ts';\nimport type {ReplicatorMode} from './replicator.ts';\nimport {\n logDeleteOp,\n logResetOp,\n logSetOp,\n logTruncateOp,\n} from './schema/change-log.ts';\nimport {ColumnMetadataStore} from './schema/column-metadata.ts';\nimport {\n ZERO_VERSION_COLUMN_NAME,\n updateReplicationWatermark,\n} from './schema/replication-state.ts';\n\nexport type ChangeProcessorMode = ReplicatorMode | 'initial-sync';\n\nexport type CommitResult = {\n watermark: string;\n schemaUpdated: boolean;\n};\n\n/**\n * The ChangeProcessor partitions the stream of messages into transactions\n * by creating a {@link TransactionProcessor} when a transaction begins, and dispatching\n * messages to it until the commit is received.\n *\n * From https://www.postgresql.org/docs/current/protocol-logical-replication.html#PROTOCOL-LOGICAL-MESSAGES-FLOW :\n *\n * \"The logical replication protocol sends individual transactions one by one.\n * This means that all messages between a pair of Begin and Commit messages\n * belong to the same transaction.\"\n */\nexport class ChangeProcessor {\n readonly #db: StatementRunner;\n readonly #mode: ChangeProcessorMode;\n readonly #failService: (lc: LogContext, err: unknown) => void;\n\n // The TransactionProcessor lazily loads table specs into this Map,\n // and reloads them after a schema change. It is cached here to avoid\n // reading them from the DB on every transaction.\n readonly #tableSpecs = new Map<string, LiteTableSpec>();\n\n #currentTx: TransactionProcessor | null = null;\n\n #failure: Error | undefined;\n\n constructor(\n db: StatementRunner,\n mode: ChangeProcessorMode,\n failService: (lc: LogContext, err: unknown) => void,\n ) {\n this.#db = db;\n this.#mode = mode;\n this.#failService = failService;\n }\n\n #fail(lc: LogContext, err: unknown) {\n if (!this.#failure) {\n this.#currentTx?.abort(lc); // roll back any pending transaction.\n\n this.#failure = ensureError(err);\n\n if (!(err instanceof AbortError)) {\n // Propagate the failure up to the service.\n lc.error?.('Message Processing failed:', this.#failure);\n this.#failService(lc, this.#failure);\n }\n }\n }\n\n abort(lc: LogContext) {\n this.#fail(lc, new AbortError());\n }\n\n /** @return If a transaction was committed. */\n processMessage(\n lc: LogContext,\n downstream: ChangeStreamData,\n ): CommitResult | null {\n const [type, message] = downstream;\n if (this.#failure) {\n lc.debug?.(`Dropping ${message.tag}`);\n return null;\n }\n try {\n const watermark =\n type === 'begin'\n ? downstream[2].commitWatermark\n : type === 'commit'\n ? downstream[2].watermark\n : undefined;\n return this.#processMessage(lc, message, watermark);\n } catch (e) {\n this.#fail(lc, e);\n }\n return null;\n }\n\n #beginTransaction(\n lc: LogContext,\n commitVersion: string,\n jsonFormat: JSONFormat,\n ): TransactionProcessor {\n const start = Date.now();\n\n // litestream can technically hold the lock for an arbitrary amount of time\n // when checkpointing a large commit. Crashing on the busy-timeout in this\n // scenario will either produce a corrupt backup or otherwise prevent\n // replication from proceeding.\n //\n // Instead, retry the lock acquisition indefinitely. If this masks\n // an unknown deadlock situation, manual intervention will be necessary.\n for (let i = 0; ; i++) {\n try {\n return new TransactionProcessor(\n lc,\n this.#db,\n this.#mode,\n this.#tableSpecs,\n commitVersion,\n jsonFormat,\n );\n } catch (e) {\n if (e instanceof SqliteError && e.code === 'SQLITE_BUSY') {\n lc.warn?.(\n `SQLITE_BUSY for ${Date.now() - start} ms (attempt ${i + 1}). ` +\n `This is only expected if litestream is performing a large ` +\n `checkpoint.`,\n e,\n );\n continue;\n }\n throw e;\n }\n }\n }\n\n /** @return If a transaction was committed. */\n #processMessage(\n lc: LogContext,\n msg: Change,\n watermark: string | undefined,\n ): CommitResult | null {\n if (msg.tag === 'begin') {\n if (this.#currentTx) {\n throw new Error(`Already in a transaction ${stringify(msg)}`);\n }\n this.#currentTx = this.#beginTransaction(\n lc,\n must(watermark),\n msg.json ?? JSON_PARSED,\n );\n return null;\n }\n\n // For non-begin messages, there should be a #currentTx set.\n const tx = this.#currentTx;\n if (!tx) {\n throw new Error(\n `Received message outside of transaction: ${stringify(msg)}`,\n );\n }\n\n if (msg.tag === 'commit') {\n // Undef this.#currentTx to allow the assembly of the next transaction.\n this.#currentTx = null;\n\n assert(watermark, 'watermark is required for commit messages');\n const schemaUpdated = tx.processCommit(msg, watermark);\n return {watermark, schemaUpdated};\n }\n\n if (msg.tag === 'rollback') {\n this.#currentTx?.abort(lc);\n this.#currentTx = null;\n return null;\n }\n\n switch (msg.tag) {\n case 'insert':\n tx.processInsert(msg);\n break;\n case 'update':\n tx.processUpdate(msg);\n break;\n case 'delete':\n tx.processDelete(msg);\n break;\n case 'truncate':\n tx.processTruncate(msg);\n break;\n case 'create-table':\n tx.processCreateTable(msg);\n break;\n case 'rename-table':\n tx.processRenameTable(msg);\n break;\n case 'update-table-metadata':\n // TODO: Process this appropriately when backfill logic is added\n lc.info?.(`Received table metadata update`, msg);\n break;\n case 'add-column':\n tx.processAddColumn(msg);\n break;\n case 'update-column':\n tx.processUpdateColumn(msg);\n break;\n case 'drop-column':\n tx.processDropColumn(msg);\n break;\n case 'drop-table':\n tx.processDropTable(msg);\n break;\n case 'create-index':\n tx.processCreateIndex(msg);\n break;\n case 'drop-index':\n tx.processDropIndex(msg);\n break;\n default:\n unreachable(msg);\n }\n\n return null;\n }\n}\n\n/**\n * The {@link TransactionProcessor} handles the sequence of messages from\n * upstream, from `BEGIN` to `COMMIT` and executes the corresponding mutations\n * on the {@link postgres.TransactionSql} on the replica.\n *\n * When applying row contents to the replica, the `_0_version` column is added / updated,\n * and a corresponding entry in the `ChangeLog` is added. The version value is derived\n * from the watermark of the preceding transaction (stored as the `nextStateVersion` in the\n * `ReplicationState` table).\n *\n * Side note: For non-streaming Postgres transactions, the commitEndLsn (and thus\n * commit watermark) is available in the `begin` message, so it could theoretically\n * be used for the row version of changes within the transaction. However, the\n * commitEndLsn is not available in the streaming (in-progress) transaction\n * protocol, and may not be available for CDC streams of other upstream types.\n * Therefore, the zero replication protocol is designed to not require the commit\n * watermark when a transaction begins.\n *\n * Also of interest is the fact that all INSERT Messages are logically applied as\n * UPSERTs. See {@link processInsert} for the underlying motivation.\n */\nclass TransactionProcessor {\n readonly #lc: LogContext;\n readonly #startMs: number;\n readonly #db: StatementRunner;\n readonly #mode: ChangeProcessorMode;\n readonly #version: LexiVersion;\n readonly #tableSpecs: Map<string, LiteTableSpec>;\n readonly #jsonFormat: JSONFormat;\n readonly #columnMetadata: ColumnMetadataStore;\n\n #pos = 0;\n #schemaChanged = false;\n\n constructor(\n lc: LogContext,\n db: StatementRunner,\n mode: ChangeProcessorMode,\n tableSpecs: Map<string, LiteTableSpec>,\n commitVersion: LexiVersion,\n jsonFormat: JSONFormat,\n ) {\n this.#startMs = Date.now();\n this.#mode = mode;\n this.#jsonFormat = jsonFormat;\n\n switch (mode) {\n case 'serving':\n // Although the Replicator / Incremental Syncer is the only writer of the replica,\n // a `BEGIN CONCURRENT` transaction is used to allow View Syncers to simulate\n // (i.e. and `ROLLBACK`) changes on historic snapshots of the database for the\n // purpose of IVM).\n //\n // This TransactionProcessor is the only logic that will actually\n // `COMMIT` any transactions to the replica.\n db.beginConcurrent();\n break;\n case 'backup':\n // For the backup-replicator (i.e. replication-manager), there are no View Syncers\n // and thus BEGIN CONCURRENT is not necessary. In fact, BEGIN CONCURRENT can cause\n // deadlocks with forced wal-checkpoints (which `litestream replicate` performs),\n // so it is important to use vanilla transactions in this configuration.\n db.beginImmediate();\n break;\n case 'initial-sync':\n // When the ChangeProcessor is used for initial-sync, the calling code\n // handles the transaction boundaries.\n break;\n default:\n unreachable();\n }\n this.#db = db;\n this.#version = commitVersion;\n this.#lc = lc.withContext('version', commitVersion);\n this.#tableSpecs = tableSpecs;\n // The column_metadata table is guaranteed to exist since the\n // replica-schema.ts migration to v8.\n this.#columnMetadata = must(ColumnMetadataStore.getInstance(db.db));\n\n if (this.#tableSpecs.size === 0) {\n this.#reloadTableSpecs();\n }\n }\n\n #reloadTableSpecs() {\n this.#tableSpecs.clear();\n // zqlSpecs include the primary key derived from unique indexes\n const zqlSpecs = computeZqlSpecs(this.#lc, this.#db.db);\n for (let spec of listTables(this.#db.db)) {\n if (!spec.primaryKey) {\n spec = {\n ...spec,\n primaryKey: [\n ...(zqlSpecs.get(spec.name)?.tableSpec.primaryKey ?? []),\n ],\n };\n }\n this.#tableSpecs.set(spec.name, spec);\n }\n }\n\n #tableSpec(name: string) {\n return must(this.#tableSpecs.get(name), `Unknown table ${name}`);\n }\n\n #getKey(\n {row, numCols}: {row: LiteRow; numCols: number},\n {relation}: {relation: MessageRelation},\n ): LiteRowKey {\n const keyColumns =\n relation.rowKey.type !== 'full'\n ? relation.rowKey.columns // already a suitable key\n : this.#tableSpec(liteTableName(relation)).primaryKey;\n if (!keyColumns?.length) {\n throw new Error(\n `Cannot replicate table \"${relation.name}\" without a PRIMARY KEY or UNIQUE INDEX`,\n );\n }\n // For the common case (replica identity default), the row is already the\n // key for deletes and updates, in which case a new object can be avoided.\n if (numCols === keyColumns.length) {\n return row;\n }\n const key: Record<string, LiteValueType> = {};\n for (const col of keyColumns) {\n key[col] = row[col];\n }\n return key;\n }\n\n processInsert(insert: MessageInsert) {\n const table = liteTableName(insert.relation);\n const newRow = liteRow(\n insert.new,\n this.#tableSpec(table),\n this.#jsonFormat,\n );\n\n this.#upsert(table, {\n ...newRow.row,\n [ZERO_VERSION_COLUMN_NAME]: this.#version,\n });\n\n if (insert.relation.rowKey.columns.length === 0) {\n // INSERTs can be replicated for rows without a PRIMARY KEY or a\n // UNIQUE INDEX. These are written to the replica but not recorded\n // in the changeLog, because these rows cannot participate in IVM.\n //\n // (Once the table schema has been corrected to include a key, the\n // associated schema change will reset pipelines and data can be\n // loaded via hydration.)\n return;\n }\n const key = this.#getKey(newRow, insert);\n this.#logSetOp(table, key);\n }\n\n #upsert(table: string, row: LiteRow) {\n const columns = Object.keys(row).map(c => id(c));\n this.#db.run(\n `\n INSERT OR REPLACE INTO ${id(table)} (${columns.join(',')})\n VALUES (${Array.from({length: columns.length}).fill('?').join(',')})\n `,\n Object.values(row),\n );\n }\n\n // Updates by default are applied as UPDATE commands to support partial\n // row specifications from the change source. In particular, this is needed\n // to handle updates for which unchanged TOASTed values are not sent:\n //\n // https://www.postgresql.org/docs/current/protocol-logicalrep-message-formats.html#PROTOCOL-LOGICALREP-MESSAGE-FORMATS-TUPLEDATA\n //\n // However, in certain cases an UPDATE may be received for a row that\n // was not initially synced, such as when:\n // (1) an existing table is added to the app's publication, or\n // (2) a new sharding key is added to a shard during resharding.\n //\n // In order to facilitate \"resumptive\" replication, the logic falls back to\n // an INSERT if the update did not change any rows.\n // TODO: Figure out a solution for resumptive replication of rows\n // with TOASTed values.\n processUpdate(update: MessageUpdate) {\n const table = liteTableName(update.relation);\n const newRow = liteRow(\n update.new,\n this.#tableSpec(table),\n this.#jsonFormat,\n );\n const row = {...newRow.row, [ZERO_VERSION_COLUMN_NAME]: this.#version};\n\n // update.key is set with the old values if the key has changed.\n const oldKey = update.key\n ? this.#getKey(\n liteRow(update.key, this.#tableSpec(table), this.#jsonFormat),\n update,\n )\n : null;\n const newKey = this.#getKey(newRow, update);\n\n if (oldKey) {\n this.#logDeleteOp(table, oldKey);\n }\n this.#logSetOp(table, newKey);\n\n const currKey = oldKey ?? newKey;\n const conds = Object.keys(currKey).map(col => `${id(col)}=?`);\n const setExprs = Object.keys(row).map(col => `${id(col)}=?`);\n\n const {changes} = this.#db.run(\n `\n UPDATE ${id(table)}\n SET ${setExprs.join(',')}\n WHERE ${conds.join(' AND ')}\n `,\n [...Object.values(row), ...Object.values(currKey)],\n );\n\n // If the UPDATE did not affect any rows, perform an UPSERT of the\n // new row for resumptive replication.\n if (changes === 0) {\n this.#upsert(table, row);\n }\n }\n\n processDelete(del: MessageDelete) {\n const table = liteTableName(del.relation);\n const rowKey = this.#getKey(\n liteRow(del.key, this.#tableSpec(table), this.#jsonFormat),\n del,\n );\n\n this.#delete(table, rowKey);\n\n if (this.#mode === 'serving') {\n this.#logDeleteOp(table, rowKey);\n }\n }\n\n #delete(table: string, rowKey: LiteRowKey) {\n const conds = Object.keys(rowKey).map(col => `${id(col)}=?`);\n this.#db.run(\n `DELETE FROM ${id(table)} WHERE ${conds.join(' AND ')}`,\n Object.values(rowKey),\n );\n }\n\n processTruncate(truncate: MessageTruncate) {\n for (const relation of truncate.relations) {\n const table = liteTableName(relation);\n // Update replica data.\n this.#db.run(`DELETE FROM ${id(table)}`);\n\n // Update change log.\n this.#logTruncateOp(table);\n }\n }\n processCreateTable(create: TableCreate) {\n const table = mapPostgresToLite(create.spec);\n this.#db.db.exec(createLiteTableStatement(table));\n\n // Write to metadata table\n for (const [colName, colSpec] of Object.entries(create.spec.columns)) {\n this.#columnMetadata.insert(table.name, colName, colSpec);\n }\n\n this.#logResetOp(table.name);\n this.#lc.info?.(create.tag, table.name);\n }\n\n processRenameTable(rename: TableRename) {\n const oldName = liteTableName(rename.old);\n const newName = liteTableName(rename.new);\n this.#db.db.exec(`ALTER TABLE ${id(oldName)} RENAME TO ${id(newName)}`);\n\n // Rename in metadata table\n this.#columnMetadata.renameTable(oldName, newName);\n\n this.#bumpVersions(newName);\n this.#logResetOp(oldName);\n this.#lc.info?.(rename.tag, oldName, newName);\n }\n\n processAddColumn(msg: ColumnAdd) {\n const table = liteTableName(msg.table);\n const {name} = msg.column;\n const spec = mapPostgresToLiteColumn(table, msg.column);\n this.#db.db.exec(\n `ALTER TABLE ${id(table)} ADD ${id(name)} ${liteColumnDef(spec)}`,\n );\n\n // Write to metadata table\n this.#columnMetadata.insert(table, name, msg.column.spec);\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, msg.column);\n }\n\n processUpdateColumn(msg: ColumnUpdate) {\n const table = liteTableName(msg.table);\n let oldName = msg.old.name;\n const newName = msg.new.name;\n\n // update-column can ignore defaults because it does not change the values\n // in existing rows.\n //\n // https://www.postgresql.org/docs/current/sql-altertable.html#SQL-ALTERTABLE-DESC-SET-DROP-DEFAULT\n //\n // \"The new default value will only apply in subsequent INSERT or UPDATE\n // commands; it does not cause rows already in the table to change.\"\n //\n // This allows support for _changing_ column defaults to any expression,\n // since it does not affect what the replica needs to do.\n const oldSpec = mapPostgresToLiteColumn(table, msg.old, 'ignore-default');\n const newSpec = mapPostgresToLiteColumn(table, msg.new, 'ignore-default');\n\n // The only updates that are relevant are the column name and the data type.\n if (oldName === newName && oldSpec.dataType === newSpec.dataType) {\n this.#lc.info?.(msg.tag, 'no thing to update', oldSpec, newSpec);\n return;\n }\n // If the data type changes, we have to make a new column with the new data type\n // and copy the values over.\n if (oldSpec.dataType !== newSpec.dataType) {\n // Remember (and drop) the indexes that reference the column.\n const indexes = listIndexes(this.#db.db).filter(\n idx => idx.tableName === table && oldName in idx.columns,\n );\n const stmts = indexes.map(idx => `DROP INDEX IF EXISTS ${id(idx.name)};`);\n const tmpName = `tmp.${newName}`;\n stmts.push(`\n ALTER TABLE ${id(table)} ADD ${id(tmpName)} ${liteColumnDef(newSpec)};\n UPDATE ${id(table)} SET ${id(tmpName)} = ${id(oldName)};\n ALTER TABLE ${id(table)} DROP ${id(oldName)};\n `);\n for (const idx of indexes) {\n // Re-create the indexes to reference the new column.\n idx.columns[tmpName] = idx.columns[oldName];\n delete idx.columns[oldName];\n stmts.push(createLiteIndexStatement(idx));\n }\n this.#db.db.exec(stmts.join(''));\n oldName = tmpName;\n }\n if (oldName !== newName) {\n this.#db.db.exec(\n `ALTER TABLE ${id(table)} RENAME ${id(oldName)} TO ${id(newName)}`,\n );\n }\n\n // Update metadata table\n this.#columnMetadata.update(\n table,\n msg.old.name,\n msg.new.name,\n msg.new.spec,\n );\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, msg.new);\n }\n\n processDropColumn(msg: ColumnDrop) {\n const table = liteTableName(msg.table);\n const {column} = msg;\n this.#db.db.exec(`ALTER TABLE ${id(table)} DROP ${id(column)}`);\n\n // Delete from metadata table\n this.#columnMetadata.deleteColumn(table, column);\n\n this.#bumpVersions(table);\n this.#lc.info?.(msg.tag, table, column);\n }\n\n processDropTable(drop: TableDrop) {\n const name = liteTableName(drop.id);\n this.#db.db.exec(`DROP TABLE IF EXISTS ${id(name)}`);\n\n // Delete from metadata table\n this.#columnMetadata.deleteTable(name);\n\n this.#logResetOp(name);\n this.#lc.info?.(drop.tag, name);\n }\n\n processCreateIndex(create: IndexCreate) {\n const index = mapPostgresToLiteIndex(create.spec);\n this.#db.db.exec(createLiteIndexStatement(index));\n\n // indexes affect tables visibility (e.g. sync-ability is gated on\n // having a unique index), so reset pipelines to refresh table schemas.\n this.#logResetOp(index.tableName);\n this.#lc.info?.(create.tag, index.name);\n }\n\n processDropIndex(drop: IndexDrop) {\n const name = liteTableName(drop.id);\n this.#db.db.exec(`DROP INDEX IF EXISTS ${id(name)}`);\n this.#lc.info?.(drop.tag, name);\n }\n\n #bumpVersions(table: string) {\n this.#db.run(\n `UPDATE ${id(table)} SET ${id(ZERO_VERSION_COLUMN_NAME)} = ?`,\n this.#version,\n );\n this.#logResetOp(table);\n }\n\n #logSetOp(table: string, key: LiteRowKey) {\n if (this.#mode === 'serving') {\n logSetOp(this.#db, this.#version, this.#pos++, table, key);\n }\n }\n\n #logDeleteOp(table: string, key: LiteRowKey) {\n if (this.#mode === 'serving') {\n logDeleteOp(this.#db, this.#version, this.#pos++, table, key);\n }\n }\n\n #logTruncateOp(table: string) {\n if (this.#mode === 'serving') {\n logTruncateOp(this.#db, this.#version, table);\n }\n }\n\n #logResetOp(table: string) {\n this.#schemaChanged = true;\n if (this.#mode === 'serving') {\n logResetOp(this.#db, this.#version, table);\n }\n this.#reloadTableSpecs();\n }\n\n /** @returns `true` if the schema was updated. */\n processCommit(commit: MessageCommit, watermark: string): boolean {\n if (watermark !== this.#version) {\n throw new Error(\n `'commit' version ${watermark} does not match 'begin' version ${\n this.#version\n }: ${stringify(commit)}`,\n );\n }\n updateReplicationWatermark(this.#db, watermark);\n\n if (this.#schemaChanged) {\n const start = Date.now();\n this.#db.db.pragma('optimize');\n this.#lc.info?.(\n `PRAGMA optimized after schema change (${Date.now() - start} ms)`,\n );\n }\n\n if (this.#mode !== 'initial-sync') {\n this.#db.commit();\n }\n\n const elapsedMs = Date.now() - this.#startMs;\n this.#lc.debug?.(`Committed tx@${this.#version} (${elapsedMs} ms)`);\n\n return this.#schemaChanged;\n }\n\n abort(lc: LogContext) {\n lc.info?.(`aborting transaction ${this.#version}`);\n this.#db.rollback();\n }\n}\n\nfunction ensureError(err: unknown): Error {\n if (err instanceof Error) {\n return err;\n }\n const error = new Error();\n error.cause = err;\n return error;\n}\n"],"names":[],"mappings":";;;;;;;;;;;;;;;AAmFO,MAAM,gBAAgB;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA,EAKA,kCAAkB,IAAA;AAAA,EAE3B,aAA0C;AAAA,EAE1C;AAAA,EAEA,YACE,IACA,MACA,aACA;AACA,SAAK,MAAM;AACX,SAAK,QAAQ;AACb,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,MAAM,IAAgB,KAAc;AAClC,QAAI,CAAC,KAAK,UAAU;AAClB,WAAK,YAAY,MAAM,EAAE;AAEzB,WAAK,WAAW,YAAY,GAAG;AAE/B,UAAI,EAAE,eAAe,aAAa;AAEhC,WAAG,QAAQ,8BAA8B,KAAK,QAAQ;AACtD,aAAK,aAAa,IAAI,KAAK,QAAQ;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,IAAgB;AACpB,SAAK,MAAM,IAAI,IAAI,WAAA,CAAY;AAAA,EACjC;AAAA;AAAA,EAGA,eACE,IACA,YACqB;AACrB,UAAM,CAAC,MAAM,OAAO,IAAI;AACxB,QAAI,KAAK,UAAU;AACjB,SAAG,QAAQ,YAAY,QAAQ,GAAG,EAAE;AACpC,aAAO;AAAA,IACT;AACA,QAAI;AACF,YAAM,YACJ,SAAS,UACL,WAAW,CAAC,EAAE,kBACd,SAAS,WACP,WAAW,CAAC,EAAE,YACd;AACR,aAAO,KAAK,gBAAgB,IAAI,SAAS,SAAS;AAAA,IACpD,SAAS,GAAG;AACV,WAAK,MAAM,IAAI,CAAC;AAAA,IAClB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,kBACE,IACA,eACA,YACsB;AACtB,UAAM,QAAQ,KAAK,IAAA;AASnB,aAAS,IAAI,KAAK,KAAK;AACrB,UAAI;AACF,eAAO,IAAI;AAAA,UACT;AAAA,UACA,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,UACL;AAAA,UACA;AAAA,QAAA;AAAA,MAEJ,SAAS,GAAG;AACV,YAAI,aAAa,eAAe,EAAE,SAAS,eAAe;AACxD,aAAG;AAAA,YACD,mBAAmB,KAAK,IAAA,IAAQ,KAAK,gBAAgB,IAAI,CAAC;AAAA,YAG1D;AAAA,UAAA;AAEF;AAAA,QACF;AACA,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,gBACE,IACA,KACA,WACqB;AACrB,QAAI,IAAI,QAAQ,SAAS;AACvB,UAAI,KAAK,YAAY;AACnB,cAAM,IAAI,MAAM,4BAA4B,UAAU,GAAG,CAAC,EAAE;AAAA,MAC9D;AACA,WAAK,aAAa,KAAK;AAAA,QACrB;AAAA,QACA,KAAK,SAAS;AAAA,QACd,IAAI,QAAQ;AAAA,MAAA;AAEd,aAAO;AAAA,IACT;AAGA,UAAM,KAAK,KAAK;AAChB,QAAI,CAAC,IAAI;AACP,YAAM,IAAI;AAAA,QACR,4CAA4C,UAAU,GAAG,CAAC;AAAA,MAAA;AAAA,IAE9D;AAEA,QAAI,IAAI,QAAQ,UAAU;AAExB,WAAK,aAAa;AAElB,aAAO,WAAW,2CAA2C;AAC7D,YAAM,gBAAgB,GAAG,cAAc,KAAK,SAAS;AACrD,aAAO,EAAC,WAAW,cAAA;AAAA,IACrB;AAEA,QAAI,IAAI,QAAQ,YAAY;AAC1B,WAAK,YAAY,MAAM,EAAE;AACzB,WAAK,aAAa;AAClB,aAAO;AAAA,IACT;AAEA,YAAQ,IAAI,KAAA;AAAA,MACV,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,cAAc,GAAG;AACpB;AAAA,MACF,KAAK;AACH,WAAG,gBAAgB,GAAG;AACtB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AAEH,WAAG,OAAO,kCAAkC,GAAG;AAC/C;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF,KAAK;AACH,WAAG,oBAAoB,GAAG;AAC1B;AAAA,MACF,KAAK;AACH,WAAG,kBAAkB,GAAG;AACxB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF,KAAK;AACH,WAAG,mBAAmB,GAAG;AACzB;AAAA,MACF,KAAK;AACH,WAAG,iBAAiB,GAAG;AACvB;AAAA,MACF;AACE,oBAAe;AAAA,IAAA;AAGnB,WAAO;AAAA,EACT;AACF;AAuBA,MAAM,qBAAqB;AAAA,EAChB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,OAAO;AAAA,EACP,iBAAiB;AAAA,EAEjB,YACE,IACA,IACA,MACA,YACA,eACA,YACA;AACA,SAAK,WAAW,KAAK,IAAA;AACrB,SAAK,QAAQ;AACb,SAAK,cAAc;AAEnB,YAAQ,MAAA;AAAA,MACN,KAAK;AAQH,WAAG,gBAAA;AACH;AAAA,MACF,KAAK;AAKH,WAAG,eAAA;AACH;AAAA,MACF,KAAK;AAGH;AAAA,MACF;AACE,oBAAA;AAAA,IAAY;AAEhB,SAAK,MAAM;AACX,SAAK,WAAW;AAChB,SAAK,MAAM,GAAG,YAAY,WAAW,aAAa;AAClD,SAAK,cAAc;AAGnB,SAAK,kBAAkB,KAAK,oBAAoB,YAAY,GAAG,EAAE,CAAC;AAElE,QAAI,KAAK,YAAY,SAAS,GAAG;AAC/B,WAAK,kBAAA;AAAA,IACP;AAAA,EACF;AAAA,EAEA,oBAAoB;AAClB,SAAK,YAAY,MAAA;AAEjB,UAAM,WAAW,gBAAgB,KAAK,KAAK,KAAK,IAAI,EAAE;AACtD,aAAS,QAAQ,WAAW,KAAK,IAAI,EAAE,GAAG;AACxC,UAAI,CAAC,KAAK,YAAY;AACpB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,YAAY;AAAA,YACV,GAAI,SAAS,IAAI,KAAK,IAAI,GAAG,UAAU,cAAc,CAAA;AAAA,UAAC;AAAA,QACxD;AAAA,MAEJ;AACA,WAAK,YAAY,IAAI,KAAK,MAAM,IAAI;AAAA,IACtC;AAAA,EACF;AAAA,EAEA,WAAW,MAAc;AACvB,WAAO,KAAK,KAAK,YAAY,IAAI,IAAI,GAAG,iBAAiB,IAAI,EAAE;AAAA,EACjE;AAAA,EAEA,QACE,EAAC,KAAK,WACN,EAAC,YACW;AACZ,UAAM,aACJ,SAAS,OAAO,SAAS,SACrB,SAAS,OAAO,UAChB,KAAK,WAAW,cAAc,QAAQ,CAAC,EAAE;AAC/C,QAAI,CAAC,YAAY,QAAQ;AACvB,YAAM,IAAI;AAAA,QACR,2BAA2B,SAAS,IAAI;AAAA,MAAA;AAAA,IAE5C;AAGA,QAAI,YAAY,WAAW,QAAQ;AACjC,aAAO;AAAA,IACT;AACA,UAAM,MAAqC,CAAA;AAC3C,eAAW,OAAO,YAAY;AAC5B,UAAI,GAAG,IAAI,IAAI,GAAG;AAAA,IACpB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,cAAc,QAAuB;AACnC,UAAM,QAAQ,cAAc,OAAO,QAAQ;AAC3C,UAAM,SAAS;AAAA,MACb,OAAO;AAAA,MACP,KAAK,WAAW,KAAK;AAAA,MACrB,KAAK;AAAA,IAAA;AAGP,SAAK,QAAQ,OAAO;AAAA,MAClB,GAAG,OAAO;AAAA,MACV,CAAC,wBAAwB,GAAG,KAAK;AAAA,IAAA,CAClC;AAED,QAAI,OAAO,SAAS,OAAO,QAAQ,WAAW,GAAG;AAQ/C;AAAA,IACF;AACA,UAAM,MAAM,KAAK,QAAQ,QAAQ,MAAM;AACvC,SAAK,UAAU,OAAO,GAAG;AAAA,EAC3B;AAAA,EAEA,QAAQ,OAAe,KAAc;AACnC,UAAM,UAAU,OAAO,KAAK,GAAG,EAAE,IAAI,CAAA,MAAK,GAAG,CAAC,CAAC;AAC/C,SAAK,IAAI;AAAA,MACP;AAAA,+BACyB,GAAG,KAAK,CAAC,KAAK,QAAQ,KAAK,GAAG,CAAC;AAAA,kBAC5C,MAAM,KAAK,EAAC,QAAQ,QAAQ,QAAO,EAAE,KAAK,GAAG,EAAE,KAAK,GAAG,CAAC;AAAA;AAAA,MAEpE,OAAO,OAAO,GAAG;AAAA,IAAA;AAAA,EAErB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,cAAc,QAAuB;AACnC,UAAM,QAAQ,cAAc,OAAO,QAAQ;AAC3C,UAAM,SAAS;AAAA,MACb,OAAO;AAAA,MACP,KAAK,WAAW,KAAK;AAAA,MACrB,KAAK;AAAA,IAAA;AAEP,UAAM,MAAM,EAAC,GAAG,OAAO,KAAK,CAAC,wBAAwB,GAAG,KAAK,SAAA;AAG7D,UAAM,SAAS,OAAO,MAClB,KAAK;AAAA,MACH,QAAQ,OAAO,KAAK,KAAK,WAAW,KAAK,GAAG,KAAK,WAAW;AAAA,MAC5D;AAAA,IAAA,IAEF;AACJ,UAAM,SAAS,KAAK,QAAQ,QAAQ,MAAM;AAE1C,QAAI,QAAQ;AACV,WAAK,aAAa,OAAO,MAAM;AAAA,IACjC;AACA,SAAK,UAAU,OAAO,MAAM;AAE5B,UAAM,UAAU,UAAU;AAC1B,UAAM,QAAQ,OAAO,KAAK,OAAO,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAC5D,UAAM,WAAW,OAAO,KAAK,GAAG,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAE3D,UAAM,EAAC,QAAA,IAAW,KAAK,IAAI;AAAA,MACzB;AAAA,eACS,GAAG,KAAK,CAAC;AAAA,cACV,SAAS,KAAK,GAAG,CAAC;AAAA,gBAChB,MAAM,KAAK,OAAO,CAAC;AAAA;AAAA,MAE7B,CAAC,GAAG,OAAO,OAAO,GAAG,GAAG,GAAG,OAAO,OAAO,OAAO,CAAC;AAAA,IAAA;AAKnD,QAAI,YAAY,GAAG;AACjB,WAAK,QAAQ,OAAO,GAAG;AAAA,IACzB;AAAA,EACF;AAAA,EAEA,cAAc,KAAoB;AAChC,UAAM,QAAQ,cAAc,IAAI,QAAQ;AACxC,UAAM,SAAS,KAAK;AAAA,MAClB,QAAQ,IAAI,KAAK,KAAK,WAAW,KAAK,GAAG,KAAK,WAAW;AAAA,MACzD;AAAA,IAAA;AAGF,SAAK,QAAQ,OAAO,MAAM;AAE1B,QAAI,KAAK,UAAU,WAAW;AAC5B,WAAK,aAAa,OAAO,MAAM;AAAA,IACjC;AAAA,EACF;AAAA,EAEA,QAAQ,OAAe,QAAoB;AACzC,UAAM,QAAQ,OAAO,KAAK,MAAM,EAAE,IAAI,CAAA,QAAO,GAAG,GAAG,GAAG,CAAC,IAAI;AAC3D,SAAK,IAAI;AAAA,MACP,eAAe,GAAG,KAAK,CAAC,UAAU,MAAM,KAAK,OAAO,CAAC;AAAA,MACrD,OAAO,OAAO,MAAM;AAAA,IAAA;AAAA,EAExB;AAAA,EAEA,gBAAgB,UAA2B;AACzC,eAAW,YAAY,SAAS,WAAW;AACzC,YAAM,QAAQ,cAAc,QAAQ;AAEpC,WAAK,IAAI,IAAI,eAAe,GAAG,KAAK,CAAC,EAAE;AAGvC,WAAK,eAAe,KAAK;AAAA,IAC3B;AAAA,EACF;AAAA,EACA,mBAAmB,QAAqB;AACtC,UAAM,QAAQ,kBAAkB,OAAO,IAAI;AAC3C,SAAK,IAAI,GAAG,KAAK,yBAAyB,KAAK,CAAC;AAGhD,eAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,OAAO,KAAK,OAAO,GAAG;AACpE,WAAK,gBAAgB,OAAO,MAAM,MAAM,SAAS,OAAO;AAAA,IAC1D;AAEA,SAAK,YAAY,MAAM,IAAI;AAC3B,SAAK,IAAI,OAAO,OAAO,KAAK,MAAM,IAAI;AAAA,EACxC;AAAA,EAEA,mBAAmB,QAAqB;AACtC,UAAM,UAAU,cAAc,OAAO,GAAG;AACxC,UAAM,UAAU,cAAc,OAAO,GAAG;AACxC,SAAK,IAAI,GAAG,KAAK,eAAe,GAAG,OAAO,CAAC,cAAc,GAAG,OAAO,CAAC,EAAE;AAGtE,SAAK,gBAAgB,YAAY,SAAS,OAAO;AAEjD,SAAK,cAAc,OAAO;AAC1B,SAAK,YAAY,OAAO;AACxB,SAAK,IAAI,OAAO,OAAO,KAAK,SAAS,OAAO;AAAA,EAC9C;AAAA,EAEA,iBAAiB,KAAgB;AAC/B,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,UAAM,EAAC,SAAQ,IAAI;AACnB,UAAM,OAAO,wBAAwB,OAAO,IAAI,MAAM;AACtD,SAAK,IAAI,GAAG;AAAA,MACV,eAAe,GAAG,KAAK,CAAC,QAAQ,GAAG,IAAI,CAAC,IAAI,cAAc,IAAI,CAAC;AAAA,IAAA;AAIjE,SAAK,gBAAgB,OAAO,OAAO,MAAM,IAAI,OAAO,IAAI;AAExD,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,IAAI,MAAM;AAAA,EAC5C;AAAA,EAEA,oBAAoB,KAAmB;AACrC,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,QAAI,UAAU,IAAI,IAAI;AACtB,UAAM,UAAU,IAAI,IAAI;AAYxB,UAAM,UAAU,wBAAwB,OAAO,IAAI,KAAK,gBAAgB;AACxE,UAAM,UAAU,wBAAwB,OAAO,IAAI,KAAK,gBAAgB;AAGxE,QAAI,YAAY,WAAW,QAAQ,aAAa,QAAQ,UAAU;AAChE,WAAK,IAAI,OAAO,IAAI,KAAK,sBAAsB,SAAS,OAAO;AAC/D;AAAA,IACF;AAGA,QAAI,QAAQ,aAAa,QAAQ,UAAU;AAEzC,YAAM,UAAU,YAAY,KAAK,IAAI,EAAE,EAAE;AAAA,QACvC,CAAA,QAAO,IAAI,cAAc,SAAS,WAAW,IAAI;AAAA,MAAA;AAEnD,YAAM,QAAQ,QAAQ,IAAI,CAAA,QAAO,wBAAwB,GAAG,IAAI,IAAI,CAAC,GAAG;AACxE,YAAM,UAAU,OAAO,OAAO;AAC9B,YAAM,KAAK;AAAA,sBACK,GAAG,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAC,IAAI,cAAc,OAAO,CAAC;AAAA,iBAC3D,GAAG,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAC,MAAM,GAAG,OAAO,CAAC;AAAA,sBACxC,GAAG,KAAK,CAAC,SAAS,GAAG,OAAO,CAAC;AAAA,SAC1C;AACH,iBAAW,OAAO,SAAS;AAEzB,YAAI,QAAQ,OAAO,IAAI,IAAI,QAAQ,OAAO;AAC1C,eAAO,IAAI,QAAQ,OAAO;AAC1B,cAAM,KAAK,yBAAyB,GAAG,CAAC;AAAA,MAC1C;AACA,WAAK,IAAI,GAAG,KAAK,MAAM,KAAK,EAAE,CAAC;AAC/B,gBAAU;AAAA,IACZ;AACA,QAAI,YAAY,SAAS;AACvB,WAAK,IAAI,GAAG;AAAA,QACV,eAAe,GAAG,KAAK,CAAC,WAAW,GAAG,OAAO,CAAC,OAAO,GAAG,OAAO,CAAC;AAAA,MAAA;AAAA,IAEpE;AAGA,SAAK,gBAAgB;AAAA,MACnB;AAAA,MACA,IAAI,IAAI;AAAA,MACR,IAAI,IAAI;AAAA,MACR,IAAI,IAAI;AAAA,IAAA;AAGV,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,IAAI,GAAG;AAAA,EACzC;AAAA,EAEA,kBAAkB,KAAiB;AACjC,UAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,UAAM,EAAC,WAAU;AACjB,SAAK,IAAI,GAAG,KAAK,eAAe,GAAG,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,EAAE;AAG9D,SAAK,gBAAgB,aAAa,OAAO,MAAM;AAE/C,SAAK,cAAc,KAAK;AACxB,SAAK,IAAI,OAAO,IAAI,KAAK,OAAO,MAAM;AAAA,EACxC;AAAA,EAEA,iBAAiB,MAAiB;AAChC,UAAM,OAAO,cAAc,KAAK,EAAE;AAClC,SAAK,IAAI,GAAG,KAAK,wBAAwB,GAAG,IAAI,CAAC,EAAE;AAGnD,SAAK,gBAAgB,YAAY,IAAI;AAErC,SAAK,YAAY,IAAI;AACrB,SAAK,IAAI,OAAO,KAAK,KAAK,IAAI;AAAA,EAChC;AAAA,EAEA,mBAAmB,QAAqB;AACtC,UAAM,QAAQ,uBAAuB,OAAO,IAAI;AAChD,SAAK,IAAI,GAAG,KAAK,yBAAyB,KAAK,CAAC;AAIhD,SAAK,YAAY,MAAM,SAAS;AAChC,SAAK,IAAI,OAAO,OAAO,KAAK,MAAM,IAAI;AAAA,EACxC;AAAA,EAEA,iBAAiB,MAAiB;AAChC,UAAM,OAAO,cAAc,KAAK,EAAE;AAClC,SAAK,IAAI,GAAG,KAAK,wBAAwB,GAAG,IAAI,CAAC,EAAE;AACnD,SAAK,IAAI,OAAO,KAAK,KAAK,IAAI;AAAA,EAChC;AAAA,EAEA,cAAc,OAAe;AAC3B,SAAK,IAAI;AAAA,MACP,UAAU,GAAG,KAAK,CAAC,QAAQ,GAAG,wBAAwB,CAAC;AAAA,MACvD,KAAK;AAAA,IAAA;AAEP,SAAK,YAAY,KAAK;AAAA,EACxB;AAAA,EAEA,UAAU,OAAe,KAAiB;AACxC,QAAI,KAAK,UAAU,WAAW;AAC5B,eAAS,KAAK,KAAK,KAAK,UAAU,KAAK,QAAQ,OAAO,GAAG;AAAA,IAC3D;AAAA,EACF;AAAA,EAEA,aAAa,OAAe,KAAiB;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,kBAAY,KAAK,KAAK,KAAK,UAAU,KAAK,QAAQ,OAAO,GAAG;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,eAAe,OAAe;AAC5B,QAAI,KAAK,UAAU,WAAW;AAC5B,oBAAc,KAAK,KAAK,KAAK,UAAU,KAAK;AAAA,IAC9C;AAAA,EACF;AAAA,EAEA,YAAY,OAAe;AACzB,SAAK,iBAAiB;AACtB,QAAI,KAAK,UAAU,WAAW;AAC5B,iBAAW,KAAK,KAAK,KAAK,UAAU,KAAK;AAAA,IAC3C;AACA,SAAK,kBAAA;AAAA,EACP;AAAA;AAAA,EAGA,cAAc,QAAuB,WAA4B;AAC/D,QAAI,cAAc,KAAK,UAAU;AAC/B,YAAM,IAAI;AAAA,QACR,oBAAoB,SAAS,mCAC3B,KAAK,QACP,KAAK,UAAU,MAAM,CAAC;AAAA,MAAA;AAAA,IAE1B;AACA,+BAA2B,KAAK,KAAK,SAAS;AAE9C,QAAI,KAAK,gBAAgB;AACvB,YAAM,QAAQ,KAAK,IAAA;AACnB,WAAK,IAAI,GAAG,OAAO,UAAU;AAC7B,WAAK,IAAI;AAAA,QACP,yCAAyC,KAAK,IAAA,IAAQ,KAAK;AAAA,MAAA;AAAA,IAE/D;AAEA,QAAI,KAAK,UAAU,gBAAgB;AACjC,WAAK,IAAI,OAAA;AAAA,IACX;AAEA,UAAM,YAAY,KAAK,IAAA,IAAQ,KAAK;AACpC,SAAK,IAAI,QAAQ,gBAAgB,KAAK,QAAQ,KAAK,SAAS,MAAM;AAElE,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,IAAgB;AACpB,OAAG,OAAO,wBAAwB,KAAK,QAAQ,EAAE;AACjD,SAAK,IAAI,SAAA;AAAA,EACX;AACF;AAEA,SAAS,YAAY,KAAqB;AACxC,MAAI,eAAe,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,QAAQ,IAAI,MAAA;AAClB,QAAM,QAAQ;AACd,SAAO;AACT;"}
@@ -1,5 +1,4 @@
1
1
  import * as v from '../../../../../shared/src/valita.ts';
2
- import type { Database } from '../../../../../zqlite/src/db.ts';
3
2
  import type { StatementRunner } from '../../../db/statements.ts';
4
3
  import type { LexiVersion } from '../../../types/lexi-version.ts';
5
4
  import type { LiteRowKey } from '../../../types/lite.ts';
@@ -37,6 +36,7 @@ export declare const SET_OP = "s";
37
36
  export declare const DEL_OP = "d";
38
37
  export declare const TRUNCATE_OP = "t";
39
38
  export declare const RESET_OP = "r";
39
+ export declare const CREATE_CHANGELOG_SCHEMA = "\n CREATE TABLE \"_zero.changeLog2\" (\n \"stateVersion\" TEXT NOT NULL,\n \"pos\" INT NOT NULL,\n \"table\" TEXT NOT NULL,\n \"rowKey\" TEXT NOT NULL,\n \"op\" TEXT NOT NULL,\n PRIMARY KEY(\"stateVersion\", \"pos\"),\n UNIQUE(\"table\", \"rowKey\")\n );\n ";
40
40
  export declare const changeLogEntrySchema: v.Type<{
41
41
  rowKey: Readonly<Record<string, import("../../../../../shared/src/bigint-json.ts").JSONValue>> | null;
42
42
  stateVersion: string;
@@ -45,7 +45,6 @@ export declare const changeLogEntrySchema: v.Type<{
45
45
  op: "d" | "r" | "s" | "t";
46
46
  }>;
47
47
  export type ChangeLogEntry = v.Infer<typeof changeLogEntrySchema>;
48
- export declare function initChangeLog(db: Database): void;
49
48
  export declare function logSetOp(db: StatementRunner, version: LexiVersion, pos: number, table: string, row: LiteRowKey): string;
50
49
  export declare function logDeleteOp(db: StatementRunner, version: LexiVersion, pos: number, table: string, row: LiteRowKey): string;
51
50
  export declare function logTruncateOp(db: StatementRunner, version: LexiVersion, table: string): void;
@@ -1 +1 @@
1
- {"version":3,"file":"change-log.d.ts","sourceRoot":"","sources":["../../../../../../../zero-cache/src/services/replicator/schema/change-log.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,CAAC,MAAM,qCAAqC,CAAC;AACzD,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,iCAAiC,CAAC;AAC9D,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,2BAA2B,CAAC;AAC/D,OAAO,KAAK,EAAC,WAAW,EAAC,MAAM,gCAAgC,CAAC;AAChE,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,wBAAwB,CAAC;AAGvD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AAEH,eAAO,MAAM,MAAM,MAAM,CAAC;AAC1B,eAAO,MAAM,MAAM,MAAM,CAAC;AAC1B,eAAO,MAAM,WAAW,MAAM,CAAC;AAC/B,eAAO,MAAM,QAAQ,MAAM,CAAC;AAgC5B,eAAO,MAAM,oBAAoB;;;;;;EAe5B,CAAC;AAEN,MAAM,MAAM,cAAc,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,oBAAoB,CAAC,CAAC;AAElE,wBAAgB,aAAa,CAAC,EAAE,EAAE,QAAQ,QAEzC;AAED,wBAAgB,QAAQ,CACtB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,UAAU,GACd,MAAM,CAER;AAED,wBAAgB,WAAW,CACzB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,UAAU,GACd,MAAM,CAER;AAqBD,wBAAgB,aAAa,CAC3B,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,QAGd;AAED,wBAAgB,UAAU,CACxB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,QAGd"}
1
+ {"version":3,"file":"change-log.d.ts","sourceRoot":"","sources":["../../../../../../../zero-cache/src/services/replicator/schema/change-log.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,CAAC,MAAM,qCAAqC,CAAC;AACzD,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,2BAA2B,CAAC;AAC/D,OAAO,KAAK,EAAC,WAAW,EAAC,MAAM,gCAAgC,CAAC;AAChE,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,wBAAwB,CAAC;AAGvD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AAEH,eAAO,MAAM,MAAM,MAAM,CAAC;AAC1B,eAAO,MAAM,MAAM,MAAM,CAAC;AAC1B,eAAO,MAAM,WAAW,MAAM,CAAC;AAC/B,eAAO,MAAM,QAAQ,MAAM,CAAC;AAG5B,eAAO,MAAM,uBAAuB,+TA4BjC,CAAC;AAEJ,eAAO,MAAM,oBAAoB;;;;;;EAe5B,CAAC;AAEN,MAAM,MAAM,cAAc,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,oBAAoB,CAAC,CAAC;AAElE,wBAAgB,QAAQ,CACtB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,UAAU,GACd,MAAM,CAER;AAED,wBAAgB,WAAW,CACzB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,UAAU,GACd,MAAM,CAER;AAqBD,wBAAgB,aAAa,CAC3B,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,QAGd;AAED,wBAAgB,UAAU,CACxB,EAAE,EAAE,eAAe,EACnB,OAAO,EAAE,WAAW,EACpB,KAAK,EAAE,MAAM,QAGd"}
@@ -33,7 +33,7 @@ const CREATE_CHANGELOG_SCHEMA = (
33
33
  "op" TEXT NOT NULL,
34
34
  PRIMARY KEY("stateVersion", "pos"),
35
35
  UNIQUE("table", "rowKey")
36
- )
36
+ );
37
37
  `
38
38
  );
39
39
  const changeLogEntrySchema = object({
@@ -47,9 +47,6 @@ const changeLogEntrySchema = object({
47
47
  // Note: sets the rowKey to `null` for table-wide ops / resets
48
48
  rowKey: val.op === "t" || val.op === "r" ? null : parse(parse$1(val.rowKey), jsonObjectSchema)
49
49
  }));
50
- function initChangeLog(db) {
51
- db.exec(CREATE_CHANGELOG_SCHEMA);
52
- }
53
50
  function logSetOp(db, version, pos, table, row) {
54
51
  return logRowOp(db, version, pos, table, row, SET_OP);
55
52
  }
@@ -91,12 +88,12 @@ function logTableWideOp(db, version, table, op) {
91
88
  );
92
89
  }
93
90
  export {
91
+ CREATE_CHANGELOG_SCHEMA,
94
92
  DEL_OP,
95
93
  RESET_OP,
96
94
  SET_OP,
97
95
  TRUNCATE_OP,
98
96
  changeLogEntrySchema,
99
- initChangeLog,
100
97
  logDeleteOp,
101
98
  logResetOp,
102
99
  logSetOp,
@@ -1 +1 @@
1
- {"version":3,"file":"change-log.js","sources":["../../../../../../../zero-cache/src/services/replicator/schema/change-log.ts"],"sourcesContent":["import {\n jsonObjectSchema,\n parse,\n stringify,\n} from '../../../../../shared/src/bigint-json.ts';\nimport * as v from '../../../../../shared/src/valita.ts';\nimport type {Database} from '../../../../../zqlite/src/db.ts';\nimport type {StatementRunner} from '../../../db/statements.ts';\nimport type {LexiVersion} from '../../../types/lexi-version.ts';\nimport type {LiteRowKey} from '../../../types/lite.ts';\nimport {normalizedKeyOrder} from '../../../types/row-key.ts';\n\n/**\n * The Change Log tracks the last operation (set or delete) for each row in the\n * data base, ordered by state version; in other words, a cross-table\n * index of row changes ordered by version. This facilitates a minimal \"diff\"\n * of row changes needed to advance a pipeline from one state version to another.\n *\n * The Change Log stores identifiers only, i.e. it does not store contents.\n * A database snapshot at the previous version can be used to query a row's\n * old contents, if any, and the current snapshot can be used to query a row's\n * new contents. (In the common case, the new contents will have just been applied\n * and thus has a high likelihood of being in the SQLite cache.)\n *\n * There are two table-wide operations:\n * - `t` corresponds to the postgres `TRUNCATE` operation\n * - `r` represents any schema (i.e. column) change\n *\n * For both operations, the corresponding row changes are not explicitly included\n * in the change log. The consumer has the option of simulating them be reading\n * from pre- and post- snapshots, or resetting their state entirely with the current\n * snapshot.\n *\n * To achieve the desired ordering semantics when processing tables that have been\n * truncated, reset, and modified, the \"rowKey\" is set to `null` for resets and\n * the empty string `\"\"` for truncates. This means that resets will be encountered\n * before truncates, which will be processed before any subsequent row changes.\n *\n * This ordering is chosen because resets are currently the more \"destructive\" op\n * and result in aborting the processing (and starting from scratch); doing this\n * earlier reduces wasted work.\n */\n\nexport const SET_OP = 's';\nexport const DEL_OP = 'd';\nexport const TRUNCATE_OP = 't';\nexport const RESET_OP = 'r';\n\nconst CREATE_CHANGELOG_SCHEMA =\n // stateVersion : a.k.a. row version\n // pos : order in which to process the change (within the version)\n // table : The table associated with the change\n // rowKey : JSON row key for a row change. For table-wide changes RESET\n // and TRUNCATE, there is no associated row; instead, `pos` is\n // set to -1 and the rowKey is set to the stateVersion,\n // guaranteeing when attempting to process the transaction,\n // the pipeline is reset (and the change log traversal\n // aborted).\n // op : 's' for set (insert/update)\n // : 'd' for delete\n // : 'r' for table reset (schema change)\n // : 't' for table truncation (which also resets the pipeline)\n //\n // Naming note: To maintain compatibility between a new replication-manager\n // and old view-syncers, the previous _zero.changeLog table is preserved\n // and its replacement given a new name \"changeLog2\".\n `\n CREATE TABLE \"_zero.changeLog2\" (\n \"stateVersion\" TEXT NOT NULL,\n \"pos\" INT NOT NULL,\n \"table\" TEXT NOT NULL,\n \"rowKey\" TEXT NOT NULL,\n \"op\" TEXT NOT NULL,\n PRIMARY KEY(\"stateVersion\", \"pos\"),\n UNIQUE(\"table\", \"rowKey\")\n )\n `;\n\nexport const changeLogEntrySchema = v\n .object({\n stateVersion: v.string(),\n pos: v.number(),\n table: v.string(),\n rowKey: v.string(),\n op: v.literalUnion(SET_OP, DEL_OP, TRUNCATE_OP, RESET_OP),\n })\n .map(val => ({\n ...val,\n // Note: sets the rowKey to `null` for table-wide ops / resets\n rowKey:\n val.op === 't' || val.op === 'r'\n ? null\n : v.parse(parse(val.rowKey), jsonObjectSchema),\n }));\n\nexport type ChangeLogEntry = v.Infer<typeof changeLogEntrySchema>;\n\nexport function initChangeLog(db: Database) {\n db.exec(CREATE_CHANGELOG_SCHEMA);\n}\n\nexport function logSetOp(\n db: StatementRunner,\n version: LexiVersion,\n pos: number,\n table: string,\n row: LiteRowKey,\n): string {\n return logRowOp(db, version, pos, table, row, SET_OP);\n}\n\nexport function logDeleteOp(\n db: StatementRunner,\n version: LexiVersion,\n pos: number,\n table: string,\n row: LiteRowKey,\n): string {\n return logRowOp(db, version, pos, table, row, DEL_OP);\n}\n\nfunction logRowOp(\n db: StatementRunner,\n version: LexiVersion,\n pos: number,\n table: string,\n row: LiteRowKey,\n op: string,\n): string {\n const rowKey = stringify(normalizedKeyOrder(row));\n db.run(\n `\n INSERT OR REPLACE INTO \"_zero.changeLog2\" \n (stateVersion, pos, \"table\", rowKey, op)\n VALUES (@version, @pos, @table, JSON(@rowKey), @op)\n `,\n {version, pos, table, rowKey, op},\n );\n return rowKey;\n}\nexport function logTruncateOp(\n db: StatementRunner,\n version: LexiVersion,\n table: string,\n) {\n logTableWideOp(db, version, table, TRUNCATE_OP);\n}\n\nexport function logResetOp(\n db: StatementRunner,\n version: LexiVersion,\n table: string,\n) {\n logTableWideOp(db, version, table, RESET_OP);\n}\n\nfunction logTableWideOp(\n db: StatementRunner,\n version: LexiVersion,\n table: string,\n op: 't' | 'r',\n) {\n // Delete any existing changes for the table (in this version) since the\n // table wide op invalidates them.\n db.run(\n `\n DELETE FROM \"_zero.changeLog2\" WHERE stateVersion = ? AND \"table\" = ?\n `,\n version,\n table,\n );\n\n db.run(\n `\n INSERT OR REPLACE INTO \"_zero.changeLog2\" (stateVersion, pos, \"table\", rowKey, op) \n VALUES (@version, -1, @table, @version, @op)\n `,\n {version, table, op},\n );\n}\n"],"names":["v.object","v.string","v.number","v.literalUnion","v.parse","parse"],"mappings":";;;;AA2CO,MAAM,SAAS;AACf,MAAM,SAAS;AACf,MAAM,cAAc;AACpB,MAAM,WAAW;AAExB,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAYK,MAAM,uBAAuBA,OAC1B;AAAA,EACN,cAAcC,OAAE;AAAA,EAChB,KAAKC,OAAE;AAAA,EACP,OAAOD,OAAE;AAAA,EACT,QAAQA,OAAE;AAAA,EACV,IAAIE,aAAe,QAAQ,QAAQ,aAAa,QAAQ;AAC1D,CAAC,EACA,IAAI,CAAA,SAAQ;AAAA,EACX,GAAG;AAAA;AAAA,EAEH,QACE,IAAI,OAAO,OAAO,IAAI,OAAO,MACzB,OACAC,MAAQC,QAAM,IAAI,MAAM,GAAG,gBAAgB;AACnD,EAAE;AAIG,SAAS,cAAc,IAAc;AAC1C,KAAG,KAAK,uBAAuB;AACjC;AAEO,SAAS,SACd,IACA,SACA,KACA,OACA,KACQ;AACR,SAAO,SAAS,IAAI,SAAS,KAAK,OAAO,KAAK,MAAM;AACtD;AAEO,SAAS,YACd,IACA,SACA,KACA,OACA,KACQ;AACR,SAAO,SAAS,IAAI,SAAS,KAAK,OAAO,KAAK,MAAM;AACtD;AAEA,SAAS,SACP,IACA,SACA,KACA,OACA,KACA,IACQ;AACR,QAAM,SAAS,UAAU,mBAAmB,GAAG,CAAC;AAChD,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA,IAKA,EAAC,SAAS,KAAK,OAAO,QAAQ,GAAA;AAAA,EAAE;AAElC,SAAO;AACT;AACO,SAAS,cACd,IACA,SACA,OACA;AACA,iBAAe,IAAI,SAAS,OAAO,WAAW;AAChD;AAEO,SAAS,WACd,IACA,SACA,OACA;AACA,iBAAe,IAAI,SAAS,OAAO,QAAQ;AAC7C;AAEA,SAAS,eACP,IACA,SACA,OACA,IACA;AAGA,KAAG;AAAA,IACD;AAAA;AAAA;AAAA,IAGA;AAAA,IACA;AAAA,EAAA;AAGF,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA,IAIA,EAAC,SAAS,OAAO,GAAA;AAAA,EAAE;AAEvB;"}
1
+ {"version":3,"file":"change-log.js","sources":["../../../../../../../zero-cache/src/services/replicator/schema/change-log.ts"],"sourcesContent":["import {\n jsonObjectSchema,\n parse,\n stringify,\n} from '../../../../../shared/src/bigint-json.ts';\nimport * as v from '../../../../../shared/src/valita.ts';\nimport type {StatementRunner} from '../../../db/statements.ts';\nimport type {LexiVersion} from '../../../types/lexi-version.ts';\nimport type {LiteRowKey} from '../../../types/lite.ts';\nimport {normalizedKeyOrder} from '../../../types/row-key.ts';\n\n/**\n * The Change Log tracks the last operation (set or delete) for each row in the\n * data base, ordered by state version; in other words, a cross-table\n * index of row changes ordered by version. This facilitates a minimal \"diff\"\n * of row changes needed to advance a pipeline from one state version to another.\n *\n * The Change Log stores identifiers only, i.e. it does not store contents.\n * A database snapshot at the previous version can be used to query a row's\n * old contents, if any, and the current snapshot can be used to query a row's\n * new contents. (In the common case, the new contents will have just been applied\n * and thus has a high likelihood of being in the SQLite cache.)\n *\n * There are two table-wide operations:\n * - `t` corresponds to the postgres `TRUNCATE` operation\n * - `r` represents any schema (i.e. column) change\n *\n * For both operations, the corresponding row changes are not explicitly included\n * in the change log. The consumer has the option of simulating them be reading\n * from pre- and post- snapshots, or resetting their state entirely with the current\n * snapshot.\n *\n * To achieve the desired ordering semantics when processing tables that have been\n * truncated, reset, and modified, the \"rowKey\" is set to `null` for resets and\n * the empty string `\"\"` for truncates. This means that resets will be encountered\n * before truncates, which will be processed before any subsequent row changes.\n *\n * This ordering is chosen because resets are currently the more \"destructive\" op\n * and result in aborting the processing (and starting from scratch); doing this\n * earlier reduces wasted work.\n */\n\nexport const SET_OP = 's';\nexport const DEL_OP = 'd';\nexport const TRUNCATE_OP = 't';\nexport const RESET_OP = 'r';\n\n// Exported for testing (and migrations)\nexport const CREATE_CHANGELOG_SCHEMA =\n // stateVersion : a.k.a. row version\n // pos : order in which to process the change (within the version)\n // table : The table associated with the change\n // rowKey : JSON row key for a row change. For table-wide changes RESET\n // and TRUNCATE, there is no associated row; instead, `pos` is\n // set to -1 and the rowKey is set to the stateVersion,\n // guaranteeing when attempting to process the transaction,\n // the pipeline is reset (and the change log traversal\n // aborted).\n // op : 's' for set (insert/update)\n // : 'd' for delete\n // : 'r' for table reset (schema change)\n // : 't' for table truncation (which also resets the pipeline)\n //\n // Naming note: To maintain compatibility between a new replication-manager\n // and old view-syncers, the previous _zero.changeLog table is preserved\n // and its replacement given a new name \"changeLog2\".\n `\n CREATE TABLE \"_zero.changeLog2\" (\n \"stateVersion\" TEXT NOT NULL,\n \"pos\" INT NOT NULL,\n \"table\" TEXT NOT NULL,\n \"rowKey\" TEXT NOT NULL,\n \"op\" TEXT NOT NULL,\n PRIMARY KEY(\"stateVersion\", \"pos\"),\n UNIQUE(\"table\", \"rowKey\")\n );\n `;\n\nexport const changeLogEntrySchema = v\n .object({\n stateVersion: v.string(),\n pos: v.number(),\n table: v.string(),\n rowKey: v.string(),\n op: v.literalUnion(SET_OP, DEL_OP, TRUNCATE_OP, RESET_OP),\n })\n .map(val => ({\n ...val,\n // Note: sets the rowKey to `null` for table-wide ops / resets\n rowKey:\n val.op === 't' || val.op === 'r'\n ? null\n : v.parse(parse(val.rowKey), jsonObjectSchema),\n }));\n\nexport type ChangeLogEntry = v.Infer<typeof changeLogEntrySchema>;\n\nexport function logSetOp(\n db: StatementRunner,\n version: LexiVersion,\n pos: number,\n table: string,\n row: LiteRowKey,\n): string {\n return logRowOp(db, version, pos, table, row, SET_OP);\n}\n\nexport function logDeleteOp(\n db: StatementRunner,\n version: LexiVersion,\n pos: number,\n table: string,\n row: LiteRowKey,\n): string {\n return logRowOp(db, version, pos, table, row, DEL_OP);\n}\n\nfunction logRowOp(\n db: StatementRunner,\n version: LexiVersion,\n pos: number,\n table: string,\n row: LiteRowKey,\n op: string,\n): string {\n const rowKey = stringify(normalizedKeyOrder(row));\n db.run(\n `\n INSERT OR REPLACE INTO \"_zero.changeLog2\" \n (stateVersion, pos, \"table\", rowKey, op)\n VALUES (@version, @pos, @table, JSON(@rowKey), @op)\n `,\n {version, pos, table, rowKey, op},\n );\n return rowKey;\n}\nexport function logTruncateOp(\n db: StatementRunner,\n version: LexiVersion,\n table: string,\n) {\n logTableWideOp(db, version, table, TRUNCATE_OP);\n}\n\nexport function logResetOp(\n db: StatementRunner,\n version: LexiVersion,\n table: string,\n) {\n logTableWideOp(db, version, table, RESET_OP);\n}\n\nfunction logTableWideOp(\n db: StatementRunner,\n version: LexiVersion,\n table: string,\n op: 't' | 'r',\n) {\n // Delete any existing changes for the table (in this version) since the\n // table wide op invalidates them.\n db.run(\n `\n DELETE FROM \"_zero.changeLog2\" WHERE stateVersion = ? AND \"table\" = ?\n `,\n version,\n table,\n );\n\n db.run(\n `\n INSERT OR REPLACE INTO \"_zero.changeLog2\" (stateVersion, pos, \"table\", rowKey, op) \n VALUES (@version, -1, @table, @version, @op)\n `,\n {version, table, op},\n );\n}\n"],"names":["v.object","v.string","v.number","v.literalUnion","v.parse","parse"],"mappings":";;;;AA0CO,MAAM,SAAS;AACf,MAAM,SAAS;AACf,MAAM,cAAc;AACpB,MAAM,WAAW;AAGjB,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAYK,MAAM,uBAAuBA,OAC1B;AAAA,EACN,cAAcC,OAAE;AAAA,EAChB,KAAKC,OAAE;AAAA,EACP,OAAOD,OAAE;AAAA,EACT,QAAQA,OAAE;AAAA,EACV,IAAIE,aAAe,QAAQ,QAAQ,aAAa,QAAQ;AAC1D,CAAC,EACA,IAAI,CAAA,SAAQ;AAAA,EACX,GAAG;AAAA;AAAA,EAEH,QACE,IAAI,OAAO,OAAO,IAAI,OAAO,MACzB,OACAC,MAAQC,QAAM,IAAI,MAAM,GAAG,gBAAgB;AACnD,EAAE;AAIG,SAAS,SACd,IACA,SACA,KACA,OACA,KACQ;AACR,SAAO,SAAS,IAAI,SAAS,KAAK,OAAO,KAAK,MAAM;AACtD;AAEO,SAAS,YACd,IACA,SACA,KACA,OACA,KACQ;AACR,SAAO,SAAS,IAAI,SAAS,KAAK,OAAO,KAAK,MAAM;AACtD;AAEA,SAAS,SACP,IACA,SACA,KACA,OACA,KACA,IACQ;AACR,QAAM,SAAS,UAAU,mBAAmB,GAAG,CAAC;AAChD,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA,IAKA,EAAC,SAAS,KAAK,OAAO,QAAQ,GAAA;AAAA,EAAE;AAElC,SAAO;AACT;AACO,SAAS,cACd,IACA,SACA,OACA;AACA,iBAAe,IAAI,SAAS,OAAO,WAAW;AAChD;AAEO,SAAS,WACd,IACA,SACA,OACA;AACA,iBAAe,IAAI,SAAS,OAAO,QAAQ;AAC7C;AAEA,SAAS,eACP,IACA,SACA,OACA,IACA;AAGA,KAAG;AAAA,IACD;AAAA;AAAA;AAAA,IAGA;AAAA,IACA;AAAA,EAAA;AAGF,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA,IAIA,EAAC,SAAS,OAAO,GAAA;AAAA,EAAE;AAEvB;"}
@@ -8,8 +8,8 @@
8
8
  * This table stores that metadata separately, allowing SQLite columns to use
9
9
  * plain type names while preserving all necessary upstream type information.
10
10
  */
11
- import type { Database } from '../../../../zqlite/src/db.ts';
12
- import type { ColumnSpec, LiteTableSpec } from '../../db/specs.ts';
11
+ import type { Database } from '../../../../../zqlite/src/db.ts';
12
+ import type { ColumnSpec, LiteTableSpec } from '../../../db/specs.ts';
13
13
  /**
14
14
  * Structured column metadata, replacing the old pipe-delimited string format.
15
15
  */
@@ -48,7 +48,7 @@ export declare class ColumnMetadataStore {
48
48
  hasTable(): boolean;
49
49
  /**
50
50
  * Populates metadata table from existing tables that use pipe notation.
51
- * This is used during migration v6 to backfill the metadata table.
51
+ * This is used during migration v8 to backfill the metadata table.
52
52
  */
53
53
  populateFromExistingTables(tables: LiteTableSpec[]): void;
54
54
  }
@@ -0,0 +1 @@
1
+ {"version":3,"file":"column-metadata.d.ts","sourceRoot":"","sources":["../../../../../../../zero-cache/src/services/replicator/schema/column-metadata.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,KAAK,EAAC,QAAQ,EAAY,MAAM,iCAAiC,CAAC;AAEzE,OAAO,KAAK,EAAC,UAAU,EAAE,aAAa,EAAC,MAAM,sBAAsB,CAAC;AASpE;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,2EAA2E;IAC3E,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,EAAE,OAAO,CAAC;IACnB,MAAM,EAAE,OAAO,CAAC;IAChB,OAAO,EAAE,OAAO,CAAC;IACjB,sDAAsD;IACtD,kBAAkB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACpC;AAUD,eAAO,MAAM,4BAA4B,0UAWxC,CAAC;AAEF;;;;;;GAMG;AACH,qBAAa,mBAAmB;;IAY9B,OAAO;IAqDP;;;OAGG;IACH,MAAM,CAAC,WAAW,CAAC,EAAE,EAAE,QAAQ,GAAG,mBAAmB,GAAG,SAAS;IAoBjE,MAAM,CAAC,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,EAAE,UAAU,GAAG,IAAI;IAqBrE,MAAM,CACJ,SAAS,EAAE,MAAM,EACjB,aAAa,EAAE,MAAM,EACrB,aAAa,EAAE,MAAM,EACrB,IAAI,EAAE,UAAU,GACf,IAAI;IAcP,YAAY,CAAC,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,GAAG,IAAI;IAIzD,WAAW,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI;IAIpC,WAAW,CAAC,YAAY,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,IAAI;IAI7D,SAAS,CAAC,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,GAAG,cAAc,GAAG,SAAS;IAkB5E,QAAQ,CAAC,SAAS,EAAE,MAAM,GAAG,GAAG,CAAC,MAAM,EAAE,cAAc,CAAC;IAmBxD,QAAQ,IAAI,OAAO;IAKnB;;;OAGG;IACH,0BAA0B,CAAC,MAAM,EAAE,aAAa,EAAE,GAAG,IAAI;CAW1D;AAED;;;GAGG;AACH,wBAAgB,wBAAwB,CACtC,cAAc,EAAE,MAAM,EACtB,kBAAkB,CAAC,EAAE,MAAM,GAAG,IAAI,GACjC,cAAc,CAiBhB;AAED;;;GAGG;AACH,wBAAgB,wBAAwB,CAAC,QAAQ,EAAE,cAAc,GAAG,MAAM,CAOzE;AAED;;;;;GAKG;AACH,wBAAgB,sBAAsB,CAAC,IAAI,EAAE,UAAU,GAAG,cAAc,CAQvE"}
@@ -1,5 +1,5 @@
1
- import { isArrayColumn, isEnumColumn } from "../../db/pg-to-lite.js";
2
- import { liteTypeString, upstreamDataType, isArray, isEnum, nullableUpstream } from "../../types/lite.js";
1
+ import { isArrayColumn, isEnumColumn } from "../../../db/pg-to-lite.js";
2
+ import { liteTypeString, upstreamDataType, isArray, isEnum, nullableUpstream } from "../../../types/lite.js";
3
3
  const CREATE_COLUMN_METADATA_TABLE = `
4
4
  CREATE TABLE "_zero.column_metadata" (
5
5
  table_name TEXT NOT NULL,
@@ -155,7 +155,7 @@ class ColumnMetadataStore {
155
155
  }
156
156
  /**
157
157
  * Populates metadata table from existing tables that use pipe notation.
158
- * This is used during migration v6 to backfill the metadata table.
158
+ * This is used during migration v8 to backfill the metadata table.
159
159
  */
160
160
  populateFromExistingTables(tables) {
161
161
  for (const table of tables) {