@syncular/client 0.0.1-60

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. package/dist/blobs/index.d.ts +7 -0
  2. package/dist/blobs/index.d.ts.map +1 -0
  3. package/dist/blobs/index.js +7 -0
  4. package/dist/blobs/index.js.map +1 -0
  5. package/dist/blobs/manager.d.ts +345 -0
  6. package/dist/blobs/manager.d.ts.map +1 -0
  7. package/dist/blobs/manager.js +749 -0
  8. package/dist/blobs/manager.js.map +1 -0
  9. package/dist/blobs/migrate.d.ts +14 -0
  10. package/dist/blobs/migrate.d.ts.map +1 -0
  11. package/dist/blobs/migrate.js +59 -0
  12. package/dist/blobs/migrate.js.map +1 -0
  13. package/dist/blobs/types.d.ts +62 -0
  14. package/dist/blobs/types.d.ts.map +1 -0
  15. package/dist/blobs/types.js +5 -0
  16. package/dist/blobs/types.js.map +1 -0
  17. package/dist/client.d.ts +338 -0
  18. package/dist/client.d.ts.map +1 -0
  19. package/dist/client.js +834 -0
  20. package/dist/client.js.map +1 -0
  21. package/dist/conflicts.d.ts +31 -0
  22. package/dist/conflicts.d.ts.map +1 -0
  23. package/dist/conflicts.js +118 -0
  24. package/dist/conflicts.js.map +1 -0
  25. package/dist/create-client.d.ts +115 -0
  26. package/dist/create-client.d.ts.map +1 -0
  27. package/dist/create-client.js +162 -0
  28. package/dist/create-client.js.map +1 -0
  29. package/dist/engine/SyncEngine.d.ts +215 -0
  30. package/dist/engine/SyncEngine.d.ts.map +1 -0
  31. package/dist/engine/SyncEngine.js +1066 -0
  32. package/dist/engine/SyncEngine.js.map +1 -0
  33. package/dist/engine/index.d.ts +6 -0
  34. package/dist/engine/index.d.ts.map +1 -0
  35. package/dist/engine/index.js +6 -0
  36. package/dist/engine/index.js.map +1 -0
  37. package/dist/engine/types.d.ts +230 -0
  38. package/dist/engine/types.d.ts.map +1 -0
  39. package/dist/engine/types.js +7 -0
  40. package/dist/engine/types.js.map +1 -0
  41. package/dist/handlers/create-handler.d.ts +110 -0
  42. package/dist/handlers/create-handler.d.ts.map +1 -0
  43. package/dist/handlers/create-handler.js +140 -0
  44. package/dist/handlers/create-handler.js.map +1 -0
  45. package/dist/handlers/registry.d.ts +15 -0
  46. package/dist/handlers/registry.d.ts.map +1 -0
  47. package/dist/handlers/registry.js +29 -0
  48. package/dist/handlers/registry.js.map +1 -0
  49. package/dist/handlers/types.d.ts +83 -0
  50. package/dist/handlers/types.d.ts.map +1 -0
  51. package/dist/handlers/types.js +5 -0
  52. package/dist/handlers/types.js.map +1 -0
  53. package/dist/index.d.ts +24 -0
  54. package/dist/index.d.ts.map +1 -0
  55. package/dist/index.js +24 -0
  56. package/dist/index.js.map +1 -0
  57. package/dist/migrate.d.ts +19 -0
  58. package/dist/migrate.d.ts.map +1 -0
  59. package/dist/migrate.js +106 -0
  60. package/dist/migrate.js.map +1 -0
  61. package/dist/mutations.d.ts +138 -0
  62. package/dist/mutations.d.ts.map +1 -0
  63. package/dist/mutations.js +611 -0
  64. package/dist/mutations.js.map +1 -0
  65. package/dist/outbox.d.ts +112 -0
  66. package/dist/outbox.d.ts.map +1 -0
  67. package/dist/outbox.js +304 -0
  68. package/dist/outbox.js.map +1 -0
  69. package/dist/plugins/incrementing-version.d.ts +34 -0
  70. package/dist/plugins/incrementing-version.d.ts.map +1 -0
  71. package/dist/plugins/incrementing-version.js +83 -0
  72. package/dist/plugins/incrementing-version.js.map +1 -0
  73. package/dist/plugins/index.d.ts +3 -0
  74. package/dist/plugins/index.d.ts.map +1 -0
  75. package/dist/plugins/index.js +3 -0
  76. package/dist/plugins/index.js.map +1 -0
  77. package/dist/plugins/types.d.ts +49 -0
  78. package/dist/plugins/types.d.ts.map +1 -0
  79. package/dist/plugins/types.js +15 -0
  80. package/dist/plugins/types.js.map +1 -0
  81. package/dist/proxy/connection.d.ts +33 -0
  82. package/dist/proxy/connection.d.ts.map +1 -0
  83. package/dist/proxy/connection.js +153 -0
  84. package/dist/proxy/connection.js.map +1 -0
  85. package/dist/proxy/dialect.d.ts +46 -0
  86. package/dist/proxy/dialect.d.ts.map +1 -0
  87. package/dist/proxy/dialect.js +58 -0
  88. package/dist/proxy/dialect.js.map +1 -0
  89. package/dist/proxy/driver.d.ts +42 -0
  90. package/dist/proxy/driver.d.ts.map +1 -0
  91. package/dist/proxy/driver.js +78 -0
  92. package/dist/proxy/driver.js.map +1 -0
  93. package/dist/proxy/index.d.ts +10 -0
  94. package/dist/proxy/index.d.ts.map +1 -0
  95. package/dist/proxy/index.js +10 -0
  96. package/dist/proxy/index.js.map +1 -0
  97. package/dist/proxy/mutations.d.ts +9 -0
  98. package/dist/proxy/mutations.d.ts.map +1 -0
  99. package/dist/proxy/mutations.js +11 -0
  100. package/dist/proxy/mutations.js.map +1 -0
  101. package/dist/pull-engine.d.ts +45 -0
  102. package/dist/pull-engine.d.ts.map +1 -0
  103. package/dist/pull-engine.js +391 -0
  104. package/dist/pull-engine.js.map +1 -0
  105. package/dist/push-engine.d.ts +18 -0
  106. package/dist/push-engine.d.ts.map +1 -0
  107. package/dist/push-engine.js +155 -0
  108. package/dist/push-engine.js.map +1 -0
  109. package/dist/query/FingerprintCollector.d.ts +18 -0
  110. package/dist/query/FingerprintCollector.d.ts.map +1 -0
  111. package/dist/query/FingerprintCollector.js +28 -0
  112. package/dist/query/FingerprintCollector.js.map +1 -0
  113. package/dist/query/QueryContext.d.ts +33 -0
  114. package/dist/query/QueryContext.d.ts.map +1 -0
  115. package/dist/query/QueryContext.js +16 -0
  116. package/dist/query/QueryContext.js.map +1 -0
  117. package/dist/query/fingerprint.d.ts +61 -0
  118. package/dist/query/fingerprint.d.ts.map +1 -0
  119. package/dist/query/fingerprint.js +91 -0
  120. package/dist/query/fingerprint.js.map +1 -0
  121. package/dist/query/index.d.ts +7 -0
  122. package/dist/query/index.d.ts.map +1 -0
  123. package/dist/query/index.js +7 -0
  124. package/dist/query/index.js.map +1 -0
  125. package/dist/query/tracked-select.d.ts +18 -0
  126. package/dist/query/tracked-select.d.ts.map +1 -0
  127. package/dist/query/tracked-select.js +90 -0
  128. package/dist/query/tracked-select.js.map +1 -0
  129. package/dist/schema.d.ts +83 -0
  130. package/dist/schema.d.ts.map +1 -0
  131. package/dist/schema.js +7 -0
  132. package/dist/schema.js.map +1 -0
  133. package/dist/sync-loop.d.ts +32 -0
  134. package/dist/sync-loop.d.ts.map +1 -0
  135. package/dist/sync-loop.js +249 -0
  136. package/dist/sync-loop.js.map +1 -0
  137. package/dist/utils/id.d.ts +8 -0
  138. package/dist/utils/id.d.ts.map +1 -0
  139. package/dist/utils/id.js +19 -0
  140. package/dist/utils/id.js.map +1 -0
  141. package/package.json +58 -0
  142. package/src/blobs/index.ts +7 -0
  143. package/src/blobs/manager.ts +1027 -0
  144. package/src/blobs/migrate.ts +67 -0
  145. package/src/blobs/types.ts +84 -0
  146. package/src/client.ts +1222 -0
  147. package/src/conflicts.ts +180 -0
  148. package/src/create-client.ts +297 -0
  149. package/src/engine/SyncEngine.ts +1337 -0
  150. package/src/engine/index.ts +6 -0
  151. package/src/engine/types.ts +268 -0
  152. package/src/handlers/create-handler.ts +287 -0
  153. package/src/handlers/registry.ts +36 -0
  154. package/src/handlers/types.ts +102 -0
  155. package/src/index.ts +25 -0
  156. package/src/migrate.ts +122 -0
  157. package/src/mutations.ts +926 -0
  158. package/src/outbox.ts +397 -0
  159. package/src/plugins/incrementing-version.ts +133 -0
  160. package/src/plugins/index.ts +2 -0
  161. package/src/plugins/types.ts +63 -0
  162. package/src/proxy/connection.ts +191 -0
  163. package/src/proxy/dialect.ts +76 -0
  164. package/src/proxy/driver.ts +126 -0
  165. package/src/proxy/index.ts +10 -0
  166. package/src/proxy/mutations.ts +18 -0
  167. package/src/pull-engine.ts +518 -0
  168. package/src/push-engine.ts +201 -0
  169. package/src/query/FingerprintCollector.ts +29 -0
  170. package/src/query/QueryContext.ts +54 -0
  171. package/src/query/fingerprint.ts +109 -0
  172. package/src/query/index.ts +10 -0
  173. package/src/query/tracked-select.ts +139 -0
  174. package/src/schema.ts +94 -0
  175. package/src/sync-loop.ts +368 -0
  176. package/src/utils/id.ts +20 -0
@@ -0,0 +1,518 @@
1
+ /**
2
+ * @syncular/client - Sync pull engine
3
+ */
4
+
5
+ import type {
6
+ SyncBootstrapState,
7
+ SyncPullRequest,
8
+ SyncPullResponse,
9
+ SyncSubscriptionRequest,
10
+ SyncTransport,
11
+ } from '@syncular/core';
12
+ import { type Kysely, sql } from 'kysely';
13
+ import type { ClientTableRegistry } from './handlers/registry';
14
+ import type {
15
+ SyncClientPlugin,
16
+ SyncClientPluginContext,
17
+ } from './plugins/types';
18
+ import type { SyncClientDb, SyncSubscriptionStateTable } from './schema';
19
+
20
+ // Simple JSON serialization cache to avoid repeated stringification
21
+ // of the same objects during pull operations
22
+ const jsonCache = new WeakMap<object, string>();
23
+ const jsonCacheStats = { hits: 0, misses: 0 };
24
+ const SNAPSHOT_CHUNK_CONCURRENCY = 8;
25
+
26
+ function serializeJsonCached(obj: object): string {
27
+ if (obj === null || typeof obj !== 'object') {
28
+ return JSON.stringify(obj);
29
+ }
30
+ const cached = jsonCache.get(obj);
31
+ if (cached !== undefined) {
32
+ jsonCacheStats.hits++;
33
+ return cached;
34
+ }
35
+ jsonCacheStats.misses++;
36
+ const serialized = JSON.stringify(obj);
37
+ // Only cache objects that are likely to be reused (not one-off empty objects)
38
+ if (Object.keys(obj).length > 0) {
39
+ jsonCache.set(obj, serialized);
40
+ }
41
+ return serialized;
42
+ }
43
+
44
+ function isGzipBytes(bytes: Uint8Array): boolean {
45
+ return bytes.length >= 2 && bytes[0] === 0x1f && bytes[1] === 0x8b;
46
+ }
47
+
48
+ async function maybeGunzip(bytes: Uint8Array): Promise<Uint8Array> {
49
+ if (!isGzipBytes(bytes)) return bytes;
50
+
51
+ // Prefer Web Streams decompression when available (browser/modern runtimes).
52
+ if (typeof DecompressionStream !== 'undefined') {
53
+ const stream = new ReadableStream({
54
+ start(controller) {
55
+ controller.enqueue(new Uint8Array(bytes));
56
+ controller.close();
57
+ },
58
+ });
59
+
60
+ const decompressed = stream.pipeThrough(
61
+ new DecompressionStream('gzip')
62
+ ) as ReadableStream<Uint8Array>;
63
+ const reader = decompressed.getReader();
64
+
65
+ try {
66
+ const chunks: Uint8Array[] = [];
67
+ let total = 0;
68
+ while (true) {
69
+ const { value, done } = await reader.read();
70
+ if (done) break;
71
+ if (!value) continue;
72
+ chunks.push(value);
73
+ total += value.length;
74
+ }
75
+
76
+ const out = new Uint8Array(total);
77
+ let offset = 0;
78
+ for (const chunk of chunks) {
79
+ out.set(chunk, offset);
80
+ offset += chunk.length;
81
+ }
82
+ return out;
83
+ } finally {
84
+ reader.releaseLock();
85
+ }
86
+ }
87
+
88
+ // If the runtime didn't auto-decompress `Content-Encoding: gzip`, and doesn't
89
+ // support DecompressionStream, we can't safely decode the chunk.
90
+ throw new Error(
91
+ 'Snapshot chunk appears gzip-compressed but gzip decompression is not available in this runtime'
92
+ );
93
+ }
94
+
95
+ function parseNdjsonRows(text: string): unknown[] {
96
+ const rows: unknown[] = [];
97
+ for (const line of text.split('\n')) {
98
+ if (!line) continue;
99
+ rows.push(JSON.parse(line));
100
+ }
101
+ return rows;
102
+ }
103
+
104
+ async function computeSha256Hex(bytes: Uint8Array): Promise<string> {
105
+ // Use crypto.subtle if available (browsers, modern Node/Bun)
106
+ if (typeof crypto !== 'undefined' && crypto.subtle) {
107
+ // Create a fresh ArrayBuffer to satisfy crypto.subtle's type requirements
108
+ const buffer = new ArrayBuffer(bytes.length);
109
+ new Uint8Array(buffer).set(bytes);
110
+ const hashBuffer = await crypto.subtle.digest('SHA-256', buffer);
111
+ const hashArray = new Uint8Array(hashBuffer);
112
+ return Array.from(hashArray, (b) => b.toString(16).padStart(2, '0')).join(
113
+ ''
114
+ );
115
+ }
116
+
117
+ // Fallback for Node.js/Bun without crypto.subtle
118
+ if (typeof globalThis.require === 'function') {
119
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
120
+ const { createHash } = await import('node:crypto');
121
+ return createHash('sha256').update(Buffer.from(bytes)).digest('hex');
122
+ }
123
+
124
+ throw new Error(
125
+ 'No crypto implementation available for SHA-256. ' +
126
+ 'Ensure crypto.subtle is available or running in Node.js/Bun.'
127
+ );
128
+ }
129
+
130
+ async function mapWithConcurrency<T, U>(
131
+ items: readonly T[],
132
+ concurrency: number,
133
+ mapper: (item: T, index: number) => Promise<U>
134
+ ): Promise<U[]> {
135
+ if (items.length === 0) return [];
136
+
137
+ const workerCount = Math.max(1, Math.min(concurrency, items.length));
138
+ const results = new Array<U>(items.length);
139
+ let nextIndex = 0;
140
+
141
+ async function worker(): Promise<void> {
142
+ while (nextIndex < items.length) {
143
+ const index = nextIndex;
144
+ nextIndex += 1;
145
+ const item = items[index];
146
+ if (item === undefined) continue;
147
+ results[index] = await mapper(item, index);
148
+ }
149
+ }
150
+
151
+ await Promise.all(Array.from({ length: workerCount }, () => worker()));
152
+ return results;
153
+ }
154
+
155
+ async function materializeChunkedSnapshots(
156
+ transport: SyncTransport,
157
+ response: SyncPullResponse
158
+ ): Promise<SyncPullResponse> {
159
+ const chunkCache = new Map<string, Promise<Uint8Array>>();
160
+ const decoder = new TextDecoder();
161
+
162
+ const subscriptions = await Promise.all(
163
+ response.subscriptions.map(async (sub) => {
164
+ if (!sub.bootstrap) return sub;
165
+ if (!sub.snapshots || sub.snapshots.length === 0) return sub;
166
+
167
+ const snapshots = await mapWithConcurrency(
168
+ sub.snapshots,
169
+ SNAPSHOT_CHUNK_CONCURRENCY,
170
+ async (snapshot) => {
171
+ const chunks = snapshot.chunks ?? [];
172
+ if (chunks.length === 0) {
173
+ return snapshot;
174
+ }
175
+
176
+ const parsedRowsByChunk = await mapWithConcurrency(
177
+ chunks,
178
+ SNAPSHOT_CHUNK_CONCURRENCY,
179
+ async (chunk) => {
180
+ const promise =
181
+ chunkCache.get(chunk.id) ??
182
+ transport.fetchSnapshotChunk({ chunkId: chunk.id });
183
+ chunkCache.set(chunk.id, promise);
184
+
185
+ const raw = await promise;
186
+ const bytes = await maybeGunzip(raw);
187
+
188
+ // Verify chunk integrity using sha256 hash
189
+ if (chunk.sha256) {
190
+ const actualHash = await computeSha256Hex(bytes);
191
+ if (actualHash !== chunk.sha256) {
192
+ throw new Error(
193
+ `Snapshot chunk integrity check failed: expected sha256 ${chunk.sha256}, got ${actualHash}`
194
+ );
195
+ }
196
+ }
197
+
198
+ const text = decoder.decode(bytes);
199
+ return parseNdjsonRows(text);
200
+ }
201
+ );
202
+
203
+ const rows: unknown[] = [];
204
+ for (const parsedRows of parsedRowsByChunk) {
205
+ rows.push(...parsedRows);
206
+ }
207
+
208
+ return { ...snapshot, rows, chunks: undefined };
209
+ }
210
+ );
211
+
212
+ return { ...sub, snapshots };
213
+ })
214
+ );
215
+
216
+ // Clear chunk cache after processing to prevent memory accumulation
217
+ chunkCache.clear();
218
+
219
+ return { ...response, subscriptions };
220
+ }
221
+
222
+ function parseBootstrapState(
223
+ value: string | object | null | undefined
224
+ ): SyncBootstrapState | null {
225
+ if (!value) return null;
226
+ try {
227
+ // Handle both string (raw JSON) and object (already deserialized by SerializePlugin)
228
+ const parsed: SyncBootstrapState =
229
+ typeof value === 'string'
230
+ ? (JSON.parse(value) as SyncBootstrapState)
231
+ : (value as SyncBootstrapState);
232
+ if (!parsed || typeof parsed !== 'object') return null;
233
+ if (typeof parsed.asOfCommitSeq !== 'number') return null;
234
+ if (!Array.isArray(parsed.tables)) return null;
235
+ if (typeof parsed.tableIndex !== 'number') return null;
236
+ if (parsed.rowCursor !== null && typeof parsed.rowCursor !== 'string')
237
+ return null;
238
+ return parsed;
239
+ } catch {
240
+ return null;
241
+ }
242
+ }
243
+
244
+ export interface SyncPullOnceOptions {
245
+ clientId: string;
246
+ actorId?: string;
247
+ plugins?: SyncClientPlugin[];
248
+ /**
249
+ * Desired subscriptions (client-chosen ids).
250
+ * Cursors are persisted in `sync_subscription_state`.
251
+ */
252
+ subscriptions: Array<Omit<SyncSubscriptionRequest, 'cursor'>>;
253
+ limitCommits?: number;
254
+ limitSnapshotRows?: number;
255
+ maxSnapshotPages?: number;
256
+ dedupeRows?: boolean;
257
+ stateId?: string;
258
+ }
259
+
260
+ /**
261
+ * Build a pull request from subscription state. Exported for use
262
+ * by the combined sync path in sync-loop.ts.
263
+ */
264
+ export async function buildPullRequest<DB extends SyncClientDb>(
265
+ db: Kysely<DB>,
266
+ options: SyncPullOnceOptions
267
+ ): Promise<{
268
+ request: SyncPullRequest;
269
+ existing: SyncSubscriptionStateTable[];
270
+ existingById: Map<string, SyncSubscriptionStateTable>;
271
+ stateId: string;
272
+ }> {
273
+ const stateId = options.stateId ?? 'default';
274
+
275
+ const existingResult = await sql<SyncSubscriptionStateTable>`
276
+ select
277
+ ${sql.ref('state_id')},
278
+ ${sql.ref('subscription_id')},
279
+ ${sql.ref('shape')},
280
+ ${sql.ref('scopes_json')},
281
+ ${sql.ref('params_json')},
282
+ ${sql.ref('cursor')},
283
+ ${sql.ref('bootstrap_state_json')},
284
+ ${sql.ref('status')},
285
+ ${sql.ref('created_at')},
286
+ ${sql.ref('updated_at')}
287
+ from ${sql.table('sync_subscription_state')}
288
+ where ${sql.ref('state_id')} = ${sql.val(stateId)}
289
+ `.execute(db);
290
+ const existing = existingResult.rows;
291
+
292
+ const existingById = new Map<string, SyncSubscriptionStateTable>();
293
+ for (const row of existing) existingById.set(row.subscription_id, row);
294
+
295
+ const request: SyncPullRequest = {
296
+ clientId: options.clientId,
297
+ limitCommits: options.limitCommits ?? 50,
298
+ limitSnapshotRows: options.limitSnapshotRows ?? 1000,
299
+ maxSnapshotPages: options.maxSnapshotPages,
300
+ dedupeRows: options.dedupeRows,
301
+ subscriptions: (options.subscriptions ?? []).map((sub) => ({
302
+ ...sub,
303
+ cursor: Math.max(-1, existingById.get(sub.id)?.cursor ?? -1),
304
+ bootstrapState: parseBootstrapState(
305
+ existingById.get(sub.id)?.bootstrap_state_json
306
+ ),
307
+ })),
308
+ };
309
+
310
+ return { request, existing, existingById, stateId };
311
+ }
312
+
313
+ /**
314
+ * Apply a pull response (run plugins + write to local DB).
315
+ * Exported for use by the combined sync path in sync-loop.ts.
316
+ */
317
+ export async function applyPullResponse<DB extends SyncClientDb>(
318
+ db: Kysely<DB>,
319
+ transport: SyncTransport,
320
+ shapes: ClientTableRegistry<DB>,
321
+ options: SyncPullOnceOptions,
322
+ pullState: {
323
+ request: SyncPullRequest;
324
+ existing: SyncSubscriptionStateTable[];
325
+ existingById: Map<string, SyncSubscriptionStateTable>;
326
+ stateId: string;
327
+ },
328
+ rawResponse: SyncPullResponse
329
+ ): Promise<SyncPullResponse> {
330
+ const { request, existing, existingById, stateId } = pullState;
331
+
332
+ const hydrated = await materializeChunkedSnapshots(transport, rawResponse);
333
+
334
+ const ctx: SyncClientPluginContext = {
335
+ actorId: options.actorId ?? 'unknown',
336
+ clientId: options.clientId,
337
+ };
338
+ const plugins = options.plugins ?? [];
339
+
340
+ let responseToApply = hydrated;
341
+ for (const plugin of plugins) {
342
+ if (!plugin.afterPull) continue;
343
+ responseToApply = await plugin.afterPull(ctx, {
344
+ request,
345
+ response: responseToApply,
346
+ });
347
+ }
348
+
349
+ await db.transaction().execute(async (trx) => {
350
+ const desiredIds = new Set((options.subscriptions ?? []).map((s) => s.id));
351
+
352
+ // Remove local data for subscriptions that are no longer desired.
353
+ for (const row of existing) {
354
+ if (desiredIds.has(row.subscription_id)) continue;
355
+
356
+ // Clear data for this shape matching the subscription's scopes
357
+ if (row.shape) {
358
+ try {
359
+ const scopes = row.scopes_json
360
+ ? typeof row.scopes_json === 'string'
361
+ ? JSON.parse(row.scopes_json)
362
+ : row.scopes_json
363
+ : {};
364
+ await shapes.getOrThrow(row.shape).clearAll({ trx, scopes });
365
+ } catch {
366
+ // ignore missing shape handler
367
+ }
368
+ }
369
+
370
+ await sql`
371
+ delete from ${sql.table('sync_subscription_state')}
372
+ where ${sql.ref('state_id')} = ${sql.val(stateId)}
373
+ and ${sql.ref('subscription_id')} = ${sql.val(row.subscription_id)}
374
+ `.execute(trx);
375
+ }
376
+
377
+ const subsById = new Map<string, (typeof options.subscriptions)[number]>();
378
+ for (const s of options.subscriptions ?? []) subsById.set(s.id, s);
379
+
380
+ for (const sub of responseToApply.subscriptions) {
381
+ const def = subsById.get(sub.id);
382
+ const prev = existingById.get(sub.id);
383
+
384
+ // Revoked: clear data and drop the subscription row.
385
+ if (sub.status === 'revoked') {
386
+ if (prev?.shape) {
387
+ try {
388
+ const scopes = prev.scopes_json
389
+ ? typeof prev.scopes_json === 'string'
390
+ ? JSON.parse(prev.scopes_json)
391
+ : prev.scopes_json
392
+ : {};
393
+ await shapes.getOrThrow(prev.shape).clearAll({ trx, scopes });
394
+ } catch {
395
+ // ignore missing handler
396
+ }
397
+ }
398
+
399
+ await sql`
400
+ delete from ${sql.table('sync_subscription_state')}
401
+ where ${sql.ref('state_id')} = ${sql.val(stateId)}
402
+ and ${sql.ref('subscription_id')} = ${sql.val(sub.id)}
403
+ `.execute(trx);
404
+ continue;
405
+ }
406
+
407
+ // Apply snapshots (bootstrap mode)
408
+ if (sub.bootstrap) {
409
+ for (const snapshot of sub.snapshots ?? []) {
410
+ const handler = shapes.getOrThrow(snapshot.table);
411
+
412
+ // Call onSnapshotStart hook when starting a new snapshot
413
+ if (snapshot.isFirstPage && handler.onSnapshotStart) {
414
+ await handler.onSnapshotStart({
415
+ trx,
416
+ table: snapshot.table,
417
+ scopes: sub.scopes,
418
+ });
419
+ }
420
+
421
+ await handler.applySnapshot({ trx }, snapshot);
422
+
423
+ // Call onSnapshotEnd hook when snapshot is complete
424
+ if (snapshot.isLastPage && handler.onSnapshotEnd) {
425
+ await handler.onSnapshotEnd({
426
+ trx,
427
+ table: snapshot.table,
428
+ scopes: sub.scopes,
429
+ });
430
+ }
431
+ }
432
+ } else {
433
+ // Apply incremental changes
434
+ for (const commit of sub.commits) {
435
+ for (const change of commit.changes) {
436
+ const handler = shapes.getOrThrow(change.table);
437
+ await handler.applyChange({ trx }, change);
438
+ }
439
+ }
440
+ }
441
+
442
+ // Persist subscription cursor + metadata.
443
+ // Use cached JSON serialization to avoid repeated stringification
444
+ const now = Date.now();
445
+ const paramsJson = serializeJsonCached(def?.params ?? {});
446
+ const scopesJson = serializeJsonCached(def?.scopes ?? {});
447
+ const bootstrapStateJson = sub.bootstrap
448
+ ? sub.bootstrapState
449
+ ? serializeJsonCached(sub.bootstrapState)
450
+ : null
451
+ : null;
452
+
453
+ const shape = def?.shape ?? 'unknown';
454
+ await sql`
455
+ insert into ${sql.table('sync_subscription_state')} (
456
+ ${sql.join([
457
+ sql.ref('state_id'),
458
+ sql.ref('subscription_id'),
459
+ sql.ref('shape'),
460
+ sql.ref('scopes_json'),
461
+ sql.ref('params_json'),
462
+ sql.ref('cursor'),
463
+ sql.ref('bootstrap_state_json'),
464
+ sql.ref('status'),
465
+ sql.ref('created_at'),
466
+ sql.ref('updated_at'),
467
+ ])}
468
+ ) values (
469
+ ${sql.join([
470
+ sql.val(stateId),
471
+ sql.val(sub.id),
472
+ sql.val(shape),
473
+ sql.val(scopesJson),
474
+ sql.val(paramsJson),
475
+ sql.val(sub.nextCursor),
476
+ sql.val(bootstrapStateJson),
477
+ sql.val('active'),
478
+ sql.val(now),
479
+ sql.val(now),
480
+ ])}
481
+ )
482
+ on conflict (${sql.join([sql.ref('state_id'), sql.ref('subscription_id')])})
483
+ do update set
484
+ ${sql.ref('shape')} = ${sql.val(shape)},
485
+ ${sql.ref('scopes_json')} = ${sql.val(scopesJson)},
486
+ ${sql.ref('params_json')} = ${sql.val(paramsJson)},
487
+ ${sql.ref('cursor')} = ${sql.val(sub.nextCursor)},
488
+ ${sql.ref('bootstrap_state_json')} = ${sql.val(bootstrapStateJson)},
489
+ ${sql.ref('status')} = ${sql.val('active')},
490
+ ${sql.ref('updated_at')} = ${sql.val(now)}
491
+ `.execute(trx);
492
+ }
493
+ });
494
+
495
+ return responseToApply;
496
+ }
497
+
498
+ export async function syncPullOnce<DB extends SyncClientDb>(
499
+ db: Kysely<DB>,
500
+ transport: SyncTransport,
501
+ shapes: ClientTableRegistry<DB>,
502
+ options: SyncPullOnceOptions
503
+ ): Promise<SyncPullResponse> {
504
+ const pullState = await buildPullRequest(db, options);
505
+ const { clientId, ...pullBody } = pullState.request;
506
+ const combined = await transport.sync({ clientId, pull: pullBody });
507
+ if (!combined.pull) {
508
+ return { ok: true, subscriptions: [] };
509
+ }
510
+ return applyPullResponse(
511
+ db,
512
+ transport,
513
+ shapes,
514
+ options,
515
+ pullState,
516
+ combined.pull
517
+ );
518
+ }
@@ -0,0 +1,201 @@
1
+ /**
2
+ * @syncular/client - Sync push engine (commit-based)
3
+ */
4
+
5
+ import type {
6
+ SyncPushRequest,
7
+ SyncPushResponse,
8
+ SyncTransport,
9
+ } from '@syncular/core';
10
+ import type { Kysely } from 'kysely';
11
+ import { upsertConflictsForRejectedCommit } from './conflicts';
12
+ import {
13
+ getNextSendableOutboxCommit,
14
+ markOutboxCommitAcked,
15
+ markOutboxCommitFailed,
16
+ markOutboxCommitPending,
17
+ } from './outbox';
18
+ import type {
19
+ SyncClientPlugin,
20
+ SyncClientPluginContext,
21
+ } from './plugins/types';
22
+ import type { SyncClientDb } from './schema';
23
+
24
+ export interface SyncPushOnceOptions {
25
+ clientId: string;
26
+ actorId?: string;
27
+ plugins?: SyncClientPlugin[];
28
+ }
29
+
30
+ export interface SyncPushOnceResult {
31
+ pushed: boolean;
32
+ response?: SyncPushResponse;
33
+ }
34
+
35
+ interface TransportWithWsPush extends SyncTransport {
36
+ pushViaWs(request: SyncPushRequest): Promise<SyncPushResponse | null>;
37
+ }
38
+
39
+ function hasPushViaWs(
40
+ transport: SyncTransport
41
+ ): transport is TransportWithWsPush {
42
+ return 'pushViaWs' in transport && typeof transport.pushViaWs === 'function';
43
+ }
44
+
45
+ function clonePushRequest(request: SyncPushRequest): SyncPushRequest {
46
+ if (typeof structuredClone === 'function') return structuredClone(request);
47
+ return JSON.parse(JSON.stringify(request)) as SyncPushRequest;
48
+ }
49
+
50
+ export async function syncPushOnce<DB extends SyncClientDb>(
51
+ db: Kysely<DB>,
52
+ transport: SyncTransport,
53
+ options: SyncPushOnceOptions
54
+ ): Promise<SyncPushOnceResult> {
55
+ // getNextSendableOutboxCommit now atomically claims the commit
56
+ // (marks it as 'sending' and returns it in one operation)
57
+ const next = await getNextSendableOutboxCommit(db);
58
+ if (!next) return { pushed: false };
59
+
60
+ const request: SyncPushRequest = {
61
+ clientId: options.clientId,
62
+ clientCommitId: next.client_commit_id,
63
+ operations: next.operations,
64
+ schemaVersion: next.schema_version,
65
+ };
66
+ const ctx: SyncClientPluginContext = {
67
+ actorId: options.actorId ?? 'unknown',
68
+ clientId: options.clientId,
69
+ };
70
+ const plugins = options.plugins ?? [];
71
+
72
+ let requestToSend = request;
73
+ if (plugins.length > 0) {
74
+ try {
75
+ requestToSend = clonePushRequest(request);
76
+ for (const plugin of plugins) {
77
+ if (!plugin.beforePush) continue;
78
+ requestToSend = await plugin.beforePush(ctx, requestToSend);
79
+ }
80
+ } catch (err) {
81
+ const message = err instanceof Error ? err.message : 'Unknown error';
82
+ await markOutboxCommitPending(db, { id: next.id, error: message });
83
+ throw err;
84
+ }
85
+ }
86
+
87
+ let res: SyncPushResponse;
88
+ try {
89
+ // Try WS push first if the transport supports it
90
+ let wsResponse: SyncPushResponse | null = null;
91
+ if (hasPushViaWs(transport)) {
92
+ wsResponse = await transport.pushViaWs(requestToSend);
93
+ }
94
+
95
+ if (wsResponse) {
96
+ res = wsResponse;
97
+ } else {
98
+ // Fall back to HTTP
99
+ const combined = await transport.sync({
100
+ clientId: requestToSend.clientId,
101
+ push: {
102
+ clientCommitId: requestToSend.clientCommitId,
103
+ operations: requestToSend.operations,
104
+ schemaVersion: requestToSend.schemaVersion,
105
+ },
106
+ });
107
+ if (!combined.push) {
108
+ throw new Error('Server returned no push response');
109
+ }
110
+ res = combined.push;
111
+ }
112
+ } catch (err) {
113
+ const message = err instanceof Error ? err.message : 'Unknown error';
114
+ // Treat transport exceptions as retryable. The sync loop already applies backoff,
115
+ // and failed commits are reserved for terminal server rejections (e.g. conflicts).
116
+ await markOutboxCommitPending(db, { id: next.id, error: message });
117
+ throw err;
118
+ }
119
+
120
+ let responseToUse = res;
121
+ if (plugins.length > 0) {
122
+ try {
123
+ for (const plugin of plugins) {
124
+ if (!plugin.afterPush) continue;
125
+ responseToUse = await plugin.afterPush(ctx, {
126
+ request: requestToSend,
127
+ response: responseToUse,
128
+ });
129
+ }
130
+ } catch (err) {
131
+ // The server already received and processed this commit. Persist the raw response
132
+ // so we don't end up retrying a commit that was already applied.
133
+ const responseJson = JSON.stringify(res);
134
+
135
+ if (res.status === 'applied' || res.status === 'cached') {
136
+ await markOutboxCommitAcked(db, {
137
+ id: next.id,
138
+ commitSeq: res.commitSeq ?? null,
139
+ responseJson,
140
+ });
141
+ } else {
142
+ await upsertConflictsForRejectedCommit(db, {
143
+ outboxCommitId: next.id,
144
+ clientCommitId: next.client_commit_id,
145
+ response: res,
146
+ });
147
+ await markOutboxCommitFailed(db, {
148
+ id: next.id,
149
+ error: 'REJECTED',
150
+ responseJson,
151
+ });
152
+ }
153
+
154
+ throw err;
155
+ }
156
+ }
157
+
158
+ const responseJson = JSON.stringify(responseToUse);
159
+
160
+ if (responseToUse.status === 'applied' || responseToUse.status === 'cached') {
161
+ await markOutboxCommitAcked(db, {
162
+ id: next.id,
163
+ commitSeq: responseToUse.commitSeq ?? null,
164
+ responseJson,
165
+ });
166
+ return { pushed: true, response: responseToUse };
167
+ }
168
+
169
+ // Check if all errors are retriable - if so, keep pending for retry
170
+ const errorResults = responseToUse.results.filter(
171
+ (r) => r.status === 'error'
172
+ );
173
+ const allRetriable =
174
+ errorResults.length > 0 && errorResults.every((r) => r.retriable === true);
175
+
176
+ if (allRetriable) {
177
+ // All errors are retriable - keep commit pending for retry
178
+ const errorMessages = errorResults
179
+ .map((r) => r.error ?? 'Unknown error')
180
+ .join('; ');
181
+ await markOutboxCommitPending(db, {
182
+ id: next.id,
183
+ error: `Retriable: ${errorMessages}`,
184
+ responseJson,
185
+ });
186
+ return { pushed: true, response: responseToUse };
187
+ }
188
+
189
+ // Terminal rejection - mark as failed and record conflicts
190
+ await upsertConflictsForRejectedCommit(db, {
191
+ outboxCommitId: next.id,
192
+ clientCommitId: next.client_commit_id,
193
+ response: responseToUse,
194
+ });
195
+ await markOutboxCommitFailed(db, {
196
+ id: next.id,
197
+ error: 'REJECTED',
198
+ responseJson,
199
+ });
200
+ return { pushed: true, response: responseToUse };
201
+ }