@syncular/client 0.0.1 → 0.0.2-126

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/README.md +23 -0
  2. package/dist/blobs/index.js +3 -3
  3. package/dist/client.d.ts +10 -5
  4. package/dist/client.d.ts.map +1 -1
  5. package/dist/client.js +70 -21
  6. package/dist/client.js.map +1 -1
  7. package/dist/conflicts.d.ts.map +1 -1
  8. package/dist/conflicts.js +1 -7
  9. package/dist/conflicts.js.map +1 -1
  10. package/dist/create-client.d.ts +5 -1
  11. package/dist/create-client.d.ts.map +1 -1
  12. package/dist/create-client.js +22 -10
  13. package/dist/create-client.js.map +1 -1
  14. package/dist/engine/SyncEngine.d.ts +24 -2
  15. package/dist/engine/SyncEngine.d.ts.map +1 -1
  16. package/dist/engine/SyncEngine.js +290 -43
  17. package/dist/engine/SyncEngine.js.map +1 -1
  18. package/dist/engine/index.js +2 -2
  19. package/dist/engine/types.d.ts +16 -4
  20. package/dist/engine/types.d.ts.map +1 -1
  21. package/dist/handlers/create-handler.d.ts +15 -5
  22. package/dist/handlers/create-handler.d.ts.map +1 -1
  23. package/dist/handlers/create-handler.js +35 -24
  24. package/dist/handlers/create-handler.js.map +1 -1
  25. package/dist/handlers/types.d.ts +5 -5
  26. package/dist/handlers/types.d.ts.map +1 -1
  27. package/dist/index.js +19 -19
  28. package/dist/migrate.d.ts +1 -1
  29. package/dist/migrate.d.ts.map +1 -1
  30. package/dist/migrate.js +148 -28
  31. package/dist/migrate.js.map +1 -1
  32. package/dist/mutations.d.ts +3 -1
  33. package/dist/mutations.d.ts.map +1 -1
  34. package/dist/mutations.js +93 -18
  35. package/dist/mutations.js.map +1 -1
  36. package/dist/outbox.d.ts.map +1 -1
  37. package/dist/outbox.js +1 -11
  38. package/dist/outbox.js.map +1 -1
  39. package/dist/plugins/incrementing-version.d.ts +1 -1
  40. package/dist/plugins/incrementing-version.js +2 -2
  41. package/dist/plugins/index.js +2 -2
  42. package/dist/proxy/dialect.js +1 -1
  43. package/dist/proxy/driver.js +1 -1
  44. package/dist/proxy/index.js +4 -4
  45. package/dist/proxy/mutations.js +1 -1
  46. package/dist/pull-engine.d.ts +29 -3
  47. package/dist/pull-engine.d.ts.map +1 -1
  48. package/dist/pull-engine.js +314 -78
  49. package/dist/pull-engine.js.map +1 -1
  50. package/dist/push-engine.d.ts.map +1 -1
  51. package/dist/push-engine.js +28 -3
  52. package/dist/push-engine.js.map +1 -1
  53. package/dist/query/QueryContext.js +1 -1
  54. package/dist/query/index.js +3 -3
  55. package/dist/query/tracked-select.d.ts +2 -1
  56. package/dist/query/tracked-select.d.ts.map +1 -1
  57. package/dist/query/tracked-select.js +1 -1
  58. package/dist/schema.d.ts +2 -2
  59. package/dist/schema.d.ts.map +1 -1
  60. package/dist/sync-loop.d.ts +5 -1
  61. package/dist/sync-loop.d.ts.map +1 -1
  62. package/dist/sync-loop.js +167 -18
  63. package/dist/sync-loop.js.map +1 -1
  64. package/package.json +30 -6
  65. package/src/client.test.ts +369 -0
  66. package/src/client.ts +101 -22
  67. package/src/conflicts.ts +1 -10
  68. package/src/create-client.ts +33 -5
  69. package/src/engine/SyncEngine.test.ts +157 -0
  70. package/src/engine/SyncEngine.ts +359 -40
  71. package/src/engine/types.ts +22 -4
  72. package/src/handlers/create-handler.ts +86 -37
  73. package/src/handlers/types.ts +10 -4
  74. package/src/migrate.ts +215 -33
  75. package/src/mutations.ts +143 -21
  76. package/src/outbox.ts +1 -15
  77. package/src/plugins/incrementing-version.ts +2 -2
  78. package/src/pull-engine.test.ts +147 -0
  79. package/src/pull-engine.ts +392 -77
  80. package/src/push-engine.ts +33 -1
  81. package/src/query/tracked-select.ts +1 -1
  82. package/src/schema.ts +2 -2
  83. package/src/sync-loop.ts +215 -19
@@ -6,11 +6,14 @@ import type {
6
6
  SyncBootstrapState,
7
7
  SyncPullRequest,
8
8
  SyncPullResponse,
9
+ SyncSnapshot,
9
10
  SyncSubscriptionRequest,
10
11
  SyncTransport,
11
12
  } from '@syncular/core';
12
- import { type Kysely, sql } from 'kysely';
13
+ import { decodeSnapshotRows } from '@syncular/core';
14
+ import { type Kysely, sql, type Transaction } from 'kysely';
13
15
  import type { ClientTableRegistry } from './handlers/registry';
16
+ import type { ClientTableHandler } from './handlers/types';
14
17
  import type {
15
18
  SyncClientPlugin,
16
19
  SyncClientPluginContext,
@@ -22,6 +25,9 @@ import type { SyncClientDb, SyncSubscriptionStateTable } from './schema';
22
25
  const jsonCache = new WeakMap<object, string>();
23
26
  const jsonCacheStats = { hits: 0, misses: 0 };
24
27
  const SNAPSHOT_CHUNK_CONCURRENCY = 8;
28
+ const SNAPSHOT_APPLY_BATCH_ROWS = 500;
29
+ const SNAPSHOT_ROW_FRAME_MAGIC = new Uint8Array([0x53, 0x52, 0x46, 0x31]); // "SRF1"
30
+ const FRAME_LENGTH_BYTES = 4;
25
31
 
26
32
  function serializeJsonCached(obj: object): string {
27
33
  if (obj === null || typeof obj !== 'object') {
@@ -45,63 +51,238 @@ function isGzipBytes(bytes: Uint8Array): boolean {
45
51
  return bytes.length >= 2 && bytes[0] === 0x1f && bytes[1] === 0x8b;
46
52
  }
47
53
 
48
- async function maybeGunzip(bytes: Uint8Array): Promise<Uint8Array> {
49
- if (!isGzipBytes(bytes)) return bytes;
54
+ function bytesToReadableStream(bytes: Uint8Array): ReadableStream<Uint8Array> {
55
+ return new ReadableStream<Uint8Array>({
56
+ start(controller) {
57
+ controller.enqueue(bytes);
58
+ controller.close();
59
+ },
60
+ });
61
+ }
50
62
 
51
- // Prefer Web Streams decompression when available (browser/modern runtimes).
52
- if (typeof DecompressionStream !== 'undefined') {
53
- const stream = new ReadableStream({
54
- start(controller) {
55
- controller.enqueue(new Uint8Array(bytes));
56
- controller.close();
57
- },
58
- });
63
+ function concatBytes(chunks: readonly Uint8Array[]): Uint8Array {
64
+ if (chunks.length === 1) {
65
+ return chunks[0] ?? new Uint8Array();
66
+ }
67
+ let total = 0;
68
+ for (const chunk of chunks) total += chunk.length;
69
+ const out = new Uint8Array(total);
70
+ let offset = 0;
71
+ for (const chunk of chunks) {
72
+ out.set(chunk, offset);
73
+ offset += chunk.length;
74
+ }
75
+ return out;
76
+ }
59
77
 
60
- const decompressed = stream.pipeThrough(
61
- new DecompressionStream('gzip')
62
- ) as ReadableStream<Uint8Array>;
63
- const reader = decompressed.getReader();
64
-
65
- try {
66
- const chunks: Uint8Array[] = [];
67
- let total = 0;
68
- while (true) {
69
- const { value, done } = await reader.read();
70
- if (done) break;
71
- if (!value) continue;
72
- chunks.push(value);
73
- total += value.length;
74
- }
78
+ function appendBytes(base: Uint8Array, next: Uint8Array): Uint8Array {
79
+ if (base.length === 0) return next;
80
+ if (next.length === 0) return base;
81
+ const out = new Uint8Array(base.length + next.length);
82
+ out.set(base, 0);
83
+ out.set(next, base.length);
84
+ return out;
85
+ }
86
+
87
+ function toOwnedUint8Array(chunk: Uint8Array): Uint8Array<ArrayBuffer> {
88
+ const out = new ArrayBuffer(chunk.byteLength);
89
+ const bytes = new Uint8Array(out);
90
+ bytes.set(chunk);
91
+ return bytes;
92
+ }
75
93
 
76
- const out = new Uint8Array(total);
77
- let offset = 0;
78
- for (const chunk of chunks) {
79
- out.set(chunk, offset);
80
- offset += chunk.length;
94
+ async function streamToBytes(
95
+ stream: ReadableStream<Uint8Array>
96
+ ): Promise<Uint8Array> {
97
+ const reader = stream.getReader();
98
+ try {
99
+ const chunks: Uint8Array[] = [];
100
+ while (true) {
101
+ const { done, value } = await reader.read();
102
+ if (done) break;
103
+ if (!value || value.length === 0) continue;
104
+ chunks.push(value);
105
+ }
106
+ return concatBytes(chunks);
107
+ } finally {
108
+ reader.releaseLock();
109
+ }
110
+ }
111
+
112
+ async function maybeGunzipStream(
113
+ stream: ReadableStream<Uint8Array>
114
+ ): Promise<ReadableStream<Uint8Array>> {
115
+ const reader = stream.getReader();
116
+ const prefetched: Uint8Array[] = [];
117
+ let prefetchedBytes = 0;
118
+
119
+ while (prefetchedBytes < 2) {
120
+ const { done, value } = await reader.read();
121
+ if (done) break;
122
+ if (!value || value.length === 0) continue;
123
+ prefetched.push(value);
124
+ prefetchedBytes += value.length;
125
+ }
126
+
127
+ const prefetchedCombined = concatBytes(prefetched);
128
+ const gzip = isGzipBytes(prefetchedCombined);
129
+
130
+ const replayStream = new ReadableStream<Uint8Array<ArrayBuffer>>({
131
+ start(controller) {
132
+ if (prefetchedCombined.length > 0) {
133
+ controller.enqueue(toOwnedUint8Array(prefetchedCombined));
134
+ }
135
+ },
136
+ async pull(controller) {
137
+ const { done, value } = await reader.read();
138
+ if (done) {
139
+ controller.close();
140
+ reader.releaseLock();
141
+ return;
81
142
  }
82
- return out;
83
- } finally {
143
+ if (!value || value.length === 0) return;
144
+ controller.enqueue(toOwnedUint8Array(value));
145
+ },
146
+ async cancel(reason) {
147
+ await reader.cancel(reason);
84
148
  reader.releaseLock();
85
- }
149
+ },
150
+ });
151
+
152
+ if (!gzip) return replayStream;
153
+
154
+ if (typeof DecompressionStream !== 'undefined') {
155
+ return replayStream.pipeThrough(new DecompressionStream('gzip'));
86
156
  }
87
157
 
88
- // If the runtime didn't auto-decompress `Content-Encoding: gzip`, and doesn't
89
- // support DecompressionStream, we can't safely decode the chunk.
90
158
  throw new Error(
91
159
  'Snapshot chunk appears gzip-compressed but gzip decompression is not available in this runtime'
92
160
  );
93
161
  }
94
162
 
95
- function parseNdjsonRows(text: string): unknown[] {
96
- const rows: unknown[] = [];
97
- for (const line of text.split('\n')) {
98
- if (!line) continue;
99
- rows.push(JSON.parse(line));
163
+ async function maybeGunzip(bytes: Uint8Array): Promise<Uint8Array> {
164
+ if (!isGzipBytes(bytes)) return bytes;
165
+ const decompressedStream = await maybeGunzipStream(
166
+ bytesToReadableStream(bytes)
167
+ );
168
+ return streamToBytes(decompressedStream);
169
+ }
170
+
171
+ async function* decodeSnapshotRowStreamBatches(
172
+ stream: ReadableStream<Uint8Array>,
173
+ batchSize: number
174
+ ): AsyncGenerator<unknown[]> {
175
+ const reader = stream.getReader();
176
+ const decoder = new TextDecoder();
177
+ let pending: Uint8Array = new Uint8Array(0);
178
+ let headerValidated = false;
179
+ let rows: unknown[] = [];
180
+
181
+ try {
182
+ while (true) {
183
+ const { done, value } = await reader.read();
184
+ if (done) break;
185
+ if (!value || value.length === 0) continue;
186
+ pending = appendBytes(pending, value);
187
+
188
+ if (!headerValidated) {
189
+ if (pending.length < SNAPSHOT_ROW_FRAME_MAGIC.length) {
190
+ continue;
191
+ }
192
+ for (let index = 0; index < SNAPSHOT_ROW_FRAME_MAGIC.length; index++) {
193
+ if (pending[index] !== SNAPSHOT_ROW_FRAME_MAGIC[index]) {
194
+ throw new Error('Unexpected snapshot chunk format');
195
+ }
196
+ }
197
+ pending = pending.subarray(SNAPSHOT_ROW_FRAME_MAGIC.length);
198
+ headerValidated = true;
199
+ }
200
+
201
+ while (pending.length >= FRAME_LENGTH_BYTES) {
202
+ const view = new DataView(
203
+ pending.buffer,
204
+ pending.byteOffset,
205
+ pending.byteLength
206
+ );
207
+ const payloadLength = view.getUint32(0, false);
208
+ if (pending.length < FRAME_LENGTH_BYTES + payloadLength) {
209
+ break;
210
+ }
211
+
212
+ const payload = pending.subarray(
213
+ FRAME_LENGTH_BYTES,
214
+ FRAME_LENGTH_BYTES + payloadLength
215
+ );
216
+ rows.push(JSON.parse(decoder.decode(payload)));
217
+ pending = pending.subarray(FRAME_LENGTH_BYTES + payloadLength);
218
+
219
+ if (rows.length >= batchSize) {
220
+ yield rows;
221
+ rows = [];
222
+ }
223
+ }
224
+ }
225
+
226
+ if (!headerValidated) {
227
+ throw new Error('Snapshot chunk payload is too small');
228
+ }
229
+
230
+ if (pending.length > 0) {
231
+ if (pending.length < FRAME_LENGTH_BYTES) {
232
+ throw new Error('Snapshot chunk payload ended mid-frame header');
233
+ }
234
+ const view = new DataView(
235
+ pending.buffer,
236
+ pending.byteOffset,
237
+ pending.byteLength
238
+ );
239
+ const payloadLength = view.getUint32(0, false);
240
+ if (pending.length < FRAME_LENGTH_BYTES + payloadLength) {
241
+ throw new Error('Snapshot chunk payload ended mid-frame body');
242
+ }
243
+ while (pending.length >= FRAME_LENGTH_BYTES) {
244
+ const nextView = new DataView(
245
+ pending.buffer,
246
+ pending.byteOffset,
247
+ pending.byteLength
248
+ );
249
+ const nextLength = nextView.getUint32(0, false);
250
+ if (pending.length < FRAME_LENGTH_BYTES + nextLength) {
251
+ break;
252
+ }
253
+ const payload = pending.subarray(
254
+ FRAME_LENGTH_BYTES,
255
+ FRAME_LENGTH_BYTES + nextLength
256
+ );
257
+ rows.push(JSON.parse(decoder.decode(payload)));
258
+ pending = pending.subarray(FRAME_LENGTH_BYTES + nextLength);
259
+ if (rows.length >= batchSize) {
260
+ yield rows;
261
+ rows = [];
262
+ }
263
+ }
264
+ if (pending.length > 0) {
265
+ throw new Error('Snapshot chunk payload ended mid-frame body');
266
+ }
267
+ }
268
+
269
+ if (rows.length > 0) {
270
+ yield rows;
271
+ }
272
+ } finally {
273
+ reader.releaseLock();
100
274
  }
101
- return rows;
102
275
  }
103
276
 
104
- async function computeSha256Hex(bytes: Uint8Array): Promise<string> {
277
+ async function computeSha256Hex(
278
+ bytes: Uint8Array,
279
+ sha256Override?: (bytes: Uint8Array) => Promise<string>
280
+ ): Promise<string> {
281
+ // Use injected implementation if provided (e.g. expo-crypto on React Native)
282
+ if (sha256Override) {
283
+ return sha256Override(bytes);
284
+ }
285
+
105
286
  // Use crypto.subtle if available (browsers, modern Node/Bun)
106
287
  if (typeof crypto !== 'undefined' && crypto.subtle) {
107
288
  // Create a fresh ArrayBuffer to satisfy crypto.subtle's type requirements
@@ -114,19 +295,23 @@ async function computeSha256Hex(bytes: Uint8Array): Promise<string> {
114
295
  );
115
296
  }
116
297
 
117
- // Fallback for Node.js/Bun without crypto.subtle
118
- if (typeof globalThis.require === 'function') {
119
- // eslint-disable-next-line @typescript-eslint/no-require-imports
120
- const { createHash } = await import('node:crypto');
121
- return createHash('sha256').update(Buffer.from(bytes)).digest('hex');
122
- }
123
-
124
298
  throw new Error(
125
299
  'No crypto implementation available for SHA-256. ' +
126
- 'Ensure crypto.subtle is available or running in Node.js/Bun.'
300
+ 'Provide a sha256 function via options or ensure crypto.subtle is available.'
127
301
  );
128
302
  }
129
303
 
304
+ async function fetchSnapshotChunkStream(
305
+ transport: SyncTransport,
306
+ chunkId: string
307
+ ): Promise<ReadableStream<Uint8Array>> {
308
+ if (transport.fetchSnapshotChunkStream) {
309
+ return transport.fetchSnapshotChunkStream({ chunkId });
310
+ }
311
+ const bytes = await transport.fetchSnapshotChunk({ chunkId });
312
+ return bytesToReadableStream(bytes);
313
+ }
314
+
130
315
  async function mapWithConcurrency<T, U>(
131
316
  items: readonly T[],
132
317
  concurrency: number,
@@ -154,10 +339,10 @@ async function mapWithConcurrency<T, U>(
154
339
 
155
340
  async function materializeChunkedSnapshots(
156
341
  transport: SyncTransport,
157
- response: SyncPullResponse
342
+ response: SyncPullResponse,
343
+ sha256Override?: (bytes: Uint8Array) => Promise<string>
158
344
  ): Promise<SyncPullResponse> {
159
345
  const chunkCache = new Map<string, Promise<Uint8Array>>();
160
- const decoder = new TextDecoder();
161
346
 
162
347
  const subscriptions = await Promise.all(
163
348
  response.subscriptions.map(async (sub) => {
@@ -187,7 +372,10 @@ async function materializeChunkedSnapshots(
187
372
 
188
373
  // Verify chunk integrity using sha256 hash
189
374
  if (chunk.sha256) {
190
- const actualHash = await computeSha256Hex(bytes);
375
+ const actualHash = await computeSha256Hex(
376
+ bytes,
377
+ sha256Override
378
+ );
191
379
  if (actualHash !== chunk.sha256) {
192
380
  throw new Error(
193
381
  `Snapshot chunk integrity check failed: expected sha256 ${chunk.sha256}, got ${actualHash}`
@@ -195,8 +383,7 @@ async function materializeChunkedSnapshots(
195
383
  }
196
384
  }
197
385
 
198
- const text = decoder.decode(bytes);
199
- return parseNdjsonRows(text);
386
+ return decodeSnapshotRows(bytes);
200
387
  }
201
388
  );
202
389
 
@@ -219,6 +406,69 @@ async function materializeChunkedSnapshots(
219
406
  return { ...response, subscriptions };
220
407
  }
221
408
 
409
+ async function applyChunkedSnapshot<DB extends SyncClientDb>(
410
+ transport: SyncTransport,
411
+ handler: Pick<ClientTableHandler<DB>, 'applySnapshot'>,
412
+ trx: Transaction<DB>,
413
+ snapshot: SyncSnapshot
414
+ ): Promise<void> {
415
+ const chunks = snapshot.chunks ?? [];
416
+ if (chunks.length === 0) {
417
+ await handler.applySnapshot({ trx }, snapshot);
418
+ return;
419
+ }
420
+
421
+ let nextIsFirstPage = snapshot.isFirstPage;
422
+
423
+ for (let chunkIndex = 0; chunkIndex < chunks.length; chunkIndex += 1) {
424
+ const chunk = chunks[chunkIndex];
425
+ if (!chunk) continue;
426
+
427
+ const rawStream = await fetchSnapshotChunkStream(transport, chunk.id);
428
+ const decodedStream = await maybeGunzipStream(rawStream);
429
+ const rowBatchIterator = decodeSnapshotRowStreamBatches(
430
+ decodedStream,
431
+ SNAPSHOT_APPLY_BATCH_ROWS
432
+ );
433
+
434
+ let pendingBatch: unknown[] | null = null;
435
+ // eslint-disable-next-line no-await-in-loop
436
+ for await (const batch of rowBatchIterator) {
437
+ if (pendingBatch) {
438
+ // eslint-disable-next-line no-await-in-loop
439
+ await handler.applySnapshot(
440
+ { trx },
441
+ {
442
+ ...snapshot,
443
+ rows: pendingBatch,
444
+ chunks: undefined,
445
+ isFirstPage: nextIsFirstPage,
446
+ isLastPage: false,
447
+ }
448
+ );
449
+ nextIsFirstPage = false;
450
+ }
451
+ pendingBatch = batch;
452
+ }
453
+
454
+ if (pendingBatch) {
455
+ const isLastChunk = chunkIndex === chunks.length - 1;
456
+ // eslint-disable-next-line no-await-in-loop
457
+ await handler.applySnapshot(
458
+ { trx },
459
+ {
460
+ ...snapshot,
461
+ rows: pendingBatch,
462
+ chunks: undefined,
463
+ isFirstPage: nextIsFirstPage,
464
+ isLastPage: isLastChunk ? snapshot.isLastPage : false,
465
+ }
466
+ );
467
+ nextIsFirstPage = false;
468
+ }
469
+ }
470
+ }
471
+
222
472
  function parseBootstrapState(
223
473
  value: string | object | null | undefined
224
474
  ): SyncBootstrapState | null {
@@ -255,21 +505,34 @@ export interface SyncPullOnceOptions {
255
505
  maxSnapshotPages?: number;
256
506
  dedupeRows?: boolean;
257
507
  stateId?: string;
508
+ /**
509
+ * Custom SHA-256 hash function for snapshot chunk integrity verification.
510
+ * Provide this on platforms where `crypto.subtle` is unavailable (e.g. React Native).
511
+ * Must return the hex-encoded hash string.
512
+ */
513
+ sha256?: (bytes: Uint8Array) => Promise<string>;
258
514
  }
259
515
 
260
- export async function syncPullOnce<DB extends SyncClientDb>(
516
+ /**
517
+ * Build a pull request from subscription state. Exported for use
518
+ * by the combined sync path in sync-loop.ts.
519
+ */
520
+ export async function buildPullRequest<DB extends SyncClientDb>(
261
521
  db: Kysely<DB>,
262
- transport: SyncTransport,
263
- shapes: ClientTableRegistry<DB>,
264
522
  options: SyncPullOnceOptions
265
- ): Promise<SyncPullResponse> {
523
+ ): Promise<{
524
+ request: SyncPullRequest;
525
+ existing: SyncSubscriptionStateTable[];
526
+ existingById: Map<string, SyncSubscriptionStateTable>;
527
+ stateId: string;
528
+ }> {
266
529
  const stateId = options.stateId ?? 'default';
267
530
 
268
531
  const existingResult = await sql<SyncSubscriptionStateTable>`
269
532
  select
270
533
  ${sql.ref('state_id')},
271
534
  ${sql.ref('subscription_id')},
272
- ${sql.ref('shape')},
535
+ ${sql.ref('table')},
273
536
  ${sql.ref('scopes_json')},
274
537
  ${sql.ref('params_json')},
275
538
  ${sql.ref('cursor')},
@@ -300,16 +563,40 @@ export async function syncPullOnce<DB extends SyncClientDb>(
300
563
  })),
301
564
  };
302
565
 
303
- const res = await transport.pull(request);
304
- const hydrated = await materializeChunkedSnapshots(transport, res);
566
+ return { request, existing, existingById, stateId };
567
+ }
568
+
569
+ /**
570
+ * Apply a pull response (run plugins + write to local DB).
571
+ * Exported for use by the combined sync path in sync-loop.ts.
572
+ */
573
+ export async function applyPullResponse<DB extends SyncClientDb>(
574
+ db: Kysely<DB>,
575
+ transport: SyncTransport,
576
+ handlers: ClientTableRegistry<DB>,
577
+ options: SyncPullOnceOptions,
578
+ pullState: {
579
+ request: SyncPullRequest;
580
+ existing: SyncSubscriptionStateTable[];
581
+ existingById: Map<string, SyncSubscriptionStateTable>;
582
+ stateId: string;
583
+ },
584
+ rawResponse: SyncPullResponse
585
+ ): Promise<SyncPullResponse> {
586
+ const { request, existing, existingById, stateId } = pullState;
305
587
 
306
588
  const ctx: SyncClientPluginContext = {
307
589
  actorId: options.actorId ?? 'unknown',
308
590
  clientId: options.clientId,
309
591
  };
310
592
  const plugins = options.plugins ?? [];
593
+ const requiresMaterializedSnapshots = plugins.some(
594
+ (plugin) => !!plugin.afterPull
595
+ );
311
596
 
312
- let responseToApply = hydrated;
597
+ let responseToApply = requiresMaterializedSnapshots
598
+ ? await materializeChunkedSnapshots(transport, rawResponse, options.sha256)
599
+ : rawResponse;
313
600
  for (const plugin of plugins) {
314
601
  if (!plugin.afterPull) continue;
315
602
  responseToApply = await plugin.afterPull(ctx, {
@@ -325,17 +612,17 @@ export async function syncPullOnce<DB extends SyncClientDb>(
325
612
  for (const row of existing) {
326
613
  if (desiredIds.has(row.subscription_id)) continue;
327
614
 
328
- // Clear data for this shape matching the subscription's scopes
329
- if (row.shape) {
615
+ // Clear data for this table matching the subscription's scopes
616
+ if (row.table) {
330
617
  try {
331
618
  const scopes = row.scopes_json
332
619
  ? typeof row.scopes_json === 'string'
333
620
  ? JSON.parse(row.scopes_json)
334
621
  : row.scopes_json
335
622
  : {};
336
- await shapes.getOrThrow(row.shape).clearAll({ trx, scopes });
623
+ await handlers.getOrThrow(row.table).clearAll({ trx, scopes });
337
624
  } catch {
338
- // ignore missing shape handler
625
+ // ignore missing table handler
339
626
  }
340
627
  }
341
628
 
@@ -355,14 +642,14 @@ export async function syncPullOnce<DB extends SyncClientDb>(
355
642
 
356
643
  // Revoked: clear data and drop the subscription row.
357
644
  if (sub.status === 'revoked') {
358
- if (prev?.shape) {
645
+ if (prev?.table) {
359
646
  try {
360
647
  const scopes = prev.scopes_json
361
648
  ? typeof prev.scopes_json === 'string'
362
649
  ? JSON.parse(prev.scopes_json)
363
650
  : prev.scopes_json
364
651
  : {};
365
- await shapes.getOrThrow(prev.shape).clearAll({ trx, scopes });
652
+ await handlers.getOrThrow(prev.table).clearAll({ trx, scopes });
366
653
  } catch {
367
654
  // ignore missing handler
368
655
  }
@@ -379,7 +666,9 @@ export async function syncPullOnce<DB extends SyncClientDb>(
379
666
  // Apply snapshots (bootstrap mode)
380
667
  if (sub.bootstrap) {
381
668
  for (const snapshot of sub.snapshots ?? []) {
382
- const handler = shapes.getOrThrow(snapshot.table);
669
+ const handler = handlers.getOrThrow(snapshot.table);
670
+ const hasChunkRefs =
671
+ Array.isArray(snapshot.chunks) && snapshot.chunks.length > 0;
383
672
 
384
673
  // Call onSnapshotStart hook when starting a new snapshot
385
674
  if (snapshot.isFirstPage && handler.onSnapshotStart) {
@@ -390,7 +679,11 @@ export async function syncPullOnce<DB extends SyncClientDb>(
390
679
  });
391
680
  }
392
681
 
393
- await handler.applySnapshot({ trx }, snapshot);
682
+ if (hasChunkRefs) {
683
+ await applyChunkedSnapshot(transport, handler, trx, snapshot);
684
+ } else {
685
+ await handler.applySnapshot({ trx }, snapshot);
686
+ }
394
687
 
395
688
  // Call onSnapshotEnd hook when snapshot is complete
396
689
  if (snapshot.isLastPage && handler.onSnapshotEnd) {
@@ -405,7 +698,7 @@ export async function syncPullOnce<DB extends SyncClientDb>(
405
698
  // Apply incremental changes
406
699
  for (const commit of sub.commits) {
407
700
  for (const change of commit.changes) {
408
- const handler = shapes.getOrThrow(change.table);
701
+ const handler = handlers.getOrThrow(change.table);
409
702
  await handler.applyChange({ trx }, change);
410
703
  }
411
704
  }
@@ -422,13 +715,13 @@ export async function syncPullOnce<DB extends SyncClientDb>(
422
715
  : null
423
716
  : null;
424
717
 
425
- const shape = def?.shape ?? 'unknown';
718
+ const table = def?.table ?? 'unknown';
426
719
  await sql`
427
720
  insert into ${sql.table('sync_subscription_state')} (
428
721
  ${sql.join([
429
722
  sql.ref('state_id'),
430
723
  sql.ref('subscription_id'),
431
- sql.ref('shape'),
724
+ sql.ref('table'),
432
725
  sql.ref('scopes_json'),
433
726
  sql.ref('params_json'),
434
727
  sql.ref('cursor'),
@@ -441,7 +734,7 @@ export async function syncPullOnce<DB extends SyncClientDb>(
441
734
  ${sql.join([
442
735
  sql.val(stateId),
443
736
  sql.val(sub.id),
444
- sql.val(shape),
737
+ sql.val(table),
445
738
  sql.val(scopesJson),
446
739
  sql.val(paramsJson),
447
740
  sql.val(sub.nextCursor),
@@ -453,7 +746,7 @@ export async function syncPullOnce<DB extends SyncClientDb>(
453
746
  )
454
747
  on conflict (${sql.join([sql.ref('state_id'), sql.ref('subscription_id')])})
455
748
  do update set
456
- ${sql.ref('shape')} = ${sql.val(shape)},
749
+ ${sql.ref('table')} = ${sql.val(table)},
457
750
  ${sql.ref('scopes_json')} = ${sql.val(scopesJson)},
458
751
  ${sql.ref('params_json')} = ${sql.val(paramsJson)},
459
752
  ${sql.ref('cursor')} = ${sql.val(sub.nextCursor)},
@@ -466,3 +759,25 @@ export async function syncPullOnce<DB extends SyncClientDb>(
466
759
 
467
760
  return responseToApply;
468
761
  }
762
+
763
+ export async function syncPullOnce<DB extends SyncClientDb>(
764
+ db: Kysely<DB>,
765
+ transport: SyncTransport,
766
+ handlers: ClientTableRegistry<DB>,
767
+ options: SyncPullOnceOptions
768
+ ): Promise<SyncPullResponse> {
769
+ const pullState = await buildPullRequest(db, options);
770
+ const { clientId, ...pullBody } = pullState.request;
771
+ const combined = await transport.sync({ clientId, pull: pullBody });
772
+ if (!combined.pull) {
773
+ return { ok: true, subscriptions: [] };
774
+ }
775
+ return applyPullResponse(
776
+ db,
777
+ transport,
778
+ handlers,
779
+ options,
780
+ pullState,
781
+ combined.pull
782
+ );
783
+ }
@@ -32,6 +32,16 @@ export interface SyncPushOnceResult {
32
32
  response?: SyncPushResponse;
33
33
  }
34
34
 
35
+ interface TransportWithWsPush extends SyncTransport {
36
+ pushViaWs(request: SyncPushRequest): Promise<SyncPushResponse | null>;
37
+ }
38
+
39
+ function hasPushViaWs(
40
+ transport: SyncTransport
41
+ ): transport is TransportWithWsPush {
42
+ return 'pushViaWs' in transport && typeof transport.pushViaWs === 'function';
43
+ }
44
+
35
45
  function clonePushRequest(request: SyncPushRequest): SyncPushRequest {
36
46
  if (typeof structuredClone === 'function') return structuredClone(request);
37
47
  return JSON.parse(JSON.stringify(request)) as SyncPushRequest;
@@ -76,7 +86,29 @@ export async function syncPushOnce<DB extends SyncClientDb>(
76
86
 
77
87
  let res: SyncPushResponse;
78
88
  try {
79
- res = await transport.push(requestToSend);
89
+ // Try WS push first if the transport supports it
90
+ let wsResponse: SyncPushResponse | null = null;
91
+ if (hasPushViaWs(transport)) {
92
+ wsResponse = await transport.pushViaWs(requestToSend);
93
+ }
94
+
95
+ if (wsResponse) {
96
+ res = wsResponse;
97
+ } else {
98
+ // Fall back to HTTP
99
+ const combined = await transport.sync({
100
+ clientId: requestToSend.clientId,
101
+ push: {
102
+ clientCommitId: requestToSend.clientCommitId,
103
+ operations: requestToSend.operations,
104
+ schemaVersion: requestToSend.schemaVersion,
105
+ },
106
+ });
107
+ if (!combined.push) {
108
+ throw new Error('Server returned no push response');
109
+ }
110
+ res = combined.push;
111
+ }
80
112
  } catch (err) {
81
113
  const message = err instanceof Error ? err.message : 'Unknown error';
82
114
  // Treat transport exceptions as retryable. The sync loop already applies backoff,