@syncular/client 0.0.6-213 → 0.0.6-221

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  /**
2
2
  * @syncular/client - Sync pull engine
3
3
  */
4
+ import { bytesToReadableStream, decodeSnapshotRows, gunzipBytes, readAllBytesFromStream as readAllBytesFromCoreStream, } from '@syncular/core';
4
5
  import { sql } from 'kysely';
5
6
  import { getClientHandlerOrThrow, } from './handlers/collection.js';
6
7
  // Simple JSON serialization cache to avoid repeated stringification
@@ -30,14 +31,6 @@ function serializeJsonCached(obj) {
30
31
  function isGzipBytes(bytes) {
31
32
  return bytes.length >= 2 && bytes[0] === 0x1f && bytes[1] === 0x8b;
32
33
  }
33
- function bytesToReadableStream(bytes) {
34
- return new ReadableStream({
35
- start(controller) {
36
- controller.enqueue(bytes);
37
- controller.close();
38
- },
39
- });
40
- }
41
34
  function concatBytes(chunks) {
42
35
  if (chunks.length === 1) {
43
36
  return chunks[0] ?? new Uint8Array();
@@ -111,7 +104,8 @@ async function maybeGunzipStream(stream) {
111
104
  if (typeof DecompressionStream !== 'undefined') {
112
105
  return replayStream.pipeThrough(new DecompressionStream('gzip'));
113
106
  }
114
- throw new Error('Snapshot chunk appears gzip-compressed but gzip decompression is not available in this runtime');
107
+ const compressedBytes = await readAllBytesFromCoreStream(replayStream);
108
+ return bytesToReadableStream(await gunzipBytes(compressedBytes));
115
109
  }
116
110
  async function* decodeSnapshotRowStreamBatches(stream, batchSize) {
117
111
  const reader = stream.getReader();
@@ -249,45 +243,119 @@ async function readAllBytesFromStream(stream) {
249
243
  }
250
244
  return bytes;
251
245
  }
252
- async function materializeSnapshotChunkRows(transport, request, expectedHash, sha256Override) {
253
- const rawStream = await fetchSnapshotChunkStream(transport, request);
254
- const decodedStream = await maybeGunzipStream(rawStream);
255
- let streamForDecode = decodedStream;
256
- let chunkHashPromise = null;
257
- if (expectedHash) {
258
- const [hashStream, decodeStream] = decodedStream.tee();
259
- streamForDecode = decodeStream;
260
- chunkHashPromise = readAllBytesFromStream(hashStream).then((bytes) => computeSha256Hex(bytes, sha256Override));
261
- }
262
- const rows = [];
263
- let materializeError = null;
264
- try {
265
- for await (const batch of decodeSnapshotRowStreamBatches(streamForDecode, SNAPSHOT_APPLY_BATCH_ROWS)) {
266
- rows.push(...batch);
246
+ async function materializeSnapshotChunkRows(transport, request, expectedHash, sha256Override, trace) {
247
+ emitTrace(trace?.onTrace, {
248
+ stage: 'apply:chunk-materialize:start',
249
+ stateId: trace?.stateId,
250
+ subscriptionId: trace?.subscriptionId,
251
+ table: trace?.table,
252
+ chunkId: request.chunkId,
253
+ chunkIndex: trace?.chunkIndex,
254
+ });
255
+ const startedAt = Date.now();
256
+ if (transport.capabilities?.snapshotChunkReadMode === 'bytes' &&
257
+ transport.fetchSnapshotChunk) {
258
+ try {
259
+ let bytes = await transport.fetchSnapshotChunk(request);
260
+ if (isGzipBytes(bytes)) {
261
+ bytes = await gunzipBytes(bytes);
262
+ }
263
+ if (expectedHash) {
264
+ const actualHash = await computeSha256Hex(bytes, sha256Override);
265
+ if (actualHash !== expectedHash) {
266
+ throw new Error(`Snapshot chunk integrity check failed: expected sha256 ${expectedHash}, got ${actualHash}`);
267
+ }
268
+ }
269
+ const rows = decodeSnapshotRows(bytes);
270
+ emitTrace(trace?.onTrace, {
271
+ stage: 'apply:chunk-materialize:complete',
272
+ stateId: trace?.stateId,
273
+ subscriptionId: trace?.subscriptionId,
274
+ table: trace?.table,
275
+ chunkId: request.chunkId,
276
+ chunkIndex: trace?.chunkIndex,
277
+ rowCount: rows.length,
278
+ durationMs: Math.max(0, Date.now() - startedAt),
279
+ });
280
+ return rows;
281
+ }
282
+ catch (error) {
283
+ emitTrace(trace?.onTrace, {
284
+ stage: 'apply:chunk-materialize:error',
285
+ stateId: trace?.stateId,
286
+ subscriptionId: trace?.subscriptionId,
287
+ table: trace?.table,
288
+ chunkId: request.chunkId,
289
+ chunkIndex: trace?.chunkIndex,
290
+ durationMs: Math.max(0, Date.now() - startedAt),
291
+ errorMessage: error instanceof Error ? error.message : String(error),
292
+ });
293
+ throw error;
267
294
  }
268
295
  }
269
- catch (error) {
270
- materializeError = error;
271
- }
272
- if (chunkHashPromise) {
296
+ try {
297
+ const rawStream = await fetchSnapshotChunkStream(transport, request);
298
+ const decodedStream = await maybeGunzipStream(rawStream);
299
+ let streamForDecode = decodedStream;
300
+ let chunkHashPromise = null;
301
+ if (expectedHash) {
302
+ const [hashStream, decodeStream] = decodedStream.tee();
303
+ streamForDecode = decodeStream;
304
+ chunkHashPromise = readAllBytesFromStream(hashStream).then((bytes) => computeSha256Hex(bytes, sha256Override));
305
+ }
306
+ const rows = [];
307
+ let materializeError = null;
273
308
  try {
274
- const actualHash = await chunkHashPromise;
275
- if (!materializeError && actualHash !== expectedHash) {
276
- materializeError = new Error(`Snapshot chunk integrity check failed: expected sha256 ${expectedHash}, got ${actualHash}`);
309
+ for await (const batch of decodeSnapshotRowStreamBatches(streamForDecode, SNAPSHOT_APPLY_BATCH_ROWS)) {
310
+ rows.push(...batch);
277
311
  }
278
312
  }
279
- catch (hashError) {
280
- if (!materializeError) {
281
- materializeError = hashError;
313
+ catch (error) {
314
+ materializeError = error;
315
+ }
316
+ if (chunkHashPromise) {
317
+ try {
318
+ const actualHash = await chunkHashPromise;
319
+ if (!materializeError && actualHash !== expectedHash) {
320
+ materializeError = new Error(`Snapshot chunk integrity check failed: expected sha256 ${expectedHash}, got ${actualHash}`);
321
+ }
282
322
  }
323
+ catch (hashError) {
324
+ if (!materializeError) {
325
+ materializeError = hashError;
326
+ }
327
+ }
328
+ }
329
+ if (materializeError) {
330
+ throw materializeError;
283
331
  }
332
+ emitTrace(trace?.onTrace, {
333
+ stage: 'apply:chunk-materialize:complete',
334
+ stateId: trace?.stateId,
335
+ subscriptionId: trace?.subscriptionId,
336
+ table: trace?.table,
337
+ chunkId: request.chunkId,
338
+ chunkIndex: trace?.chunkIndex,
339
+ rowCount: rows.length,
340
+ durationMs: Math.max(0, Date.now() - startedAt),
341
+ });
342
+ return rows;
284
343
  }
285
- if (materializeError) {
286
- throw materializeError;
344
+ catch (error) {
345
+ emitTrace(trace?.onTrace, {
346
+ stage: 'apply:chunk-materialize:error',
347
+ stateId: trace?.stateId,
348
+ subscriptionId: trace?.subscriptionId,
349
+ table: trace?.table,
350
+ chunkId: request.chunkId,
351
+ chunkIndex: trace?.chunkIndex,
352
+ durationMs: Math.max(0, Date.now() - startedAt),
353
+ errorMessage: error instanceof Error ? error.message : String(error),
354
+ });
355
+ throw error;
287
356
  }
288
- return rows;
289
357
  }
290
- async function materializeChunkedSnapshots(transport, response, sha256Override) {
358
+ async function materializeChunkedSnapshots(transport, response, sha256Override, trace) {
291
359
  const subscriptions = [];
292
360
  for (const sub of response.subscriptions) {
293
361
  if (!sub.bootstrap || !sub.snapshots || sub.snapshots.length === 0) {
@@ -302,11 +370,20 @@ async function materializeChunkedSnapshots(transport, response, sha256Override)
302
370
  continue;
303
371
  }
304
372
  const rows = [];
305
- for (const chunk of chunks) {
373
+ for (let chunkIndex = 0; chunkIndex < chunks.length; chunkIndex += 1) {
374
+ const chunk = chunks[chunkIndex];
375
+ if (!chunk)
376
+ continue;
306
377
  const chunkRows = await materializeSnapshotChunkRows(transport, {
307
378
  chunkId: chunk.id,
308
379
  scopeValues: sub.scopes,
309
- }, chunk.sha256, sha256Override);
380
+ }, chunk.sha256, sha256Override, {
381
+ stateId: trace?.stateId ?? 'default',
382
+ subscriptionId: sub.id,
383
+ table: snapshot.table,
384
+ chunkIndex,
385
+ onTrace: trace?.onTrace,
386
+ });
310
387
  rows.push(...chunkRows);
311
388
  }
312
389
  snapshots.push({
@@ -322,7 +399,7 @@ async function materializeChunkedSnapshots(transport, response, sha256Override)
322
399
  }
323
400
  return { ...response, subscriptions };
324
401
  }
325
- async function applyChunkedSnapshot(transport, handler, trx, snapshot, scopeValues, sha256Override) {
402
+ async function applyChunkedSnapshot(transport, handler, trx, snapshot, scopeValues, sha256Override, trace) {
326
403
  const chunks = snapshot.chunks ?? [];
327
404
  if (chunks.length === 0) {
328
405
  await handler.applySnapshot({ trx }, snapshot);
@@ -333,69 +410,105 @@ async function applyChunkedSnapshot(transport, handler, trx, snapshot, scopeValu
333
410
  const chunk = chunks[chunkIndex];
334
411
  if (!chunk)
335
412
  continue;
336
- const rawStream = await fetchSnapshotChunkStream(transport, {
413
+ emitTrace(trace?.onTrace, {
414
+ stage: 'apply:chunk-materialize:start',
415
+ stateId: trace?.stateId,
416
+ subscriptionId: trace?.subscriptionId,
417
+ table: snapshot.table,
337
418
  chunkId: chunk.id,
338
- scopeValues,
419
+ chunkIndex,
339
420
  });
340
- const decodedStream = await maybeGunzipStream(rawStream);
341
- let streamForDecode = decodedStream;
342
- let chunkHashPromise = null;
343
- if (chunk.sha256) {
344
- const [hashStream, decodeStream] = decodedStream.tee();
345
- streamForDecode = decodeStream;
346
- chunkHashPromise = readAllBytesFromStream(hashStream).then((bytes) => computeSha256Hex(bytes, sha256Override));
347
- }
348
- const rowBatchIterator = decodeSnapshotRowStreamBatches(streamForDecode, SNAPSHOT_APPLY_BATCH_ROWS);
349
- let pendingBatch = null;
350
- let applyError = null;
421
+ const chunkStartedAt = Date.now();
351
422
  try {
352
- // eslint-disable-next-line no-await-in-loop
353
- for await (const batch of rowBatchIterator) {
423
+ const rawStream = await fetchSnapshotChunkStream(transport, {
424
+ chunkId: chunk.id,
425
+ scopeValues,
426
+ });
427
+ const decodedStream = await maybeGunzipStream(rawStream);
428
+ let streamForDecode = decodedStream;
429
+ let chunkHashPromise = null;
430
+ if (chunk.sha256) {
431
+ const [hashStream, decodeStream] = decodedStream.tee();
432
+ streamForDecode = decodeStream;
433
+ chunkHashPromise = readAllBytesFromStream(hashStream).then((bytes) => computeSha256Hex(bytes, sha256Override));
434
+ }
435
+ const rowBatchIterator = decodeSnapshotRowStreamBatches(streamForDecode, SNAPSHOT_APPLY_BATCH_ROWS);
436
+ let pendingBatch = null;
437
+ let applyError = null;
438
+ let chunkRowCount = 0;
439
+ try {
440
+ // eslint-disable-next-line no-await-in-loop
441
+ for await (const batch of rowBatchIterator) {
442
+ chunkRowCount += batch.length;
443
+ if (pendingBatch) {
444
+ // eslint-disable-next-line no-await-in-loop
445
+ await handler.applySnapshot({ trx }, {
446
+ ...snapshot,
447
+ rows: pendingBatch,
448
+ chunks: undefined,
449
+ isFirstPage: nextIsFirstPage,
450
+ isLastPage: false,
451
+ });
452
+ nextIsFirstPage = false;
453
+ }
454
+ pendingBatch = batch;
455
+ }
354
456
  if (pendingBatch) {
457
+ const isLastChunk = chunkIndex === chunks.length - 1;
355
458
  // eslint-disable-next-line no-await-in-loop
356
459
  await handler.applySnapshot({ trx }, {
357
460
  ...snapshot,
358
461
  rows: pendingBatch,
359
462
  chunks: undefined,
360
463
  isFirstPage: nextIsFirstPage,
361
- isLastPage: false,
464
+ isLastPage: isLastChunk ? snapshot.isLastPage : false,
362
465
  });
363
466
  nextIsFirstPage = false;
364
467
  }
365
- pendingBatch = batch;
366
468
  }
367
- if (pendingBatch) {
368
- const isLastChunk = chunkIndex === chunks.length - 1;
369
- // eslint-disable-next-line no-await-in-loop
370
- await handler.applySnapshot({ trx }, {
371
- ...snapshot,
372
- rows: pendingBatch,
373
- chunks: undefined,
374
- isFirstPage: nextIsFirstPage,
375
- isLastPage: isLastChunk ? snapshot.isLastPage : false,
376
- });
377
- nextIsFirstPage = false;
469
+ catch (error) {
470
+ applyError = error;
378
471
  }
379
- }
380
- catch (error) {
381
- applyError = error;
382
- }
383
- if (chunkHashPromise) {
384
- try {
385
- // eslint-disable-next-line no-await-in-loop
386
- const actualHash = await chunkHashPromise;
387
- if (!applyError && actualHash !== chunk.sha256) {
388
- applyError = new Error(`Snapshot chunk integrity check failed: expected sha256 ${chunk.sha256}, got ${actualHash}`);
472
+ if (chunkHashPromise) {
473
+ try {
474
+ // eslint-disable-next-line no-await-in-loop
475
+ const actualHash = await chunkHashPromise;
476
+ if (!applyError && actualHash !== chunk.sha256) {
477
+ applyError = new Error(`Snapshot chunk integrity check failed: expected sha256 ${chunk.sha256}, got ${actualHash}`);
478
+ }
389
479
  }
390
- }
391
- catch (hashError) {
392
- if (!applyError) {
393
- applyError = hashError;
480
+ catch (hashError) {
481
+ if (!applyError) {
482
+ applyError = hashError;
483
+ }
394
484
  }
395
485
  }
486
+ if (applyError) {
487
+ throw applyError;
488
+ }
489
+ emitTrace(trace?.onTrace, {
490
+ stage: 'apply:chunk-materialize:complete',
491
+ stateId: trace?.stateId,
492
+ subscriptionId: trace?.subscriptionId,
493
+ table: snapshot.table,
494
+ chunkId: chunk.id,
495
+ chunkIndex,
496
+ rowCount: chunkRowCount,
497
+ durationMs: Math.max(0, Date.now() - chunkStartedAt),
498
+ });
396
499
  }
397
- if (applyError) {
398
- throw applyError;
500
+ catch (error) {
501
+ emitTrace(trace?.onTrace, {
502
+ stage: 'apply:chunk-materialize:error',
503
+ stateId: trace?.stateId,
504
+ subscriptionId: trace?.subscriptionId,
505
+ table: snapshot.table,
506
+ chunkId: chunk.id,
507
+ chunkIndex,
508
+ durationMs: Math.max(0, Date.now() - chunkStartedAt),
509
+ errorMessage: error instanceof Error ? error.message : String(error),
510
+ });
511
+ throw error;
399
512
  }
400
513
  }
401
514
  }
@@ -423,6 +536,50 @@ function parseBootstrapState(value) {
423
536
  return null;
424
537
  }
425
538
  }
539
+ function normalizeBootstrapPhase(value) {
540
+ if (value === undefined)
541
+ return 0;
542
+ return Number.isFinite(value) ? Math.max(0, Math.trunc(value)) : 0;
543
+ }
544
+ function isSubscriptionReady(row) {
545
+ return (row?.status === 'active' &&
546
+ parseBootstrapState(row.bootstrap_state_json) === null &&
547
+ row.cursor >= 0);
548
+ }
549
+ function isSubscriptionBootstrapping(row) {
550
+ return (row?.status === 'active' &&
551
+ parseBootstrapState(row.bootstrap_state_json) !== null);
552
+ }
553
+ function resolveActiveBootstrapPhase(subscriptions, existingById) {
554
+ let lowestPendingPhase = null;
555
+ for (const subscription of subscriptions) {
556
+ const phase = normalizeBootstrapPhase(subscription.bootstrapPhase);
557
+ if (isSubscriptionReady(existingById.get(subscription.id))) {
558
+ continue;
559
+ }
560
+ if (lowestPendingPhase === null || phase < lowestPendingPhase) {
561
+ lowestPendingPhase = phase;
562
+ }
563
+ }
564
+ return lowestPendingPhase;
565
+ }
566
+ function selectPullSubscriptions(subscriptions, existingById) {
567
+ const activePhase = resolveActiveBootstrapPhase(subscriptions, existingById);
568
+ if (activePhase === null) {
569
+ return [...subscriptions];
570
+ }
571
+ return subscriptions.filter((subscription) => {
572
+ const phase = normalizeBootstrapPhase(subscription.bootstrapPhase);
573
+ const existing = existingById.get(subscription.id);
574
+ if (phase <= activePhase)
575
+ return true;
576
+ if (isSubscriptionReady(existing))
577
+ return true;
578
+ if (isSubscriptionBootstrapping(existing))
579
+ return true;
580
+ return false;
581
+ });
582
+ }
426
583
  function parseScopeValuesJson(value) {
427
584
  if (!value)
428
585
  return {};
@@ -509,6 +666,26 @@ function resolveBootstrapClearScopes(previous, next) {
509
666
  }
510
667
  return narrowed;
511
668
  }
669
+ function emitTrace(onTrace, event) {
670
+ onTrace?.({
671
+ timestamp: Date.now(),
672
+ ...event,
673
+ });
674
+ }
675
+ function countSubscriptionRows(subscription) {
676
+ if (!subscription.bootstrap)
677
+ return undefined;
678
+ const snapshots = subscription.snapshots ?? [];
679
+ if (snapshots.length === 0)
680
+ return 0;
681
+ return snapshots.reduce((sum, snapshot) => sum + (snapshot.rows?.length ?? 0), 0);
682
+ }
683
+ function countSubscriptionChunks(subscription) {
684
+ if (!subscription.bootstrap)
685
+ return undefined;
686
+ const snapshots = subscription.snapshots ?? [];
687
+ return snapshots.reduce((sum, snapshot) => sum + (snapshot.chunks?.length ?? 0), 0);
688
+ }
512
689
  /**
513
690
  * Build a pull request from subscription state. Exported for use
514
691
  * by the combined sync path in sync-loop.ts.
@@ -534,19 +711,27 @@ export async function buildPullRequest(db, options) {
534
711
  const existingById = new Map();
535
712
  for (const row of existing)
536
713
  existingById.set(row.subscription_id, row);
714
+ const configuredSubscriptions = options.subscriptions ?? [];
715
+ const selectedSubscriptions = selectPullSubscriptions(configuredSubscriptions, existingById);
537
716
  const request = {
538
717
  clientId: options.clientId,
539
718
  limitCommits: options.limitCommits ?? 50,
540
719
  limitSnapshotRows: options.limitSnapshotRows ?? 1000,
541
720
  maxSnapshotPages: options.maxSnapshotPages ?? 4,
542
721
  dedupeRows: options.dedupeRows,
543
- subscriptions: (options.subscriptions ?? []).map((sub) => ({
722
+ subscriptions: selectedSubscriptions.map((sub) => ({
544
723
  ...sub,
545
724
  cursor: Math.max(-1, existingById.get(sub.id)?.cursor ?? -1),
546
725
  bootstrapState: parseBootstrapState(existingById.get(sub.id)?.bootstrap_state_json),
547
726
  })),
548
727
  };
549
- return { request, existing, existingById, stateId };
728
+ return {
729
+ request,
730
+ existing,
731
+ existingById,
732
+ stateId,
733
+ configuredSubscriptions,
734
+ };
550
735
  }
551
736
  export function createFollowupPullState(pullState, response) {
552
737
  const responseById = new Map();
@@ -585,9 +770,10 @@ export function createFollowupPullState(pullState, response) {
585
770
  nextExisting.push(nextRow);
586
771
  nextExistingById.set(nextRow.subscription_id, nextRow);
587
772
  }
773
+ const nextSelectedSubscriptions = selectPullSubscriptions(pullState.configuredSubscriptions, nextExistingById);
588
774
  const nextRequest = {
589
775
  ...pullState.request,
590
- subscriptions: (pullState.request.subscriptions ?? []).map((sub) => {
776
+ subscriptions: nextSelectedSubscriptions.map((sub) => {
591
777
  const row = nextExistingById.get(sub.id);
592
778
  return {
593
779
  ...sub,
@@ -601,6 +787,7 @@ export function createFollowupPullState(pullState, response) {
601
787
  existing: nextExisting,
602
788
  existingById: nextExistingById,
603
789
  stateId: pullState.stateId,
790
+ configuredSubscriptions: pullState.configuredSubscriptions,
604
791
  };
605
792
  }
606
793
  export async function applyIncrementalCommitChanges(handlers, trx, args) {
@@ -645,9 +832,14 @@ export async function applyPullResponse(db, transport, handlers, options, pullSt
645
832
  clientId: options.clientId,
646
833
  };
647
834
  const plugins = options.plugins ?? [];
648
- const requiresMaterializedSnapshots = plugins.some((plugin) => !!plugin.afterPull);
835
+ const requiresMaterializedSnapshots = plugins.some((plugin) => !!plugin.afterPull) ||
836
+ transport.capabilities?.preferMaterializedSnapshots === true;
837
+ const bootstrapApplyMode = resolveBootstrapApplyMode(options, rawResponse, transport.capabilities);
649
838
  let responseToApply = requiresMaterializedSnapshots
650
- ? await materializeChunkedSnapshots(transport, rawResponse, options.sha256)
839
+ ? await materializeChunkedSnapshots(transport, rawResponse, options.sha256, {
840
+ stateId,
841
+ onTrace: options.onTrace,
842
+ })
651
843
  : rawResponse;
652
844
  for (const plugin of plugins) {
653
845
  if (!plugin.afterPull)
@@ -657,227 +849,423 @@ export async function applyPullResponse(db, transport, handlers, options, pullSt
657
849
  response: responseToApply,
658
850
  });
659
851
  }
852
+ const subsById = new Map();
853
+ for (const s of options.subscriptions ?? [])
854
+ subsById.set(s.id, s);
660
855
  await db.transaction().execute(async (trx) => {
661
- const desiredIds = new Set((options.subscriptions ?? []).map((s) => s.id));
662
- // Remove local data for subscriptions that are no longer desired.
663
- for (const row of existing) {
664
- if (desiredIds.has(row.subscription_id))
665
- continue;
666
- // Clear data for this table matching the subscription's scopes
667
- if (row.table) {
856
+ await removeUndesiredSubscriptions(trx, handlers, existing, options.subscriptions ?? [], stateId);
857
+ });
858
+ if (bootstrapApplyMode === 'per-subscription') {
859
+ for (const sub of responseToApply.subscriptions) {
860
+ emitTrace(options.onTrace, {
861
+ stage: 'apply:transaction:start',
862
+ stateId,
863
+ transactionMode: bootstrapApplyMode,
864
+ subscriptionIds: [sub.id],
865
+ subscriptionCount: 1,
866
+ });
867
+ const transactionStartedAt = Date.now();
868
+ try {
869
+ await db.transaction().execute(async (trx) => {
870
+ await applySubscriptionResponse({
871
+ trx,
872
+ handlers,
873
+ transport,
874
+ options,
875
+ stateId,
876
+ existingById,
877
+ subsById,
878
+ sub,
879
+ });
880
+ });
881
+ emitTrace(options.onTrace, {
882
+ stage: 'apply:transaction:complete',
883
+ stateId,
884
+ transactionMode: bootstrapApplyMode,
885
+ subscriptionIds: [sub.id],
886
+ subscriptionCount: 1,
887
+ durationMs: Math.max(0, Date.now() - transactionStartedAt),
888
+ });
889
+ }
890
+ catch (error) {
891
+ emitTrace(options.onTrace, {
892
+ stage: 'apply:transaction:error',
893
+ stateId,
894
+ transactionMode: bootstrapApplyMode,
895
+ subscriptionIds: [sub.id],
896
+ subscriptionCount: 1,
897
+ durationMs: Math.max(0, Date.now() - transactionStartedAt),
898
+ errorMessage: error instanceof Error ? error.message : String(error),
899
+ });
900
+ throw error;
901
+ }
902
+ }
903
+ }
904
+ else {
905
+ emitTrace(options.onTrace, {
906
+ stage: 'apply:transaction:start',
907
+ stateId,
908
+ transactionMode: bootstrapApplyMode,
909
+ subscriptionIds: responseToApply.subscriptions.map((sub) => sub.id),
910
+ subscriptionCount: responseToApply.subscriptions.length,
911
+ });
912
+ const transactionStartedAt = Date.now();
913
+ try {
914
+ await db.transaction().execute(async (trx) => {
915
+ for (const sub of responseToApply.subscriptions) {
916
+ await applySubscriptionResponse({
917
+ trx,
918
+ handlers,
919
+ transport,
920
+ options,
921
+ stateId,
922
+ existingById,
923
+ subsById,
924
+ sub,
925
+ });
926
+ }
927
+ });
928
+ emitTrace(options.onTrace, {
929
+ stage: 'apply:transaction:complete',
930
+ stateId,
931
+ transactionMode: bootstrapApplyMode,
932
+ subscriptionIds: responseToApply.subscriptions.map((sub) => sub.id),
933
+ subscriptionCount: responseToApply.subscriptions.length,
934
+ durationMs: Math.max(0, Date.now() - transactionStartedAt),
935
+ });
936
+ }
937
+ catch (error) {
938
+ emitTrace(options.onTrace, {
939
+ stage: 'apply:transaction:error',
940
+ stateId,
941
+ transactionMode: bootstrapApplyMode,
942
+ subscriptionIds: responseToApply.subscriptions.map((sub) => sub.id),
943
+ subscriptionCount: responseToApply.subscriptions.length,
944
+ durationMs: Math.max(0, Date.now() - transactionStartedAt),
945
+ errorMessage: error instanceof Error ? error.message : String(error),
946
+ });
947
+ throw error;
948
+ }
949
+ }
950
+ return responseToApply;
951
+ }
952
+ function resolveBootstrapApplyMode(options, response, capabilities) {
953
+ const mode = options.bootstrapApplyMode ?? 'auto';
954
+ if (mode === 'single-transaction' || mode === 'per-subscription') {
955
+ return mode;
956
+ }
957
+ if (!response.subscriptions.some((sub) => sub.bootstrap)) {
958
+ return 'single-transaction';
959
+ }
960
+ if (capabilities?.preferredBootstrapApplyMode) {
961
+ return capabilities.preferredBootstrapApplyMode;
962
+ }
963
+ if (capabilities?.snapshotChunkReadMode === 'bytes' ||
964
+ capabilities?.gzipDecompressionMode === 'buffered') {
965
+ return 'per-subscription';
966
+ }
967
+ return 'single-transaction';
968
+ }
969
+ async function removeUndesiredSubscriptions(trx, handlers, existing, desiredSubscriptions, stateId) {
970
+ const desiredIds = new Set(desiredSubscriptions.map((subscription) => subscription.id));
971
+ for (const row of existing) {
972
+ if (desiredIds.has(row.subscription_id))
973
+ continue;
974
+ if (row.table) {
975
+ try {
976
+ const scopes = row.scopes_json
977
+ ? typeof row.scopes_json === 'string'
978
+ ? JSON.parse(row.scopes_json)
979
+ : row.scopes_json
980
+ : {};
981
+ await getClientHandlerOrThrow(handlers, row.table).clearAll({
982
+ trx,
983
+ scopes,
984
+ });
985
+ }
986
+ catch {
987
+ // ignore missing table handler
988
+ }
989
+ }
990
+ await sql `
991
+ delete from ${sql.table('sync_subscription_state')}
992
+ where ${sql.ref('state_id')} = ${sql.val(stateId)}
993
+ and ${sql.ref('subscription_id')} = ${sql.val(row.subscription_id)}
994
+ `.execute(trx);
995
+ }
996
+ }
997
+ async function readLatestSubscriptionCursor(trx, stateId, subscriptionId) {
998
+ const result = await sql `
999
+ select ${sql.ref('cursor')} as cursor
1000
+ from ${sql.table('sync_subscription_state')}
1001
+ where ${sql.ref('state_id')} = ${sql.val(stateId)}
1002
+ and ${sql.ref('subscription_id')} = ${sql.val(subscriptionId)}
1003
+ limit 1
1004
+ `.execute(trx);
1005
+ const raw = result.rows[0]?.cursor;
1006
+ return typeof raw === 'number'
1007
+ ? raw
1008
+ : raw === null || raw === undefined
1009
+ ? null
1010
+ : Number(raw);
1011
+ }
1012
+ async function applySubscriptionResponse(args) {
1013
+ const { trx, handlers, transport, options, stateId, existingById, subsById, sub, } = args;
1014
+ const def = subsById.get(sub.id);
1015
+ const prev = existingById.get(sub.id);
1016
+ const prevCursorRaw = prev?.cursor;
1017
+ const prevCursor = typeof prevCursorRaw === 'number'
1018
+ ? prevCursorRaw
1019
+ : prevCursorRaw === null || prevCursorRaw === undefined
1020
+ ? null
1021
+ : Number(prevCursorRaw);
1022
+ const latestCursor = await readLatestSubscriptionCursor(trx, stateId, sub.id);
1023
+ const effectiveCursor = prevCursor !== null &&
1024
+ Number.isFinite(prevCursor) &&
1025
+ latestCursor !== null &&
1026
+ Number.isFinite(latestCursor)
1027
+ ? Math.max(prevCursor, latestCursor)
1028
+ : prevCursor !== null && Number.isFinite(prevCursor)
1029
+ ? prevCursor
1030
+ : latestCursor !== null && Number.isFinite(latestCursor)
1031
+ ? latestCursor
1032
+ : null;
1033
+ const staleIncrementalResponse = !sub.bootstrap &&
1034
+ effectiveCursor !== null &&
1035
+ sub.nextCursor < effectiveCursor;
1036
+ const applyStartedAt = Date.now();
1037
+ emitTrace(options.onTrace, {
1038
+ stage: 'apply:subscription:start',
1039
+ stateId,
1040
+ subscriptionId: sub.id,
1041
+ table: def?.table ?? prev?.table,
1042
+ bootstrap: sub.bootstrap,
1043
+ snapshotCount: sub.snapshots?.length ?? 0,
1044
+ commitCount: sub.commits?.length ?? 0,
1045
+ chunkCount: countSubscriptionChunks(sub),
1046
+ rowCount: countSubscriptionRows(sub),
1047
+ nextCursor: sub.nextCursor,
1048
+ });
1049
+ if (staleIncrementalResponse) {
1050
+ emitTrace(options.onTrace, {
1051
+ stage: 'apply:subscription:complete',
1052
+ stateId,
1053
+ subscriptionId: sub.id,
1054
+ table: def?.table ?? prev?.table,
1055
+ bootstrap: sub.bootstrap,
1056
+ snapshotCount: sub.snapshots?.length ?? 0,
1057
+ commitCount: sub.commits?.length ?? 0,
1058
+ chunkCount: countSubscriptionChunks(sub),
1059
+ rowCount: countSubscriptionRows(sub),
1060
+ nextCursor: sub.nextCursor,
1061
+ durationMs: Math.max(0, Date.now() - applyStartedAt),
1062
+ });
1063
+ return;
1064
+ }
1065
+ try {
1066
+ if (sub.status === 'revoked') {
1067
+ if (prev?.table) {
668
1068
  try {
669
- const scopes = row.scopes_json
670
- ? typeof row.scopes_json === 'string'
671
- ? JSON.parse(row.scopes_json)
672
- : row.scopes_json
673
- : {};
674
- await getClientHandlerOrThrow(handlers, row.table).clearAll({
1069
+ const scopes = parseScopeValuesJson(prev.scopes_json);
1070
+ await getClientHandlerOrThrow(handlers, prev.table).clearAll({
675
1071
  trx,
676
1072
  scopes,
677
1073
  });
678
1074
  }
679
1075
  catch {
680
- // ignore missing table handler
1076
+ // ignore missing handler
681
1077
  }
682
1078
  }
683
1079
  await sql `
684
- delete from ${sql.table('sync_subscription_state')}
685
- where ${sql.ref('state_id')} = ${sql.val(stateId)}
686
- and ${sql.ref('subscription_id')} = ${sql.val(row.subscription_id)}
687
- `.execute(trx);
688
- }
689
- const subsById = new Map();
690
- for (const s of options.subscriptions ?? [])
691
- subsById.set(s.id, s);
692
- const latestStateRows = await sql `
693
- select
694
- ${sql.ref('subscription_id')} as subscription_id,
695
- ${sql.ref('cursor')} as cursor
696
- from ${sql.table('sync_subscription_state')}
697
- where ${sql.ref('state_id')} = ${sql.val(stateId)}
698
- `.execute(trx);
699
- const latestCursorBySubscriptionId = new Map();
700
- for (const row of latestStateRows.rows) {
701
- const raw = row.cursor;
702
- const cursor = typeof raw === 'number'
703
- ? raw
704
- : raw === null || raw === undefined
705
- ? null
706
- : Number(raw);
707
- latestCursorBySubscriptionId.set(row.subscription_id, cursor);
1080
+ delete from ${sql.table('sync_subscription_state')}
1081
+ where ${sql.ref('state_id')} = ${sql.val(stateId)}
1082
+ and ${sql.ref('subscription_id')} = ${sql.val(sub.id)}
1083
+ `.execute(trx);
1084
+ emitTrace(options.onTrace, {
1085
+ stage: 'apply:subscription:complete',
1086
+ stateId,
1087
+ subscriptionId: sub.id,
1088
+ table: def?.table ?? prev?.table,
1089
+ bootstrap: sub.bootstrap,
1090
+ snapshotCount: sub.snapshots?.length ?? 0,
1091
+ commitCount: sub.commits?.length ?? 0,
1092
+ chunkCount: countSubscriptionChunks(sub),
1093
+ rowCount: countSubscriptionRows(sub),
1094
+ nextCursor: null,
1095
+ durationMs: Math.max(0, Date.now() - applyStartedAt),
1096
+ });
1097
+ return;
708
1098
  }
709
- for (const sub of responseToApply.subscriptions) {
710
- const def = subsById.get(sub.id);
711
- const prev = existingById.get(sub.id);
712
- const prevCursorRaw = prev?.cursor;
713
- const prevCursor = typeof prevCursorRaw === 'number'
714
- ? prevCursorRaw
715
- : prevCursorRaw === null || prevCursorRaw === undefined
716
- ? null
717
- : Number(prevCursorRaw);
718
- const latestCursorRaw = latestCursorBySubscriptionId.get(sub.id);
719
- const latestCursor = typeof latestCursorRaw === 'number'
720
- ? latestCursorRaw
721
- : latestCursorRaw === null || latestCursorRaw === undefined
722
- ? null
723
- : Number(latestCursorRaw);
724
- const effectiveCursor = prevCursor !== null &&
725
- Number.isFinite(prevCursor) &&
726
- latestCursor !== null &&
727
- Number.isFinite(latestCursor)
728
- ? Math.max(prevCursor, latestCursor)
729
- : prevCursor !== null && Number.isFinite(prevCursor)
730
- ? prevCursor
731
- : latestCursor !== null && Number.isFinite(latestCursor)
732
- ? latestCursor
733
- : null;
734
- const staleIncrementalResponse = !sub.bootstrap &&
735
- effectiveCursor !== null &&
736
- sub.nextCursor < effectiveCursor;
737
- // Guard against out-of-order duplicate pull responses from older requests.
738
- if (staleIncrementalResponse) {
739
- continue;
740
- }
741
- // Revoked: clear data and drop the subscription row.
742
- if (sub.status === 'revoked') {
743
- if (prev?.table) {
744
- try {
745
- const scopes = parseScopeValuesJson(prev.scopes_json);
746
- await getClientHandlerOrThrow(handlers, prev.table).clearAll({
747
- trx,
748
- scopes,
749
- });
750
- }
751
- catch {
752
- // ignore missing handler
753
- }
1099
+ const nextScopes = sub.scopes ?? def?.scopes ?? {};
1100
+ const previousScopes = parseScopeValuesJson(prev?.scopes_json);
1101
+ const scopesChanged = !scopeValuesEqual(previousScopes, nextScopes);
1102
+ if (sub.bootstrap && prev?.table && scopesChanged) {
1103
+ try {
1104
+ const clearScopes = resolveBootstrapClearScopes(previousScopes, nextScopes);
1105
+ if (clearScopes !== 'none') {
1106
+ await getClientHandlerOrThrow(handlers, prev.table).clearAll({
1107
+ trx,
1108
+ scopes: clearScopes ?? previousScopes,
1109
+ });
754
1110
  }
755
- await sql `
756
- delete from ${sql.table('sync_subscription_state')}
757
- where ${sql.ref('state_id')} = ${sql.val(stateId)}
758
- and ${sql.ref('subscription_id')} = ${sql.val(sub.id)}
759
- `.execute(trx);
760
- latestCursorBySubscriptionId.delete(sub.id);
761
- continue;
762
1111
  }
763
- const nextScopes = sub.scopes ?? def?.scopes ?? {};
764
- const previousScopes = parseScopeValuesJson(prev?.scopes_json);
765
- const scopesChanged = !scopeValuesEqual(previousScopes, nextScopes);
766
- if (sub.bootstrap && prev?.table && scopesChanged) {
767
- try {
768
- const clearScopes = resolveBootstrapClearScopes(previousScopes, nextScopes);
769
- if (clearScopes !== 'none') {
770
- await getClientHandlerOrThrow(handlers, prev.table).clearAll({
771
- trx,
772
- scopes: clearScopes ?? previousScopes,
773
- });
774
- }
1112
+ catch {
1113
+ // ignore missing handler
1114
+ }
1115
+ }
1116
+ if (sub.bootstrap) {
1117
+ for (const snapshot of sub.snapshots ?? []) {
1118
+ const handler = getClientHandlerOrThrow(handlers, snapshot.table);
1119
+ const hasChunkRefs = Array.isArray(snapshot.chunks) && snapshot.chunks.length > 0;
1120
+ if (snapshot.isFirstPage && handler.onSnapshotStart) {
1121
+ await handler.onSnapshotStart({
1122
+ trx,
1123
+ table: snapshot.table,
1124
+ scopes: sub.scopes,
1125
+ });
775
1126
  }
776
- catch {
777
- // ignore missing handler
1127
+ if (hasChunkRefs) {
1128
+ await applyChunkedSnapshot(transport, handler, trx, snapshot, sub.scopes, options.sha256, {
1129
+ stateId,
1130
+ subscriptionId: sub.id,
1131
+ onTrace: options.onTrace,
1132
+ });
778
1133
  }
779
- }
780
- // Apply snapshots (bootstrap mode)
781
- if (sub.bootstrap) {
782
- for (const snapshot of sub.snapshots ?? []) {
783
- const handler = getClientHandlerOrThrow(handlers, snapshot.table);
784
- const hasChunkRefs = Array.isArray(snapshot.chunks) && snapshot.chunks.length > 0;
785
- // Call onSnapshotStart hook when starting a new snapshot
786
- if (snapshot.isFirstPage && handler.onSnapshotStart) {
787
- await handler.onSnapshotStart({
788
- trx,
789
- table: snapshot.table,
790
- scopes: sub.scopes,
791
- });
792
- }
793
- if (hasChunkRefs) {
794
- await applyChunkedSnapshot(transport, handler, trx, snapshot, sub.scopes, options.sha256);
795
- }
796
- else {
797
- await handler.applySnapshot({ trx }, snapshot);
798
- }
799
- // Call onSnapshotEnd hook when snapshot is complete
800
- if (snapshot.isLastPage && handler.onSnapshotEnd) {
801
- await handler.onSnapshotEnd({
802
- trx,
803
- table: snapshot.table,
804
- scopes: sub.scopes,
805
- });
806
- }
1134
+ else {
1135
+ await handler.applySnapshot({ trx }, snapshot);
807
1136
  }
808
- }
809
- else {
810
- // Apply incremental changes
811
- for (const commit of sub.commits) {
812
- await applyIncrementalCommitChanges(handlers, trx, {
813
- changes: commit.changes,
814
- commitSeq: commit.commitSeq ?? null,
815
- actorId: commit.actorId ?? null,
816
- createdAt: commit.createdAt ?? null,
1137
+ if (snapshot.isLastPage && handler.onSnapshotEnd) {
1138
+ await handler.onSnapshotEnd({
1139
+ trx,
1140
+ table: snapshot.table,
1141
+ scopes: sub.scopes,
817
1142
  });
818
1143
  }
819
1144
  }
820
- // Persist subscription cursor + metadata.
821
- // Use cached JSON serialization to avoid repeated stringification
822
- const now = Date.now();
823
- const paramsJson = serializeJsonCached(def?.params ?? {});
824
- const scopesJson = serializeJsonCached(nextScopes);
825
- const bootstrapStateJson = sub.bootstrap
826
- ? sub.bootstrapState
827
- ? serializeJsonCached(sub.bootstrapState)
828
- : null
829
- : null;
830
- const table = def?.table ?? 'unknown';
831
- await sql `
832
- insert into ${sql.table('sync_subscription_state')} (
833
- ${sql.join([
834
- sql.ref('state_id'),
835
- sql.ref('subscription_id'),
836
- sql.ref('table'),
837
- sql.ref('scopes_json'),
838
- sql.ref('params_json'),
839
- sql.ref('cursor'),
840
- sql.ref('bootstrap_state_json'),
841
- sql.ref('status'),
842
- sql.ref('created_at'),
843
- sql.ref('updated_at'),
844
- ])}
845
- ) values (
846
- ${sql.join([
847
- sql.val(stateId),
848
- sql.val(sub.id),
849
- sql.val(table),
850
- sql.val(scopesJson),
851
- sql.val(paramsJson),
852
- sql.val(sub.nextCursor),
853
- sql.val(bootstrapStateJson),
854
- sql.val('active'),
855
- sql.val(now),
856
- sql.val(now),
857
- ])}
858
- )
859
- on conflict (${sql.join([sql.ref('state_id'), sql.ref('subscription_id')])})
860
- do update set
861
- ${sql.ref('table')} = ${sql.val(table)},
862
- ${sql.ref('scopes_json')} = ${sql.val(scopesJson)},
863
- ${sql.ref('params_json')} = ${sql.val(paramsJson)},
864
- ${sql.ref('cursor')} = ${sql.val(sub.nextCursor)},
865
- ${sql.ref('bootstrap_state_json')} = ${sql.val(bootstrapStateJson)},
866
- ${sql.ref('status')} = ${sql.val('active')},
867
- ${sql.ref('updated_at')} = ${sql.val(now)}
868
- `.execute(trx);
869
- latestCursorBySubscriptionId.set(sub.id, sub.nextCursor);
870
1145
  }
871
- });
872
- return responseToApply;
1146
+ else {
1147
+ for (const commit of sub.commits) {
1148
+ await applyIncrementalCommitChanges(handlers, trx, {
1149
+ changes: commit.changes,
1150
+ commitSeq: commit.commitSeq ?? null,
1151
+ actorId: commit.actorId ?? null,
1152
+ createdAt: commit.createdAt ?? null,
1153
+ });
1154
+ }
1155
+ }
1156
+ const now = Date.now();
1157
+ const paramsJson = serializeJsonCached(def?.params ?? {});
1158
+ const scopesJson = serializeJsonCached(nextScopes);
1159
+ const bootstrapStateJson = sub.bootstrap
1160
+ ? sub.bootstrapState
1161
+ ? serializeJsonCached(sub.bootstrapState)
1162
+ : null
1163
+ : null;
1164
+ const table = def?.table ?? 'unknown';
1165
+ await sql `
1166
+ insert into ${sql.table('sync_subscription_state')} (
1167
+ ${sql.join([
1168
+ sql.ref('state_id'),
1169
+ sql.ref('subscription_id'),
1170
+ sql.ref('table'),
1171
+ sql.ref('scopes_json'),
1172
+ sql.ref('params_json'),
1173
+ sql.ref('cursor'),
1174
+ sql.ref('bootstrap_state_json'),
1175
+ sql.ref('status'),
1176
+ sql.ref('created_at'),
1177
+ sql.ref('updated_at'),
1178
+ ])}
1179
+ ) values (
1180
+ ${sql.join([
1181
+ sql.val(stateId),
1182
+ sql.val(sub.id),
1183
+ sql.val(table),
1184
+ sql.val(scopesJson),
1185
+ sql.val(paramsJson),
1186
+ sql.val(sub.nextCursor),
1187
+ sql.val(bootstrapStateJson),
1188
+ sql.val('active'),
1189
+ sql.val(now),
1190
+ sql.val(now),
1191
+ ])}
1192
+ )
1193
+ on conflict (${sql.join([sql.ref('state_id'), sql.ref('subscription_id')])})
1194
+ do update set
1195
+ ${sql.ref('table')} = ${sql.val(table)},
1196
+ ${sql.ref('scopes_json')} = ${sql.val(scopesJson)},
1197
+ ${sql.ref('params_json')} = ${sql.val(paramsJson)},
1198
+ ${sql.ref('cursor')} = ${sql.val(sub.nextCursor)},
1199
+ ${sql.ref('bootstrap_state_json')} = ${sql.val(bootstrapStateJson)},
1200
+ ${sql.ref('status')} = ${sql.val('active')},
1201
+ ${sql.ref('updated_at')} = ${sql.val(now)}
1202
+ `.execute(trx);
1203
+ emitTrace(options.onTrace, {
1204
+ stage: 'apply:subscription:complete',
1205
+ stateId,
1206
+ subscriptionId: sub.id,
1207
+ table,
1208
+ bootstrap: sub.bootstrap,
1209
+ snapshotCount: sub.snapshots?.length ?? 0,
1210
+ commitCount: sub.commits?.length ?? 0,
1211
+ chunkCount: countSubscriptionChunks(sub),
1212
+ rowCount: countSubscriptionRows(sub),
1213
+ nextCursor: sub.nextCursor,
1214
+ durationMs: Math.max(0, Date.now() - applyStartedAt),
1215
+ });
1216
+ }
1217
+ catch (error) {
1218
+ emitTrace(options.onTrace, {
1219
+ stage: 'apply:subscription:error',
1220
+ stateId,
1221
+ subscriptionId: sub.id,
1222
+ table: def?.table ?? prev?.table,
1223
+ bootstrap: sub.bootstrap,
1224
+ snapshotCount: sub.snapshots?.length ?? 0,
1225
+ commitCount: sub.commits?.length ?? 0,
1226
+ chunkCount: countSubscriptionChunks(sub),
1227
+ rowCount: countSubscriptionRows(sub),
1228
+ nextCursor: sub.nextCursor,
1229
+ durationMs: Math.max(0, Date.now() - applyStartedAt),
1230
+ errorMessage: error instanceof Error ? error.message : String(error),
1231
+ });
1232
+ throw error;
1233
+ }
873
1234
  }
874
1235
  export async function syncPullOnce(db, transport, handlers, options, pullStateOverride) {
875
1236
  const pullState = pullStateOverride ?? (await buildPullRequest(db, options));
876
1237
  const { clientId, ...pullBody } = pullState.request;
877
- const combined = await transport.sync({ clientId, pull: pullBody });
1238
+ emitTrace(options.onTrace, {
1239
+ stage: 'pull:start',
1240
+ stateId: pullState.stateId,
1241
+ subscriptionIds: pullState.request.subscriptions.map((subscription) => subscription.id),
1242
+ subscriptionCount: pullState.request.subscriptions.length,
1243
+ });
1244
+ let combined;
1245
+ try {
1246
+ combined = await transport.sync({ clientId, pull: pullBody });
1247
+ }
1248
+ catch (error) {
1249
+ emitTrace(options.onTrace, {
1250
+ stage: 'pull:error',
1251
+ stateId: pullState.stateId,
1252
+ subscriptionIds: pullState.request.subscriptions.map((subscription) => subscription.id),
1253
+ subscriptionCount: pullState.request.subscriptions.length,
1254
+ errorMessage: error instanceof Error ? error.message : String(error),
1255
+ });
1256
+ throw error;
1257
+ }
878
1258
  if (!combined.pull) {
879
1259
  return { ok: true, subscriptions: [] };
880
1260
  }
1261
+ emitTrace(options.onTrace, {
1262
+ stage: 'pull:response',
1263
+ stateId: pullState.stateId,
1264
+ subscriptionIds: combined.pull.subscriptions.map((subscription) => subscription.id),
1265
+ subscriptionCount: combined.pull.subscriptions.length,
1266
+ commitCount: combined.pull.subscriptions.reduce((sum, subscription) => sum + (subscription.commits?.length ?? 0), 0),
1267
+ snapshotCount: combined.pull.subscriptions.reduce((sum, subscription) => sum + (subscription.snapshots?.length ?? 0), 0),
1268
+ });
881
1269
  return applyPullResponse(db, transport, handlers, options, pullState, combined.pull);
882
1270
  }
883
1271
  //# sourceMappingURL=pull-engine.js.map