@syncular/server 0.0.6-185 → 0.0.6-188

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/pull.ts CHANGED
@@ -41,7 +41,9 @@ import {
41
41
  import { resolveEffectiveScopesForSubscriptions } from './subscriptions/resolve';
42
42
 
43
43
  const defaultScopeCache = createMemoryScopeCache();
44
- const MAX_SNAPSHOT_BUNDLE_ROW_FRAME_BYTES = 512 * 1024;
44
+ const DEFAULT_MAX_SNAPSHOT_BUNDLE_ROW_FRAME_BYTES = 512 * 1024;
45
+ const MAX_ADAPTIVE_SNAPSHOT_BUNDLE_ROW_FRAME_BYTES = 4 * 1024 * 1024;
46
+ const DEFAULT_INLINE_SNAPSHOT_ROW_FRAME_BYTES = 256 * 1024;
45
47
 
46
48
  function concatByteChunks(chunks: readonly Uint8Array[]): Uint8Array {
47
49
  if (chunks.length === 1) {
@@ -86,39 +88,6 @@ function bufferSourceToUint8Array(chunk: BufferSource): Uint8Array {
86
88
  return new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);
87
89
  }
88
90
 
89
- async function streamToBytes(
90
- stream: ReadableStream<BufferSource>
91
- ): Promise<Uint8Array> {
92
- const reader = stream.getReader();
93
- const chunks: Uint8Array[] = [];
94
- let total = 0;
95
-
96
- try {
97
- while (true) {
98
- const { done, value } = await reader.read();
99
- if (done) break;
100
- if (!value) continue;
101
- const bytes = bufferSourceToUint8Array(value);
102
- if (bytes.length === 0) continue;
103
- chunks.push(bytes);
104
- total += bytes.length;
105
- }
106
- } finally {
107
- reader.releaseLock();
108
- }
109
-
110
- if (chunks.length === 0) return new Uint8Array();
111
- if (chunks.length === 1) return chunks[0] ?? new Uint8Array();
112
-
113
- const merged = new Uint8Array(total);
114
- let offset = 0;
115
- for (const chunk of chunks) {
116
- merged.set(chunk, offset);
117
- offset += chunk.length;
118
- }
119
- return merged;
120
- }
121
-
122
91
  function bufferSourceStreamToUint8ArrayStream(
123
92
  stream: ReadableStream<BufferSource>
124
93
  ): ReadableStream<Uint8Array> {
@@ -147,6 +116,8 @@ function bufferSourceStreamToUint8ArrayStream(
147
116
  let nodeCryptoModulePromise: Promise<
148
117
  typeof import('node:crypto') | null
149
118
  > | null = null;
119
+ let nodeZlibModulePromise: Promise<typeof import('node:zlib') | null> | null =
120
+ null;
150
121
 
151
122
  async function getNodeCryptoModule(): Promise<
152
123
  typeof import('node:crypto') | null
@@ -157,57 +128,106 @@ async function getNodeCryptoModule(): Promise<
157
128
  return nodeCryptoModulePromise;
158
129
  }
159
130
 
160
- async function sha256HexFromByteChunks(
161
- chunks: readonly Uint8Array[]
162
- ): Promise<string> {
131
+ async function getNodeZlibModule(): Promise<typeof import('node:zlib') | null> {
132
+ if (!nodeZlibModulePromise) {
133
+ nodeZlibModulePromise = import('node:zlib').catch(() => null);
134
+ }
135
+ return nodeZlibModulePromise;
136
+ }
137
+
138
+ async function sha256HexFromBytes(bytes: Uint8Array): Promise<string> {
163
139
  const nodeCrypto = await getNodeCryptoModule();
164
140
  if (nodeCrypto && typeof nodeCrypto.createHash === 'function') {
165
141
  const hasher = nodeCrypto.createHash('sha256');
166
- for (const chunk of chunks) {
167
- if (chunk.length === 0) continue;
168
- hasher.update(chunk);
169
- }
142
+ hasher.update(bytes);
170
143
  return hasher.digest('hex');
171
144
  }
172
145
 
173
- return sha256Hex(concatByteChunks(chunks));
146
+ return sha256Hex(bytes);
174
147
  }
175
148
 
176
- async function gzipByteChunks(
177
- chunks: readonly Uint8Array[]
149
+ async function gzipSnapshotChunkBytes(
150
+ payload: Uint8Array
178
151
  ): Promise<Uint8Array> {
179
- if (typeof CompressionStream !== 'undefined') {
180
- const stream = byteChunksToStream(chunks).pipeThrough(
181
- new CompressionStream('gzip')
182
- );
183
- return streamToBytes(stream);
152
+ const nodeZlib = await getNodeZlibModule();
153
+ if (nodeZlib) {
154
+ return await new Promise<Uint8Array>((resolve, reject) => {
155
+ nodeZlib.gzip(
156
+ payload,
157
+ {
158
+ level: 1,
159
+ },
160
+ (error, compressed) => {
161
+ if (error) {
162
+ reject(error);
163
+ return;
164
+ }
165
+ resolve(new Uint8Array(compressed));
166
+ }
167
+ );
168
+ });
184
169
  }
185
170
 
186
- return gzipBytes(concatByteChunks(chunks));
171
+ return gzipBytes(payload);
187
172
  }
188
173
 
189
- async function gzipByteChunksToStream(chunks: readonly Uint8Array[]): Promise<{
190
- stream: ReadableStream<Uint8Array>;
191
- byteLength?: number;
174
+ async function encodeCompressedSnapshotChunk(
175
+ chunks: readonly Uint8Array[]
176
+ ): Promise<{
177
+ body: Uint8Array;
178
+ sha256: string;
192
179
  }> {
193
- if (typeof CompressionStream !== 'undefined') {
194
- const source = byteChunksToStream(chunks).pipeThrough(
195
- new CompressionStream('gzip')
196
- );
197
- return {
198
- stream: bufferSourceStreamToUint8ArrayStream(source),
199
- };
200
- }
180
+ const payload = concatByteChunks(chunks);
181
+ const [body, sha256] = await Promise.all([
182
+ gzipSnapshotChunkBytes(payload),
183
+ sha256HexFromBytes(payload),
184
+ ]);
185
+ return { body, sha256 };
186
+ }
201
187
 
202
- const compressed = await gzipBytes(concatByteChunks(chunks));
188
+ async function encodeCompressedSnapshotChunkToStream(
189
+ chunks: readonly Uint8Array[]
190
+ ): Promise<{
191
+ stream: ReadableStream<Uint8Array>;
192
+ byteLength: number;
193
+ sha256: string;
194
+ }> {
195
+ const encoded = await encodeCompressedSnapshotChunk(chunks);
203
196
  return {
204
197
  stream: bufferSourceStreamToUint8ArrayStream(
205
- byteChunksToStream([compressed])
198
+ byteChunksToStream([encoded.body])
206
199
  ),
207
- byteLength: compressed.length,
200
+ byteLength: encoded.body.length,
201
+ sha256: encoded.sha256,
208
202
  };
209
203
  }
210
204
 
205
+ function resolveSnapshotBundleMaxBytes(args: {
206
+ configuredMaxBytes?: number;
207
+ pageRowCount: number;
208
+ pageRowFrameBytes: number;
209
+ }): number {
210
+ if (
211
+ typeof args.configuredMaxBytes === 'number' &&
212
+ Number.isFinite(args.configuredMaxBytes) &&
213
+ args.configuredMaxBytes > 0
214
+ ) {
215
+ return Math.max(1, args.configuredMaxBytes);
216
+ }
217
+
218
+ if (args.pageRowCount <= 0 || args.pageRowFrameBytes <= 0) {
219
+ return DEFAULT_MAX_SNAPSHOT_BUNDLE_ROW_FRAME_BYTES;
220
+ }
221
+
222
+ return Math.max(
223
+ DEFAULT_MAX_SNAPSHOT_BUNDLE_ROW_FRAME_BYTES,
224
+ Math.min(
225
+ MAX_ADAPTIVE_SNAPSHOT_BUNDLE_ROW_FRAME_BYTES,
226
+ args.pageRowFrameBytes
227
+ )
228
+ );
229
+ }
230
+
211
231
  export interface PullResult {
212
232
  response: SyncPullResponse;
213
233
  /**
@@ -419,6 +439,7 @@ async function readLatestExternalCommitByTable<DB extends SyncCoreDb>(
419
439
  .select((eb) => eb.fn.max('tc.commit_seq').as('latest_commit_seq'))
420
440
  .where('tc.partition_id', '=', args.partitionId)
421
441
  .where('cm.client_id', '=', EXTERNAL_CLIENT_ID)
442
+ .where('cm.change_count', '=', 0)
422
443
  .where('tc.commit_seq', '>', args.afterCursor)
423
444
  .where('tc.table', 'in', tableNames)
424
445
  .groupBy('tc.table')
@@ -480,7 +501,7 @@ export async function pull<
480
501
  request.limitSnapshotRows,
481
502
  1000,
482
503
  1,
483
- 5000
504
+ 20000
484
505
  );
485
506
  const maxSnapshotPages = sanitizeLimit(
486
507
  request.maxSnapshotPages,
@@ -581,6 +602,11 @@ export async function pull<
581
602
  args.handlers,
582
603
  sub.table
583
604
  ).map((handler) => handler.table);
605
+ const preferInlineBootstrapSnapshot =
606
+ cursor >= 0 ||
607
+ sub.bootstrapState != null ||
608
+ (latestExternalCommitForTable !== undefined &&
609
+ latestExternalCommitForTable > cursor);
584
610
 
585
611
  const initState: SyncBootstrapState = {
586
612
  asOfCommitSeq: maxCommitSeq,
@@ -636,11 +662,22 @@ export async function pull<
636
662
  ttlMs: number;
637
663
  rowFrameByteLength: number;
638
664
  rowFrameParts: Uint8Array[];
665
+ inlineRows: unknown[] | null;
639
666
  }
640
667
 
641
668
  const flushSnapshotBundle = async (
642
669
  bundle: SnapshotBundle
643
670
  ): Promise<void> => {
671
+ if (bundle.inlineRows) {
672
+ snapshots.push({
673
+ table: bundle.table,
674
+ rows: bundle.inlineRows,
675
+ isFirstPage: bundle.isFirstPage,
676
+ isLastPage: bundle.isLastPage,
677
+ });
678
+ return;
679
+ }
680
+
644
681
  const nowIso = new Date().toISOString();
645
682
  const bundleRowLimit = Math.max(
646
683
  1,
@@ -689,10 +726,7 @@ export async function pull<
689
726
  });
690
727
  return;
691
728
  }
692
- const sha256 = await sha256HexFromByteChunks(
693
- bundle.rowFrameParts
694
- );
695
- const compressedBody = await gzipByteChunks(
729
+ const encodedChunk = await encodeCompressedSnapshotChunk(
696
730
  bundle.rowFrameParts
697
731
  );
698
732
  const chunkId = randomId();
@@ -706,8 +740,8 @@ export async function pull<
706
740
  rowLimit: bundleRowLimit,
707
741
  encoding: SYNC_SNAPSHOT_CHUNK_ENCODING,
708
742
  compression: SYNC_SNAPSHOT_CHUNK_COMPRESSION,
709
- sha256,
710
- body: compressedBody,
743
+ sha256: encodedChunk.sha256,
744
+ body: encodedChunk.body,
711
745
  expiresAt,
712
746
  });
713
747
  }
@@ -761,6 +795,7 @@ export async function pull<
761
795
  tableHandler.snapshotChunkTtlMs ?? 24 * 60 * 60 * 1000,
762
796
  rowFrameByteLength: bundleHeader.length,
763
797
  rowFrameParts: [bundleHeader],
798
+ inlineRows: null,
764
799
  };
765
800
  }
766
801
 
@@ -778,10 +813,15 @@ export async function pull<
778
813
  );
779
814
 
780
815
  const rowFrames = encodeSnapshotRowFrames(page.rows ?? []);
816
+ const bundleMaxBytes = resolveSnapshotBundleMaxBytes({
817
+ configuredMaxBytes: tableHandler.snapshotBundleMaxBytes,
818
+ pageRowCount: page.rows?.length ?? 0,
819
+ pageRowFrameBytes: rowFrames.length,
820
+ });
781
821
  if (
782
822
  activeBundle.pageCount > 0 &&
783
823
  activeBundle.rowFrameByteLength + rowFrames.length >
784
- MAX_SNAPSHOT_BUNDLE_ROW_FRAME_BYTES
824
+ bundleMaxBytes
785
825
  ) {
786
826
  await flushSnapshotBundle(activeBundle);
787
827
  const bundleHeader = encodeSnapshotRows([]);
@@ -795,8 +835,20 @@ export async function pull<
795
835
  tableHandler.snapshotChunkTtlMs ?? 24 * 60 * 60 * 1000,
796
836
  rowFrameByteLength: bundleHeader.length,
797
837
  rowFrameParts: [bundleHeader],
838
+ inlineRows: null,
798
839
  };
799
840
  }
841
+
842
+ if (
843
+ preferInlineBootstrapSnapshot &&
844
+ activeBundle.pageCount === 0 &&
845
+ page.nextCursor == null &&
846
+ rowFrames.length <= DEFAULT_INLINE_SNAPSHOT_ROW_FRAME_BYTES
847
+ ) {
848
+ activeBundle.inlineRows = page.rows ?? [];
849
+ } else {
850
+ activeBundle.inlineRows = null;
851
+ }
800
852
  activeBundle.rowFrameParts.push(rowFrames);
801
853
  activeBundle.rowFrameByteLength += rowFrames.length;
802
854
  activeBundle.pageCount += 1;
@@ -1052,12 +1104,14 @@ export async function pull<
1052
1104
  });
1053
1105
 
1054
1106
  if (!chunkRef) {
1055
- const sha256 = await sha256HexFromByteChunks(
1056
- pending.rowFrameParts
1057
- );
1058
1107
  if (chunkStorage.storeChunkStream) {
1059
- const { stream: bodyStream, byteLength } =
1060
- await gzipByteChunksToStream(pending.rowFrameParts);
1108
+ const {
1109
+ stream: bodyStream,
1110
+ byteLength,
1111
+ sha256,
1112
+ } = await encodeCompressedSnapshotChunkToStream(
1113
+ pending.rowFrameParts
1114
+ );
1061
1115
  chunkRef = await chunkStorage.storeChunkStream({
1062
1116
  partitionId: pending.cacheLookup.partitionId,
1063
1117
  scopeKey: pending.cacheLookup.scopeKey,
@@ -1073,7 +1127,7 @@ export async function pull<
1073
1127
  expiresAt: pending.expiresAt,
1074
1128
  });
1075
1129
  } else {
1076
- const compressedBody = await gzipByteChunks(
1130
+ const encodedChunk = await encodeCompressedSnapshotChunk(
1077
1131
  pending.rowFrameParts
1078
1132
  );
1079
1133
  chunkRef = await chunkStorage.storeChunk({
@@ -1085,8 +1139,8 @@ export async function pull<
1085
1139
  rowLimit: pending.cacheLookup.rowLimit,
1086
1140
  encoding: SYNC_SNAPSHOT_CHUNK_ENCODING,
1087
1141
  compression: SYNC_SNAPSHOT_CHUNK_COMPRESSION,
1088
- sha256,
1089
- body: compressedBody,
1142
+ sha256: encodedChunk.sha256,
1143
+ body: encodedChunk.body,
1090
1144
  expiresAt: pending.expiresAt,
1091
1145
  });
1092
1146
  }