@syncular/core 0.0.6-204 → 0.0.6-206
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/schemas/sync.d.ts +5 -5
- package/dist/snapshot-chunks.d.ts +5 -4
- package/dist/snapshot-chunks.d.ts.map +1 -1
- package/dist/snapshot-chunks.js +30 -33
- package/dist/snapshot-chunks.js.map +1 -1
- package/dist/utils/bytes.d.ts +4 -0
- package/dist/utils/bytes.d.ts.map +1 -0
- package/dist/utils/bytes.js +66 -0
- package/dist/utils/bytes.js.map +1 -0
- package/dist/utils/compression.d.ts +2 -2
- package/dist/utils/compression.d.ts.map +1 -1
- package/dist/utils/compression.js +32 -49
- package/dist/utils/compression.js.map +1 -1
- package/dist/utils/crypto.d.ts +7 -1
- package/dist/utils/crypto.d.ts.map +1 -1
- package/dist/utils/crypto.js +70 -7
- package/dist/utils/crypto.js.map +1 -1
- package/dist/utils/index.d.ts +1 -0
- package/dist/utils/index.d.ts.map +1 -1
- package/dist/utils/index.js +1 -0
- package/dist/utils/index.js.map +1 -1
- package/dist/utils/internal-runtime.d.ts +13 -0
- package/dist/utils/internal-runtime.d.ts.map +1 -0
- package/dist/utils/internal-runtime.js +14 -0
- package/dist/utils/internal-runtime.js.map +1 -0
- package/package.json +1 -1
- package/src/snapshot-chunks.ts +32 -36
- package/src/utils/bytes.ts +75 -0
- package/src/utils/compression.ts +43 -55
- package/src/utils/crypto.ts +92 -7
- package/src/utils/index.ts +1 -0
- package/src/utils/internal-runtime.ts +38 -0
package/dist/schemas/sync.d.ts
CHANGED
|
@@ -251,7 +251,7 @@ export declare const SyncSnapshotChunkRefSchema: z.ZodObject<{
|
|
|
251
251
|
id: z.ZodString;
|
|
252
252
|
byteLength: z.ZodNumber;
|
|
253
253
|
sha256: z.ZodString;
|
|
254
|
-
encoding: z.ZodLiteral<"json-row-frame-
|
|
254
|
+
encoding: z.ZodLiteral<"json-row-batch-frame-v2">;
|
|
255
255
|
compression: z.ZodLiteral<"gzip">;
|
|
256
256
|
}, z.core.$strip>;
|
|
257
257
|
export type SyncSnapshotChunkRef = z.infer<typeof SyncSnapshotChunkRefSchema>;
|
|
@@ -262,7 +262,7 @@ export declare const SyncSnapshotSchema: z.ZodObject<{
|
|
|
262
262
|
id: z.ZodString;
|
|
263
263
|
byteLength: z.ZodNumber;
|
|
264
264
|
sha256: z.ZodString;
|
|
265
|
-
encoding: z.ZodLiteral<"json-row-frame-
|
|
265
|
+
encoding: z.ZodLiteral<"json-row-batch-frame-v2">;
|
|
266
266
|
compression: z.ZodLiteral<"gzip">;
|
|
267
267
|
}, z.core.$strip>>>;
|
|
268
268
|
isFirstPage: z.ZodBoolean;
|
|
@@ -307,7 +307,7 @@ export declare const SyncPullSubscriptionResponseSchema: z.ZodObject<{
|
|
|
307
307
|
id: z.ZodString;
|
|
308
308
|
byteLength: z.ZodNumber;
|
|
309
309
|
sha256: z.ZodString;
|
|
310
|
-
encoding: z.ZodLiteral<"json-row-frame-
|
|
310
|
+
encoding: z.ZodLiteral<"json-row-batch-frame-v2">;
|
|
311
311
|
compression: z.ZodLiteral<"gzip">;
|
|
312
312
|
}, z.core.$strip>>>;
|
|
313
313
|
isFirstPage: z.ZodBoolean;
|
|
@@ -355,7 +355,7 @@ export declare const SyncPullResponseSchema: z.ZodObject<{
|
|
|
355
355
|
id: z.ZodString;
|
|
356
356
|
byteLength: z.ZodNumber;
|
|
357
357
|
sha256: z.ZodString;
|
|
358
|
-
encoding: z.ZodLiteral<"json-row-frame-
|
|
358
|
+
encoding: z.ZodLiteral<"json-row-batch-frame-v2">;
|
|
359
359
|
compression: z.ZodLiteral<"gzip">;
|
|
360
360
|
}, z.core.$strip>>>;
|
|
361
361
|
isFirstPage: z.ZodBoolean;
|
|
@@ -474,7 +474,7 @@ export declare const SyncCombinedResponseSchema: z.ZodObject<{
|
|
|
474
474
|
id: z.ZodString;
|
|
475
475
|
byteLength: z.ZodNumber;
|
|
476
476
|
sha256: z.ZodString;
|
|
477
|
-
encoding: z.ZodLiteral<"json-row-frame-
|
|
477
|
+
encoding: z.ZodLiteral<"json-row-batch-frame-v2">;
|
|
478
478
|
compression: z.ZodLiteral<"gzip">;
|
|
479
479
|
}, z.core.$strip>>>;
|
|
480
480
|
isFirstPage: z.ZodBoolean;
|
|
@@ -1,22 +1,23 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @syncular/core - Snapshot chunk encoding helpers
|
|
3
3
|
*/
|
|
4
|
-
export declare const SYNC_SNAPSHOT_CHUNK_ENCODING = "json-row-frame-
|
|
4
|
+
export declare const SYNC_SNAPSHOT_CHUNK_ENCODING = "json-row-batch-frame-v2";
|
|
5
5
|
export type SyncSnapshotChunkEncoding = typeof SYNC_SNAPSHOT_CHUNK_ENCODING;
|
|
6
6
|
export declare const SYNC_SNAPSHOT_CHUNK_COMPRESSION = "gzip";
|
|
7
7
|
export type SyncSnapshotChunkCompression = typeof SYNC_SNAPSHOT_CHUNK_COMPRESSION;
|
|
8
|
+
export declare const SYNC_SNAPSHOT_CHUNK_MAGIC: Uint8Array<ArrayBuffer>;
|
|
8
9
|
/**
|
|
9
|
-
* Encode rows as framed JSON
|
|
10
|
+
* Encode rows as a single framed JSON batch without the format header.
|
|
10
11
|
*/
|
|
11
12
|
export declare function encodeSnapshotRowFrames(rows: readonly unknown[]): Uint8Array;
|
|
12
13
|
/**
|
|
13
14
|
* Encode rows as framed JSON bytes with a format header.
|
|
14
15
|
*
|
|
15
16
|
* Format:
|
|
16
|
-
* - 4-byte magic header ("
|
|
17
|
+
* - 4-byte magic header ("SBF2")
|
|
17
18
|
* - repeated frames of:
|
|
18
19
|
* - 4-byte big-endian payload byte length
|
|
19
|
-
* - UTF-8 JSON payload
|
|
20
|
+
* - UTF-8 JSON array payload
|
|
20
21
|
*/
|
|
21
22
|
export declare function encodeSnapshotRows(rows: readonly unknown[]): Uint8Array;
|
|
22
23
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"snapshot-chunks.d.ts","sourceRoot":"","sources":["../src/snapshot-chunks.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,eAAO,MAAM,4BAA4B,
|
|
1
|
+
{"version":3,"file":"snapshot-chunks.d.ts","sourceRoot":"","sources":["../src/snapshot-chunks.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,eAAO,MAAM,4BAA4B,4BAA4B,CAAC;AACtE,MAAM,MAAM,yBAAyB,GAAG,OAAO,4BAA4B,CAAC;AAE5E,eAAO,MAAM,+BAA+B,SAAS,CAAC;AACtD,MAAM,MAAM,4BAA4B,GACtC,OAAO,+BAA+B,CAAC;AAEzC,eAAO,MAAM,yBAAyB,yBAEpC,CAAC;AAUH;;GAEG;AACH,wBAAgB,uBAAuB,CAAC,IAAI,EAAE,SAAS,OAAO,EAAE,GAAG,UAAU,CAiB5E;AAED;;;;;;;;GAQG;AACH,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,SAAS,OAAO,EAAE,GAAG,UAAU,CASvE;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,UAAU,GAAG,OAAO,EAAE,CAuC/D"}
|
package/dist/snapshot-chunks.js
CHANGED
|
@@ -1,68 +1,61 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @syncular/core - Snapshot chunk encoding helpers
|
|
3
3
|
*/
|
|
4
|
-
export const SYNC_SNAPSHOT_CHUNK_ENCODING = 'json-row-frame-
|
|
4
|
+
export const SYNC_SNAPSHOT_CHUNK_ENCODING = 'json-row-batch-frame-v2';
|
|
5
5
|
export const SYNC_SNAPSHOT_CHUNK_COMPRESSION = 'gzip';
|
|
6
|
-
const
|
|
6
|
+
export const SYNC_SNAPSHOT_CHUNK_MAGIC = new Uint8Array([
|
|
7
|
+
0x53, 0x42, 0x46, 0x32,
|
|
8
|
+
]); // "SBF2"
|
|
7
9
|
const FRAME_LENGTH_BYTES = 4;
|
|
8
10
|
const MAX_FRAME_BYTE_LENGTH = 0xffff_ffff;
|
|
9
11
|
const snapshotRowFrameEncoder = new TextEncoder();
|
|
10
12
|
const snapshotRowFrameDecoder = new TextDecoder();
|
|
11
|
-
function
|
|
12
|
-
|
|
13
|
-
return serialized === undefined ? 'null' : serialized;
|
|
13
|
+
function normalizeRowBatchJson(rows) {
|
|
14
|
+
return JSON.stringify(rows);
|
|
14
15
|
}
|
|
15
16
|
/**
|
|
16
|
-
* Encode rows as framed JSON
|
|
17
|
+
* Encode rows as a single framed JSON batch without the format header.
|
|
17
18
|
*/
|
|
18
19
|
export function encodeSnapshotRowFrames(rows) {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
for (const row of rows) {
|
|
22
|
-
const payload = snapshotRowFrameEncoder.encode(normalizeRowJson(row));
|
|
23
|
-
if (payload.length > MAX_FRAME_BYTE_LENGTH) {
|
|
24
|
-
throw new Error(`Snapshot row payload exceeds ${MAX_FRAME_BYTE_LENGTH} bytes`);
|
|
25
|
-
}
|
|
26
|
-
payloads.push(payload);
|
|
27
|
-
totalByteLength += FRAME_LENGTH_BYTES + payload.length;
|
|
20
|
+
if (rows.length === 0) {
|
|
21
|
+
return new Uint8Array();
|
|
28
22
|
}
|
|
29
|
-
const
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
for (const payload of payloads) {
|
|
33
|
-
view.setUint32(offset, payload.length, false);
|
|
34
|
-
offset += FRAME_LENGTH_BYTES;
|
|
35
|
-
encoded.set(payload, offset);
|
|
36
|
-
offset += payload.length;
|
|
23
|
+
const payload = snapshotRowFrameEncoder.encode(normalizeRowBatchJson(rows));
|
|
24
|
+
if (payload.length > MAX_FRAME_BYTE_LENGTH) {
|
|
25
|
+
throw new Error(`Snapshot row batch payload exceeds ${MAX_FRAME_BYTE_LENGTH} bytes`);
|
|
37
26
|
}
|
|
27
|
+
const encoded = new Uint8Array(FRAME_LENGTH_BYTES + payload.length);
|
|
28
|
+
const view = new DataView(encoded.buffer, encoded.byteOffset, encoded.length);
|
|
29
|
+
view.setUint32(0, payload.length, false);
|
|
30
|
+
encoded.set(payload, FRAME_LENGTH_BYTES);
|
|
38
31
|
return encoded;
|
|
39
32
|
}
|
|
40
33
|
/**
|
|
41
34
|
* Encode rows as framed JSON bytes with a format header.
|
|
42
35
|
*
|
|
43
36
|
* Format:
|
|
44
|
-
* - 4-byte magic header ("
|
|
37
|
+
* - 4-byte magic header ("SBF2")
|
|
45
38
|
* - repeated frames of:
|
|
46
39
|
* - 4-byte big-endian payload byte length
|
|
47
|
-
* - UTF-8 JSON payload
|
|
40
|
+
* - UTF-8 JSON array payload
|
|
48
41
|
*/
|
|
49
42
|
export function encodeSnapshotRows(rows) {
|
|
50
43
|
const framedRows = encodeSnapshotRowFrames(rows);
|
|
51
|
-
const totalByteLength =
|
|
44
|
+
const totalByteLength = SYNC_SNAPSHOT_CHUNK_MAGIC.length + framedRows.length;
|
|
52
45
|
const encoded = new Uint8Array(totalByteLength);
|
|
53
|
-
encoded.set(
|
|
54
|
-
encoded.set(framedRows,
|
|
46
|
+
encoded.set(SYNC_SNAPSHOT_CHUNK_MAGIC, 0);
|
|
47
|
+
encoded.set(framedRows, SYNC_SNAPSHOT_CHUNK_MAGIC.length);
|
|
55
48
|
return encoded;
|
|
56
49
|
}
|
|
57
50
|
/**
|
|
58
51
|
* Decode framed JSON bytes into rows.
|
|
59
52
|
*/
|
|
60
53
|
export function decodeSnapshotRows(bytes) {
|
|
61
|
-
if (bytes.length <
|
|
54
|
+
if (bytes.length < SYNC_SNAPSHOT_CHUNK_MAGIC.length) {
|
|
62
55
|
throw new Error('Snapshot chunk payload is too small');
|
|
63
56
|
}
|
|
64
|
-
for (let index = 0; index <
|
|
65
|
-
const expected =
|
|
57
|
+
for (let index = 0; index < SYNC_SNAPSHOT_CHUNK_MAGIC.length; index += 1) {
|
|
58
|
+
const expected = SYNC_SNAPSHOT_CHUNK_MAGIC[index];
|
|
66
59
|
const actual = bytes[index];
|
|
67
60
|
if (actual !== expected) {
|
|
68
61
|
throw new Error('Unexpected snapshot chunk format');
|
|
@@ -70,7 +63,7 @@ export function decodeSnapshotRows(bytes) {
|
|
|
70
63
|
}
|
|
71
64
|
const rows = [];
|
|
72
65
|
const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.length);
|
|
73
|
-
let offset =
|
|
66
|
+
let offset = SYNC_SNAPSHOT_CHUNK_MAGIC.length;
|
|
74
67
|
while (offset < bytes.length) {
|
|
75
68
|
if (offset + FRAME_LENGTH_BYTES > bytes.length) {
|
|
76
69
|
throw new Error('Snapshot chunk payload ended mid-frame header');
|
|
@@ -82,7 +75,11 @@ export function decodeSnapshotRows(bytes) {
|
|
|
82
75
|
}
|
|
83
76
|
const payload = bytes.subarray(offset, offset + payloadLength);
|
|
84
77
|
offset += payloadLength;
|
|
85
|
-
|
|
78
|
+
const parsed = JSON.parse(snapshotRowFrameDecoder.decode(payload));
|
|
79
|
+
if (!Array.isArray(parsed)) {
|
|
80
|
+
throw new Error('Snapshot chunk frame payload must be a JSON array');
|
|
81
|
+
}
|
|
82
|
+
rows.push(...parsed);
|
|
86
83
|
}
|
|
87
84
|
return rows;
|
|
88
85
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"snapshot-chunks.js","sourceRoot":"","sources":["../src/snapshot-chunks.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,CAAC,MAAM,4BAA4B,GAAG,
|
|
1
|
+
{"version":3,"file":"snapshot-chunks.js","sourceRoot":"","sources":["../src/snapshot-chunks.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,CAAC,MAAM,4BAA4B,GAAG,yBAAyB,CAAC;AAGtE,MAAM,CAAC,MAAM,+BAA+B,GAAG,MAAM,CAAC;AAItD,MAAM,CAAC,MAAM,yBAAyB,GAAG,IAAI,UAAU,CAAC;IACtD,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI;CACvB,CAAC,CAAC,CAAC,SAAS;AACb,MAAM,kBAAkB,GAAG,CAAC,CAAC;AAC7B,MAAM,qBAAqB,GAAG,WAAW,CAAC;AAC1C,MAAM,uBAAuB,GAAG,IAAI,WAAW,EAAE,CAAC;AAClD,MAAM,uBAAuB,GAAG,IAAI,WAAW,EAAE,CAAC;AAElD,SAAS,qBAAqB,CAAC,IAAwB,EAAU;IAC/D,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;AAAA,CAC7B;AAED;;GAEG;AACH,MAAM,UAAU,uBAAuB,CAAC,IAAwB,EAAc;IAC5E,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACtB,OAAO,IAAI,UAAU,EAAE,CAAC;IAC1B,CAAC;IAED,MAAM,OAAO,GAAG,uBAAuB,CAAC,MAAM,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAC,CAAC;IAC5E,IAAI,OAAO,CAAC,MAAM,GAAG,qBAAqB,EAAE,CAAC;QAC3C,MAAM,IAAI,KAAK,CACb,sCAAsC,qBAAqB,QAAQ,CACpE,CAAC;IACJ,CAAC;IAED,MAAM,OAAO,GAAG,IAAI,UAAU,CAAC,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IACpE,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,UAAU,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC;IAC9E,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;IACzC,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,kBAAkB,CAAC,CAAC;IACzC,OAAO,OAAO,CAAC;AAAA,CAChB;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,kBAAkB,CAAC,IAAwB,EAAc;IACvE,MAAM,UAAU,GAAG,uBAAuB,CAAC,IAAI,CAAC,CAAC;IACjD,MAAM,eAAe,GAAG,yBAAyB,CAAC,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC;IAE7E,MAAM,OAAO,GAAG,IAAI,UAAU,CAAC,eAAe,CAAC,CAAC;IAChD,OAAO,CAAC,GAAG,CAAC,yBAAyB,EAAE,CAAC,CAAC,CAAC;IAC1C,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,yBAAyB,CAAC,MAAM,CAAC,CAAC;IAE1D,OAAO,OAAO,CAAC;AAAA,CAChB;AAED;;GAEG;AACH,MAAM,UAAU,kBAAkB,CAAC,KAAiB,EAAa;IAC/D,IAAI,KAAK,CAAC,MAAM,GAAG,yBAAyB,CAAC,MAAM,EAAE,CAAC;QACpD,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;IACzD,CAAC;IAED,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,yBAAyB,CAAC,MAAM,EAAE,KAAK,IAAI,CAAC,EAAE,CAAC;QACzE,MAAM,QAAQ,GAAG,yBAAyB,CAAC,KAAK,CAAC,CAAC;QAClD,MAAM,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC;QAC5B,IAAI,MAAM,KAAK,QAAQ,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;QACtD,CAAC;IACH,CAAC;IAED,MAAM,IAAI,GAAc,EAAE,CAAC;IAC3B,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IACxE,IAAI,MAAM,GAAG,yBAAyB,CAAC,MAAM,CAAC;IAE9C,OAAO,MAAM,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC;QAC7B,IAAI,MAAM,GAAG,kBAAkB,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC;YAC/C,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;QACnE,CAAC;QAED,MAAM,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACpD,MAAM,IAAI,kBAAkB,CAAC;QAE7B,IAAI,MAAM,GAAG,aAAa,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC;YAC1C,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAC;QACjE,CAAC;QAED,MAAM,OAAO,GAAG,KAAK,CAAC,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,aAAa,CAAC,CAAC;QAC/D,MAAM,IAAI,aAAa,CAAC;QACxB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,uBAAuB,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC;QACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC;YAC3B,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;QACvE,CAAC;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,CAAC;IACvB,CAAC;IAED,OAAO,IAAI,CAAC;AAAA,CACb"}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export declare function bytesToReadableStream(bytes: Uint8Array): ReadableStream<Uint8Array>;
|
|
2
|
+
export declare function concatByteChunks(chunks: readonly Uint8Array[]): Uint8Array;
|
|
3
|
+
export declare function readAllBytesFromStream(stream: ReadableStream<Uint8Array>): Promise<Uint8Array>;
|
|
4
|
+
//# sourceMappingURL=bytes.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bytes.d.ts","sourceRoot":"","sources":["../../src/utils/bytes.ts"],"names":[],"mappings":"AAEA,wBAAgB,qBAAqB,CACnC,KAAK,EAAE,UAAU,GAChB,cAAc,CAAC,UAAU,CAAC,CAO5B;AAED,wBAAgB,gBAAgB,CAAC,MAAM,EAAE,SAAS,UAAU,EAAE,GAAG,UAAU,CAoB1E;AAED,wBAAsB,sBAAsB,CAC1C,MAAM,EAAE,cAAc,CAAC,UAAU,CAAC,GACjC,OAAO,CAAC,UAAU,CAAC,CAqCrB"}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { getBunRuntime } from './internal-runtime.js';
|
|
2
|
+
export function bytesToReadableStream(bytes) {
|
|
3
|
+
return new ReadableStream({
|
|
4
|
+
start(controller) {
|
|
5
|
+
controller.enqueue(bytes);
|
|
6
|
+
controller.close();
|
|
7
|
+
},
|
|
8
|
+
});
|
|
9
|
+
}
|
|
10
|
+
export function concatByteChunks(chunks) {
|
|
11
|
+
if (chunks.length === 0) {
|
|
12
|
+
return new Uint8Array();
|
|
13
|
+
}
|
|
14
|
+
if (chunks.length === 1) {
|
|
15
|
+
return chunks[0] ?? new Uint8Array();
|
|
16
|
+
}
|
|
17
|
+
let total = 0;
|
|
18
|
+
for (const chunk of chunks) {
|
|
19
|
+
total += chunk.length;
|
|
20
|
+
}
|
|
21
|
+
const merged = new Uint8Array(total);
|
|
22
|
+
let offset = 0;
|
|
23
|
+
for (const chunk of chunks) {
|
|
24
|
+
merged.set(chunk, offset);
|
|
25
|
+
offset += chunk.length;
|
|
26
|
+
}
|
|
27
|
+
return merged;
|
|
28
|
+
}
|
|
29
|
+
export async function readAllBytesFromStream(stream) {
|
|
30
|
+
const bun = getBunRuntime();
|
|
31
|
+
if (bun?.readableStreamToBytes) {
|
|
32
|
+
const bytes = await bun.readableStreamToBytes(stream);
|
|
33
|
+
return bytes instanceof Uint8Array ? bytes : new Uint8Array(bytes);
|
|
34
|
+
}
|
|
35
|
+
const reader = stream.getReader();
|
|
36
|
+
const chunks = [];
|
|
37
|
+
let total = 0;
|
|
38
|
+
try {
|
|
39
|
+
while (true) {
|
|
40
|
+
const { done, value } = await reader.read();
|
|
41
|
+
if (done)
|
|
42
|
+
break;
|
|
43
|
+
if (!value)
|
|
44
|
+
continue;
|
|
45
|
+
chunks.push(value);
|
|
46
|
+
total += value.length;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
finally {
|
|
50
|
+
reader.releaseLock();
|
|
51
|
+
}
|
|
52
|
+
if (chunks.length === 0) {
|
|
53
|
+
return new Uint8Array();
|
|
54
|
+
}
|
|
55
|
+
if (chunks.length === 1) {
|
|
56
|
+
return chunks[0] ?? new Uint8Array();
|
|
57
|
+
}
|
|
58
|
+
const merged = new Uint8Array(total);
|
|
59
|
+
let offset = 0;
|
|
60
|
+
for (const chunk of chunks) {
|
|
61
|
+
merged.set(chunk, offset);
|
|
62
|
+
offset += chunk.length;
|
|
63
|
+
}
|
|
64
|
+
return merged;
|
|
65
|
+
}
|
|
66
|
+
//# sourceMappingURL=bytes.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bytes.js","sourceRoot":"","sources":["../../src/utils/bytes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAEnD,MAAM,UAAU,qBAAqB,CACnC,KAAiB,EACW;IAC5B,OAAO,IAAI,cAAc,CAAa;QACpC,KAAK,CAAC,UAAU,EAAE;YAChB,UAAU,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YAC1B,UAAU,CAAC,KAAK,EAAE,CAAC;QAAA,CACpB;KACF,CAAC,CAAC;AAAA,CACJ;AAED,MAAM,UAAU,gBAAgB,CAAC,MAA6B,EAAc;IAC1E,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACxB,OAAO,IAAI,UAAU,EAAE,CAAC;IAC1B,CAAC;IACD,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACxB,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,IAAI,UAAU,EAAE,CAAC;IACvC,CAAC;IAED,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,KAAK,IAAI,KAAK,CAAC,MAAM,CAAC;IACxB,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,UAAU,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,MAAM,GAAG,CAAC,CAAC;IACf,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;QAC1B,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC;IACzB,CAAC;IACD,OAAO,MAAM,CAAC;AAAA,CACf;AAED,MAAM,CAAC,KAAK,UAAU,sBAAsB,CAC1C,MAAkC,EACb;IACrB,MAAM,GAAG,GAAG,aAAa,EAAE,CAAC;IAC5B,IAAI,GAAG,EAAE,qBAAqB,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAG,MAAM,GAAG,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC;QACtD,OAAO,KAAK,YAAY,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,UAAU,CAAC,KAAK,CAAC,CAAC;IACrE,CAAC;IAED,MAAM,MAAM,GAAG,MAAM,CAAC,SAAS,EAAE,CAAC;IAClC,MAAM,MAAM,GAAiB,EAAE,CAAC;IAChC,IAAI,KAAK,GAAG,CAAC,CAAC;IAEd,IAAI,CAAC;QACH,OAAO,IAAI,EAAE,CAAC;YACZ,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;YAC5C,IAAI,IAAI;gBAAE,MAAM;YAChB,IAAI,CAAC,KAAK;gBAAE,SAAS;YACrB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YACnB,KAAK,IAAI,KAAK,CAAC,MAAM,CAAC;QACxB,CAAC;IACH,CAAC;YAAS,CAAC;QACT,MAAM,CAAC,WAAW,EAAE,CAAC;IACvB,CAAC;IAED,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACxB,OAAO,IAAI,UAAU,EAAE,CAAC;IAC1B,CAAC;IACD,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACxB,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,IAAI,UAAU,EAAE,CAAC;IACvC,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,UAAU,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,MAAM,GAAG,CAAC,CAAC;IACf,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;QAC1B,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC;IACzB,CAAC;IACD,OAAO,MAAM,CAAC;AAAA,CACf"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* Gzip-compress a byte array using
|
|
3
|
-
*
|
|
2
|
+
* Gzip-compress a byte array using the fastest native implementation available
|
|
3
|
+
* in the current runtime.
|
|
4
4
|
*/
|
|
5
5
|
export declare function gzipBytes(payload: Uint8Array): Promise<Uint8Array>;
|
|
6
6
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"compression.d.ts","sourceRoot":"","sources":["../../src/utils/compression.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"compression.d.ts","sourceRoot":"","sources":["../../src/utils/compression.ts"],"names":[],"mappings":"AAgBA;;;GAGG;AACH,wBAAsB,SAAS,CAAC,OAAO,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC,CA6BxE;AAED;;;GAGG;AACH,wBAAsB,iBAAiB,CAAC,OAAO,EAAE,UAAU,GAAG,OAAO,CAAC;IACpE,MAAM,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;IACnC,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB,CAAC,CAoBD"}
|
|
@@ -1,67 +1,50 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
});
|
|
8
|
-
}
|
|
9
|
-
async function streamToBytes(stream) {
|
|
10
|
-
const reader = stream.getReader();
|
|
11
|
-
const chunks = [];
|
|
12
|
-
let total = 0;
|
|
13
|
-
try {
|
|
14
|
-
while (true) {
|
|
15
|
-
const { done, value } = await reader.read();
|
|
16
|
-
if (done)
|
|
17
|
-
break;
|
|
18
|
-
if (!value)
|
|
19
|
-
continue;
|
|
20
|
-
chunks.push(value);
|
|
21
|
-
total += value.length;
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
finally {
|
|
25
|
-
reader.releaseLock();
|
|
1
|
+
import { bytesToReadableStream, readAllBytesFromStream } from './bytes.js';
|
|
2
|
+
import { getBunRuntime, usesNodeRuntimeModules } from './internal-runtime.js';
|
|
3
|
+
let nodeZlibModulePromise = null;
|
|
4
|
+
async function getNodeZlibModule() {
|
|
5
|
+
if (!usesNodeRuntimeModules()) {
|
|
6
|
+
return null;
|
|
26
7
|
}
|
|
27
|
-
if (
|
|
28
|
-
|
|
29
|
-
if (chunks.length === 1)
|
|
30
|
-
return chunks[0] ?? new Uint8Array();
|
|
31
|
-
const merged = new Uint8Array(total);
|
|
32
|
-
let offset = 0;
|
|
33
|
-
for (const chunk of chunks) {
|
|
34
|
-
merged.set(chunk, offset);
|
|
35
|
-
offset += chunk.length;
|
|
8
|
+
if (!nodeZlibModulePromise) {
|
|
9
|
+
nodeZlibModulePromise = import('node:zlib').catch(() => null);
|
|
36
10
|
}
|
|
37
|
-
return
|
|
11
|
+
return nodeZlibModulePromise;
|
|
38
12
|
}
|
|
39
13
|
/**
|
|
40
|
-
* Gzip-compress a byte array using
|
|
41
|
-
*
|
|
14
|
+
* Gzip-compress a byte array using the fastest native implementation available
|
|
15
|
+
* in the current runtime.
|
|
42
16
|
*/
|
|
43
17
|
export async function gzipBytes(payload) {
|
|
18
|
+
const bun = getBunRuntime();
|
|
19
|
+
if (bun?.gzipSync) {
|
|
20
|
+
return bun.gzipSync(payload);
|
|
21
|
+
}
|
|
22
|
+
const nodeZlib = await getNodeZlibModule();
|
|
23
|
+
if (nodeZlib?.gzip) {
|
|
24
|
+
return await new Promise((resolve, reject) => {
|
|
25
|
+
nodeZlib.gzip(payload, (error, compressed) => {
|
|
26
|
+
if (error) {
|
|
27
|
+
reject(error);
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
resolve(new Uint8Array(compressed));
|
|
31
|
+
});
|
|
32
|
+
});
|
|
33
|
+
}
|
|
44
34
|
if (typeof CompressionStream !== 'undefined') {
|
|
45
35
|
const stream = bytesToReadableStream(payload).pipeThrough(new CompressionStream('gzip'));
|
|
46
|
-
return
|
|
36
|
+
return readAllBytesFromStream(stream);
|
|
47
37
|
}
|
|
48
|
-
|
|
49
|
-
return await new Promise((resolve, reject) => {
|
|
50
|
-
nodeZlib.gzip(payload, (error, compressed) => {
|
|
51
|
-
if (error) {
|
|
52
|
-
reject(error);
|
|
53
|
-
return;
|
|
54
|
-
}
|
|
55
|
-
resolve(new Uint8Array(compressed));
|
|
56
|
-
});
|
|
57
|
-
});
|
|
38
|
+
throw new Error('Failed to gzip bytes, no compression implementation available');
|
|
58
39
|
}
|
|
59
40
|
/**
|
|
60
41
|
* Gzip-compress bytes and return a stream. When streaming compression is not
|
|
61
42
|
* available, falls back to eager compression and includes byteLength metadata.
|
|
62
43
|
*/
|
|
63
44
|
export async function gzipBytesToStream(payload) {
|
|
64
|
-
|
|
45
|
+
const bun = getBunRuntime();
|
|
46
|
+
const nodeZlib = await getNodeZlibModule();
|
|
47
|
+
if (!bun?.gzipSync && !nodeZlib && typeof CompressionStream !== 'undefined') {
|
|
65
48
|
const source = bytesToReadableStream(payload);
|
|
66
49
|
const gzipStream = new CompressionStream('gzip');
|
|
67
50
|
return {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"compression.js","sourceRoot":"","sources":["../../src/utils/compression.ts"],"names":[],"mappings":"AAAA,
|
|
1
|
+
{"version":3,"file":"compression.js","sourceRoot":"","sources":["../../src/utils/compression.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,qBAAqB,EAAE,sBAAsB,EAAE,MAAM,SAAS,CAAC;AACxE,OAAO,EAAE,aAAa,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAE3E,IAAI,qBAAqB,GACvB,IAAI,CAAC;AAEP,KAAK,UAAU,iBAAiB,GAA+C;IAC7E,IAAI,CAAC,sBAAsB,EAAE,EAAE,CAAC;QAC9B,OAAO,IAAI,CAAC;IACd,CAAC;IACD,IAAI,CAAC,qBAAqB,EAAE,CAAC;QAC3B,qBAAqB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC;IAChE,CAAC;IACD,OAAO,qBAAqB,CAAC;AAAA,CAC9B;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,SAAS,CAAC,OAAmB,EAAuB;IACxE,MAAM,GAAG,GAAG,aAAa,EAAE,CAAC;IAC5B,IAAI,GAAG,EAAE,QAAQ,EAAE,CAAC;QAClB,OAAO,GAAG,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;IAC/B,CAAC;IAED,MAAM,QAAQ,GAAG,MAAM,iBAAiB,EAAE,CAAC;IAC3C,IAAI,QAAQ,EAAE,IAAI,EAAE,CAAC;QACnB,OAAO,MAAM,IAAI,OAAO,CAAa,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC;YACxD,QAAQ,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,UAAU,EAAE,EAAE,CAAC;gBAC5C,IAAI,KAAK,EAAE,CAAC;oBACV,MAAM,CAAC,KAAK,CAAC,CAAC;oBACd,OAAO;gBACT,CAAC;gBACD,OAAO,CAAC,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;YAAA,CACrC,CAAC,CAAC;QAAA,CACJ,CAAC,CAAC;IACL,CAAC;IAED,IAAI,OAAO,iBAAiB,KAAK,WAAW,EAAE,CAAC;QAC7C,MAAM,MAAM,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC,WAAW,CACvD,IAAI,iBAAiB,CAAC,MAAM,CAA4C,CACzE,CAAC;QACF,OAAO,sBAAsB,CAAC,MAAM,CAAC,CAAC;IACxC,CAAC;IAED,MAAM,IAAI,KAAK,CACb,+DAA+D,CAChE,CAAC;AAAA,CACH;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAC,OAAmB,EAGxD;IACD,MAAM,GAAG,GAAG,aAAa,EAAE,CAAC;IAC5B,MAAM,QAAQ,GAAG,MAAM,iBAAiB,EAAE,CAAC;IAE3C,IAAI,CAAC,GAAG,EAAE,QAAQ,IAAI,CAAC,QAAQ,IAAI,OAAO,iBAAiB,KAAK,WAAW,EAAE,CAAC;QAC5E,MAAM,MAAM,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAC;QAC9C,MAAM,UAAU,GAAG,IAAI,iBAAiB,CAAC,MAAM,CAG9C,CAAC;QACF,OAAO;YACL,MAAM,EAAE,MAAM,CAAC,WAAW,CAAC,UAAU,CAAC;SACvC,CAAC;IACJ,CAAC;IAED,MAAM,UAAU,GAAG,MAAM,SAAS,CAAC,OAAO,CAAC,CAAC;IAC5C,OAAO;QACL,MAAM,EAAE,qBAAqB,CAAC,UAAU,CAAC;QACzC,UAAU,EAAE,UAAU,CAAC,MAAM;KAC9B,CAAC;AAAA,CACH"}
|
package/dist/utils/crypto.d.ts
CHANGED
|
@@ -1,7 +1,13 @@
|
|
|
1
|
+
export interface IncrementalSha256 {
|
|
2
|
+
update(chunk: Uint8Array): void;
|
|
3
|
+
digestHex(): Promise<string>;
|
|
4
|
+
}
|
|
5
|
+
export declare function createIncrementalSha256(): Promise<IncrementalSha256>;
|
|
1
6
|
/**
|
|
2
7
|
* Cross-runtime SHA-256 digest helper.
|
|
3
8
|
*
|
|
4
|
-
* Uses
|
|
9
|
+
* Uses native Bun/Node implementations on server runtimes, with Web Crypto
|
|
10
|
+
* fallback for browser and worker environments.
|
|
5
11
|
*/
|
|
6
12
|
export declare function sha256Hex(input: string | Uint8Array): Promise<string>;
|
|
7
13
|
//# sourceMappingURL=crypto.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"crypto.d.ts","sourceRoot":"","sources":["../../src/utils/crypto.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"crypto.d.ts","sourceRoot":"","sources":["../../src/utils/crypto.ts"],"names":[],"mappings":"AAyCA,MAAM,WAAW,iBAAiB;IAChC,MAAM,CAAC,KAAK,EAAE,UAAU,GAAG,IAAI,CAAC;IAChC,SAAS,IAAI,OAAO,CAAC,MAAM,CAAC,CAAC;CAC9B;AAED,wBAAsB,uBAAuB,IAAI,OAAO,CAAC,iBAAiB,CAAC,CAqC1E;AAED;;;;;GAKG;AACH,wBAAsB,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,CA0B3E"}
|
package/dist/utils/crypto.js
CHANGED
|
@@ -1,25 +1,88 @@
|
|
|
1
|
+
import { concatByteChunks } from './bytes.js';
|
|
2
|
+
import { getBunRuntime, usesNodeRuntimeModules } from './internal-runtime.js';
|
|
1
3
|
const textEncoder = new TextEncoder();
|
|
2
4
|
function toHex(bytes) {
|
|
3
5
|
return Array.from(bytes)
|
|
4
6
|
.map((byte) => byte.toString(16).padStart(2, '0'))
|
|
5
7
|
.join('');
|
|
6
8
|
}
|
|
9
|
+
function toDigestBufferSource(payload) {
|
|
10
|
+
if (payload.buffer instanceof ArrayBuffer) {
|
|
11
|
+
return new Uint8Array(payload.buffer, payload.byteOffset, payload.byteLength);
|
|
12
|
+
}
|
|
13
|
+
const owned = new Uint8Array(payload.byteLength);
|
|
14
|
+
owned.set(payload);
|
|
15
|
+
return owned;
|
|
16
|
+
}
|
|
17
|
+
let nodeCryptoModulePromise = null;
|
|
18
|
+
async function getNodeCryptoModule() {
|
|
19
|
+
if (!usesNodeRuntimeModules()) {
|
|
20
|
+
return null;
|
|
21
|
+
}
|
|
22
|
+
if (!nodeCryptoModulePromise) {
|
|
23
|
+
nodeCryptoModulePromise = import('node:crypto').catch(() => null);
|
|
24
|
+
}
|
|
25
|
+
return nodeCryptoModulePromise;
|
|
26
|
+
}
|
|
27
|
+
export async function createIncrementalSha256() {
|
|
28
|
+
const bun = getBunRuntime();
|
|
29
|
+
if (bun?.CryptoHasher) {
|
|
30
|
+
const hasher = new bun.CryptoHasher('sha256');
|
|
31
|
+
return {
|
|
32
|
+
update(chunk) {
|
|
33
|
+
hasher.update(chunk);
|
|
34
|
+
},
|
|
35
|
+
async digestHex() {
|
|
36
|
+
return hasher.digest('hex');
|
|
37
|
+
},
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
const nodeCrypto = await getNodeCryptoModule();
|
|
41
|
+
if (nodeCrypto?.createHash) {
|
|
42
|
+
const hasher = nodeCrypto.createHash('sha256');
|
|
43
|
+
return {
|
|
44
|
+
update(chunk) {
|
|
45
|
+
hasher.update(chunk);
|
|
46
|
+
},
|
|
47
|
+
async digestHex() {
|
|
48
|
+
return hasher.digest('hex');
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
const chunks = [];
|
|
53
|
+
return {
|
|
54
|
+
update(chunk) {
|
|
55
|
+
if (chunk.length === 0)
|
|
56
|
+
return;
|
|
57
|
+
chunks.push(chunk.slice());
|
|
58
|
+
},
|
|
59
|
+
async digestHex() {
|
|
60
|
+
return sha256Hex(concatByteChunks(chunks));
|
|
61
|
+
},
|
|
62
|
+
};
|
|
63
|
+
}
|
|
7
64
|
/**
|
|
8
65
|
* Cross-runtime SHA-256 digest helper.
|
|
9
66
|
*
|
|
10
|
-
* Uses
|
|
67
|
+
* Uses native Bun/Node implementations on server runtimes, with Web Crypto
|
|
68
|
+
* fallback for browser and worker environments.
|
|
11
69
|
*/
|
|
12
70
|
export async function sha256Hex(input) {
|
|
13
71
|
const payload = typeof input === 'string' ? textEncoder.encode(input) : input;
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
72
|
+
const bun = getBunRuntime();
|
|
73
|
+
if (bun?.CryptoHasher) {
|
|
74
|
+
const hasher = new bun.CryptoHasher('sha256');
|
|
75
|
+
hasher.update(payload);
|
|
76
|
+
return hasher.digest('hex');
|
|
17
77
|
}
|
|
18
|
-
|
|
19
|
-
|
|
78
|
+
const nodeCrypto = await getNodeCryptoModule();
|
|
79
|
+
if (nodeCrypto?.createHash) {
|
|
20
80
|
return nodeCrypto.createHash('sha256').update(payload).digest('hex');
|
|
21
81
|
}
|
|
22
|
-
|
|
82
|
+
if (typeof crypto !== 'undefined' && crypto.subtle) {
|
|
83
|
+
const digestBuffer = await crypto.subtle.digest('SHA-256', toDigestBufferSource(payload));
|
|
84
|
+
return toHex(new Uint8Array(digestBuffer));
|
|
85
|
+
}
|
|
23
86
|
throw new Error('Failed to create SHA-256 hash, no crypto implementation available');
|
|
24
87
|
}
|
|
25
88
|
//# sourceMappingURL=crypto.js.map
|
package/dist/utils/crypto.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"crypto.js","sourceRoot":"","sources":["../../src/utils/crypto.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;AAEtC,SAAS,KAAK,CAAC,KAAiB,EAAU;IACxC,OAAO,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;SACrB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;SACjD,IAAI,CAAC,EAAE,CAAC,CAAC;AAAA,CACb;AAED
|
|
1
|
+
{"version":3,"file":"crypto.js","sourceRoot":"","sources":["../../src/utils/crypto.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAC3C,OAAO,EAAE,aAAa,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC;AAE3E,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;AAEtC,SAAS,KAAK,CAAC,KAAiB,EAAU;IACxC,OAAO,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC;SACrB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;SACjD,IAAI,CAAC,EAAE,CAAC,CAAC;AAAA,CACb;AAED,SAAS,oBAAoB,CAAC,OAAmB,EAA2B;IAC1E,IAAI,OAAO,CAAC,MAAM,YAAY,WAAW,EAAE,CAAC;QAC1C,OAAO,IAAI,UAAU,CACnB,OAAO,CAAC,MAAM,EACd,OAAO,CAAC,UAAU,EAClB,OAAO,CAAC,UAAU,CACnB,CAAC;IACJ,CAAC;IAED,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IACjD,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACnB,OAAO,KAAK,CAAC;AAAA,CACd;AAED,IAAI,uBAAuB,GAEhB,IAAI,CAAC;AAEhB,KAAK,UAAU,mBAAmB,GAEhC;IACA,IAAI,CAAC,sBAAsB,EAAE,EAAE,CAAC;QAC9B,OAAO,IAAI,CAAC;IACd,CAAC;IACD,IAAI,CAAC,uBAAuB,EAAE,CAAC;QAC7B,uBAAuB,GAAG,MAAM,CAAC,aAAa,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC;IACpE,CAAC;IACD,OAAO,uBAAuB,CAAC;AAAA,CAChC;AAOD,MAAM,CAAC,KAAK,UAAU,uBAAuB,GAA+B;IAC1E,MAAM,GAAG,GAAG,aAAa,EAAE,CAAC;IAC5B,IAAI,GAAG,EAAE,YAAY,EAAE,CAAC;QACtB,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAC9C,OAAO;YACL,MAAM,CAAC,KAAK,EAAE;gBACZ,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAAA,CACtB;YACD,KAAK,CAAC,SAAS,GAAG;gBAChB,OAAO,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAAA,CAC7B;SACF,CAAC;IACJ,CAAC;IAED,MAAM,UAAU,GAAG,MAAM,mBAAmB,EAAE,CAAC;IAC/C,IAAI,UAAU,EAAE,UAAU,EAAE,CAAC;QAC3B,MAAM,MAAM,GAAG,UAAU,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;QAC/C,OAAO;YACL,MAAM,CAAC,KAAK,EAAE;gBACZ,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAAA,CACtB;YACD,KAAK,CAAC,SAAS,GAAG;gBAChB,OAAO,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAAA,CAC7B;SACF,CAAC;IACJ,CAAC;IAED,MAAM,MAAM,GAAiB,EAAE,CAAC;IAChC,OAAO;QACL,MAAM,CAAC,KAAK,EAAE;YACZ,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;gBAAE,OAAO;YAC/B,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC;QAAA,CAC5B;QACD,KAAK,CAAC,SAAS,GAAG;YAChB,OAAO,SAAS,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC,CAAC;QAAA,CAC5C;KACF,CAAC;AAAA,CACH;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,SAAS,CAAC,KAA0B,EAAmB;IAC3E,MAAM,OAAO,GAAG,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;IAE9E,MAAM,GAAG,GAAG,aAAa,EAAE,CAAC;IAC5B,IAAI,GAAG,EAAE,YAAY,EAAE,CAAC;QACtB,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAC9C,MAAM,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QACvB,OAAO,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAC9B,CAAC;IAED,MAAM,UAAU,GAAG,MAAM,mBAAmB,EAAE,CAAC;IAC/C,IAAI,UAAU,EAAE,UAAU,EAAE,CAAC;QAC3B,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IACvE,CAAC;IAED,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC;QACnD,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,MAAM,CAC7C,SAAS,EACT,oBAAoB,CAAC,OAAO,CAAC,CAC9B,CAAC;QACF,OAAO,KAAK,CAAC,IAAI,UAAU,CAAC,YAAY,CAAC,CAAC,CAAC;IAC7C,CAAC;IAED,MAAM,IAAI,KAAK,CACb,mEAAmE,CACpE,CAAC;AAAA,CACH"}
|
package/dist/utils/index.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,UAAU,CAAC;AACzB,cAAc,MAAM,CAAC;AACrB,cAAc,UAAU,CAAC;AACzB,cAAc,gCAAgC,CAAC;AAC/C,cAAc,kBAAkB,CAAC;AACjC,cAAc,OAAO,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":"AAAA,cAAc,SAAS,CAAC;AACxB,cAAc,eAAe,CAAC;AAC9B,cAAc,UAAU,CAAC;AACzB,cAAc,MAAM,CAAC;AACrB,cAAc,UAAU,CAAC;AACzB,cAAc,gCAAgC,CAAC;AAC/C,cAAc,kBAAkB,CAAC;AACjC,cAAc,OAAO,CAAC"}
|
package/dist/utils/index.js
CHANGED
package/dist/utils/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,UAAU,CAAC;AACzB,cAAc,MAAM,CAAC;AACrB,cAAc,UAAU,CAAC;AACzB,cAAc,gCAAgC,CAAC;AAC/C,cAAc,kBAAkB,CAAC;AACjC,cAAc,OAAO,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":"AAAA,cAAc,SAAS,CAAC;AACxB,cAAc,eAAe,CAAC;AAC9B,cAAc,UAAU,CAAC;AACzB,cAAc,MAAM,CAAC;AACrB,cAAc,UAAU,CAAC;AACzB,cAAc,gCAAgC,CAAC;AAC/C,cAAc,kBAAkB,CAAC;AACjC,cAAc,OAAO,CAAC"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
interface BunCryptoHasher {
|
|
2
|
+
update(data: string | Uint8Array): void;
|
|
3
|
+
digest(encoding: 'hex'): string;
|
|
4
|
+
}
|
|
5
|
+
interface BunRuntime {
|
|
6
|
+
CryptoHasher?: new (algorithm: string) => BunCryptoHasher;
|
|
7
|
+
gzipSync?: (data: Uint8Array) => Uint8Array;
|
|
8
|
+
readableStreamToBytes?: (stream: ReadableStream<Uint8Array>) => Promise<Uint8Array | ArrayBuffer>;
|
|
9
|
+
}
|
|
10
|
+
export declare function getBunRuntime(): BunRuntime | null;
|
|
11
|
+
export declare function usesNodeRuntimeModules(): boolean;
|
|
12
|
+
export {};
|
|
13
|
+
//# sourceMappingURL=internal-runtime.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"internal-runtime.d.ts","sourceRoot":"","sources":["../../src/utils/internal-runtime.ts"],"names":[],"mappings":"AAAA,UAAU,eAAe;IACvB,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC;IACxC,MAAM,CAAC,QAAQ,EAAE,KAAK,GAAG,MAAM,CAAC;CACjC;AAED,UAAU,UAAU;IAClB,YAAY,CAAC,EAAE,KAAK,SAAS,EAAE,MAAM,KAAK,eAAe,CAAC;IAC1D,QAAQ,CAAC,EAAE,CAAC,IAAI,EAAE,UAAU,KAAK,UAAU,CAAC;IAC5C,qBAAqB,CAAC,EAAE,CACtB,MAAM,EAAE,cAAc,CAAC,UAAU,CAAC,KAC/B,OAAO,CAAC,UAAU,GAAG,WAAW,CAAC,CAAC;CACxC;AAgBD,wBAAgB,aAAa,IAAI,UAAU,GAAG,IAAI,CAEjD;AAED,wBAAgB,sBAAsB,IAAI,OAAO,CAMhD"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
function getRuntimeGlobals() {
|
|
2
|
+
return globalThis;
|
|
3
|
+
}
|
|
4
|
+
export function getBunRuntime() {
|
|
5
|
+
return getRuntimeGlobals().Bun ?? null;
|
|
6
|
+
}
|
|
7
|
+
export function usesNodeRuntimeModules() {
|
|
8
|
+
const globals = getRuntimeGlobals();
|
|
9
|
+
if (globals.Deno !== undefined) {
|
|
10
|
+
return true;
|
|
11
|
+
}
|
|
12
|
+
return typeof globals.process?.versions?.node === 'string';
|
|
13
|
+
}
|
|
14
|
+
//# sourceMappingURL=internal-runtime.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"internal-runtime.js","sourceRoot":"","sources":["../../src/utils/internal-runtime.ts"],"names":[],"mappings":"AAuBA,SAAS,iBAAiB,GAAmB;IAC3C,OAAO,UAA4B,CAAC;AAAA,CACrC;AAED,MAAM,UAAU,aAAa,GAAsB;IACjD,OAAO,iBAAiB,EAAE,CAAC,GAAG,IAAI,IAAI,CAAC;AAAA,CACxC;AAED,MAAM,UAAU,sBAAsB,GAAY;IAChD,MAAM,OAAO,GAAG,iBAAiB,EAAE,CAAC;IACpC,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;QAC/B,OAAO,IAAI,CAAC;IACd,CAAC;IACD,OAAO,OAAO,OAAO,CAAC,OAAO,EAAE,QAAQ,EAAE,IAAI,KAAK,QAAQ,CAAC;AAAA,CAC5D"}
|
package/package.json
CHANGED
package/src/snapshot-chunks.ts
CHANGED
|
@@ -2,52 +2,44 @@
|
|
|
2
2
|
* @syncular/core - Snapshot chunk encoding helpers
|
|
3
3
|
*/
|
|
4
4
|
|
|
5
|
-
export const SYNC_SNAPSHOT_CHUNK_ENCODING = 'json-row-frame-
|
|
5
|
+
export const SYNC_SNAPSHOT_CHUNK_ENCODING = 'json-row-batch-frame-v2';
|
|
6
6
|
export type SyncSnapshotChunkEncoding = typeof SYNC_SNAPSHOT_CHUNK_ENCODING;
|
|
7
7
|
|
|
8
8
|
export const SYNC_SNAPSHOT_CHUNK_COMPRESSION = 'gzip';
|
|
9
9
|
export type SyncSnapshotChunkCompression =
|
|
10
10
|
typeof SYNC_SNAPSHOT_CHUNK_COMPRESSION;
|
|
11
11
|
|
|
12
|
-
const
|
|
12
|
+
export const SYNC_SNAPSHOT_CHUNK_MAGIC = new Uint8Array([
|
|
13
|
+
0x53, 0x42, 0x46, 0x32,
|
|
14
|
+
]); // "SBF2"
|
|
13
15
|
const FRAME_LENGTH_BYTES = 4;
|
|
14
16
|
const MAX_FRAME_BYTE_LENGTH = 0xffff_ffff;
|
|
15
17
|
const snapshotRowFrameEncoder = new TextEncoder();
|
|
16
18
|
const snapshotRowFrameDecoder = new TextDecoder();
|
|
17
19
|
|
|
18
|
-
function
|
|
19
|
-
|
|
20
|
-
return serialized === undefined ? 'null' : serialized;
|
|
20
|
+
function normalizeRowBatchJson(rows: readonly unknown[]): string {
|
|
21
|
+
return JSON.stringify(rows);
|
|
21
22
|
}
|
|
22
23
|
|
|
23
24
|
/**
|
|
24
|
-
* Encode rows as framed JSON
|
|
25
|
+
* Encode rows as a single framed JSON batch without the format header.
|
|
25
26
|
*/
|
|
26
27
|
export function encodeSnapshotRowFrames(rows: readonly unknown[]): Uint8Array {
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
for (const row of rows) {
|
|
31
|
-
const payload = snapshotRowFrameEncoder.encode(normalizeRowJson(row));
|
|
32
|
-
if (payload.length > MAX_FRAME_BYTE_LENGTH) {
|
|
33
|
-
throw new Error(
|
|
34
|
-
`Snapshot row payload exceeds ${MAX_FRAME_BYTE_LENGTH} bytes`
|
|
35
|
-
);
|
|
36
|
-
}
|
|
37
|
-
payloads.push(payload);
|
|
38
|
-
totalByteLength += FRAME_LENGTH_BYTES + payload.length;
|
|
28
|
+
if (rows.length === 0) {
|
|
29
|
+
return new Uint8Array();
|
|
39
30
|
}
|
|
40
31
|
|
|
41
|
-
const
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
offset += FRAME_LENGTH_BYTES;
|
|
47
|
-
encoded.set(payload, offset);
|
|
48
|
-
offset += payload.length;
|
|
32
|
+
const payload = snapshotRowFrameEncoder.encode(normalizeRowBatchJson(rows));
|
|
33
|
+
if (payload.length > MAX_FRAME_BYTE_LENGTH) {
|
|
34
|
+
throw new Error(
|
|
35
|
+
`Snapshot row batch payload exceeds ${MAX_FRAME_BYTE_LENGTH} bytes`
|
|
36
|
+
);
|
|
49
37
|
}
|
|
50
38
|
|
|
39
|
+
const encoded = new Uint8Array(FRAME_LENGTH_BYTES + payload.length);
|
|
40
|
+
const view = new DataView(encoded.buffer, encoded.byteOffset, encoded.length);
|
|
41
|
+
view.setUint32(0, payload.length, false);
|
|
42
|
+
encoded.set(payload, FRAME_LENGTH_BYTES);
|
|
51
43
|
return encoded;
|
|
52
44
|
}
|
|
53
45
|
|
|
@@ -55,18 +47,18 @@ export function encodeSnapshotRowFrames(rows: readonly unknown[]): Uint8Array {
|
|
|
55
47
|
* Encode rows as framed JSON bytes with a format header.
|
|
56
48
|
*
|
|
57
49
|
* Format:
|
|
58
|
-
* - 4-byte magic header ("
|
|
50
|
+
* - 4-byte magic header ("SBF2")
|
|
59
51
|
* - repeated frames of:
|
|
60
52
|
* - 4-byte big-endian payload byte length
|
|
61
|
-
* - UTF-8 JSON payload
|
|
53
|
+
* - UTF-8 JSON array payload
|
|
62
54
|
*/
|
|
63
55
|
export function encodeSnapshotRows(rows: readonly unknown[]): Uint8Array {
|
|
64
56
|
const framedRows = encodeSnapshotRowFrames(rows);
|
|
65
|
-
const totalByteLength =
|
|
57
|
+
const totalByteLength = SYNC_SNAPSHOT_CHUNK_MAGIC.length + framedRows.length;
|
|
66
58
|
|
|
67
59
|
const encoded = new Uint8Array(totalByteLength);
|
|
68
|
-
encoded.set(
|
|
69
|
-
encoded.set(framedRows,
|
|
60
|
+
encoded.set(SYNC_SNAPSHOT_CHUNK_MAGIC, 0);
|
|
61
|
+
encoded.set(framedRows, SYNC_SNAPSHOT_CHUNK_MAGIC.length);
|
|
70
62
|
|
|
71
63
|
return encoded;
|
|
72
64
|
}
|
|
@@ -75,12 +67,12 @@ export function encodeSnapshotRows(rows: readonly unknown[]): Uint8Array {
|
|
|
75
67
|
* Decode framed JSON bytes into rows.
|
|
76
68
|
*/
|
|
77
69
|
export function decodeSnapshotRows(bytes: Uint8Array): unknown[] {
|
|
78
|
-
if (bytes.length <
|
|
70
|
+
if (bytes.length < SYNC_SNAPSHOT_CHUNK_MAGIC.length) {
|
|
79
71
|
throw new Error('Snapshot chunk payload is too small');
|
|
80
72
|
}
|
|
81
73
|
|
|
82
|
-
for (let index = 0; index <
|
|
83
|
-
const expected =
|
|
74
|
+
for (let index = 0; index < SYNC_SNAPSHOT_CHUNK_MAGIC.length; index += 1) {
|
|
75
|
+
const expected = SYNC_SNAPSHOT_CHUNK_MAGIC[index];
|
|
84
76
|
const actual = bytes[index];
|
|
85
77
|
if (actual !== expected) {
|
|
86
78
|
throw new Error('Unexpected snapshot chunk format');
|
|
@@ -89,7 +81,7 @@ export function decodeSnapshotRows(bytes: Uint8Array): unknown[] {
|
|
|
89
81
|
|
|
90
82
|
const rows: unknown[] = [];
|
|
91
83
|
const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.length);
|
|
92
|
-
let offset =
|
|
84
|
+
let offset = SYNC_SNAPSHOT_CHUNK_MAGIC.length;
|
|
93
85
|
|
|
94
86
|
while (offset < bytes.length) {
|
|
95
87
|
if (offset + FRAME_LENGTH_BYTES > bytes.length) {
|
|
@@ -105,7 +97,11 @@ export function decodeSnapshotRows(bytes: Uint8Array): unknown[] {
|
|
|
105
97
|
|
|
106
98
|
const payload = bytes.subarray(offset, offset + payloadLength);
|
|
107
99
|
offset += payloadLength;
|
|
108
|
-
|
|
100
|
+
const parsed = JSON.parse(snapshotRowFrameDecoder.decode(payload));
|
|
101
|
+
if (!Array.isArray(parsed)) {
|
|
102
|
+
throw new Error('Snapshot chunk frame payload must be a JSON array');
|
|
103
|
+
}
|
|
104
|
+
rows.push(...parsed);
|
|
109
105
|
}
|
|
110
106
|
|
|
111
107
|
return rows;
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { getBunRuntime } from './internal-runtime';
|
|
2
|
+
|
|
3
|
+
export function bytesToReadableStream(
|
|
4
|
+
bytes: Uint8Array
|
|
5
|
+
): ReadableStream<Uint8Array> {
|
|
6
|
+
return new ReadableStream<Uint8Array>({
|
|
7
|
+
start(controller) {
|
|
8
|
+
controller.enqueue(bytes);
|
|
9
|
+
controller.close();
|
|
10
|
+
},
|
|
11
|
+
});
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function concatByteChunks(chunks: readonly Uint8Array[]): Uint8Array {
|
|
15
|
+
if (chunks.length === 0) {
|
|
16
|
+
return new Uint8Array();
|
|
17
|
+
}
|
|
18
|
+
if (chunks.length === 1) {
|
|
19
|
+
return chunks[0] ?? new Uint8Array();
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
let total = 0;
|
|
23
|
+
for (const chunk of chunks) {
|
|
24
|
+
total += chunk.length;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const merged = new Uint8Array(total);
|
|
28
|
+
let offset = 0;
|
|
29
|
+
for (const chunk of chunks) {
|
|
30
|
+
merged.set(chunk, offset);
|
|
31
|
+
offset += chunk.length;
|
|
32
|
+
}
|
|
33
|
+
return merged;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export async function readAllBytesFromStream(
|
|
37
|
+
stream: ReadableStream<Uint8Array>
|
|
38
|
+
): Promise<Uint8Array> {
|
|
39
|
+
const bun = getBunRuntime();
|
|
40
|
+
if (bun?.readableStreamToBytes) {
|
|
41
|
+
const bytes = await bun.readableStreamToBytes(stream);
|
|
42
|
+
return bytes instanceof Uint8Array ? bytes : new Uint8Array(bytes);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const reader = stream.getReader();
|
|
46
|
+
const chunks: Uint8Array[] = [];
|
|
47
|
+
let total = 0;
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
while (true) {
|
|
51
|
+
const { done, value } = await reader.read();
|
|
52
|
+
if (done) break;
|
|
53
|
+
if (!value) continue;
|
|
54
|
+
chunks.push(value);
|
|
55
|
+
total += value.length;
|
|
56
|
+
}
|
|
57
|
+
} finally {
|
|
58
|
+
reader.releaseLock();
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (chunks.length === 0) {
|
|
62
|
+
return new Uint8Array();
|
|
63
|
+
}
|
|
64
|
+
if (chunks.length === 1) {
|
|
65
|
+
return chunks[0] ?? new Uint8Array();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const merged = new Uint8Array(total);
|
|
69
|
+
let offset = 0;
|
|
70
|
+
for (const chunk of chunks) {
|
|
71
|
+
merged.set(chunk, offset);
|
|
72
|
+
offset += chunk.length;
|
|
73
|
+
}
|
|
74
|
+
return merged;
|
|
75
|
+
}
|
package/src/utils/compression.ts
CHANGED
|
@@ -1,68 +1,52 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
start(controller) {
|
|
4
|
-
controller.enqueue(bytes);
|
|
5
|
-
controller.close();
|
|
6
|
-
},
|
|
7
|
-
});
|
|
8
|
-
}
|
|
1
|
+
import { bytesToReadableStream, readAllBytesFromStream } from './bytes';
|
|
2
|
+
import { getBunRuntime, usesNodeRuntimeModules } from './internal-runtime';
|
|
9
3
|
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
): Promise<Uint8Array> {
|
|
13
|
-
const reader = stream.getReader();
|
|
14
|
-
const chunks: Uint8Array[] = [];
|
|
15
|
-
let total = 0;
|
|
4
|
+
let nodeZlibModulePromise: Promise<typeof import('node:zlib') | null> | null =
|
|
5
|
+
null;
|
|
16
6
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
if (done) break;
|
|
21
|
-
if (!value) continue;
|
|
22
|
-
chunks.push(value);
|
|
23
|
-
total += value.length;
|
|
24
|
-
}
|
|
25
|
-
} finally {
|
|
26
|
-
reader.releaseLock();
|
|
7
|
+
async function getNodeZlibModule(): Promise<typeof import('node:zlib') | null> {
|
|
8
|
+
if (!usesNodeRuntimeModules()) {
|
|
9
|
+
return null;
|
|
27
10
|
}
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
if (chunks.length === 1) return chunks[0] ?? new Uint8Array();
|
|
31
|
-
|
|
32
|
-
const merged = new Uint8Array(total);
|
|
33
|
-
let offset = 0;
|
|
34
|
-
for (const chunk of chunks) {
|
|
35
|
-
merged.set(chunk, offset);
|
|
36
|
-
offset += chunk.length;
|
|
11
|
+
if (!nodeZlibModulePromise) {
|
|
12
|
+
nodeZlibModulePromise = import('node:zlib').catch(() => null);
|
|
37
13
|
}
|
|
38
|
-
return
|
|
14
|
+
return nodeZlibModulePromise;
|
|
39
15
|
}
|
|
40
16
|
|
|
41
17
|
/**
|
|
42
|
-
* Gzip-compress a byte array using
|
|
43
|
-
*
|
|
18
|
+
* Gzip-compress a byte array using the fastest native implementation available
|
|
19
|
+
* in the current runtime.
|
|
44
20
|
*/
|
|
45
21
|
export async function gzipBytes(payload: Uint8Array): Promise<Uint8Array> {
|
|
22
|
+
const bun = getBunRuntime();
|
|
23
|
+
if (bun?.gzipSync) {
|
|
24
|
+
return bun.gzipSync(payload);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const nodeZlib = await getNodeZlibModule();
|
|
28
|
+
if (nodeZlib?.gzip) {
|
|
29
|
+
return await new Promise<Uint8Array>((resolve, reject) => {
|
|
30
|
+
nodeZlib.gzip(payload, (error, compressed) => {
|
|
31
|
+
if (error) {
|
|
32
|
+
reject(error);
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
resolve(new Uint8Array(compressed));
|
|
36
|
+
});
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
|
|
46
40
|
if (typeof CompressionStream !== 'undefined') {
|
|
47
41
|
const stream = bytesToReadableStream(payload).pipeThrough(
|
|
48
|
-
new CompressionStream('gzip') as
|
|
49
|
-
Uint8Array,
|
|
50
|
-
Uint8Array
|
|
51
|
-
>
|
|
42
|
+
new CompressionStream('gzip') as TransformStream<Uint8Array, Uint8Array>
|
|
52
43
|
);
|
|
53
|
-
return
|
|
44
|
+
return readAllBytesFromStream(stream);
|
|
54
45
|
}
|
|
55
46
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
if (error) {
|
|
60
|
-
reject(error);
|
|
61
|
-
return;
|
|
62
|
-
}
|
|
63
|
-
resolve(new Uint8Array(compressed));
|
|
64
|
-
});
|
|
65
|
-
});
|
|
47
|
+
throw new Error(
|
|
48
|
+
'Failed to gzip bytes, no compression implementation available'
|
|
49
|
+
);
|
|
66
50
|
}
|
|
67
51
|
|
|
68
52
|
/**
|
|
@@ -73,11 +57,15 @@ export async function gzipBytesToStream(payload: Uint8Array): Promise<{
|
|
|
73
57
|
stream: ReadableStream<Uint8Array>;
|
|
74
58
|
byteLength?: number;
|
|
75
59
|
}> {
|
|
76
|
-
|
|
60
|
+
const bun = getBunRuntime();
|
|
61
|
+
const nodeZlib = await getNodeZlibModule();
|
|
62
|
+
|
|
63
|
+
if (!bun?.gzipSync && !nodeZlib && typeof CompressionStream !== 'undefined') {
|
|
77
64
|
const source = bytesToReadableStream(payload);
|
|
78
|
-
const gzipStream = new CompressionStream(
|
|
79
|
-
|
|
80
|
-
|
|
65
|
+
const gzipStream = new CompressionStream('gzip') as TransformStream<
|
|
66
|
+
Uint8Array,
|
|
67
|
+
Uint8Array
|
|
68
|
+
>;
|
|
81
69
|
return {
|
|
82
70
|
stream: source.pipeThrough(gzipStream),
|
|
83
71
|
};
|
package/src/utils/crypto.ts
CHANGED
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import { concatByteChunks } from './bytes';
|
|
2
|
+
import { getBunRuntime, usesNodeRuntimeModules } from './internal-runtime';
|
|
3
|
+
|
|
1
4
|
const textEncoder = new TextEncoder();
|
|
2
5
|
|
|
3
6
|
function toHex(bytes: Uint8Array): string {
|
|
@@ -6,27 +9,109 @@ function toHex(bytes: Uint8Array): string {
|
|
|
6
9
|
.join('');
|
|
7
10
|
}
|
|
8
11
|
|
|
12
|
+
function toDigestBufferSource(payload: Uint8Array): Uint8Array<ArrayBuffer> {
|
|
13
|
+
if (payload.buffer instanceof ArrayBuffer) {
|
|
14
|
+
return new Uint8Array(
|
|
15
|
+
payload.buffer,
|
|
16
|
+
payload.byteOffset,
|
|
17
|
+
payload.byteLength
|
|
18
|
+
);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const owned = new Uint8Array(payload.byteLength);
|
|
22
|
+
owned.set(payload);
|
|
23
|
+
return owned;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
let nodeCryptoModulePromise: Promise<
|
|
27
|
+
typeof import('node:crypto') | null
|
|
28
|
+
> | null = null;
|
|
29
|
+
|
|
30
|
+
async function getNodeCryptoModule(): Promise<
|
|
31
|
+
typeof import('node:crypto') | null
|
|
32
|
+
> {
|
|
33
|
+
if (!usesNodeRuntimeModules()) {
|
|
34
|
+
return null;
|
|
35
|
+
}
|
|
36
|
+
if (!nodeCryptoModulePromise) {
|
|
37
|
+
nodeCryptoModulePromise = import('node:crypto').catch(() => null);
|
|
38
|
+
}
|
|
39
|
+
return nodeCryptoModulePromise;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export interface IncrementalSha256 {
|
|
43
|
+
update(chunk: Uint8Array): void;
|
|
44
|
+
digestHex(): Promise<string>;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export async function createIncrementalSha256(): Promise<IncrementalSha256> {
|
|
48
|
+
const bun = getBunRuntime();
|
|
49
|
+
if (bun?.CryptoHasher) {
|
|
50
|
+
const hasher = new bun.CryptoHasher('sha256');
|
|
51
|
+
return {
|
|
52
|
+
update(chunk) {
|
|
53
|
+
hasher.update(chunk);
|
|
54
|
+
},
|
|
55
|
+
async digestHex() {
|
|
56
|
+
return hasher.digest('hex');
|
|
57
|
+
},
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const nodeCrypto = await getNodeCryptoModule();
|
|
62
|
+
if (nodeCrypto?.createHash) {
|
|
63
|
+
const hasher = nodeCrypto.createHash('sha256');
|
|
64
|
+
return {
|
|
65
|
+
update(chunk) {
|
|
66
|
+
hasher.update(chunk);
|
|
67
|
+
},
|
|
68
|
+
async digestHex() {
|
|
69
|
+
return hasher.digest('hex');
|
|
70
|
+
},
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const chunks: Uint8Array[] = [];
|
|
75
|
+
return {
|
|
76
|
+
update(chunk) {
|
|
77
|
+
if (chunk.length === 0) return;
|
|
78
|
+
chunks.push(chunk.slice());
|
|
79
|
+
},
|
|
80
|
+
async digestHex() {
|
|
81
|
+
return sha256Hex(concatByteChunks(chunks));
|
|
82
|
+
},
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
|
|
9
86
|
/**
|
|
10
87
|
* Cross-runtime SHA-256 digest helper.
|
|
11
88
|
*
|
|
12
|
-
* Uses
|
|
89
|
+
* Uses native Bun/Node implementations on server runtimes, with Web Crypto
|
|
90
|
+
* fallback for browser and worker environments.
|
|
13
91
|
*/
|
|
14
92
|
export async function sha256Hex(input: string | Uint8Array): Promise<string> {
|
|
15
93
|
const payload = typeof input === 'string' ? textEncoder.encode(input) : input;
|
|
16
94
|
|
|
95
|
+
const bun = getBunRuntime();
|
|
96
|
+
if (bun?.CryptoHasher) {
|
|
97
|
+
const hasher = new bun.CryptoHasher('sha256');
|
|
98
|
+
hasher.update(payload);
|
|
99
|
+
return hasher.digest('hex');
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
const nodeCrypto = await getNodeCryptoModule();
|
|
103
|
+
if (nodeCrypto?.createHash) {
|
|
104
|
+
return nodeCrypto.createHash('sha256').update(payload).digest('hex');
|
|
105
|
+
}
|
|
106
|
+
|
|
17
107
|
if (typeof crypto !== 'undefined' && crypto.subtle) {
|
|
18
108
|
const digestBuffer = await crypto.subtle.digest(
|
|
19
109
|
'SHA-256',
|
|
20
|
-
payload
|
|
110
|
+
toDigestBufferSource(payload)
|
|
21
111
|
);
|
|
22
112
|
return toHex(new Uint8Array(digestBuffer));
|
|
23
113
|
}
|
|
24
114
|
|
|
25
|
-
try {
|
|
26
|
-
const nodeCrypto = await import('node:crypto');
|
|
27
|
-
return nodeCrypto.createHash('sha256').update(payload).digest('hex');
|
|
28
|
-
} catch {}
|
|
29
|
-
|
|
30
115
|
throw new Error(
|
|
31
116
|
'Failed to create SHA-256 hash, no crypto implementation available'
|
|
32
117
|
);
|
package/src/utils/index.ts
CHANGED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
interface BunCryptoHasher {
|
|
2
|
+
update(data: string | Uint8Array): void;
|
|
3
|
+
digest(encoding: 'hex'): string;
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
interface BunRuntime {
|
|
7
|
+
CryptoHasher?: new (algorithm: string) => BunCryptoHasher;
|
|
8
|
+
gzipSync?: (data: Uint8Array) => Uint8Array;
|
|
9
|
+
readableStreamToBytes?: (
|
|
10
|
+
stream: ReadableStream<Uint8Array>
|
|
11
|
+
) => Promise<Uint8Array | ArrayBuffer>;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
type RuntimeGlobals = typeof globalThis & {
|
|
15
|
+
Bun?: BunRuntime;
|
|
16
|
+
Deno?: object;
|
|
17
|
+
process?: {
|
|
18
|
+
versions?: {
|
|
19
|
+
node?: string;
|
|
20
|
+
};
|
|
21
|
+
};
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
function getRuntimeGlobals(): RuntimeGlobals {
|
|
25
|
+
return globalThis as RuntimeGlobals;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function getBunRuntime(): BunRuntime | null {
|
|
29
|
+
return getRuntimeGlobals().Bun ?? null;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function usesNodeRuntimeModules(): boolean {
|
|
33
|
+
const globals = getRuntimeGlobals();
|
|
34
|
+
if (globals.Deno !== undefined) {
|
|
35
|
+
return true;
|
|
36
|
+
}
|
|
37
|
+
return typeof globals.process?.versions?.node === 'string';
|
|
38
|
+
}
|