rac-delta 1.0.9 → 1.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core/services/hasher-service.d.ts +3 -3
- package/dist/core/services/hasher-service.d.ts.map +1 -1
- package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts +1 -2
- package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts.map +1 -1
- package/dist/infrastructure/services/hash-wasm-hasher-service.js +26 -27
- package/dist/infrastructure/services/memory-delta-service.d.ts.map +1 -1
- package/dist/infrastructure/services/memory-delta-service.js +4 -2
- package/package.json +1 -1
|
@@ -16,13 +16,13 @@ export interface HasherService {
|
|
|
16
16
|
*/
|
|
17
17
|
hashFile(filePath: string, rootDir: string, chunkSize: number): Promise<FileEntry>;
|
|
18
18
|
/**
|
|
19
|
-
* Will process a stream of Chunks and return
|
|
19
|
+
* Will process a stream of Chunks and return in a callback each processed chunk
|
|
20
20
|
*
|
|
21
21
|
* @param stream
|
|
22
22
|
* @param chunkSize The size (in bytes) chunks will need to be processed and emited.
|
|
23
|
-
* @param onChunk callback that returns the processed
|
|
23
|
+
* @param onChunk callback that returns the processed chunk
|
|
24
24
|
*/
|
|
25
|
-
hashStream(stream: AsyncChunkStream, chunkSize: number, onChunk
|
|
25
|
+
hashStream(stream: AsyncChunkStream, chunkSize: number, onChunk: (data: Buffer, chunk: Chunk) => void): Promise<void>;
|
|
26
26
|
/**
|
|
27
27
|
* Returns a hash of a buffer
|
|
28
28
|
*
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"hasher-service.d.ts","sourceRoot":"","sources":["../../../src/core/services/hasher-service.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACnD,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAE7C,MAAM,WAAW,eAAe;IAC9B,MAAM,CAAC,IAAI,EAAE,UAAU,GAAG,MAAM,GAAG,IAAI,CAAC;IAExC,MAAM,CAAC,QAAQ,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CAClC;AAED,MAAM,WAAW,aAAa;IAC5B;;;;;;;;OAQG;IACH,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC;IAEnF;;;;;;OAMG;IACH,UAAU,CACR,MAAM,EAAE,gBAAgB,EACxB,SAAS,EAAE,MAAM,EACjB,OAAO,
|
|
1
|
+
{"version":3,"file":"hasher-service.d.ts","sourceRoot":"","sources":["../../../src/core/services/hasher-service.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACnD,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAE7C,MAAM,WAAW,eAAe;IAC9B,MAAM,CAAC,IAAI,EAAE,UAAU,GAAG,MAAM,GAAG,IAAI,CAAC;IAExC,MAAM,CAAC,QAAQ,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;CAClC;AAED,MAAM,WAAW,aAAa;IAC5B;;;;;;;;OAQG;IACH,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC;IAEnF;;;;;;OAMG;IACH,UAAU,CACR,MAAM,EAAE,gBAAgB,EACxB,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,KAAK,IAAI,GAC5C,OAAO,CAAC,IAAI,CAAC,CAAC;IAEjB;;;;OAIG;IACH,UAAU,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IAE9C;;;;;OAKG;IACH,WAAW,CAAC,IAAI,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IAEtE;;;;;OAKG;IACH,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IAEjE,qBAAqB,IAAI,OAAO,CAAC,eAAe,CAAC,CAAC;CACnD"}
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
import { HasherService, StreamingHasher } from '../../core/services';
|
|
2
2
|
import { AsyncChunkStream } from '../../core/services';
|
|
3
3
|
import { FileEntry, Chunk } from '../../core/models';
|
|
4
|
-
import { Nullish } from '../../core/types';
|
|
5
4
|
export declare class HashWasmHasherService implements HasherService {
|
|
6
5
|
hashFile(filePath: string, rootDir: string, chunkSize: number): Promise<FileEntry>;
|
|
7
|
-
hashStream(stream: AsyncChunkStream, chunkSize: number, onChunk
|
|
6
|
+
hashStream(stream: AsyncChunkStream, chunkSize: number, onChunk: (data: Buffer, chunk: Chunk) => void): Promise<void>;
|
|
8
7
|
verifyChunk(data: Uint8Array, expectedHash: string): Promise<boolean>;
|
|
9
8
|
hashBuffer(data: Uint8Array): Promise<string>;
|
|
10
9
|
verifyFile(path: string, expectedHash: string): Promise<boolean>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"hash-wasm-hasher-service.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/services/hash-wasm-hasher-service.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AAErE,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,mBAAmB,CAAC;
|
|
1
|
+
{"version":3,"file":"hash-wasm-hasher-service.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/services/hash-wasm-hasher-service.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AAErE,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,mBAAmB,CAAC;AAGrD,qBAAa,qBAAsB,YAAW,aAAa;IACnD,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC;IAiDlF,UAAU,CACd,MAAM,EAAE,gBAAgB,EACxB,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,KAAK,IAAI,GAC5C,OAAO,CAAC,IAAI,CAAC;IA+EV,WAAW,CAAC,IAAI,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAQrE,UAAU,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC;IAM7C,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAgBhE,qBAAqB,IAAI,OAAO,CAAC,eAAe,CAAC;CAYxD"}
|
|
@@ -50,52 +50,51 @@ class HashWasmHasherService {
|
|
|
50
50
|
if (!chunkSize || chunkSize <= 0) {
|
|
51
51
|
throw new Error('chunkSize must be a positive number.');
|
|
52
52
|
}
|
|
53
|
-
const chunks = [];
|
|
54
53
|
let offset = 0;
|
|
55
|
-
let buffer = Buffer.
|
|
54
|
+
let buffer = Buffer.allocUnsafe(chunkSize);
|
|
55
|
+
let bufferLen = 0;
|
|
56
56
|
try {
|
|
57
57
|
for await (const data of stream) {
|
|
58
|
+
console.log('[HASHSTREAM] Received data length:', data.length, 'bufferLen', bufferLen, 'rss:', process.memoryUsage().rss);
|
|
58
59
|
let input = Buffer.isBuffer(data) ? data : Buffer.from(data);
|
|
59
60
|
while (input.length > 0) {
|
|
60
|
-
const
|
|
61
|
-
|
|
62
|
-
|
|
61
|
+
const take = Math.min(chunkSize - bufferLen, input.length);
|
|
62
|
+
input.copy(buffer, bufferLen, 0, take);
|
|
63
|
+
bufferLen += take;
|
|
63
64
|
input = input.subarray(take);
|
|
64
|
-
if (
|
|
65
|
+
if (bufferLen === chunkSize) {
|
|
66
|
+
const chunkView = buffer.subarray(0, bufferLen);
|
|
67
|
+
const chunk = Buffer.from(chunkView);
|
|
65
68
|
const chunkHasher = await (0, hash_wasm_1.createBLAKE3)();
|
|
66
|
-
chunkHasher.update(
|
|
69
|
+
chunkHasher.update(chunk);
|
|
67
70
|
const chunkHash = chunkHasher.digest('hex');
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
offset,
|
|
75
|
-
size: buffer.length,
|
|
76
|
-
});
|
|
77
|
-
offset += buffer.length;
|
|
78
|
-
buffer = Buffer.alloc(0);
|
|
71
|
+
// onChunk could be a promise
|
|
72
|
+
console.log('[HASHSTREAM] Emitting chunk', { offset, size: bufferLen });
|
|
73
|
+
await Promise.resolve(onChunk(chunk, { hash: chunkHash, offset, size: bufferLen }));
|
|
74
|
+
console.log('[HASHSTREAM] onChunk done', { offset });
|
|
75
|
+
offset += bufferLen;
|
|
76
|
+
bufferLen = 0;
|
|
79
77
|
}
|
|
80
78
|
}
|
|
81
79
|
}
|
|
82
|
-
if (
|
|
80
|
+
if (bufferLen > 0) {
|
|
81
|
+
const chunkView = buffer.subarray(0, bufferLen);
|
|
82
|
+
const chunk = Buffer.from(chunkView);
|
|
83
83
|
const chunkHasher = await (0, hash_wasm_1.createBLAKE3)();
|
|
84
|
-
chunkHasher.update(
|
|
84
|
+
chunkHasher.update(chunk);
|
|
85
85
|
const chunkHash = chunkHasher.digest('hex');
|
|
86
|
-
if
|
|
87
|
-
|
|
88
|
-
}
|
|
89
|
-
chunks.push({
|
|
86
|
+
console.log('[HASHSTREAM] Emitting chunk on if', { offset, size: bufferLen });
|
|
87
|
+
await Promise.resolve(onChunk(chunk, {
|
|
90
88
|
hash: chunkHash,
|
|
91
89
|
offset,
|
|
92
|
-
size:
|
|
93
|
-
});
|
|
90
|
+
size: bufferLen,
|
|
91
|
+
}));
|
|
92
|
+
console.log('[HASHSTREAM] onChunk if done', { offset });
|
|
94
93
|
}
|
|
95
94
|
if (stream.close) {
|
|
96
95
|
await stream.close();
|
|
97
96
|
}
|
|
98
|
-
|
|
97
|
+
console.log('[HASHSTREAM] Finished.');
|
|
99
98
|
}
|
|
100
99
|
catch (err) {
|
|
101
100
|
if (err instanceof Error) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"memory-delta-service.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/services/memory-delta-service.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AACpF,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,
|
|
1
|
+
{"version":3,"file":"memory-delta-service.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/services/memory-delta-service.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AACpF,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,EAAqB,MAAM,mBAAmB,CAAC;AAErF,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C,qBAAa,kBAAmB,YAAW,YAAY;IACzC,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAN,MAAM,EAAE,aAAa;IAE5C,wBAAwB,CAC5B,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,WAAW,SAAI,EACf,cAAc,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,GACjC,OAAO,CAAC,OAAO,CAAC;IA8Cb,yBAAyB,CAC7B,MAAM,EAAE,gBAAgB,EACxB,IAAI,EAAE,MAAM,EACZ,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,SAAS,CAAC;YAwBN,SAAS;IAuBxB,OAAO,CAAC,iBAAiB;IAYzB,OAAO,CAAC,WAAW;IAanB,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAiE3D,UAAU,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,SAAS,GAAG,SAAS;IA0CpD,gBAAgB,CAAC,UAAU,EAAE,OAAO,EAAE,WAAW,EAAE,OAAO,GAAG,IAAI,GAAG,OAAO,CAAC,SAAS,CAAC;IAkBtF,kBAAkB,CAAC,UAAU,EAAE,OAAO,GAAG,IAAI,EAAE,WAAW,EAAE,OAAO,GAAG,OAAO,CAAC,SAAS,CAAC;CAG/F"}
|
|
@@ -47,8 +47,10 @@ class MemoryDeltaService {
|
|
|
47
47
|
}
|
|
48
48
|
async createFileEntryFromStream(stream, path, chunkSize) {
|
|
49
49
|
const fileHasher = await this.hasher.createStreamingHasher();
|
|
50
|
-
const chunks =
|
|
51
|
-
|
|
50
|
+
const chunks = [];
|
|
51
|
+
await this.hasher.hashStream(stream, chunkSize, (data, chunk) => {
|
|
52
|
+
fileHasher.update(data);
|
|
53
|
+
chunks.push(chunk);
|
|
52
54
|
});
|
|
53
55
|
const fileHash = fileHasher.digest('hex');
|
|
54
56
|
const totalSize = chunks.reduce((total, chunk) => total + chunk.size, 0);
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "rac-delta",
|
|
3
3
|
"description": "Storage agnostic delta patching implementation of rac-delta protocol for NodeJs. With streaming support and file reconstruction.",
|
|
4
|
-
"version": "1.0.
|
|
4
|
+
"version": "1.0.11",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
7
7
|
"repository": {
|