rac-delta 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +1 -0
- package/dist/core/adapters/index.d.ts +2 -0
- package/dist/core/adapters/index.d.ts.map +1 -0
- package/dist/core/adapters/index.js +17 -0
- package/dist/core/adapters/storage-adapter.d.ts +125 -0
- package/dist/core/adapters/storage-adapter.d.ts.map +1 -0
- package/dist/core/adapters/storage-adapter.js +14 -0
- package/dist/core/config/index.d.ts +2 -0
- package/dist/core/config/index.d.ts.map +1 -0
- package/dist/core/config/index.js +17 -0
- package/dist/core/config/rac-delta-config.d.ts +132 -0
- package/dist/core/config/rac-delta-config.d.ts.map +1 -0
- package/dist/core/config/rac-delta-config.js +2 -0
- package/dist/core/exceptions.d.ts +25 -0
- package/dist/core/exceptions.d.ts.map +1 -0
- package/dist/core/exceptions.js +51 -0
- package/dist/core/models/chunk.d.ts +12 -0
- package/dist/core/models/chunk.d.ts.map +1 -0
- package/dist/core/models/chunk.js +2 -0
- package/dist/core/models/delta-plan.d.ts +12 -0
- package/dist/core/models/delta-plan.d.ts.map +1 -0
- package/dist/core/models/delta-plan.js +2 -0
- package/dist/core/models/file-entry.d.ts +9 -0
- package/dist/core/models/file-entry.d.ts.map +1 -0
- package/dist/core/models/file-entry.js +2 -0
- package/dist/core/models/index.d.ts +5 -0
- package/dist/core/models/index.d.ts.map +1 -0
- package/dist/core/models/index.js +20 -0
- package/dist/core/models/rd-index.d.ts +8 -0
- package/dist/core/models/rd-index.d.ts.map +1 -0
- package/dist/core/models/rd-index.js +2 -0
- package/dist/core/pipelines/download-pipeline.d.ts +142 -0
- package/dist/core/pipelines/download-pipeline.d.ts.map +1 -0
- package/dist/core/pipelines/download-pipeline.js +64 -0
- package/dist/core/pipelines/index.d.ts +3 -0
- package/dist/core/pipelines/index.d.ts.map +1 -0
- package/dist/core/pipelines/index.js +18 -0
- package/dist/core/pipelines/upload-pipeline.d.ts +60 -0
- package/dist/core/pipelines/upload-pipeline.d.ts.map +1 -0
- package/dist/core/pipelines/upload-pipeline.js +34 -0
- package/dist/core/services/delta-service.d.ts +76 -0
- package/dist/core/services/delta-service.d.ts.map +1 -0
- package/dist/core/services/delta-service.js +2 -0
- package/dist/core/services/hasher-service.d.ts +47 -0
- package/dist/core/services/hasher-service.d.ts.map +1 -0
- package/dist/core/services/hasher-service.js +2 -0
- package/dist/core/services/index.d.ts +5 -0
- package/dist/core/services/index.d.ts.map +1 -0
- package/dist/core/services/index.js +20 -0
- package/dist/core/services/reconstruction-service.d.ts +99 -0
- package/dist/core/services/reconstruction-service.d.ts.map +1 -0
- package/dist/core/services/reconstruction-service.js +4 -0
- package/dist/core/services/validation-service.d.ts +18 -0
- package/dist/core/services/validation-service.d.ts.map +1 -0
- package/dist/core/services/validation-service.js +2 -0
- package/dist/core/types/index.d.ts +2 -0
- package/dist/core/types/index.d.ts.map +1 -0
- package/dist/core/types/index.js +17 -0
- package/dist/core/types/types.d.ts +3 -0
- package/dist/core/types/types.d.ts.map +1 -0
- package/dist/core/types/types.js +2 -0
- package/dist/core/utils/index.d.ts +3 -0
- package/dist/core/utils/index.d.ts.map +1 -0
- package/dist/core/utils/index.js +18 -0
- package/dist/core/utils/invariant.d.ts +2 -0
- package/dist/core/utils/invariant.d.ts.map +1 -0
- package/dist/core/utils/invariant.js +11 -0
- package/dist/core/utils/stream-to-buffer.d.ts +3 -0
- package/dist/core/utils/stream-to-buffer.d.ts.map +1 -0
- package/dist/core/utils/stream-to-buffer.js +10 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +29 -0
- package/dist/infrastructure/adapters/azure-blob-storage-adapter.d.ts +24 -0
- package/dist/infrastructure/adapters/azure-blob-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/azure-blob-storage-adapter.js +149 -0
- package/dist/infrastructure/adapters/gcs-storage-adapter.d.ts +20 -0
- package/dist/infrastructure/adapters/gcs-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/gcs-storage-adapter.js +101 -0
- package/dist/infrastructure/adapters/http-storage-adapter.d.ts +23 -0
- package/dist/infrastructure/adapters/http-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/http-storage-adapter.js +154 -0
- package/dist/infrastructure/adapters/local-storage-adapter.d.ts +23 -0
- package/dist/infrastructure/adapters/local-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/local-storage-adapter.js +124 -0
- package/dist/infrastructure/adapters/s3-storage-adapter.d.ts +24 -0
- package/dist/infrastructure/adapters/s3-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/s3-storage-adapter.js +139 -0
- package/dist/infrastructure/adapters/ssh-storage-adapter.d.ts +28 -0
- package/dist/infrastructure/adapters/ssh-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/ssh-storage-adapter.js +237 -0
- package/dist/infrastructure/adapters/url-storage-adapter.d.ts +14 -0
- package/dist/infrastructure/adapters/url-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/url-storage-adapter.js +92 -0
- package/dist/infrastructure/chunk-sources/disk-chunk-source.d.ts +12 -0
- package/dist/infrastructure/chunk-sources/disk-chunk-source.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/disk-chunk-source.js +61 -0
- package/dist/infrastructure/chunk-sources/index.d.ts +4 -0
- package/dist/infrastructure/chunk-sources/index.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/index.js +19 -0
- package/dist/infrastructure/chunk-sources/memory-chunk-source.d.ts +9 -0
- package/dist/infrastructure/chunk-sources/memory-chunk-source.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/memory-chunk-source.js +29 -0
- package/dist/infrastructure/chunk-sources/storage-chunk-source.d.ts +21 -0
- package/dist/infrastructure/chunk-sources/storage-chunk-source.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/storage-chunk-source.js +150 -0
- package/dist/infrastructure/client.d.ts +45 -0
- package/dist/infrastructure/client.d.ts.map +1 -0
- package/dist/infrastructure/client.js +52 -0
- package/dist/infrastructure/factories/pipeline-factory.d.ts +15 -0
- package/dist/infrastructure/factories/pipeline-factory.d.ts.map +1 -0
- package/dist/infrastructure/factories/pipeline-factory.js +26 -0
- package/dist/infrastructure/factories/service-factory.d.ts +11 -0
- package/dist/infrastructure/factories/service-factory.d.ts.map +1 -0
- package/dist/infrastructure/factories/service-factory.js +17 -0
- package/dist/infrastructure/factories/storage-adpater-factory.d.ts +41 -0
- package/dist/infrastructure/factories/storage-adpater-factory.d.ts.map +1 -0
- package/dist/infrastructure/factories/storage-adpater-factory.js +33 -0
- package/dist/infrastructure/pipelines/default-hash-download-pipeline.d.ts +27 -0
- package/dist/infrastructure/pipelines/default-hash-download-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-hash-download-pipeline.js +211 -0
- package/dist/infrastructure/pipelines/default-hash-upload-pipeline.d.ts +19 -0
- package/dist/infrastructure/pipelines/default-hash-upload-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-hash-upload-pipeline.js +170 -0
- package/dist/infrastructure/pipelines/default-url-download-pipeline.d.ts +30 -0
- package/dist/infrastructure/pipelines/default-url-download-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-url-download-pipeline.js +198 -0
- package/dist/infrastructure/pipelines/default-url-upload-pipeline.d.ts +20 -0
- package/dist/infrastructure/pipelines/default-url-upload-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-url-upload-pipeline.js +126 -0
- package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts +13 -0
- package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts.map +1 -0
- package/dist/infrastructure/services/hash-wasm-hasher-service.js +113 -0
- package/dist/infrastructure/services/memory-delta-service.d.ts +17 -0
- package/dist/infrastructure/services/memory-delta-service.d.ts.map +1 -0
- package/dist/infrastructure/services/memory-delta-service.js +198 -0
- package/dist/infrastructure/services/memory-reconstruction-service.d.ts +25 -0
- package/dist/infrastructure/services/memory-reconstruction-service.d.ts.map +1 -0
- package/dist/infrastructure/services/memory-reconstruction-service.js +329 -0
- package/dist/infrastructure/services/memory-validation-service.d.ts +9 -0
- package/dist/infrastructure/services/memory-validation-service.d.ts.map +1 -0
- package/dist/infrastructure/services/memory-validation-service.js +33 -0
- package/package.json +43 -0
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DefaultUrlStorageAdapter = void 0;
|
|
4
|
+
const stream_1 = require("stream");
|
|
5
|
+
const adapters_1 = require("../../core/adapters");
|
|
6
|
+
const exceptions_1 = require("../../core/exceptions");
|
|
7
|
+
class DefaultUrlStorageAdapter extends adapters_1.UrlStorageAdapter {
|
|
8
|
+
async dispose() {
|
|
9
|
+
return;
|
|
10
|
+
}
|
|
11
|
+
async getChunkByUrl(url) {
|
|
12
|
+
const res = await fetch(url, { method: 'GET', headers: {} });
|
|
13
|
+
if (res.status === 404) {
|
|
14
|
+
return null;
|
|
15
|
+
}
|
|
16
|
+
if (!res.ok) {
|
|
17
|
+
throw new exceptions_1.GetChunkException(`${res.status} ${res.statusText}`);
|
|
18
|
+
}
|
|
19
|
+
return stream_1.Readable.fromWeb(res.body);
|
|
20
|
+
}
|
|
21
|
+
async putChunkByUrl(url, data) {
|
|
22
|
+
const res = await fetch(url, {
|
|
23
|
+
method: 'PUT',
|
|
24
|
+
headers: { ...{}, 'Content-Type': 'application/octet-stream' },
|
|
25
|
+
body: data,
|
|
26
|
+
});
|
|
27
|
+
if (!res.ok) {
|
|
28
|
+
throw new exceptions_1.PutChunkException(`${res.status} ${res.statusText}`);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
async chunkExistsByUrl(url) {
|
|
32
|
+
const res = await fetch(url, { method: 'HEAD', headers: {} });
|
|
33
|
+
if (res.status === 404) {
|
|
34
|
+
return false;
|
|
35
|
+
}
|
|
36
|
+
if (!res.ok && res.status !== 200) {
|
|
37
|
+
throw new exceptions_1.HeadChunkException(`${res.status} ${res.statusText}`);
|
|
38
|
+
}
|
|
39
|
+
return res.ok;
|
|
40
|
+
}
|
|
41
|
+
async deleteChunkByUrl(url) {
|
|
42
|
+
const res = await fetch(url, { method: 'DELETE', headers: {} });
|
|
43
|
+
if (res.status === 404) {
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
if (!res.ok) {
|
|
47
|
+
throw new exceptions_1.DeleteChunkException(`${res.status} ${res.statusText}`);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
async getChunkInfoByUrl(hash, url) {
|
|
51
|
+
const res = await fetch(url, { method: 'HEAD', headers: {} });
|
|
52
|
+
if (res.status === 404) {
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
if (!res.ok) {
|
|
56
|
+
throw new exceptions_1.HeadChunkException(`${res.status} ${res.statusText}`);
|
|
57
|
+
}
|
|
58
|
+
const size = Number(res.headers.get('content-length')) || 0;
|
|
59
|
+
const modified = res.headers.get('last-modified')
|
|
60
|
+
? new Date(res.headers.get('last-modified'))
|
|
61
|
+
: undefined;
|
|
62
|
+
const metadata = {};
|
|
63
|
+
for (const [k, v] of res.headers.entries()) {
|
|
64
|
+
if (k.startsWith('x-meta-'))
|
|
65
|
+
metadata[k.substring(7)] = v;
|
|
66
|
+
}
|
|
67
|
+
return { hash, size, modified, metadata };
|
|
68
|
+
}
|
|
69
|
+
async getRemoteIndexByUrl(url) {
|
|
70
|
+
const res = await fetch(url, { method: 'GET', headers: {} });
|
|
71
|
+
if (res.status === 404) {
|
|
72
|
+
return null;
|
|
73
|
+
}
|
|
74
|
+
if (!res.ok) {
|
|
75
|
+
throw new exceptions_1.GetRemoteIndexException(`${res.status} ${res.statusText}`);
|
|
76
|
+
}
|
|
77
|
+
return (await res.json());
|
|
78
|
+
}
|
|
79
|
+
async putRemoteIndexByUrl(url, index) {
|
|
80
|
+
const res = await fetch(url, {
|
|
81
|
+
method: 'PUT',
|
|
82
|
+
headers: {
|
|
83
|
+
'Content-Type': 'application/json',
|
|
84
|
+
},
|
|
85
|
+
body: JSON.stringify(index, null, 2),
|
|
86
|
+
});
|
|
87
|
+
if (!res.ok) {
|
|
88
|
+
throw new exceptions_1.PutRemoteIndexException(`${res.status} ${res.statusText}`);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
exports.DefaultUrlStorageAdapter = DefaultUrlStorageAdapter;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { Readable } from 'stream';
|
|
2
|
+
import { ChunkSource } from '../../core/services';
|
|
3
|
+
export declare class DiskChunkSource implements ChunkSource {
|
|
4
|
+
private cacheDir;
|
|
5
|
+
constructor(cacheDir: string);
|
|
6
|
+
getChunk(hash: string): Promise<Buffer>;
|
|
7
|
+
getChunks(hashes: string[]): Promise<Map<string, Buffer>>;
|
|
8
|
+
hasChunk(hash: string): Promise<boolean>;
|
|
9
|
+
setChunk(hash: string, data: Buffer | Readable): Promise<void>;
|
|
10
|
+
clear(): Promise<void>;
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=disk-chunk-source.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"disk-chunk-source.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/chunk-sources/disk-chunk-source.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAGlC,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAElD,qBAAa,eAAgB,YAAW,WAAW;IACjD,OAAO,CAAC,QAAQ,CAAS;gBAEb,QAAQ,EAAE,MAAM;IAItB,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAcvC,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAWzD,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAUxC,QAAQ,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC;IAY9D,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAG7B"}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DiskChunkSource = void 0;
|
|
4
|
+
const promises_1 = require("fs/promises");
|
|
5
|
+
const promises_2 = require("stream/promises");
|
|
6
|
+
const fs_1 = require("fs");
|
|
7
|
+
const path_1 = require("path");
|
|
8
|
+
const stream_1 = require("stream");
|
|
9
|
+
const exceptions_1 = require("../../core/exceptions");
|
|
10
|
+
class DiskChunkSource {
|
|
11
|
+
cacheDir;
|
|
12
|
+
constructor(cacheDir) {
|
|
13
|
+
this.cacheDir = cacheDir;
|
|
14
|
+
}
|
|
15
|
+
async getChunk(hash) {
|
|
16
|
+
const filePath = (0, path_1.join)(this.cacheDir, hash);
|
|
17
|
+
try {
|
|
18
|
+
const stats = await (0, promises_1.stat)(filePath);
|
|
19
|
+
if (!stats.isFile()) {
|
|
20
|
+
throw new Error(`Resource is not a file`);
|
|
21
|
+
}
|
|
22
|
+
return (0, promises_1.readFile)(filePath);
|
|
23
|
+
}
|
|
24
|
+
catch {
|
|
25
|
+
throw new exceptions_1.ChunkNotFoundException(`${hash} not found on disk`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
async getChunks(hashes) {
|
|
29
|
+
const results = new Map();
|
|
30
|
+
for (const hash of hashes) {
|
|
31
|
+
const buffer = await this.getChunk(hash);
|
|
32
|
+
results.set(hash, buffer);
|
|
33
|
+
}
|
|
34
|
+
return results;
|
|
35
|
+
}
|
|
36
|
+
async hasChunk(hash) {
|
|
37
|
+
const filePath = (0, path_1.join)(this.cacheDir, hash);
|
|
38
|
+
try {
|
|
39
|
+
await (0, promises_1.access)(filePath);
|
|
40
|
+
return true;
|
|
41
|
+
}
|
|
42
|
+
catch {
|
|
43
|
+
return false;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
async setChunk(hash, data) {
|
|
47
|
+
const filePath = (0, path_1.join)(this.cacheDir, hash);
|
|
48
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(filePath), { recursive: true });
|
|
49
|
+
if (data instanceof stream_1.Readable) {
|
|
50
|
+
const writeStream = (0, fs_1.createWriteStream)(filePath);
|
|
51
|
+
await (0, promises_2.pipeline)(data, writeStream);
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
await (0, promises_1.writeFile)(filePath, data);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
async clear() {
|
|
58
|
+
await (0, promises_1.rm)(this.cacheDir, { recursive: true, force: true });
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
exports.DiskChunkSource = DiskChunkSource;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/chunk-sources/index.ts"],"names":[],"mappings":"AAAA,cAAc,qBAAqB,CAAC;AACpC,cAAc,uBAAuB,CAAC;AACtC,cAAc,wBAAwB,CAAC"}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./disk-chunk-source"), exports);
|
|
18
|
+
__exportStar(require("./memory-chunk-source"), exports);
|
|
19
|
+
__exportStar(require("./storage-chunk-source"), exports);
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { ChunkSource } from '../../core/services';
|
|
2
|
+
export declare class MemoryChunkSource implements ChunkSource {
|
|
3
|
+
private cache;
|
|
4
|
+
getChunk(hash: string): Promise<Buffer>;
|
|
5
|
+
getChunks(hashes: string[]): Promise<Map<string, Buffer>>;
|
|
6
|
+
hasChunk(hash: string): boolean;
|
|
7
|
+
setChunk(hash: string, data: Buffer): void;
|
|
8
|
+
}
|
|
9
|
+
//# sourceMappingURL=memory-chunk-source.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory-chunk-source.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/chunk-sources/memory-chunk-source.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAElD,qBAAa,iBAAkB,YAAW,WAAW;IACnD,OAAO,CAAC,KAAK,CAA6B;IAEpC,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAUvC,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAW/D,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO;IAI/B,QAAQ,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,IAAI;CAG3C"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.MemoryChunkSource = void 0;
|
|
4
|
+
const exceptions_1 = require("../../core/exceptions");
|
|
5
|
+
class MemoryChunkSource {
|
|
6
|
+
cache = new Map();
|
|
7
|
+
async getChunk(hash) {
|
|
8
|
+
const chunk = this.cache.get(hash);
|
|
9
|
+
if (!chunk) {
|
|
10
|
+
throw new exceptions_1.ChunkNotFoundException(`${hash} not found in memory`);
|
|
11
|
+
}
|
|
12
|
+
return chunk;
|
|
13
|
+
}
|
|
14
|
+
async getChunks(hashes) {
|
|
15
|
+
const results = new Map();
|
|
16
|
+
for (const hash of hashes) {
|
|
17
|
+
const chunk = await this.getChunk(hash);
|
|
18
|
+
results.set(hash, chunk);
|
|
19
|
+
}
|
|
20
|
+
return results;
|
|
21
|
+
}
|
|
22
|
+
hasChunk(hash) {
|
|
23
|
+
return this.cache.has(hash);
|
|
24
|
+
}
|
|
25
|
+
setChunk(hash, data) {
|
|
26
|
+
this.cache.set(hash, data);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
exports.MemoryChunkSource = MemoryChunkSource;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { StorageAdapter } from '../../core/adapters';
|
|
2
|
+
import { ChunkSource } from '../../core/services';
|
|
3
|
+
import { Nullish } from '../../core/types';
|
|
4
|
+
import { Readable } from 'stream';
|
|
5
|
+
export declare class StorageChunkSource implements ChunkSource {
|
|
6
|
+
private readonly storage;
|
|
7
|
+
private readonly urlsMap?;
|
|
8
|
+
constructor(storage: StorageAdapter, urlsMap?: Nullish<Map<string, string>>);
|
|
9
|
+
getChunk(hash: string): Promise<Buffer>;
|
|
10
|
+
getChunks(hashes: string[], { concurrency }?: {
|
|
11
|
+
concurrency?: number;
|
|
12
|
+
}): Promise<Map<string, Buffer>>;
|
|
13
|
+
streamChunks(hashes: string[], { concurrency, preserveOrder, }?: {
|
|
14
|
+
concurrency?: number;
|
|
15
|
+
preserveOrder?: boolean;
|
|
16
|
+
}): AsyncGenerator<{
|
|
17
|
+
hash: string;
|
|
18
|
+
data: Readable;
|
|
19
|
+
}>;
|
|
20
|
+
}
|
|
21
|
+
//# sourceMappingURL=storage-chunk-source.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"storage-chunk-source.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/chunk-sources/storage-chunk-source.ts"],"names":[],"mappings":"AAAA,OAAO,EAAsB,cAAc,EAAqB,MAAM,qBAAqB,CAAC;AAG5F,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C,OAAO,EAAE,QAAQ,EAAe,MAAM,QAAQ,CAAC;AAE/C,qBAAa,kBAAmB,YAAW,WAAW;IAElD,OAAO,CAAC,QAAQ,CAAC,OAAO;IACxB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC;gBADR,OAAO,EAAE,cAAc,EACvB,OAAO,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAGnD,QAAQ,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAmBvC,SAAS,CACb,MAAM,EAAE,MAAM,EAAE,EAChB,EAAE,WAAe,EAAE,GAAE;QAAE,WAAW,CAAC,EAAE,MAAM,CAAA;KAAO,GACjD,OAAO,CAAC,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAoBxB,YAAY,CACjB,MAAM,EAAE,MAAM,EAAE,EAChB,EACE,WAAe,EACf,aAAoB,GACrB,GAAE;QAAE,WAAW,CAAC,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,OAAO,CAAA;KAAO,GACxD,cAAc,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,QAAQ,CAAA;KAAE,CAAC;CAkIpD"}
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.StorageChunkSource = void 0;
|
|
4
|
+
const stream_to_buffer_1 = require("../../core/utils/stream-to-buffer");
|
|
5
|
+
const exceptions_1 = require("../../core/exceptions");
|
|
6
|
+
const stream_1 = require("stream");
|
|
7
|
+
class StorageChunkSource {
|
|
8
|
+
storage;
|
|
9
|
+
urlsMap;
|
|
10
|
+
constructor(storage, urlsMap) {
|
|
11
|
+
this.storage = storage;
|
|
12
|
+
this.urlsMap = urlsMap;
|
|
13
|
+
}
|
|
14
|
+
async getChunk(hash) {
|
|
15
|
+
const url = this.urlsMap?.get(hash);
|
|
16
|
+
if (!url && this.storage.type === 'url') {
|
|
17
|
+
throw new Error(`No URL found for hash: ${hash}`);
|
|
18
|
+
}
|
|
19
|
+
const stream = this.storage.type === 'hash'
|
|
20
|
+
? await this.storage.getChunk(hash)
|
|
21
|
+
: await this.storage.getChunkByUrl(url);
|
|
22
|
+
if (!stream) {
|
|
23
|
+
throw new exceptions_1.ChunkNotFoundException(`${hash} not found in storage`);
|
|
24
|
+
}
|
|
25
|
+
return (0, stream_to_buffer_1.streamToBuffer)(stream);
|
|
26
|
+
}
|
|
27
|
+
async getChunks(hashes, { concurrency = 8 } = {}) {
|
|
28
|
+
const results = new Map();
|
|
29
|
+
const queue = [...hashes];
|
|
30
|
+
const workers = Array.from({ length: concurrency }).map(async () => {
|
|
31
|
+
while (queue.length > 0) {
|
|
32
|
+
const hash = queue.shift();
|
|
33
|
+
if (!hash) {
|
|
34
|
+
break;
|
|
35
|
+
}
|
|
36
|
+
const data = await this.getChunk(hash);
|
|
37
|
+
results.set(hash, data);
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
await Promise.all(workers);
|
|
41
|
+
return results;
|
|
42
|
+
}
|
|
43
|
+
async *streamChunks(hashes, { concurrency = 4, preserveOrder = true, } = {}) {
|
|
44
|
+
if (hashes.length === 0) {
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
const controller = new AbortController();
|
|
48
|
+
const queue = hashes.map((hash, index) => ({ hash, index }));
|
|
49
|
+
const results = new Map();
|
|
50
|
+
let nextIndexToEmit = 0;
|
|
51
|
+
let activeWorkers = 0;
|
|
52
|
+
let workersDone = false;
|
|
53
|
+
const pendingResolvers = [];
|
|
54
|
+
let workerError = null;
|
|
55
|
+
const signalNext = () => {
|
|
56
|
+
const resolver = pendingResolvers.shift();
|
|
57
|
+
if (resolver) {
|
|
58
|
+
resolver();
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
const waitForData = async () => {
|
|
62
|
+
while ((preserveOrder && !results.has(nextIndexToEmit) && workerError === null) ||
|
|
63
|
+
(!preserveOrder && results.size === 0 && workerError === null)) {
|
|
64
|
+
await new Promise((resolve) => pendingResolvers.push(resolve));
|
|
65
|
+
}
|
|
66
|
+
};
|
|
67
|
+
const worker = async () => {
|
|
68
|
+
activeWorkers++;
|
|
69
|
+
try {
|
|
70
|
+
while (queue.length > 0 && !controller.signal.aborted) {
|
|
71
|
+
const { hash, index } = queue.shift();
|
|
72
|
+
try {
|
|
73
|
+
const url = this.urlsMap?.get(hash);
|
|
74
|
+
if (!url && this.storage.type === 'url') {
|
|
75
|
+
throw new Error(`No URL found for hash: ${hash}`);
|
|
76
|
+
}
|
|
77
|
+
const stream = this.storage.type === 'hash'
|
|
78
|
+
? await this.storage.getChunk(hash)
|
|
79
|
+
: await this.storage.getChunkByUrl(url);
|
|
80
|
+
if (!stream) {
|
|
81
|
+
throw new exceptions_1.ChunkNotFoundException(`${hash} not found in storage`);
|
|
82
|
+
}
|
|
83
|
+
const pass = new stream_1.PassThrough({ highWaterMark: 64 * 1024 });
|
|
84
|
+
stream.on('error', (err) => {
|
|
85
|
+
workerError = err instanceof Error ? err : new Error(String(err));
|
|
86
|
+
controller.abort();
|
|
87
|
+
signalNext();
|
|
88
|
+
});
|
|
89
|
+
pass.on('error', (err) => {
|
|
90
|
+
workerError = err instanceof Error ? err : new Error(String(err));
|
|
91
|
+
controller.abort();
|
|
92
|
+
signalNext();
|
|
93
|
+
});
|
|
94
|
+
stream.pipe(pass);
|
|
95
|
+
results.set(index, { hash, data: pass });
|
|
96
|
+
signalNext();
|
|
97
|
+
}
|
|
98
|
+
catch (error) {
|
|
99
|
+
workerError = error instanceof Error ? error : new Error(String(error));
|
|
100
|
+
controller.abort();
|
|
101
|
+
signalNext();
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
finally {
|
|
107
|
+
activeWorkers--;
|
|
108
|
+
if (activeWorkers === 0) {
|
|
109
|
+
workersDone = true;
|
|
110
|
+
signalNext();
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
};
|
|
114
|
+
const workers = Array.from({ length: Math.min(concurrency, queue.length) }, worker);
|
|
115
|
+
try {
|
|
116
|
+
while (true) {
|
|
117
|
+
await waitForData();
|
|
118
|
+
if (workerError) {
|
|
119
|
+
// ensure workers settle so their promise rejections don't become unhandled
|
|
120
|
+
await Promise.allSettled(workers);
|
|
121
|
+
throw workerError;
|
|
122
|
+
}
|
|
123
|
+
// Emit strictly in order
|
|
124
|
+
if (preserveOrder) {
|
|
125
|
+
while (results.has(nextIndexToEmit)) {
|
|
126
|
+
yield results.get(nextIndexToEmit);
|
|
127
|
+
results.delete(nextIndexToEmit);
|
|
128
|
+
nextIndexToEmit++;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
// Emit as soon as any result is ready
|
|
132
|
+
if (!preserveOrder) {
|
|
133
|
+
const [index, value] = results.entries().next().value ?? [];
|
|
134
|
+
if (value !== undefined && index !== undefined) {
|
|
135
|
+
yield value;
|
|
136
|
+
results.delete(index);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
if (workersDone && results.size === 0) {
|
|
140
|
+
break;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
finally {
|
|
145
|
+
controller.abort();
|
|
146
|
+
await Promise.allSettled(workers);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
exports.StorageChunkSource = StorageChunkSource;
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { RacDeltaConfig, StorageConfig } from '../core/config';
|
|
2
|
+
import { DeltaService, HasherService, ReconstructionService, ValidationService } from '../core/services';
|
|
3
|
+
import { PipelineBundleFor } from './factories/pipeline-factory';
|
|
4
|
+
import { AdapterFromConfig } from './factories/storage-adpater-factory';
|
|
5
|
+
/**
|
|
6
|
+
* Main entry point of the RacDelta SDK.
|
|
7
|
+
*
|
|
8
|
+
* `RacDeltaClient` acts as a high-level orchestrator that initializes all
|
|
9
|
+
* core services, adapters, and pipelines used for differential upload and
|
|
10
|
+
* download operations.
|
|
11
|
+
*
|
|
12
|
+
* It is designed to provide a single, easy-to-use API for performing
|
|
13
|
+
* file synchronization, hashing, validation, and reconstruction.
|
|
14
|
+
*
|
|
15
|
+
* ---
|
|
16
|
+
* ### Example
|
|
17
|
+
* ```ts
|
|
18
|
+
* import { RacDeltaClient } from 'rac-delta';
|
|
19
|
+
*
|
|
20
|
+
* const client = new RacDeltaClient({
|
|
21
|
+
* storage: { type: 'local', basePath: './remote' },
|
|
22
|
+
* maxConcurrency: 4,
|
|
23
|
+
* });
|
|
24
|
+
*
|
|
25
|
+
* const delta = await client.compareForDownload(localIndex, remoteIndex);
|
|
26
|
+
* console.log(delta);
|
|
27
|
+
* ```
|
|
28
|
+
*
|
|
29
|
+
* ---
|
|
30
|
+
*/
|
|
31
|
+
export declare class RacDeltaClient<C extends StorageConfig = StorageConfig> {
|
|
32
|
+
readonly config: RacDeltaConfig & {
|
|
33
|
+
storage: C;
|
|
34
|
+
};
|
|
35
|
+
readonly storage: AdapterFromConfig<C>;
|
|
36
|
+
readonly delta: DeltaService;
|
|
37
|
+
readonly hasher: HasherService;
|
|
38
|
+
readonly validation: ValidationService;
|
|
39
|
+
readonly reconstruction: ReconstructionService;
|
|
40
|
+
readonly pipelines: PipelineBundleFor<AdapterFromConfig<C>>;
|
|
41
|
+
constructor(config: RacDeltaConfig & {
|
|
42
|
+
storage: C;
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
//# sourceMappingURL=client.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../../src/infrastructure/client.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAC/D,OAAO,EACL,YAAY,EACZ,aAAa,EACb,qBAAqB,EACrB,iBAAiB,EAClB,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,iBAAiB,EAAmB,MAAM,8BAA8B,CAAC;AAClF,OAAO,EAAE,iBAAiB,EAAyB,MAAM,qCAAqC,CAAC;AAG/F;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,qBAAa,cAAc,CAAC,CAAC,SAAS,aAAa,GAAG,aAAa;IACjE,QAAQ,CAAC,MAAM,EAAE,cAAc,GAAG;QAAE,OAAO,EAAE,CAAC,CAAA;KAAE,CAAC;IACjD,QAAQ,CAAC,OAAO,EAAE,iBAAiB,CAAC,CAAC,CAAC,CAAC;IACvC,QAAQ,CAAC,KAAK,EAAE,YAAY,CAAC;IAC7B,QAAQ,CAAC,MAAM,EAAE,aAAa,CAAC;IAC/B,QAAQ,CAAC,UAAU,EAAE,iBAAiB,CAAC;IACvC,QAAQ,CAAC,cAAc,EAAE,qBAAqB,CAAC;IAC/C,QAAQ,CAAC,SAAS,EAAE,iBAAiB,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC;gBAEhD,MAAM,EAAE,cAAc,GAAG;QAAE,OAAO,EAAE,CAAC,CAAA;KAAE;CAapD"}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.RacDeltaClient = void 0;
|
|
4
|
+
const pipeline_factory_1 = require("./factories/pipeline-factory");
|
|
5
|
+
const storage_adpater_factory_1 = require("./factories/storage-adpater-factory");
|
|
6
|
+
const service_factory_1 = require("./factories/service-factory");
|
|
7
|
+
/**
|
|
8
|
+
* Main entry point of the RacDelta SDK.
|
|
9
|
+
*
|
|
10
|
+
* `RacDeltaClient` acts as a high-level orchestrator that initializes all
|
|
11
|
+
* core services, adapters, and pipelines used for differential upload and
|
|
12
|
+
* download operations.
|
|
13
|
+
*
|
|
14
|
+
* It is designed to provide a single, easy-to-use API for performing
|
|
15
|
+
* file synchronization, hashing, validation, and reconstruction.
|
|
16
|
+
*
|
|
17
|
+
* ---
|
|
18
|
+
* ### Example
|
|
19
|
+
* ```ts
|
|
20
|
+
* import { RacDeltaClient } from 'rac-delta';
|
|
21
|
+
*
|
|
22
|
+
* const client = new RacDeltaClient({
|
|
23
|
+
* storage: { type: 'local', basePath: './remote' },
|
|
24
|
+
* maxConcurrency: 4,
|
|
25
|
+
* });
|
|
26
|
+
*
|
|
27
|
+
* const delta = await client.compareForDownload(localIndex, remoteIndex);
|
|
28
|
+
* console.log(delta);
|
|
29
|
+
* ```
|
|
30
|
+
*
|
|
31
|
+
* ---
|
|
32
|
+
*/
|
|
33
|
+
class RacDeltaClient {
|
|
34
|
+
config;
|
|
35
|
+
storage;
|
|
36
|
+
delta;
|
|
37
|
+
hasher;
|
|
38
|
+
validation;
|
|
39
|
+
reconstruction;
|
|
40
|
+
pipelines;
|
|
41
|
+
constructor(config) {
|
|
42
|
+
this.config = config;
|
|
43
|
+
this.storage = storage_adpater_factory_1.StorageAdapterFactory.create(config.storage);
|
|
44
|
+
const services = service_factory_1.ServiceFactory.create();
|
|
45
|
+
this.delta = services.delta;
|
|
46
|
+
this.hasher = services.hasher;
|
|
47
|
+
this.validation = services.validation;
|
|
48
|
+
this.reconstruction = services.reconstruction;
|
|
49
|
+
this.pipelines = pipeline_factory_1.PipelineFactory.create(this.storage, services, config);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
exports.RacDeltaClient = RacDeltaClient;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { HashDownloadPipeline, HashUploadPipeline, UrlDownloadPipeline, UrlUploadPipeline } from '../../core/pipelines';
|
|
2
|
+
import { RacDeltaConfig } from '../../core/config';
|
|
3
|
+
import { StorageAdapter } from '../../core/adapters';
|
|
4
|
+
import { ServiceBundle } from './service-factory';
|
|
5
|
+
export type PipelineBundleFor<S extends StorageAdapter> = S['type'] extends 'hash' ? {
|
|
6
|
+
upload: HashUploadPipeline;
|
|
7
|
+
download: HashDownloadPipeline;
|
|
8
|
+
} : S['type'] extends 'url' ? {
|
|
9
|
+
upload: UrlUploadPipeline;
|
|
10
|
+
download: UrlDownloadPipeline;
|
|
11
|
+
} : never;
|
|
12
|
+
export declare class PipelineFactory {
|
|
13
|
+
static create<S extends StorageAdapter>(storage: S, services: ServiceBundle, config: RacDeltaConfig): PipelineBundleFor<S>;
|
|
14
|
+
}
|
|
15
|
+
//# sourceMappingURL=pipeline-factory.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pipeline-factory.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/factories/pipeline-factory.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,oBAAoB,EACpB,kBAAkB,EAClB,mBAAmB,EACnB,iBAAiB,EAClB,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EAAsB,cAAc,EAAqB,MAAM,qBAAqB,CAAC;AAO5F,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAElD,MAAM,MAAM,iBAAiB,CAAC,CAAC,SAAS,cAAc,IAAI,CAAC,CAAC,MAAM,CAAC,SAAS,MAAM,GAC9E;IACE,MAAM,EAAE,kBAAkB,CAAC;IAC3B,QAAQ,EAAE,oBAAoB,CAAC;CAChC,GACD,CAAC,CAAC,MAAM,CAAC,SAAS,KAAK,GACrB;IACE,MAAM,EAAE,iBAAiB,CAAC;IAC1B,QAAQ,EAAE,mBAAmB,CAAC;CAC/B,GACD,KAAK,CAAC;AAEZ,qBAAa,eAAe;IAC1B,MAAM,CAAC,MAAM,CAAC,CAAC,SAAS,cAAc,EACpC,OAAO,EAAE,CAAC,EACV,QAAQ,EAAE,aAAa,EACvB,MAAM,EAAE,cAAc,GACrB,iBAAiB,CAAC,CAAC,CAAC;CA6BxB"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PipelineFactory = void 0;
|
|
4
|
+
const adapters_1 = require("../../core/adapters");
|
|
5
|
+
const default_hash_download_pipeline_1 = require("../pipelines/default-hash-download-pipeline");
|
|
6
|
+
const default_url_download_pipeline_1 = require("../pipelines/default-url-download-pipeline");
|
|
7
|
+
const default_hash_upload_pipeline_1 = require("../pipelines/default-hash-upload-pipeline");
|
|
8
|
+
const default_url_upload_pipeline_1 = require("../pipelines/default-url-upload-pipeline");
|
|
9
|
+
class PipelineFactory {
|
|
10
|
+
static create(storage, services, config) {
|
|
11
|
+
if (storage instanceof adapters_1.HashStorageAdapter) {
|
|
12
|
+
return {
|
|
13
|
+
upload: new default_hash_upload_pipeline_1.DefaultHashUploadPipeline(storage, services.delta, config),
|
|
14
|
+
download: new default_hash_download_pipeline_1.DefaultHashDownloadPipeline(services.reconstruction, services.validation, storage, config, services.delta),
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
if (storage instanceof adapters_1.UrlStorageAdapter) {
|
|
18
|
+
return {
|
|
19
|
+
upload: new default_url_upload_pipeline_1.DefaultUrlUploadPipeline(storage, config),
|
|
20
|
+
download: new default_url_download_pipeline_1.DefaultUrlDownloadPipeline(storage, services.reconstruction, services.validation, services.delta, config),
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
throw new Error(`Unsupported storage adapter type: ${storage.type}`);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
exports.PipelineFactory = PipelineFactory;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { DeltaService, HasherService, ReconstructionService, ValidationService } from '../../core/services';
|
|
2
|
+
export interface ServiceBundle {
|
|
3
|
+
delta: DeltaService;
|
|
4
|
+
hasher: HasherService;
|
|
5
|
+
validation: ValidationService;
|
|
6
|
+
reconstruction: ReconstructionService;
|
|
7
|
+
}
|
|
8
|
+
export declare class ServiceFactory {
|
|
9
|
+
static create(): ServiceBundle;
|
|
10
|
+
}
|
|
11
|
+
//# sourceMappingURL=service-factory.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"service-factory.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/factories/service-factory.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,aAAa,EACb,qBAAqB,EACrB,iBAAiB,EAClB,MAAM,qBAAqB,CAAC;AAO7B,MAAM,WAAW,aAAa;IAC5B,KAAK,EAAE,YAAY,CAAC;IACpB,MAAM,EAAE,aAAa,CAAC;IACtB,UAAU,EAAE,iBAAiB,CAAC;IAC9B,cAAc,EAAE,qBAAqB,CAAC;CACvC;AAED,qBAAa,cAAc;IACzB,MAAM,CAAC,MAAM,IAAI,aAAa;CAQ/B"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ServiceFactory = void 0;
|
|
4
|
+
const memory_reconstruction_service_1 = require("../services/memory-reconstruction-service");
|
|
5
|
+
const memory_validation_service_1 = require("../services/memory-validation-service");
|
|
6
|
+
const hash_wasm_hasher_service_1 = require("../services/hash-wasm-hasher-service");
|
|
7
|
+
const memory_delta_service_1 = require("../services/memory-delta-service");
|
|
8
|
+
class ServiceFactory {
|
|
9
|
+
static create() {
|
|
10
|
+
const hasher = new hash_wasm_hasher_service_1.HashWasmHasherService();
|
|
11
|
+
const delta = new memory_delta_service_1.MemoryDeltaService(hasher);
|
|
12
|
+
const validation = new memory_validation_service_1.MemoryValidationService(hasher);
|
|
13
|
+
const reconstruction = new memory_reconstruction_service_1.MemoryReconstructionService(hasher);
|
|
14
|
+
return { delta, hasher, validation, reconstruction };
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.ServiceFactory = ServiceFactory;
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { StorageConfig } from '../../core/config';
|
|
2
|
+
import { AzureBlobStorageAdapter } from '../adapters/azure-blob-storage-adapter';
|
|
3
|
+
import { DefaultUrlStorageAdapter } from '../adapters/url-storage-adapter';
|
|
4
|
+
import { LocalStorageAdapter } from '../adapters/local-storage-adapter';
|
|
5
|
+
import { HTTPStorageAdapter } from '../adapters/http-storage-adapter';
|
|
6
|
+
import { GCSStorageAdapter } from '../adapters/gcs-storage-adapter';
|
|
7
|
+
import { SSHStorageAdapter } from '../adapters/ssh-storage-adapter';
|
|
8
|
+
import { S3StorageAdapter } from '../adapters/s3-storage-adapter';
|
|
9
|
+
export type StorageAdapterMap = {
|
|
10
|
+
local: LocalStorageAdapter;
|
|
11
|
+
http: HTTPStorageAdapter;
|
|
12
|
+
s3: S3StorageAdapter;
|
|
13
|
+
azure: AzureBlobStorageAdapter;
|
|
14
|
+
gcs: GCSStorageAdapter;
|
|
15
|
+
ssh: SSHStorageAdapter;
|
|
16
|
+
url: DefaultUrlStorageAdapter;
|
|
17
|
+
};
|
|
18
|
+
export type StorageTypeKey = keyof StorageAdapterMap;
|
|
19
|
+
export type AdapterFromConfig<T extends StorageConfig> = T extends {
|
|
20
|
+
type: StorageTypeKey;
|
|
21
|
+
} ? StorageAdapterMap[T['type']] : never;
|
|
22
|
+
type StorageAdapterByConfigType<C extends StorageConfig> = C extends {
|
|
23
|
+
type: 'local';
|
|
24
|
+
} ? LocalStorageAdapter : C extends {
|
|
25
|
+
type: 'http';
|
|
26
|
+
} ? HTTPStorageAdapter : C extends {
|
|
27
|
+
type: 's3';
|
|
28
|
+
} ? S3StorageAdapter : C extends {
|
|
29
|
+
type: 'azure';
|
|
30
|
+
} ? AzureBlobStorageAdapter : C extends {
|
|
31
|
+
type: 'gcs';
|
|
32
|
+
} ? GCSStorageAdapter : C extends {
|
|
33
|
+
type: 'ssh';
|
|
34
|
+
} ? SSHStorageAdapter : C extends {
|
|
35
|
+
type: 'url';
|
|
36
|
+
} ? DefaultUrlStorageAdapter : never;
|
|
37
|
+
export declare class StorageAdapterFactory {
|
|
38
|
+
static create<C extends StorageConfig>(config: C): StorageAdapterByConfigType<C>;
|
|
39
|
+
}
|
|
40
|
+
export {};
|
|
41
|
+
//# sourceMappingURL=storage-adpater-factory.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"storage-adpater-factory.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/factories/storage-adpater-factory.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAElD,OAAO,EAAE,uBAAuB,EAAE,MAAM,wCAAwC,CAAC;AACjF,OAAO,EAAE,wBAAwB,EAAE,MAAM,iCAAiC,CAAC;AAC3E,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,kBAAkB,EAAE,MAAM,kCAAkC,CAAC;AACtE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAGlE,MAAM,MAAM,iBAAiB,GAAG;IAC9B,KAAK,EAAE,mBAAmB,CAAC;IAC3B,IAAI,EAAE,kBAAkB,CAAC;IACzB,EAAE,EAAE,gBAAgB,CAAC;IACrB,KAAK,EAAE,uBAAuB,CAAC;IAC/B,GAAG,EAAE,iBAAiB,CAAC;IACvB,GAAG,EAAE,iBAAiB,CAAC;IACvB,GAAG,EAAE,wBAAwB,CAAC;CAC/B,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG,MAAM,iBAAiB,CAAC;AAErD,MAAM,MAAM,iBAAiB,CAAC,CAAC,SAAS,aAAa,IAAI,CAAC,SAAS;IAAE,IAAI,EAAE,cAAc,CAAA;CAAE,GACvF,iBAAiB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,GAC5B,KAAK,CAAC;AAEV,KAAK,0BAA0B,CAAC,CAAC,SAAS,aAAa,IAAI,CAAC,SAAS;IAAE,IAAI,EAAE,OAAO,CAAA;CAAE,GAClF,mBAAmB,GACnB,CAAC,SAAS;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GACxB,kBAAkB,GAClB,CAAC,SAAS;IAAE,IAAI,EAAE,IAAI,CAAA;CAAE,GACtB,gBAAgB,GAChB,CAAC,SAAS;IAAE,IAAI,EAAE,OAAO,CAAA;CAAE,GACzB,uBAAuB,GACvB,CAAC,SAAS;IAAE,IAAI,EAAE,KAAK,CAAA;CAAE,GACvB,iBAAiB,GACjB,CAAC,SAAS;IAAE,IAAI,EAAE,KAAK,CAAA;CAAE,GACvB,iBAAiB,GACjB,CAAC,SAAS;IAAE,IAAI,EAAE,KAAK,CAAA;CAAE,GACvB,wBAAwB,GACxB,KAAK,CAAC;AAEtB,qBAAa,qBAAqB;IAChC,MAAM,CAAC,MAAM,CAAC,CAAC,SAAS,aAAa,EAAE,MAAM,EAAE,CAAC,GAAG,0BAA0B,CAAC,CAAC,CAAC;CAoBjF"}
|