rac-delta 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +1 -0
- package/dist/core/adapters/index.d.ts +2 -0
- package/dist/core/adapters/index.d.ts.map +1 -0
- package/dist/core/adapters/index.js +17 -0
- package/dist/core/adapters/storage-adapter.d.ts +125 -0
- package/dist/core/adapters/storage-adapter.d.ts.map +1 -0
- package/dist/core/adapters/storage-adapter.js +14 -0
- package/dist/core/config/index.d.ts +2 -0
- package/dist/core/config/index.d.ts.map +1 -0
- package/dist/core/config/index.js +17 -0
- package/dist/core/config/rac-delta-config.d.ts +132 -0
- package/dist/core/config/rac-delta-config.d.ts.map +1 -0
- package/dist/core/config/rac-delta-config.js +2 -0
- package/dist/core/exceptions.d.ts +25 -0
- package/dist/core/exceptions.d.ts.map +1 -0
- package/dist/core/exceptions.js +51 -0
- package/dist/core/models/chunk.d.ts +12 -0
- package/dist/core/models/chunk.d.ts.map +1 -0
- package/dist/core/models/chunk.js +2 -0
- package/dist/core/models/delta-plan.d.ts +12 -0
- package/dist/core/models/delta-plan.d.ts.map +1 -0
- package/dist/core/models/delta-plan.js +2 -0
- package/dist/core/models/file-entry.d.ts +9 -0
- package/dist/core/models/file-entry.d.ts.map +1 -0
- package/dist/core/models/file-entry.js +2 -0
- package/dist/core/models/index.d.ts +5 -0
- package/dist/core/models/index.d.ts.map +1 -0
- package/dist/core/models/index.js +20 -0
- package/dist/core/models/rd-index.d.ts +8 -0
- package/dist/core/models/rd-index.d.ts.map +1 -0
- package/dist/core/models/rd-index.js +2 -0
- package/dist/core/pipelines/download-pipeline.d.ts +142 -0
- package/dist/core/pipelines/download-pipeline.d.ts.map +1 -0
- package/dist/core/pipelines/download-pipeline.js +64 -0
- package/dist/core/pipelines/index.d.ts +3 -0
- package/dist/core/pipelines/index.d.ts.map +1 -0
- package/dist/core/pipelines/index.js +18 -0
- package/dist/core/pipelines/upload-pipeline.d.ts +60 -0
- package/dist/core/pipelines/upload-pipeline.d.ts.map +1 -0
- package/dist/core/pipelines/upload-pipeline.js +34 -0
- package/dist/core/services/delta-service.d.ts +76 -0
- package/dist/core/services/delta-service.d.ts.map +1 -0
- package/dist/core/services/delta-service.js +2 -0
- package/dist/core/services/hasher-service.d.ts +47 -0
- package/dist/core/services/hasher-service.d.ts.map +1 -0
- package/dist/core/services/hasher-service.js +2 -0
- package/dist/core/services/index.d.ts +5 -0
- package/dist/core/services/index.d.ts.map +1 -0
- package/dist/core/services/index.js +20 -0
- package/dist/core/services/reconstruction-service.d.ts +99 -0
- package/dist/core/services/reconstruction-service.d.ts.map +1 -0
- package/dist/core/services/reconstruction-service.js +4 -0
- package/dist/core/services/validation-service.d.ts +18 -0
- package/dist/core/services/validation-service.d.ts.map +1 -0
- package/dist/core/services/validation-service.js +2 -0
- package/dist/core/types/index.d.ts +2 -0
- package/dist/core/types/index.d.ts.map +1 -0
- package/dist/core/types/index.js +17 -0
- package/dist/core/types/types.d.ts +3 -0
- package/dist/core/types/types.d.ts.map +1 -0
- package/dist/core/types/types.js +2 -0
- package/dist/core/utils/index.d.ts +3 -0
- package/dist/core/utils/index.d.ts.map +1 -0
- package/dist/core/utils/index.js +18 -0
- package/dist/core/utils/invariant.d.ts +2 -0
- package/dist/core/utils/invariant.d.ts.map +1 -0
- package/dist/core/utils/invariant.js +11 -0
- package/dist/core/utils/stream-to-buffer.d.ts +3 -0
- package/dist/core/utils/stream-to-buffer.d.ts.map +1 -0
- package/dist/core/utils/stream-to-buffer.js +10 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +29 -0
- package/dist/infrastructure/adapters/azure-blob-storage-adapter.d.ts +24 -0
- package/dist/infrastructure/adapters/azure-blob-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/azure-blob-storage-adapter.js +149 -0
- package/dist/infrastructure/adapters/gcs-storage-adapter.d.ts +20 -0
- package/dist/infrastructure/adapters/gcs-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/gcs-storage-adapter.js +101 -0
- package/dist/infrastructure/adapters/http-storage-adapter.d.ts +23 -0
- package/dist/infrastructure/adapters/http-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/http-storage-adapter.js +154 -0
- package/dist/infrastructure/adapters/local-storage-adapter.d.ts +23 -0
- package/dist/infrastructure/adapters/local-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/local-storage-adapter.js +124 -0
- package/dist/infrastructure/adapters/s3-storage-adapter.d.ts +24 -0
- package/dist/infrastructure/adapters/s3-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/s3-storage-adapter.js +139 -0
- package/dist/infrastructure/adapters/ssh-storage-adapter.d.ts +28 -0
- package/dist/infrastructure/adapters/ssh-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/ssh-storage-adapter.js +237 -0
- package/dist/infrastructure/adapters/url-storage-adapter.d.ts +14 -0
- package/dist/infrastructure/adapters/url-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/url-storage-adapter.js +92 -0
- package/dist/infrastructure/chunk-sources/disk-chunk-source.d.ts +12 -0
- package/dist/infrastructure/chunk-sources/disk-chunk-source.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/disk-chunk-source.js +61 -0
- package/dist/infrastructure/chunk-sources/index.d.ts +4 -0
- package/dist/infrastructure/chunk-sources/index.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/index.js +19 -0
- package/dist/infrastructure/chunk-sources/memory-chunk-source.d.ts +9 -0
- package/dist/infrastructure/chunk-sources/memory-chunk-source.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/memory-chunk-source.js +29 -0
- package/dist/infrastructure/chunk-sources/storage-chunk-source.d.ts +21 -0
- package/dist/infrastructure/chunk-sources/storage-chunk-source.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/storage-chunk-source.js +150 -0
- package/dist/infrastructure/client.d.ts +45 -0
- package/dist/infrastructure/client.d.ts.map +1 -0
- package/dist/infrastructure/client.js +52 -0
- package/dist/infrastructure/factories/pipeline-factory.d.ts +15 -0
- package/dist/infrastructure/factories/pipeline-factory.d.ts.map +1 -0
- package/dist/infrastructure/factories/pipeline-factory.js +26 -0
- package/dist/infrastructure/factories/service-factory.d.ts +11 -0
- package/dist/infrastructure/factories/service-factory.d.ts.map +1 -0
- package/dist/infrastructure/factories/service-factory.js +17 -0
- package/dist/infrastructure/factories/storage-adpater-factory.d.ts +41 -0
- package/dist/infrastructure/factories/storage-adpater-factory.d.ts.map +1 -0
- package/dist/infrastructure/factories/storage-adpater-factory.js +33 -0
- package/dist/infrastructure/pipelines/default-hash-download-pipeline.d.ts +27 -0
- package/dist/infrastructure/pipelines/default-hash-download-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-hash-download-pipeline.js +211 -0
- package/dist/infrastructure/pipelines/default-hash-upload-pipeline.d.ts +19 -0
- package/dist/infrastructure/pipelines/default-hash-upload-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-hash-upload-pipeline.js +170 -0
- package/dist/infrastructure/pipelines/default-url-download-pipeline.d.ts +30 -0
- package/dist/infrastructure/pipelines/default-url-download-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-url-download-pipeline.js +198 -0
- package/dist/infrastructure/pipelines/default-url-upload-pipeline.d.ts +20 -0
- package/dist/infrastructure/pipelines/default-url-upload-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-url-upload-pipeline.js +126 -0
- package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts +13 -0
- package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts.map +1 -0
- package/dist/infrastructure/services/hash-wasm-hasher-service.js +113 -0
- package/dist/infrastructure/services/memory-delta-service.d.ts +17 -0
- package/dist/infrastructure/services/memory-delta-service.d.ts.map +1 -0
- package/dist/infrastructure/services/memory-delta-service.js +198 -0
- package/dist/infrastructure/services/memory-reconstruction-service.d.ts +25 -0
- package/dist/infrastructure/services/memory-reconstruction-service.d.ts.map +1 -0
- package/dist/infrastructure/services/memory-reconstruction-service.js +329 -0
- package/dist/infrastructure/services/memory-validation-service.d.ts +9 -0
- package/dist/infrastructure/services/memory-validation-service.d.ts.map +1 -0
- package/dist/infrastructure/services/memory-validation-service.js +33 -0
- package/package.json +43 -0
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.StorageAdapterFactory = void 0;
|
|
4
|
+
const azure_blob_storage_adapter_1 = require("../adapters/azure-blob-storage-adapter");
|
|
5
|
+
const url_storage_adapter_1 = require("../adapters/url-storage-adapter");
|
|
6
|
+
const local_storage_adapter_1 = require("../adapters/local-storage-adapter");
|
|
7
|
+
const http_storage_adapter_1 = require("../adapters/http-storage-adapter");
|
|
8
|
+
const gcs_storage_adapter_1 = require("../adapters/gcs-storage-adapter");
|
|
9
|
+
const ssh_storage_adapter_1 = require("../adapters/ssh-storage-adapter");
|
|
10
|
+
const s3_storage_adapter_1 = require("../adapters/s3-storage-adapter");
|
|
11
|
+
class StorageAdapterFactory {
|
|
12
|
+
static create(config) {
|
|
13
|
+
switch (config.type) {
|
|
14
|
+
case 'local':
|
|
15
|
+
return new local_storage_adapter_1.LocalStorageAdapter(config);
|
|
16
|
+
case 'http':
|
|
17
|
+
return new http_storage_adapter_1.HTTPStorageAdapter(config);
|
|
18
|
+
case 's3':
|
|
19
|
+
return new s3_storage_adapter_1.S3StorageAdapter(config);
|
|
20
|
+
case 'azure':
|
|
21
|
+
return new azure_blob_storage_adapter_1.AzureBlobStorageAdapter(config);
|
|
22
|
+
case 'gcs':
|
|
23
|
+
return new gcs_storage_adapter_1.GCSStorageAdapter(config);
|
|
24
|
+
case 'ssh':
|
|
25
|
+
return new ssh_storage_adapter_1.SSHStorageAdapter(config);
|
|
26
|
+
case 'url':
|
|
27
|
+
return new url_storage_adapter_1.DefaultUrlStorageAdapter();
|
|
28
|
+
default:
|
|
29
|
+
throw new Error(`Unsupported storage type: ${config.type}`);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
exports.StorageAdapterFactory = StorageAdapterFactory;
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { DeltaService, ReconstructionService, ValidationService } from '../../core/services';
|
|
2
|
+
import { DownloadOptions, HashDownloadPipeline, UpdateStrategy } from '../../core/pipelines';
|
|
3
|
+
import { ChunkSource } from '../../core/services/reconstruction-service';
|
|
4
|
+
import { DeltaPlan, RDIndex } from '../../core/models';
|
|
5
|
+
import { HashStorageAdapter } from '../../core/adapters';
|
|
6
|
+
import { RacDeltaConfig } from '../../core/config';
|
|
7
|
+
import { Nullish } from '../../core/types';
|
|
8
|
+
export declare class DefaultHashDownloadPipeline extends HashDownloadPipeline {
|
|
9
|
+
protected readonly reconstruction: ReconstructionService;
|
|
10
|
+
protected readonly validation: ValidationService;
|
|
11
|
+
protected readonly storage: HashStorageAdapter;
|
|
12
|
+
protected readonly config: RacDeltaConfig;
|
|
13
|
+
protected readonly delta: DeltaService;
|
|
14
|
+
constructor(reconstruction: ReconstructionService, validation: ValidationService, storage: HashStorageAdapter, config: RacDeltaConfig, delta: DeltaService);
|
|
15
|
+
execute(localDir: string, strategy: UpdateStrategy, remoteIndex?: Nullish<RDIndex>, options?: Nullish<DownloadOptions>): Promise<void>;
|
|
16
|
+
loadLocalIndex(localDir: string): Promise<RDIndex>;
|
|
17
|
+
downloadAllMissingChunks(plan: DeltaPlan, target: 'memory' | 'disk', options?: Nullish<DownloadOptions>): Promise<ChunkSource>;
|
|
18
|
+
verifyAndDeleteObsoleteChunks(plan: DeltaPlan, localDir: string, remoteIndex: RDIndex, chunkSource: ChunkSource, options?: Nullish<DownloadOptions>): Promise<{
|
|
19
|
+
deletedFiles: string[];
|
|
20
|
+
verifiedFiles: string[];
|
|
21
|
+
rebuiltFiles: string[];
|
|
22
|
+
}>;
|
|
23
|
+
saveLocalIndex(localDir: string, index: RDIndex): Promise<void>;
|
|
24
|
+
findLocalIndex(localDir: string): Promise<RDIndex | null>;
|
|
25
|
+
private groupByFile;
|
|
26
|
+
}
|
|
27
|
+
//# sourceMappingURL=default-hash-download-pipeline.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"default-hash-download-pipeline.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/pipelines/default-hash-download-pipeline.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAC7F,OAAO,EAAE,eAAe,EAAE,oBAAoB,EAAE,cAAc,EAAE,MAAM,sBAAsB,CAAC;AAC7F,OAAO,EAAE,WAAW,EAAE,MAAM,4CAA4C,CAAC;AACzE,OAAO,EAAc,SAAS,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAEnE,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAM3C,qBAAa,2BAA4B,SAAQ,oBAAoB;IAEjE,SAAS,CAAC,QAAQ,CAAC,cAAc,EAAE,qBAAqB;IACxD,SAAS,CAAC,QAAQ,CAAC,UAAU,EAAE,iBAAiB;IAChD,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,kBAAkB;IAC9C,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,cAAc;IACzC,SAAS,CAAC,QAAQ,CAAC,KAAK,EAAE,YAAY;gBAJnB,cAAc,EAAE,qBAAqB,EACrC,UAAU,EAAE,iBAAiB,EAC7B,OAAO,EAAE,kBAAkB,EAC3B,MAAM,EAAE,cAAc,EACtB,KAAK,EAAE,YAAY;IAKlC,OAAO,CACX,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,cAAc,EACxB,WAAW,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,EAC9B,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GACjC,OAAO,CAAC,IAAI,CAAC;IA8FV,cAAc,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IASlD,wBAAwB,CAC5B,IAAI,EAAE,SAAS,EACf,MAAM,EAAE,QAAQ,GAAG,MAAM,EACzB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GACjC,OAAO,CAAC,WAAW,CAAC;IAmEjB,6BAA6B,CACjC,IAAI,EAAE,SAAS,EACf,QAAQ,EAAE,MAAM,EAChB,WAAW,EAAE,OAAO,EACpB,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC;;;;;IA8C9B,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO;IAO/C,cAAc,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC;IAyB/D,OAAO,CAAC,WAAW;CAapB"}
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DefaultHashDownloadPipeline = void 0;
|
|
4
|
+
const promises_1 = require("fs/promises");
|
|
5
|
+
const path_1 = require("path");
|
|
6
|
+
const pipelines_1 = require("../../core/pipelines");
|
|
7
|
+
const stream_to_buffer_1 = require("../../core/utils/stream-to-buffer");
|
|
8
|
+
const storage_chunk_source_1 = require("../chunk-sources/storage-chunk-source");
|
|
9
|
+
const memory_chunk_source_1 = require("../chunk-sources/memory-chunk-source");
|
|
10
|
+
const disk_chunk_source_1 = require("../chunk-sources/disk-chunk-source");
|
|
11
|
+
class DefaultHashDownloadPipeline extends pipelines_1.HashDownloadPipeline {
|
|
12
|
+
reconstruction;
|
|
13
|
+
validation;
|
|
14
|
+
storage;
|
|
15
|
+
config;
|
|
16
|
+
delta;
|
|
17
|
+
constructor(reconstruction, validation, storage, config, delta) {
|
|
18
|
+
super(storage, delta, reconstruction, validation, config);
|
|
19
|
+
this.reconstruction = reconstruction;
|
|
20
|
+
this.validation = validation;
|
|
21
|
+
this.storage = storage;
|
|
22
|
+
this.config = config;
|
|
23
|
+
this.delta = delta;
|
|
24
|
+
}
|
|
25
|
+
async execute(localDir, strategy, remoteIndex, options) {
|
|
26
|
+
try {
|
|
27
|
+
this.changeState('scanning', options);
|
|
28
|
+
const localIndex = !options?.force
|
|
29
|
+
? options?.useExistingIndex
|
|
30
|
+
? ((await this.findLocalIndex(localDir)) ?? (await this.loadLocalIndex(localDir)))
|
|
31
|
+
: await this.loadLocalIndex(localDir)
|
|
32
|
+
: null;
|
|
33
|
+
const remoteIndexToUse = remoteIndex ?? (await this.storage.getRemoteIndex());
|
|
34
|
+
if (!remoteIndexToUse) {
|
|
35
|
+
throw new Error('Remote rd-index not provided and was not found in storage. Check if storage pathPrefix is correct or provide an index manually');
|
|
36
|
+
}
|
|
37
|
+
const plan = await this.delta.compareForDownload(localIndex, remoteIndexToUse);
|
|
38
|
+
if (!options?.force &&
|
|
39
|
+
!plan.missingChunks.length &&
|
|
40
|
+
!plan.obsoleteChunks.length &&
|
|
41
|
+
!plan.deletedFiles.length) {
|
|
42
|
+
console.info('No changes to download, you are up to date.');
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
let chunkSource = null;
|
|
46
|
+
if (strategy === pipelines_1.UpdateStrategy.DownloadAllFirstToMemory) {
|
|
47
|
+
this.changeState('downloading', options);
|
|
48
|
+
chunkSource = await this.downloadAllMissingChunks(plan, 'memory', options);
|
|
49
|
+
}
|
|
50
|
+
// StorageChunkSource admits streaming, so it will be downloading chunks while reconstructing in service
|
|
51
|
+
if (strategy === pipelines_1.UpdateStrategy.StreamFromNetwork) {
|
|
52
|
+
chunkSource = new storage_chunk_source_1.StorageChunkSource(this.storage);
|
|
53
|
+
}
|
|
54
|
+
if (strategy === pipelines_1.UpdateStrategy.DownloadAllFirstToDisk) {
|
|
55
|
+
this.changeState('downloading', options);
|
|
56
|
+
chunkSource = await this.downloadAllMissingChunks(plan, 'disk', options);
|
|
57
|
+
}
|
|
58
|
+
if (!chunkSource) {
|
|
59
|
+
throw new Error('No chunkSource found');
|
|
60
|
+
}
|
|
61
|
+
if (plan.newAndModifiedFiles.length) {
|
|
62
|
+
this.changeState('reconstructing', options);
|
|
63
|
+
await this.reconstruction.reconstructAll(plan, localDir, chunkSource, {
|
|
64
|
+
forceRebuild: options?.force,
|
|
65
|
+
verifyAfterRebuild: true,
|
|
66
|
+
fileConcurrency: options?.fileReconstructionConcurrency,
|
|
67
|
+
inPlaceReconstructionThreshold: options?.inPlaceReconstructionThreshold,
|
|
68
|
+
onProgress: (reconstructProgress, diskSpeed, networkProgress, networkSpeed) => {
|
|
69
|
+
this.updateProgress(reconstructProgress, 'reconstructing', diskSpeed, undefined, options);
|
|
70
|
+
if (networkProgress) {
|
|
71
|
+
this.updateProgress(networkProgress, 'download', 0, networkSpeed, options);
|
|
72
|
+
}
|
|
73
|
+
},
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
if (plan.obsoleteChunks.length || plan.deletedFiles.length) {
|
|
77
|
+
this.changeState('cleaning', options);
|
|
78
|
+
await this.verifyAndDeleteObsoleteChunks(plan, localDir, remoteIndexToUse, chunkSource, options);
|
|
79
|
+
}
|
|
80
|
+
await this.saveLocalIndex(localDir, remoteIndexToUse);
|
|
81
|
+
if (chunkSource instanceof disk_chunk_source_1.DiskChunkSource) {
|
|
82
|
+
chunkSource.clear();
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
finally {
|
|
86
|
+
await this.storage.dispose();
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
async loadLocalIndex(localDir) {
|
|
90
|
+
const localIndex = await this.delta.createIndexFromDirectory(localDir, this.config.chunkSize, this.config.maxConcurrency);
|
|
91
|
+
return localIndex;
|
|
92
|
+
}
|
|
93
|
+
async downloadAllMissingChunks(plan, target, options) {
|
|
94
|
+
if (target === 'disk' && !options?.chunksSavePath) {
|
|
95
|
+
throw new Error('Error: chunksSavePath must be provided under options');
|
|
96
|
+
}
|
|
97
|
+
let chunksSavePath = undefined;
|
|
98
|
+
if (target === 'disk' && options?.chunksSavePath) {
|
|
99
|
+
chunksSavePath = (0, path_1.isAbsolute)(options?.chunksSavePath)
|
|
100
|
+
? options?.chunksSavePath
|
|
101
|
+
: (0, path_1.resolve)(process.cwd(), options?.chunksSavePath);
|
|
102
|
+
}
|
|
103
|
+
const chunkSource = target === 'memory' ? new memory_chunk_source_1.MemoryChunkSource() : new disk_chunk_source_1.DiskChunkSource(chunksSavePath);
|
|
104
|
+
const chunks = plan.missingChunks;
|
|
105
|
+
let completed = 0;
|
|
106
|
+
let totalBytes = 0;
|
|
107
|
+
const concurrency = this.config.maxConcurrency ?? 6;
|
|
108
|
+
const queue = [...chunks];
|
|
109
|
+
let lastUpdateTime = Date.now();
|
|
110
|
+
let lastBytes = 0;
|
|
111
|
+
let speed = 0;
|
|
112
|
+
const worker = async () => {
|
|
113
|
+
while (queue.length) {
|
|
114
|
+
const chunk = queue.pop();
|
|
115
|
+
const readable = await this.storage.getChunk(chunk.hash);
|
|
116
|
+
if (!readable) {
|
|
117
|
+
throw new Error(`Chunk missing: ${chunk.hash}`);
|
|
118
|
+
}
|
|
119
|
+
const buffer = await (0, stream_to_buffer_1.streamToBuffer)(readable);
|
|
120
|
+
chunkSource.setChunk(chunk.hash, buffer);
|
|
121
|
+
completed++;
|
|
122
|
+
totalBytes += buffer.length;
|
|
123
|
+
const now = Date.now();
|
|
124
|
+
const elapsed = now - lastUpdateTime;
|
|
125
|
+
// each 100ms
|
|
126
|
+
if (elapsed >= 100) {
|
|
127
|
+
const bytesDiff = totalBytes - lastBytes;
|
|
128
|
+
speed = bytesDiff / (elapsed / 1000);
|
|
129
|
+
lastUpdateTime = now;
|
|
130
|
+
lastBytes = totalBytes;
|
|
131
|
+
const percent = ((completed / chunks.length) * 100).toFixed(1);
|
|
132
|
+
this.updateProgress(Number(percent), 'download', 0, speed, options);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
await Promise.all(Array.from({ length: concurrency }, () => worker()));
|
|
137
|
+
const percent = 100;
|
|
138
|
+
const totalTime = (Date.now() - lastUpdateTime) / 1000;
|
|
139
|
+
const avgSpeed = totalBytes / (totalTime || 1);
|
|
140
|
+
this.updateProgress(percent, 'download', 0, avgSpeed, options);
|
|
141
|
+
return chunkSource;
|
|
142
|
+
}
|
|
143
|
+
async verifyAndDeleteObsoleteChunks(plan, localDir, remoteIndex, chunkSource, options) {
|
|
144
|
+
const dir = (0, path_1.isAbsolute)(localDir) ? localDir : (0, path_1.resolve)(process.cwd(), localDir);
|
|
145
|
+
const obsoleteByFile = this.groupByFile(plan.obsoleteChunks);
|
|
146
|
+
const deletedFiles = [];
|
|
147
|
+
const verifiedFiles = [];
|
|
148
|
+
const rebuiltFiles = [];
|
|
149
|
+
const allFiles = new Set([...plan.deletedFiles, ...obsoleteByFile.keys()]);
|
|
150
|
+
const totalFiles = allFiles.size;
|
|
151
|
+
let completedFiles = 0;
|
|
152
|
+
for (const filePath of allFiles) {
|
|
153
|
+
const absPath = (0, path_1.join)(dir, filePath);
|
|
154
|
+
const remoteFile = remoteIndex.files.find((file) => file.path === filePath);
|
|
155
|
+
// Fully removed file
|
|
156
|
+
if (!remoteFile || plan.deletedFiles.includes(filePath)) {
|
|
157
|
+
await (0, promises_1.rm)(absPath, { force: true });
|
|
158
|
+
deletedFiles.push(filePath);
|
|
159
|
+
}
|
|
160
|
+
else {
|
|
161
|
+
const isValid = await this.validation.validateFile(remoteFile, absPath);
|
|
162
|
+
if (!isValid) {
|
|
163
|
+
await this.reconstruction.reconstructFile(remoteFile, absPath, chunkSource);
|
|
164
|
+
rebuiltFiles.push(filePath);
|
|
165
|
+
}
|
|
166
|
+
else {
|
|
167
|
+
verifiedFiles.push(filePath);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
completedFiles++;
|
|
171
|
+
this.updateProgress((completedFiles / totalFiles) * 100, 'deleting', undefined, undefined, options);
|
|
172
|
+
}
|
|
173
|
+
return { deletedFiles, verifiedFiles, rebuiltFiles };
|
|
174
|
+
}
|
|
175
|
+
async saveLocalIndex(localDir, index) {
|
|
176
|
+
const dir = (0, path_1.isAbsolute)(localDir) ? localDir : (0, path_1.resolve)(process.cwd(), localDir);
|
|
177
|
+
const indexPath = (0, path_1.join)(dir, 'rd-index.json');
|
|
178
|
+
await (0, promises_1.writeFile)(indexPath, JSON.stringify(index, null, 2), 'utf-8');
|
|
179
|
+
}
|
|
180
|
+
async findLocalIndex(localDir) {
|
|
181
|
+
const dir = (0, path_1.isAbsolute)(localDir) ? localDir : (0, path_1.resolve)(process.cwd(), localDir);
|
|
182
|
+
const files = await (0, promises_1.readdir)(dir);
|
|
183
|
+
const indexFile = files.find((f) => f === 'rd-index.json');
|
|
184
|
+
if (!indexFile) {
|
|
185
|
+
return null;
|
|
186
|
+
}
|
|
187
|
+
const filePath = (0, path_1.join)(localDir, indexFile);
|
|
188
|
+
const data = await (0, promises_1.readFile)(filePath, 'utf-8');
|
|
189
|
+
try {
|
|
190
|
+
const parsed = JSON.parse(data);
|
|
191
|
+
return parsed;
|
|
192
|
+
}
|
|
193
|
+
catch (error) {
|
|
194
|
+
if (error instanceof Error) {
|
|
195
|
+
throw new Error(`Invalid RDIndex JSON at ${filePath}: ${error.message}`);
|
|
196
|
+
}
|
|
197
|
+
throw error;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
groupByFile(chunks) {
|
|
201
|
+
const map = new Map();
|
|
202
|
+
for (const chunk of chunks) {
|
|
203
|
+
if (!map.has(chunk.filePath)) {
|
|
204
|
+
map.set(chunk.filePath, []);
|
|
205
|
+
}
|
|
206
|
+
map.get(chunk.filePath).push(chunk);
|
|
207
|
+
}
|
|
208
|
+
return map;
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
exports.DefaultHashDownloadPipeline = DefaultHashDownloadPipeline;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { UploadOptions, HashUploadPipeline } from '../../core/pipelines';
|
|
2
|
+
import { DeltaPlan, RDIndex } from '../../core/models';
|
|
3
|
+
import { HashStorageAdapter } from '../../core/adapters';
|
|
4
|
+
import { RacDeltaConfig } from '../../core/config';
|
|
5
|
+
import { DeltaService } from '../../core/services';
|
|
6
|
+
import { Nullish } from '../../core/types';
|
|
7
|
+
export declare class DefaultHashUploadPipeline extends HashUploadPipeline {
|
|
8
|
+
protected readonly storage: HashStorageAdapter;
|
|
9
|
+
protected readonly delta: DeltaService;
|
|
10
|
+
protected readonly config: RacDeltaConfig;
|
|
11
|
+
constructor(storage: HashStorageAdapter, delta: DeltaService, config: RacDeltaConfig);
|
|
12
|
+
execute(directory: string, remoteIndex?: Nullish<RDIndex>, options?: Nullish<UploadOptions>): Promise<RDIndex>;
|
|
13
|
+
scanDirectory(localDir: string, ignorePatterns?: Nullish<string[]>): Promise<RDIndex>;
|
|
14
|
+
uploadMissingChunks(plan: DeltaPlan, baseDir: string, force: boolean, options?: Nullish<UploadOptions>): Promise<void>;
|
|
15
|
+
uploadIndex(index: RDIndex): Promise<void>;
|
|
16
|
+
deleteObsoleteChunks(plan: DeltaPlan, options?: Nullish<UploadOptions>): Promise<void>;
|
|
17
|
+
private groupChunksByFile;
|
|
18
|
+
}
|
|
19
|
+
//# sourceMappingURL=default-hash-upload-pipeline.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"default-hash-upload-pipeline.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/pipelines/default-hash-upload-pipeline.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AACzE,OAAO,EAAc,SAAS,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACnE,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AACnD,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C,qBAAa,yBAA0B,SAAQ,kBAAkB;IAE7D,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,kBAAkB;IAC9C,SAAS,CAAC,QAAQ,CAAC,KAAK,EAAE,YAAY;IACtC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,cAAc;gBAFtB,OAAO,EAAE,kBAAkB,EAC3B,KAAK,EAAE,YAAY,EACnB,MAAM,EAAE,cAAc;IAKrC,OAAO,CACX,SAAS,EAAE,MAAM,EACjB,WAAW,CAAC,EAAE,OAAO,CAAC,OAAO,CAAC,EAC9B,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAC/B,OAAO,CAAC,OAAO,CAAC;IAyDb,aAAa,CAAC,QAAQ,EAAE,MAAM,EAAE,cAAc,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC;IAUrF,mBAAmB,CACvB,IAAI,EAAE,SAAS,EACf,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,OAAO,EACd,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAC/B,OAAO,CAAC,IAAI,CAAC;IA8DV,WAAW,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAI1C,oBAAoB,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAqD5F,OAAO,CAAC,iBAAiB;CAa1B"}
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DefaultHashUploadPipeline = void 0;
|
|
4
|
+
const path_1 = require("path");
|
|
5
|
+
const promises_1 = require("fs/promises");
|
|
6
|
+
const stream_1 = require("stream");
|
|
7
|
+
const pipelines_1 = require("../../core/pipelines");
|
|
8
|
+
class DefaultHashUploadPipeline extends pipelines_1.HashUploadPipeline {
|
|
9
|
+
storage;
|
|
10
|
+
delta;
|
|
11
|
+
config;
|
|
12
|
+
constructor(storage, delta, config) {
|
|
13
|
+
super(storage, delta, config);
|
|
14
|
+
this.storage = storage;
|
|
15
|
+
this.delta = delta;
|
|
16
|
+
this.config = config;
|
|
17
|
+
}
|
|
18
|
+
async execute(directory, remoteIndex, options) {
|
|
19
|
+
try {
|
|
20
|
+
this.changeState('scanning', options);
|
|
21
|
+
const localIndex = await this.scanDirectory(directory, options?.ignorePatterns);
|
|
22
|
+
let remoteIndexToUse = remoteIndex ?? null;
|
|
23
|
+
// If no remote index provided and no force upload, we try to get remote index using storageAdapter
|
|
24
|
+
if (!remoteIndex && !options?.force) {
|
|
25
|
+
const result = await this.storage.getRemoteIndex();
|
|
26
|
+
if (!result && options?.requireRemoteIndex) {
|
|
27
|
+
throw new Error('Remote rd-index.json could not be found');
|
|
28
|
+
}
|
|
29
|
+
remoteIndexToUse = result;
|
|
30
|
+
}
|
|
31
|
+
// If force we always upload everything, so we just ignore remote index
|
|
32
|
+
if (options?.force) {
|
|
33
|
+
remoteIndexToUse = null;
|
|
34
|
+
}
|
|
35
|
+
// If no remoteIndex provided, everything will be uploaded
|
|
36
|
+
this.changeState('comparing', options);
|
|
37
|
+
const deltaPlan = await this.delta.compareForUpload(localIndex, remoteIndexToUse);
|
|
38
|
+
if (!options?.force &&
|
|
39
|
+
!deltaPlan.missingChunks.length &&
|
|
40
|
+
!deltaPlan.obsoleteChunks.length &&
|
|
41
|
+
!deltaPlan.deletedFiles.length) {
|
|
42
|
+
console.info('No changes to upload or delete, remote is up to date.');
|
|
43
|
+
return localIndex;
|
|
44
|
+
}
|
|
45
|
+
if (deltaPlan.missingChunks.length) {
|
|
46
|
+
this.changeState('uploading', options);
|
|
47
|
+
await this.uploadMissingChunks(deltaPlan, directory, !!options?.force, options);
|
|
48
|
+
}
|
|
49
|
+
if (remoteIndexToUse && deltaPlan.obsoleteChunks.length) {
|
|
50
|
+
this.changeState('cleaning', options);
|
|
51
|
+
await this.deleteObsoleteChunks(deltaPlan, options);
|
|
52
|
+
}
|
|
53
|
+
this.changeState('finalizing', options);
|
|
54
|
+
await this.uploadIndex(localIndex);
|
|
55
|
+
return localIndex;
|
|
56
|
+
}
|
|
57
|
+
finally {
|
|
58
|
+
await this.storage.dispose();
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
async scanDirectory(localDir, ignorePatterns) {
|
|
62
|
+
const localIndex = await this.delta.createIndexFromDirectory(localDir, this.config.chunkSize, this.config.maxConcurrency, ignorePatterns);
|
|
63
|
+
return localIndex;
|
|
64
|
+
}
|
|
65
|
+
async uploadMissingChunks(plan, baseDir, force, options) {
|
|
66
|
+
if (!plan.missingChunks?.length) {
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
const dir = (0, path_1.isAbsolute)(baseDir) ? baseDir : (0, path_1.resolve)(process.cwd(), baseDir);
|
|
70
|
+
const concurrency = this.config.maxConcurrency ?? 5;
|
|
71
|
+
const files = this.groupChunksByFile(plan.missingChunks);
|
|
72
|
+
const totalChunks = plan.missingChunks.length;
|
|
73
|
+
let uploadedChunks = 0;
|
|
74
|
+
let uploadedBytes = 0;
|
|
75
|
+
const startTime = Date.now();
|
|
76
|
+
const queue = [...files];
|
|
77
|
+
const worker = async () => {
|
|
78
|
+
while (true) {
|
|
79
|
+
const next = queue.pop();
|
|
80
|
+
if (!next) {
|
|
81
|
+
break;
|
|
82
|
+
}
|
|
83
|
+
const [relativePath, chunks] = next;
|
|
84
|
+
chunks.sort((a, b) => a.offset - b.offset);
|
|
85
|
+
const filePath = (0, path_1.join)(dir, relativePath);
|
|
86
|
+
const fileHandle = await (0, promises_1.open)(filePath, 'r');
|
|
87
|
+
try {
|
|
88
|
+
const buffer = Buffer.alloc(this.config.chunkSize);
|
|
89
|
+
for (const { offset, size, hash } of chunks) {
|
|
90
|
+
const { bytesRead } = await fileHandle.read(buffer, 0, size, offset);
|
|
91
|
+
const data = buffer.subarray(0, bytesRead);
|
|
92
|
+
const stream = stream_1.Readable.from(data);
|
|
93
|
+
await this.storage.putChunk(hash, stream, {
|
|
94
|
+
overwrite: true,
|
|
95
|
+
size,
|
|
96
|
+
});
|
|
97
|
+
uploadedChunks++;
|
|
98
|
+
uploadedBytes += bytesRead;
|
|
99
|
+
const percent = ((uploadedChunks / totalChunks) * 100).toFixed(1);
|
|
100
|
+
const elapsed = (Date.now() - startTime) / 1000; // seconds
|
|
101
|
+
const speed = uploadedBytes / elapsed;
|
|
102
|
+
this.updateProgress(Number(percent), 'upload', speed, options);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
finally {
|
|
106
|
+
await fileHandle.close();
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
};
|
|
110
|
+
await Promise.all(Array.from({ length: concurrency }, () => worker()));
|
|
111
|
+
}
|
|
112
|
+
async uploadIndex(index) {
|
|
113
|
+
await this.storage.putRemoteIndex(index);
|
|
114
|
+
}
|
|
115
|
+
async deleteObsoleteChunks(plan, options) {
|
|
116
|
+
if (!plan.obsoleteChunks?.length) {
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
const concurrency = this.config.maxConcurrency ?? 5;
|
|
120
|
+
const queue = [...plan.obsoleteChunks];
|
|
121
|
+
const total = queue.length;
|
|
122
|
+
let deleted = 0;
|
|
123
|
+
if (total === 0) {
|
|
124
|
+
return;
|
|
125
|
+
}
|
|
126
|
+
const failed = [];
|
|
127
|
+
const maxRetries = 3;
|
|
128
|
+
const worker = async () => {
|
|
129
|
+
while (true) {
|
|
130
|
+
const chunk = queue.pop();
|
|
131
|
+
if (!chunk) {
|
|
132
|
+
break;
|
|
133
|
+
}
|
|
134
|
+
let success = false;
|
|
135
|
+
for (let attempt = 1; attempt <= maxRetries && !success; attempt++) {
|
|
136
|
+
try {
|
|
137
|
+
await this.storage.deleteChunk(chunk.hash);
|
|
138
|
+
success = true;
|
|
139
|
+
}
|
|
140
|
+
catch (err) {
|
|
141
|
+
if (attempt === maxRetries) {
|
|
142
|
+
failed.push(chunk.hash);
|
|
143
|
+
}
|
|
144
|
+
else {
|
|
145
|
+
await new Promise((res) => setTimeout(res, 100 * attempt));
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
deleted++;
|
|
150
|
+
const percent = (deleted / total) * 100;
|
|
151
|
+
this.updateProgress(percent, 'deleting', undefined, options);
|
|
152
|
+
}
|
|
153
|
+
};
|
|
154
|
+
await Promise.all(Array.from({ length: concurrency }, () => worker()));
|
|
155
|
+
if (failed.length > 0) {
|
|
156
|
+
throw new Error(`Failed to delete ${failed.length}/${total} chunks: ${failed.join(', ')}`);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
groupChunksByFile(chunks) {
|
|
160
|
+
const groups = new Map();
|
|
161
|
+
for (const chunk of chunks) {
|
|
162
|
+
if (!groups.has(chunk.filePath)) {
|
|
163
|
+
groups.set(chunk.filePath, []);
|
|
164
|
+
}
|
|
165
|
+
groups.get(chunk.filePath).push(chunk);
|
|
166
|
+
}
|
|
167
|
+
return Array.from(groups.entries());
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
exports.DefaultHashUploadPipeline = DefaultHashUploadPipeline;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { UrlDownloadPipeline, DownloadOptions, UpdateStrategy } from '../../core/pipelines';
|
|
2
|
+
import { ChunkUrlInfo, DeltaPlan, RDIndex } from '../../core/models';
|
|
3
|
+
import { DeltaService, ReconstructionService, ValidationService } from '../../core/services';
|
|
4
|
+
import { ChunkSource } from '../../core/services/reconstruction-service';
|
|
5
|
+
import { UrlStorageAdapter } from '../../core/adapters';
|
|
6
|
+
import { RacDeltaConfig } from '../../core/config';
|
|
7
|
+
import { Nullish } from '../../core/types';
|
|
8
|
+
export declare class DefaultUrlDownloadPipeline extends UrlDownloadPipeline {
|
|
9
|
+
protected readonly storage: UrlStorageAdapter;
|
|
10
|
+
protected readonly reconstruction: ReconstructionService;
|
|
11
|
+
protected readonly validation: ValidationService;
|
|
12
|
+
protected readonly delta: DeltaService;
|
|
13
|
+
protected readonly config: RacDeltaConfig;
|
|
14
|
+
constructor(storage: UrlStorageAdapter, reconstruction: ReconstructionService, validation: ValidationService, delta: DeltaService, config: RacDeltaConfig);
|
|
15
|
+
execute(localDir: string, urls: {
|
|
16
|
+
downloadUrls: Record<string, ChunkUrlInfo>;
|
|
17
|
+
indexUrl: string;
|
|
18
|
+
}, strategy: UpdateStrategy, plan?: Nullish<DeltaPlan>, options?: Nullish<DownloadOptions>): Promise<void>;
|
|
19
|
+
findLocalIndex(localDir: string): Promise<RDIndex | null>;
|
|
20
|
+
loadLocalIndex(localDir: string): Promise<RDIndex>;
|
|
21
|
+
downloadAllMissingChunks(downloadUrls: Record<string, ChunkUrlInfo>, target: 'memory' | 'disk', options?: Nullish<DownloadOptions>): Promise<ChunkSource>;
|
|
22
|
+
verifyAndDeleteObsoleteChunks(plan: DeltaPlan, localDir: string, remoteIndex: RDIndex, chunkSource: ChunkSource, options?: Nullish<DownloadOptions>): Promise<{
|
|
23
|
+
deletedFiles: string[];
|
|
24
|
+
verifiedFiles: string[];
|
|
25
|
+
rebuiltFiles: string[];
|
|
26
|
+
}>;
|
|
27
|
+
saveLocalIndex(localDir: string, index: RDIndex): Promise<void>;
|
|
28
|
+
private groupByFile;
|
|
29
|
+
}
|
|
30
|
+
//# sourceMappingURL=default-url-download-pipeline.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"default-url-download-pipeline.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/pipelines/default-url-download-pipeline.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,sBAAsB,CAAC;AAC5F,OAAO,EAAc,YAAY,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACjF,OAAO,EAAE,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAC7F,OAAO,EAAE,WAAW,EAAE,MAAM,4CAA4C,CAAC;AAEzE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAM3C,qBAAa,0BAA2B,SAAQ,mBAAmB;IAE/D,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,iBAAiB;IAC7C,SAAS,CAAC,QAAQ,CAAC,cAAc,EAAE,qBAAqB;IACxD,SAAS,CAAC,QAAQ,CAAC,UAAU,EAAE,iBAAiB;IAChD,SAAS,CAAC,QAAQ,CAAC,KAAK,EAAE,YAAY;IACtC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,cAAc;gBAJtB,OAAO,EAAE,iBAAiB,EAC1B,cAAc,EAAE,qBAAqB,EACrC,UAAU,EAAE,iBAAiB,EAC7B,KAAK,EAAE,YAAY,EACnB,MAAM,EAAE,cAAc;IAKrC,OAAO,CACX,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE;QACJ,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;QAC3C,QAAQ,EAAE,MAAM,CAAC;KAClB,EACD,QAAQ,EAAE,cAAc,EACxB,IAAI,CAAC,EAAE,OAAO,CAAC,SAAS,CAAC,EACzB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GACjC,OAAO,CAAC,IAAI,CAAC;IA2EV,cAAc,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC;IAyBzD,cAAc,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IASlD,wBAAwB,CAC5B,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,EAC1C,MAAM,EAAE,QAAQ,GAAG,MAAM,EACzB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,GACjC,OAAO,CAAC,WAAW,CAAC;IAmEjB,6BAA6B,CACjC,IAAI,EAAE,SAAS,EACf,QAAQ,EAAE,MAAM,EAChB,WAAW,EAAE,OAAO,EACpB,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC;;;;;IA8C9B,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAOrE,OAAO,CAAC,WAAW;CAapB"}
|