rac-delta 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +1 -0
- package/dist/core/adapters/index.d.ts +2 -0
- package/dist/core/adapters/index.d.ts.map +1 -0
- package/dist/core/adapters/index.js +17 -0
- package/dist/core/adapters/storage-adapter.d.ts +125 -0
- package/dist/core/adapters/storage-adapter.d.ts.map +1 -0
- package/dist/core/adapters/storage-adapter.js +14 -0
- package/dist/core/config/index.d.ts +2 -0
- package/dist/core/config/index.d.ts.map +1 -0
- package/dist/core/config/index.js +17 -0
- package/dist/core/config/rac-delta-config.d.ts +132 -0
- package/dist/core/config/rac-delta-config.d.ts.map +1 -0
- package/dist/core/config/rac-delta-config.js +2 -0
- package/dist/core/exceptions.d.ts +25 -0
- package/dist/core/exceptions.d.ts.map +1 -0
- package/dist/core/exceptions.js +51 -0
- package/dist/core/models/chunk.d.ts +12 -0
- package/dist/core/models/chunk.d.ts.map +1 -0
- package/dist/core/models/chunk.js +2 -0
- package/dist/core/models/delta-plan.d.ts +12 -0
- package/dist/core/models/delta-plan.d.ts.map +1 -0
- package/dist/core/models/delta-plan.js +2 -0
- package/dist/core/models/file-entry.d.ts +9 -0
- package/dist/core/models/file-entry.d.ts.map +1 -0
- package/dist/core/models/file-entry.js +2 -0
- package/dist/core/models/index.d.ts +5 -0
- package/dist/core/models/index.d.ts.map +1 -0
- package/dist/core/models/index.js +20 -0
- package/dist/core/models/rd-index.d.ts +8 -0
- package/dist/core/models/rd-index.d.ts.map +1 -0
- package/dist/core/models/rd-index.js +2 -0
- package/dist/core/pipelines/download-pipeline.d.ts +142 -0
- package/dist/core/pipelines/download-pipeline.d.ts.map +1 -0
- package/dist/core/pipelines/download-pipeline.js +64 -0
- package/dist/core/pipelines/index.d.ts +3 -0
- package/dist/core/pipelines/index.d.ts.map +1 -0
- package/dist/core/pipelines/index.js +18 -0
- package/dist/core/pipelines/upload-pipeline.d.ts +60 -0
- package/dist/core/pipelines/upload-pipeline.d.ts.map +1 -0
- package/dist/core/pipelines/upload-pipeline.js +34 -0
- package/dist/core/services/delta-service.d.ts +76 -0
- package/dist/core/services/delta-service.d.ts.map +1 -0
- package/dist/core/services/delta-service.js +2 -0
- package/dist/core/services/hasher-service.d.ts +47 -0
- package/dist/core/services/hasher-service.d.ts.map +1 -0
- package/dist/core/services/hasher-service.js +2 -0
- package/dist/core/services/index.d.ts +5 -0
- package/dist/core/services/index.d.ts.map +1 -0
- package/dist/core/services/index.js +20 -0
- package/dist/core/services/reconstruction-service.d.ts +99 -0
- package/dist/core/services/reconstruction-service.d.ts.map +1 -0
- package/dist/core/services/reconstruction-service.js +4 -0
- package/dist/core/services/validation-service.d.ts +18 -0
- package/dist/core/services/validation-service.d.ts.map +1 -0
- package/dist/core/services/validation-service.js +2 -0
- package/dist/core/types/index.d.ts +2 -0
- package/dist/core/types/index.d.ts.map +1 -0
- package/dist/core/types/index.js +17 -0
- package/dist/core/types/types.d.ts +3 -0
- package/dist/core/types/types.d.ts.map +1 -0
- package/dist/core/types/types.js +2 -0
- package/dist/core/utils/index.d.ts +3 -0
- package/dist/core/utils/index.d.ts.map +1 -0
- package/dist/core/utils/index.js +18 -0
- package/dist/core/utils/invariant.d.ts +2 -0
- package/dist/core/utils/invariant.d.ts.map +1 -0
- package/dist/core/utils/invariant.js +11 -0
- package/dist/core/utils/stream-to-buffer.d.ts +3 -0
- package/dist/core/utils/stream-to-buffer.d.ts.map +1 -0
- package/dist/core/utils/stream-to-buffer.js +10 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +29 -0
- package/dist/infrastructure/adapters/azure-blob-storage-adapter.d.ts +24 -0
- package/dist/infrastructure/adapters/azure-blob-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/azure-blob-storage-adapter.js +149 -0
- package/dist/infrastructure/adapters/gcs-storage-adapter.d.ts +20 -0
- package/dist/infrastructure/adapters/gcs-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/gcs-storage-adapter.js +101 -0
- package/dist/infrastructure/adapters/http-storage-adapter.d.ts +23 -0
- package/dist/infrastructure/adapters/http-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/http-storage-adapter.js +154 -0
- package/dist/infrastructure/adapters/local-storage-adapter.d.ts +23 -0
- package/dist/infrastructure/adapters/local-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/local-storage-adapter.js +124 -0
- package/dist/infrastructure/adapters/s3-storage-adapter.d.ts +24 -0
- package/dist/infrastructure/adapters/s3-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/s3-storage-adapter.js +139 -0
- package/dist/infrastructure/adapters/ssh-storage-adapter.d.ts +28 -0
- package/dist/infrastructure/adapters/ssh-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/ssh-storage-adapter.js +237 -0
- package/dist/infrastructure/adapters/url-storage-adapter.d.ts +14 -0
- package/dist/infrastructure/adapters/url-storage-adapter.d.ts.map +1 -0
- package/dist/infrastructure/adapters/url-storage-adapter.js +92 -0
- package/dist/infrastructure/chunk-sources/disk-chunk-source.d.ts +12 -0
- package/dist/infrastructure/chunk-sources/disk-chunk-source.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/disk-chunk-source.js +61 -0
- package/dist/infrastructure/chunk-sources/index.d.ts +4 -0
- package/dist/infrastructure/chunk-sources/index.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/index.js +19 -0
- package/dist/infrastructure/chunk-sources/memory-chunk-source.d.ts +9 -0
- package/dist/infrastructure/chunk-sources/memory-chunk-source.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/memory-chunk-source.js +29 -0
- package/dist/infrastructure/chunk-sources/storage-chunk-source.d.ts +21 -0
- package/dist/infrastructure/chunk-sources/storage-chunk-source.d.ts.map +1 -0
- package/dist/infrastructure/chunk-sources/storage-chunk-source.js +150 -0
- package/dist/infrastructure/client.d.ts +45 -0
- package/dist/infrastructure/client.d.ts.map +1 -0
- package/dist/infrastructure/client.js +52 -0
- package/dist/infrastructure/factories/pipeline-factory.d.ts +15 -0
- package/dist/infrastructure/factories/pipeline-factory.d.ts.map +1 -0
- package/dist/infrastructure/factories/pipeline-factory.js +26 -0
- package/dist/infrastructure/factories/service-factory.d.ts +11 -0
- package/dist/infrastructure/factories/service-factory.d.ts.map +1 -0
- package/dist/infrastructure/factories/service-factory.js +17 -0
- package/dist/infrastructure/factories/storage-adpater-factory.d.ts +41 -0
- package/dist/infrastructure/factories/storage-adpater-factory.d.ts.map +1 -0
- package/dist/infrastructure/factories/storage-adpater-factory.js +33 -0
- package/dist/infrastructure/pipelines/default-hash-download-pipeline.d.ts +27 -0
- package/dist/infrastructure/pipelines/default-hash-download-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-hash-download-pipeline.js +211 -0
- package/dist/infrastructure/pipelines/default-hash-upload-pipeline.d.ts +19 -0
- package/dist/infrastructure/pipelines/default-hash-upload-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-hash-upload-pipeline.js +170 -0
- package/dist/infrastructure/pipelines/default-url-download-pipeline.d.ts +30 -0
- package/dist/infrastructure/pipelines/default-url-download-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-url-download-pipeline.js +198 -0
- package/dist/infrastructure/pipelines/default-url-upload-pipeline.d.ts +20 -0
- package/dist/infrastructure/pipelines/default-url-upload-pipeline.d.ts.map +1 -0
- package/dist/infrastructure/pipelines/default-url-upload-pipeline.js +126 -0
- package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts +13 -0
- package/dist/infrastructure/services/hash-wasm-hasher-service.d.ts.map +1 -0
- package/dist/infrastructure/services/hash-wasm-hasher-service.js +113 -0
- package/dist/infrastructure/services/memory-delta-service.d.ts +17 -0
- package/dist/infrastructure/services/memory-delta-service.d.ts.map +1 -0
- package/dist/infrastructure/services/memory-delta-service.js +198 -0
- package/dist/infrastructure/services/memory-reconstruction-service.d.ts +25 -0
- package/dist/infrastructure/services/memory-reconstruction-service.d.ts.map +1 -0
- package/dist/infrastructure/services/memory-reconstruction-service.js +329 -0
- package/dist/infrastructure/services/memory-validation-service.d.ts +9 -0
- package/dist/infrastructure/services/memory-validation-service.d.ts.map +1 -0
- package/dist/infrastructure/services/memory-validation-service.js +33 -0
- package/package.json +43 -0
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DefaultUrlDownloadPipeline = void 0;
|
|
4
|
+
const promises_1 = require("fs/promises");
|
|
5
|
+
const path_1 = require("path");
|
|
6
|
+
const pipelines_1 = require("../../core/pipelines");
|
|
7
|
+
const stream_to_buffer_1 = require("../../core/utils/stream-to-buffer");
|
|
8
|
+
const storage_chunk_source_1 = require("../chunk-sources/storage-chunk-source");
|
|
9
|
+
const memory_chunk_source_1 = require("../chunk-sources/memory-chunk-source");
|
|
10
|
+
const disk_chunk_source_1 = require("../chunk-sources/disk-chunk-source");
|
|
11
|
+
class DefaultUrlDownloadPipeline extends pipelines_1.UrlDownloadPipeline {
|
|
12
|
+
storage;
|
|
13
|
+
reconstruction;
|
|
14
|
+
validation;
|
|
15
|
+
delta;
|
|
16
|
+
config;
|
|
17
|
+
constructor(storage, reconstruction, validation, delta, config) {
|
|
18
|
+
super(storage, reconstruction, validation, delta, config);
|
|
19
|
+
this.storage = storage;
|
|
20
|
+
this.reconstruction = reconstruction;
|
|
21
|
+
this.validation = validation;
|
|
22
|
+
this.delta = delta;
|
|
23
|
+
this.config = config;
|
|
24
|
+
}
|
|
25
|
+
async execute(localDir, urls, strategy, plan, options) {
|
|
26
|
+
this.changeState('scanning', options);
|
|
27
|
+
const remoteIndex = await this.storage.getRemoteIndexByUrl(urls.indexUrl);
|
|
28
|
+
if (!remoteIndex) {
|
|
29
|
+
throw new Error(`No remote rd-index found with the url: ${urls.indexUrl}`);
|
|
30
|
+
}
|
|
31
|
+
const localIndex = plan
|
|
32
|
+
? null
|
|
33
|
+
: options?.useExistingIndex
|
|
34
|
+
? await this.findLocalIndex(localDir)
|
|
35
|
+
: await this.loadLocalIndex(localDir);
|
|
36
|
+
const planToUse = plan ? plan : await this.delta.compareForDownload(localIndex, remoteIndex);
|
|
37
|
+
let chunkSource = null;
|
|
38
|
+
if (strategy === pipelines_1.UpdateStrategy.DownloadAllFirstToMemory) {
|
|
39
|
+
this.changeState('downloading', options);
|
|
40
|
+
chunkSource = await this.downloadAllMissingChunks(urls.downloadUrls, 'memory', options);
|
|
41
|
+
}
|
|
42
|
+
if (strategy === pipelines_1.UpdateStrategy.DownloadAllFirstToDisk) {
|
|
43
|
+
this.changeState('downloading', options);
|
|
44
|
+
chunkSource = await this.downloadAllMissingChunks(urls.downloadUrls, 'disk', options);
|
|
45
|
+
}
|
|
46
|
+
if (strategy === pipelines_1.UpdateStrategy.StreamFromNetwork) {
|
|
47
|
+
const map = new Map(Object.entries(urls.downloadUrls).map(([key, value]) => [key, value.url]));
|
|
48
|
+
chunkSource = new storage_chunk_source_1.StorageChunkSource(this.storage, map);
|
|
49
|
+
}
|
|
50
|
+
if (!chunkSource) {
|
|
51
|
+
throw new Error('No chunkSource found');
|
|
52
|
+
}
|
|
53
|
+
if (planToUse.newAndModifiedFiles.length) {
|
|
54
|
+
this.changeState('reconstructing', options);
|
|
55
|
+
await this.reconstruction.reconstructAll(planToUse, localDir, chunkSource, {
|
|
56
|
+
forceRebuild: options?.force,
|
|
57
|
+
verifyAfterRebuild: true,
|
|
58
|
+
fileConcurrency: options?.fileReconstructionConcurrency,
|
|
59
|
+
inPlaceReconstructionThreshold: options?.inPlaceReconstructionThreshold,
|
|
60
|
+
onProgress: (reconstructProgress, diskSpeed, networkProgress, networkSpeed) => {
|
|
61
|
+
this.updateProgress(reconstructProgress, 'reconstructing', diskSpeed, undefined, options);
|
|
62
|
+
if (networkProgress) {
|
|
63
|
+
this.updateProgress(networkProgress, 'download', 0, networkSpeed, options);
|
|
64
|
+
}
|
|
65
|
+
},
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
if (planToUse.obsoleteChunks || planToUse.deletedFiles) {
|
|
69
|
+
this.changeState('cleaning', options);
|
|
70
|
+
await this.verifyAndDeleteObsoleteChunks(planToUse, localDir, remoteIndex, chunkSource, options);
|
|
71
|
+
}
|
|
72
|
+
await this.saveLocalIndex(localDir, remoteIndex);
|
|
73
|
+
if (chunkSource instanceof disk_chunk_source_1.DiskChunkSource) {
|
|
74
|
+
chunkSource.clear();
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
async findLocalIndex(localDir) {
|
|
78
|
+
const dir = (0, path_1.isAbsolute)(localDir) ? localDir : (0, path_1.resolve)(process.cwd(), localDir);
|
|
79
|
+
const files = await (0, promises_1.readdir)(dir);
|
|
80
|
+
const indexFile = files.find((f) => f === 'rd-index.json');
|
|
81
|
+
if (!indexFile) {
|
|
82
|
+
return null;
|
|
83
|
+
}
|
|
84
|
+
const filePath = (0, path_1.join)(localDir, indexFile);
|
|
85
|
+
const data = await (0, promises_1.readFile)(filePath, 'utf-8');
|
|
86
|
+
try {
|
|
87
|
+
const parsed = JSON.parse(data);
|
|
88
|
+
return parsed;
|
|
89
|
+
}
|
|
90
|
+
catch (error) {
|
|
91
|
+
if (error instanceof Error) {
|
|
92
|
+
throw new Error(`Invalid RDIndex JSON at ${filePath}: ${error.message}`);
|
|
93
|
+
}
|
|
94
|
+
throw error;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
async loadLocalIndex(localDir) {
|
|
98
|
+
const localIndex = await this.delta.createIndexFromDirectory(localDir, this.config.chunkSize, this.config.maxConcurrency);
|
|
99
|
+
return localIndex;
|
|
100
|
+
}
|
|
101
|
+
async downloadAllMissingChunks(downloadUrls, target, options) {
|
|
102
|
+
if (target === 'disk' && !options?.chunksSavePath) {
|
|
103
|
+
throw new Error('Error: chunksSavePath must be provided under options');
|
|
104
|
+
}
|
|
105
|
+
let chunksSavePath = undefined;
|
|
106
|
+
if (target === 'disk' && options?.chunksSavePath) {
|
|
107
|
+
chunksSavePath = (0, path_1.isAbsolute)(options?.chunksSavePath)
|
|
108
|
+
? options?.chunksSavePath
|
|
109
|
+
: (0, path_1.resolve)(process.cwd(), options?.chunksSavePath);
|
|
110
|
+
}
|
|
111
|
+
const chunkSource = target === 'memory' ? new memory_chunk_source_1.MemoryChunkSource() : new disk_chunk_source_1.DiskChunkSource(chunksSavePath);
|
|
112
|
+
const entries = Object.entries(downloadUrls);
|
|
113
|
+
let completed = 0;
|
|
114
|
+
let totalBytes = 0;
|
|
115
|
+
const concurrency = this.config.maxConcurrency ?? 6;
|
|
116
|
+
const queue = [...entries];
|
|
117
|
+
let lastUpdateTime = Date.now();
|
|
118
|
+
let lastBytes = 0;
|
|
119
|
+
const worker = async () => {
|
|
120
|
+
while (queue.length) {
|
|
121
|
+
const [hash, info] = queue.pop();
|
|
122
|
+
const readable = await this.storage.getChunkByUrl(info.url);
|
|
123
|
+
if (!readable) {
|
|
124
|
+
throw new Error(`Chunk missing: ${hash}`);
|
|
125
|
+
}
|
|
126
|
+
const buffer = await (0, stream_to_buffer_1.streamToBuffer)(readable);
|
|
127
|
+
chunkSource.setChunk(hash, buffer);
|
|
128
|
+
completed++;
|
|
129
|
+
totalBytes += buffer.length;
|
|
130
|
+
const now = Date.now();
|
|
131
|
+
const elapsed = now - lastUpdateTime;
|
|
132
|
+
// each 100ms
|
|
133
|
+
if (elapsed >= 100) {
|
|
134
|
+
const bytesDiff = totalBytes - lastBytes;
|
|
135
|
+
const speed = bytesDiff / (elapsed / 1000);
|
|
136
|
+
lastUpdateTime = now;
|
|
137
|
+
lastBytes = totalBytes;
|
|
138
|
+
const percent = ((completed / entries.length) * 100).toFixed(1);
|
|
139
|
+
this.updateProgress(Number(percent), 'download', 0, speed, options);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
await Promise.all(Array.from({ length: concurrency }, () => worker()));
|
|
144
|
+
const percent = 100;
|
|
145
|
+
const totalTime = (Date.now() - lastUpdateTime) / 1000;
|
|
146
|
+
const avgSpeed = totalBytes / (totalTime || 1);
|
|
147
|
+
this.updateProgress(percent, 'download', 0, avgSpeed, options);
|
|
148
|
+
return chunkSource;
|
|
149
|
+
}
|
|
150
|
+
async verifyAndDeleteObsoleteChunks(plan, localDir, remoteIndex, chunkSource, options) {
|
|
151
|
+
const dir = (0, path_1.isAbsolute)(localDir) ? localDir : (0, path_1.resolve)(process.cwd(), localDir);
|
|
152
|
+
const obsoleteByFile = this.groupByFile(plan.obsoleteChunks);
|
|
153
|
+
const deletedFiles = [];
|
|
154
|
+
const verifiedFiles = [];
|
|
155
|
+
const rebuiltFiles = [];
|
|
156
|
+
const allFiles = new Set([...plan.deletedFiles, ...obsoleteByFile.keys()]);
|
|
157
|
+
const totalFiles = allFiles.size;
|
|
158
|
+
let completedFiles = 0;
|
|
159
|
+
for (const filePath of allFiles) {
|
|
160
|
+
const absPath = (0, path_1.join)(dir, filePath);
|
|
161
|
+
const remoteFile = remoteIndex.files.find((file) => file.path === filePath);
|
|
162
|
+
// Fully removed file
|
|
163
|
+
if (!remoteFile || plan.deletedFiles.includes(filePath)) {
|
|
164
|
+
await (0, promises_1.rm)(absPath, { force: true });
|
|
165
|
+
deletedFiles.push(filePath);
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
const isValid = await this.validation.validateFile(remoteFile, absPath);
|
|
169
|
+
if (!isValid) {
|
|
170
|
+
await this.reconstruction.reconstructFile(remoteFile, absPath, chunkSource);
|
|
171
|
+
rebuiltFiles.push(filePath);
|
|
172
|
+
}
|
|
173
|
+
else {
|
|
174
|
+
verifiedFiles.push(filePath);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
completedFiles++;
|
|
178
|
+
this.updateProgress((completedFiles / totalFiles) * 100, 'deleting', undefined, undefined, options);
|
|
179
|
+
}
|
|
180
|
+
return { deletedFiles, verifiedFiles, rebuiltFiles };
|
|
181
|
+
}
|
|
182
|
+
async saveLocalIndex(localDir, index) {
|
|
183
|
+
const dir = (0, path_1.isAbsolute)(localDir) ? localDir : (0, path_1.resolve)(process.cwd(), localDir);
|
|
184
|
+
const indexPath = (0, path_1.join)(dir, 'rd-index.json');
|
|
185
|
+
await (0, promises_1.writeFile)(indexPath, JSON.stringify(index, null, 2), 'utf-8');
|
|
186
|
+
}
|
|
187
|
+
groupByFile(chunks) {
|
|
188
|
+
const map = new Map();
|
|
189
|
+
for (const chunk of chunks) {
|
|
190
|
+
if (!map.has(chunk.filePath)) {
|
|
191
|
+
map.set(chunk.filePath, []);
|
|
192
|
+
}
|
|
193
|
+
map.get(chunk.filePath).push(chunk);
|
|
194
|
+
}
|
|
195
|
+
return map;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
exports.DefaultUrlDownloadPipeline = DefaultUrlDownloadPipeline;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { UploadOptions, UrlUploadPipeline } from '../../core/pipelines';
|
|
2
|
+
import { ChunkUrlInfo, RDIndex } from '../../core/models';
|
|
3
|
+
import { UrlStorageAdapter } from '../../core/adapters';
|
|
4
|
+
import { RacDeltaConfig } from '../../core/config';
|
|
5
|
+
import { Nullish } from '../../core/types';
|
|
6
|
+
export declare class DefaultUrlUploadPipeline extends UrlUploadPipeline {
|
|
7
|
+
protected readonly storage: UrlStorageAdapter;
|
|
8
|
+
protected readonly config: RacDeltaConfig;
|
|
9
|
+
constructor(storage: UrlStorageAdapter, config: RacDeltaConfig);
|
|
10
|
+
execute(localIndex: RDIndex, urls: {
|
|
11
|
+
uploadUrls: Record<string, ChunkUrlInfo>;
|
|
12
|
+
deleteUrls?: Nullish<string[]>;
|
|
13
|
+
indexUrl: string;
|
|
14
|
+
}, options?: Nullish<UploadOptions>): Promise<RDIndex>;
|
|
15
|
+
uploadMissingChunks(uploadUrls: Record<string, ChunkUrlInfo>, options?: Nullish<UploadOptions>): Promise<void>;
|
|
16
|
+
deleteObsoleteChunks(deleteUrls: string[], options?: UploadOptions): Promise<void>;
|
|
17
|
+
uploadIndex(index: RDIndex, indexUrl: string): Promise<void>;
|
|
18
|
+
private groupChunksByFile;
|
|
19
|
+
}
|
|
20
|
+
//# sourceMappingURL=default-url-upload-pipeline.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"default-url-upload-pipeline.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/pipelines/default-url-upload-pipeline.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,aAAa,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AACxE,OAAO,EAAE,YAAY,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAC1D,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C,qBAAa,wBAAyB,SAAQ,iBAAiB;IAE3D,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,iBAAiB;IAC7C,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,cAAc;gBADtB,OAAO,EAAE,iBAAiB,EAC1B,MAAM,EAAE,cAAc;IAKrC,OAAO,CACX,UAAU,EAAE,OAAO,EACnB,IAAI,EAAE;QACJ,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;QACzC,UAAU,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC;KAClB,EACD,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAC/B,OAAO,CAAC,OAAO,CAAC;IAiBb,mBAAmB,CACvB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,EACxC,OAAO,CAAC,EAAE,OAAO,CAAC,aAAa,CAAC,GAC/B,OAAO,CAAC,IAAI,CAAC;IAmDV,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE,aAAa;IAoDlE,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM;IAIlD,OAAO,CAAC,iBAAiB;CAa1B"}
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DefaultUrlUploadPipeline = void 0;
|
|
4
|
+
const path_1 = require("path");
|
|
5
|
+
const promises_1 = require("fs/promises");
|
|
6
|
+
const stream_1 = require("stream");
|
|
7
|
+
const pipelines_1 = require("../../core/pipelines");
|
|
8
|
+
class DefaultUrlUploadPipeline extends pipelines_1.UrlUploadPipeline {
|
|
9
|
+
storage;
|
|
10
|
+
config;
|
|
11
|
+
constructor(storage, config) {
|
|
12
|
+
super(storage, config);
|
|
13
|
+
this.storage = storage;
|
|
14
|
+
this.config = config;
|
|
15
|
+
}
|
|
16
|
+
async execute(localIndex, urls, options) {
|
|
17
|
+
if (urls.uploadUrls.length) {
|
|
18
|
+
this.changeState('uploading', options);
|
|
19
|
+
await this.uploadMissingChunks(urls.uploadUrls, options);
|
|
20
|
+
}
|
|
21
|
+
if (urls.deleteUrls?.length) {
|
|
22
|
+
this.changeState('cleaning', options);
|
|
23
|
+
await this.deleteObsoleteChunks(urls.deleteUrls, options);
|
|
24
|
+
}
|
|
25
|
+
this.changeState('finalizing', options);
|
|
26
|
+
await this.uploadIndex(localIndex, urls.indexUrl);
|
|
27
|
+
return localIndex;
|
|
28
|
+
}
|
|
29
|
+
async uploadMissingChunks(uploadUrls, options) {
|
|
30
|
+
const chunks = Object.values(uploadUrls);
|
|
31
|
+
if (!chunks.length) {
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
const concurrency = this.config.maxConcurrency ?? 5;
|
|
35
|
+
const grouped = this.groupChunksByFile(chunks);
|
|
36
|
+
const totalChunks = chunks.length;
|
|
37
|
+
let uploadedChunks = 0;
|
|
38
|
+
let uploadedBytes = 0;
|
|
39
|
+
const startTime = Date.now();
|
|
40
|
+
const queue = [...grouped];
|
|
41
|
+
const worker = async () => {
|
|
42
|
+
while (queue.length) {
|
|
43
|
+
const [filePath, fileChunks] = queue.pop();
|
|
44
|
+
fileChunks.sort((a, b) => a.offset - b.offset);
|
|
45
|
+
const finalFilePath = (0, path_1.isAbsolute)(filePath) ? filePath : (0, path_1.resolve)(process.cwd(), filePath);
|
|
46
|
+
const fileHandle = await (0, promises_1.open)(finalFilePath, 'r');
|
|
47
|
+
const buffer = Buffer.alloc(this.config.chunkSize);
|
|
48
|
+
try {
|
|
49
|
+
for (const chunk of fileChunks) {
|
|
50
|
+
const { url, offset, size } = chunk;
|
|
51
|
+
const { bytesRead } = await fileHandle.read(buffer, 0, size, offset);
|
|
52
|
+
const data = buffer.subarray(0, bytesRead);
|
|
53
|
+
await this.storage.putChunkByUrl(url, stream_1.Readable.from(data));
|
|
54
|
+
uploadedChunks++;
|
|
55
|
+
uploadedBytes += bytesRead;
|
|
56
|
+
const percent = ((uploadedChunks / totalChunks) * 100).toFixed(1);
|
|
57
|
+
const elapsed = (Date.now() - startTime) / 1000; // seconds
|
|
58
|
+
const speed = uploadedBytes / elapsed;
|
|
59
|
+
this.updateProgress(Number(percent), 'upload', speed, options);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
finally {
|
|
63
|
+
await fileHandle.close();
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
};
|
|
67
|
+
await Promise.all(Array.from({ length: concurrency }, () => worker()));
|
|
68
|
+
}
|
|
69
|
+
async deleteObsoleteChunks(deleteUrls, options) {
|
|
70
|
+
if (!deleteUrls.length) {
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
const concurrency = this.config.maxConcurrency ?? 5;
|
|
74
|
+
const queue = [...deleteUrls];
|
|
75
|
+
const total = queue.length;
|
|
76
|
+
let deleted = 0;
|
|
77
|
+
if (total === 0) {
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
const failed = [];
|
|
81
|
+
const maxRetries = 3;
|
|
82
|
+
const worker = async () => {
|
|
83
|
+
while (queue.length) {
|
|
84
|
+
const url = queue.pop();
|
|
85
|
+
if (!url) {
|
|
86
|
+
break;
|
|
87
|
+
}
|
|
88
|
+
let success = false;
|
|
89
|
+
for (let attempt = 1; attempt <= maxRetries && !success; attempt++) {
|
|
90
|
+
try {
|
|
91
|
+
await this.storage.deleteChunkByUrl(url);
|
|
92
|
+
success = true;
|
|
93
|
+
}
|
|
94
|
+
catch (err) {
|
|
95
|
+
if (attempt === maxRetries) {
|
|
96
|
+
failed.push(url);
|
|
97
|
+
}
|
|
98
|
+
else {
|
|
99
|
+
await new Promise((res) => setTimeout(res, 100 * attempt));
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
deleted++;
|
|
104
|
+
this.updateProgress(deleted / total, 'deleting', undefined, options);
|
|
105
|
+
}
|
|
106
|
+
};
|
|
107
|
+
await Promise.all(Array.from({ length: concurrency }, () => worker()));
|
|
108
|
+
if (failed.length > 0) {
|
|
109
|
+
throw new Error(`Failed to delete ${failed.length}/${total} chunks: ${failed.join(', ')}`);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
async uploadIndex(index, indexUrl) {
|
|
113
|
+
await this.storage.putRemoteIndexByUrl(indexUrl, index);
|
|
114
|
+
}
|
|
115
|
+
groupChunksByFile(chunks) {
|
|
116
|
+
const groups = new Map();
|
|
117
|
+
for (const chunk of chunks) {
|
|
118
|
+
if (!groups.has(chunk.filePath)) {
|
|
119
|
+
groups.set(chunk.filePath, []);
|
|
120
|
+
}
|
|
121
|
+
groups.get(chunk.filePath).push(chunk);
|
|
122
|
+
}
|
|
123
|
+
return Array.from(groups.entries());
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
exports.DefaultUrlUploadPipeline = DefaultUrlUploadPipeline;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { HasherService, StreamingHasher } from '../../core/services';
|
|
2
|
+
import { AsyncChunkStream } from '../../core/services';
|
|
3
|
+
import { FileEntry, Chunk } from '../../core/models';
|
|
4
|
+
import { Nullish } from '../../core/types';
|
|
5
|
+
export declare class HashWasmHasherService implements HasherService {
|
|
6
|
+
hashFile(filePath: string, rootDir: string, chunkSize: number): Promise<FileEntry>;
|
|
7
|
+
hashStream(stream: AsyncChunkStream, onChunk?: Nullish<(chunk: Uint8Array) => void>): Promise<Chunk[]>;
|
|
8
|
+
verifyChunk(data: Uint8Array, expectedHash: string): Promise<boolean>;
|
|
9
|
+
hashBuffer(data: Uint8Array): Promise<string>;
|
|
10
|
+
verifyFile(path: string, expectedHash: string): Promise<boolean>;
|
|
11
|
+
createStreamingHasher(): Promise<StreamingHasher>;
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=hash-wasm-hasher-service.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hash-wasm-hasher-service.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/services/hash-wasm-hasher-service.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,aAAa,EAAE,eAAe,EAAE,MAAM,qBAAqB,CAAC;AAErE,OAAO,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,mBAAmB,CAAC;AACrD,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C,qBAAa,qBAAsB,YAAW,aAAa;IACnD,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC;IAiDlF,UAAU,CACd,MAAM,EAAE,gBAAgB,EACxB,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,KAAK,EAAE,UAAU,KAAK,IAAI,CAAC,GAC7C,OAAO,CAAC,KAAK,EAAE,CAAC;IAqCb,WAAW,CAAC,IAAI,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAQrE,UAAU,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC;IAM7C,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAgBhE,qBAAqB,IAAI,OAAO,CAAC,eAAe,CAAC;CAYxD"}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.HashWasmHasherService = void 0;
|
|
4
|
+
const path_1 = require("path");
|
|
5
|
+
const hash_wasm_1 = require("hash-wasm");
|
|
6
|
+
const fs_1 = require("fs");
|
|
7
|
+
const promises_1 = require("fs/promises");
|
|
8
|
+
const invariant_1 = require("../../core/utils/invariant");
|
|
9
|
+
class HashWasmHasherService {
|
|
10
|
+
async hashFile(filePath, rootDir, chunkSize) {
|
|
11
|
+
(0, invariant_1.invariant)('path must be a valid string', typeof filePath === 'string' && filePath !== '');
|
|
12
|
+
(0, invariant_1.invariant)('chunkSize must be a valid number > 0', chunkSize > 0);
|
|
13
|
+
const dir = (0, path_1.isAbsolute)(rootDir) ? rootDir : (0, path_1.resolve)(process.cwd(), rootDir);
|
|
14
|
+
const finalFilePath = (0, path_1.join)(dir, filePath);
|
|
15
|
+
try {
|
|
16
|
+
const stats = await (0, promises_1.stat)(finalFilePath);
|
|
17
|
+
const fileHasher = await (0, hash_wasm_1.createBLAKE3)();
|
|
18
|
+
const chunks = [];
|
|
19
|
+
let offset = 0;
|
|
20
|
+
const stream = (0, fs_1.createReadStream)(finalFilePath, { highWaterMark: chunkSize });
|
|
21
|
+
for await (const chunk of stream) {
|
|
22
|
+
const chunkHasher = await (0, hash_wasm_1.createBLAKE3)();
|
|
23
|
+
chunkHasher.update(chunk);
|
|
24
|
+
const chunkHash = chunkHasher.digest('hex');
|
|
25
|
+
fileHasher.update(chunk);
|
|
26
|
+
chunks.push({
|
|
27
|
+
hash: chunkHash,
|
|
28
|
+
offset,
|
|
29
|
+
size: chunk.length,
|
|
30
|
+
});
|
|
31
|
+
offset += chunk.length;
|
|
32
|
+
}
|
|
33
|
+
const fileHash = fileHasher.digest('hex');
|
|
34
|
+
return {
|
|
35
|
+
path: filePath,
|
|
36
|
+
size: stats.size,
|
|
37
|
+
modifiedAt: stats.mtimeMs,
|
|
38
|
+
hash: fileHash,
|
|
39
|
+
chunks,
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
catch (err) {
|
|
43
|
+
if (err instanceof Error) {
|
|
44
|
+
throw new Error(`HasherService.hashFile failed: ${err.message}`);
|
|
45
|
+
}
|
|
46
|
+
throw err;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
async hashStream(stream, onChunk) {
|
|
50
|
+
const chunks = [];
|
|
51
|
+
let offset = 0;
|
|
52
|
+
try {
|
|
53
|
+
for await (const chunk of stream) {
|
|
54
|
+
const chunkHasher = await (0, hash_wasm_1.createBLAKE3)();
|
|
55
|
+
chunkHasher.update(chunk);
|
|
56
|
+
const chunkHash = chunkHasher.digest('hex');
|
|
57
|
+
if (onChunk) {
|
|
58
|
+
onChunk(chunk);
|
|
59
|
+
}
|
|
60
|
+
chunks.push({
|
|
61
|
+
hash: chunkHash,
|
|
62
|
+
offset,
|
|
63
|
+
size: chunk.length,
|
|
64
|
+
});
|
|
65
|
+
offset += chunk.length;
|
|
66
|
+
}
|
|
67
|
+
if (stream.close) {
|
|
68
|
+
await stream.close();
|
|
69
|
+
}
|
|
70
|
+
return chunks;
|
|
71
|
+
}
|
|
72
|
+
catch (err) {
|
|
73
|
+
if (err instanceof Error) {
|
|
74
|
+
throw new Error(`HasherService.hashStream failed: ${err.message}`);
|
|
75
|
+
}
|
|
76
|
+
throw err;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
async verifyChunk(data, expectedHash) {
|
|
80
|
+
const hasher = await (0, hash_wasm_1.createBLAKE3)();
|
|
81
|
+
hasher.update(data);
|
|
82
|
+
const actualHash = hasher.digest('hex');
|
|
83
|
+
return actualHash === expectedHash;
|
|
84
|
+
}
|
|
85
|
+
async hashBuffer(data) {
|
|
86
|
+
const hasher = await (0, hash_wasm_1.createBLAKE3)();
|
|
87
|
+
hasher.update(data);
|
|
88
|
+
return hasher.digest('hex');
|
|
89
|
+
}
|
|
90
|
+
async verifyFile(path, expectedHash) {
|
|
91
|
+
(0, invariant_1.invariant)('path must be a valid string', typeof path === 'string' && path !== '');
|
|
92
|
+
const finalPath = (0, path_1.isAbsolute)(path) ? path : (0, path_1.resolve)(process.cwd(), path);
|
|
93
|
+
const hasher = await (0, hash_wasm_1.createBLAKE3)();
|
|
94
|
+
const stream = (0, fs_1.createReadStream)(finalPath);
|
|
95
|
+
for await (const chunk of stream) {
|
|
96
|
+
hasher.update(chunk);
|
|
97
|
+
}
|
|
98
|
+
const actualHash = hasher.digest('hex');
|
|
99
|
+
return actualHash === expectedHash;
|
|
100
|
+
}
|
|
101
|
+
async createStreamingHasher() {
|
|
102
|
+
const native = await (0, hash_wasm_1.createBLAKE3)();
|
|
103
|
+
return {
|
|
104
|
+
update(data) {
|
|
105
|
+
native.update(data);
|
|
106
|
+
},
|
|
107
|
+
digest(encoding = 'hex') {
|
|
108
|
+
return native.digest(encoding);
|
|
109
|
+
},
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
exports.HashWasmHasherService = HashWasmHasherService;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { AsyncChunkStream, DeltaService, HasherService } from '../../core/services';
|
|
2
|
+
import { RDIndex, DeltaPlan, FileEntry } from '../../core/models';
|
|
3
|
+
import { Nullish } from '../../core/types';
|
|
4
|
+
export declare class MemoryDeltaService implements DeltaService {
|
|
5
|
+
private readonly hasher;
|
|
6
|
+
constructor(hasher: HasherService);
|
|
7
|
+
createIndexFromDirectory(rootPath: string, chunkSize: number, concurrency?: number, ignorePatterns?: Nullish<string[]>): Promise<RDIndex>;
|
|
8
|
+
createFileEntryFromStream(stream: AsyncChunkStream, path: string): Promise<FileEntry>;
|
|
9
|
+
private walkFiles;
|
|
10
|
+
private matchesAnyPattern;
|
|
11
|
+
private globToRegex;
|
|
12
|
+
compare(source: RDIndex, target: RDIndex | null): DeltaPlan;
|
|
13
|
+
mergePlans(base: DeltaPlan, updates: DeltaPlan): DeltaPlan;
|
|
14
|
+
compareForUpload(localIndex: RDIndex, remoteIndex: RDIndex | null): Promise<DeltaPlan>;
|
|
15
|
+
compareForDownload(localIndex: RDIndex | null, remoteIndex: RDIndex): Promise<DeltaPlan>;
|
|
16
|
+
}
|
|
17
|
+
//# sourceMappingURL=memory-delta-service.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory-delta-service.d.ts","sourceRoot":"","sources":["../../../src/infrastructure/services/memory-delta-service.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AACpF,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,EAAc,MAAM,mBAAmB,CAAC;AAE9E,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE3C,qBAAa,kBAAmB,YAAW,YAAY;IACzC,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAN,MAAM,EAAE,aAAa;IAE5C,wBAAwB,CAC5B,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,WAAW,SAAI,EACf,cAAc,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,GACjC,OAAO,CAAC,OAAO,CAAC;IA8Cb,yBAAyB,CAAC,MAAM,EAAE,gBAAgB,EAAE,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC;YAqB5E,SAAS;IAuBxB,OAAO,CAAC,iBAAiB;IAYzB,OAAO,CAAC,WAAW;IAanB,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAiE3D,UAAU,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,SAAS,GAAG,SAAS;IA0CpD,gBAAgB,CAAC,UAAU,EAAE,OAAO,EAAE,WAAW,EAAE,OAAO,GAAG,IAAI,GAAG,OAAO,CAAC,SAAS,CAAC;IAkBtF,kBAAkB,CAAC,UAAU,EAAE,OAAO,GAAG,IAAI,EAAE,WAAW,EAAE,OAAO,GAAG,OAAO,CAAC,SAAS,CAAC;CAG/F"}
|