@milaboratories/pl-drivers 1.5.9 → 1.5.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/clients/constructors.d.ts.map +1 -1
- package/dist/clients/download.d.ts.map +1 -1
- package/dist/clients/logs.d.ts.map +1 -1
- package/dist/clients/ls_api.d.ts.map +1 -1
- package/dist/clients/progress.d.ts.map +1 -1
- package/dist/clients/upload.d.ts.map +1 -1
- package/dist/drivers/download_blob.d.ts +2 -2
- package/dist/drivers/download_blob.d.ts.map +1 -1
- package/dist/drivers/download_blob_task.d.ts +2 -2
- package/dist/drivers/download_blob_task.d.ts.map +1 -1
- package/dist/drivers/download_blob_url/driver.d.ts +46 -0
- package/dist/drivers/download_blob_url/driver.d.ts.map +1 -0
- package/dist/drivers/download_blob_url/driver_id.d.ts +6 -0
- package/dist/drivers/download_blob_url/driver_id.d.ts.map +1 -0
- package/dist/drivers/download_blob_url/snapshot.d.ts +7 -0
- package/dist/drivers/download_blob_url/snapshot.d.ts.map +1 -0
- package/dist/drivers/download_blob_url/task.d.ts +63 -0
- package/dist/drivers/download_blob_url/task.d.ts.map +1 -0
- package/dist/drivers/download_blob_url/url.d.ts +6 -0
- package/dist/drivers/download_blob_url/url.d.ts.map +1 -0
- package/dist/drivers/download_url.d.ts +2 -2
- package/dist/drivers/download_url.d.ts.map +1 -1
- package/dist/drivers/helpers/download_local_handle.d.ts.map +1 -1
- package/dist/drivers/helpers/download_remote_handle.d.ts.map +1 -1
- package/dist/drivers/helpers/files_cache.d.ts.map +1 -1
- package/dist/drivers/helpers/logs_handle.d.ts +1 -1
- package/dist/drivers/helpers/logs_handle.d.ts.map +1 -1
- package/dist/drivers/helpers/ls_remote_import_handle.d.ts +1 -1
- package/dist/drivers/helpers/ls_remote_import_handle.d.ts.map +1 -1
- package/dist/drivers/helpers/ls_storage_entry.d.ts +1 -1
- package/dist/drivers/helpers/ls_storage_entry.d.ts.map +1 -1
- package/dist/drivers/logs.d.ts +2 -2
- package/dist/drivers/logs.d.ts.map +1 -1
- package/dist/drivers/logs_stream.d.ts +2 -2
- package/dist/drivers/logs_stream.d.ts.map +1 -1
- package/dist/drivers/ls.d.ts +1 -1
- package/dist/drivers/ls.d.ts.map +1 -1
- package/dist/drivers/types.d.ts.map +1 -1
- package/dist/drivers/upload.d.ts +1 -1
- package/dist/drivers/upload.d.ts.map +1 -1
- package/dist/drivers/upload_task.d.ts +1 -1
- package/dist/drivers/upload_task.d.ts.map +1 -1
- package/dist/drivers/virtual_storages.d.ts.map +1 -1
- package/dist/helpers/download.d.ts.map +1 -1
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2310 -2067
- package/dist/index.mjs.map +1 -1
- package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.client.d.ts.map +1 -1
- package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.d.ts.map +1 -1
- package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.d.ts.map +1 -1
- package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.client.d.ts.map +1 -1
- package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.d.ts.map +1 -1
- package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.client.d.ts.map +1 -1
- package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.d.ts.map +1 -1
- package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.client.d.ts.map +1 -1
- package/dist/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.d.ts.map +1 -1
- package/dist/proto/google/api/http.d.ts +28 -28
- package/dist/proto/google/api/http.d.ts.map +1 -1
- package/dist/proto/google/protobuf/descriptor.d.ts.map +1 -1
- package/dist/proto/google/protobuf/duration.d.ts.map +1 -1
- package/dist/proto/google/protobuf/timestamp.d.ts.map +1 -1
- package/package.json +11 -6
- package/src/clients/constructors.ts +11 -11
- package/src/clients/download.test.ts +11 -10
- package/src/clients/download.ts +15 -14
- package/src/clients/logs.ts +13 -12
- package/src/clients/ls_api.ts +7 -7
- package/src/clients/progress.ts +15 -13
- package/src/clients/upload.test.ts +6 -5
- package/src/clients/upload.ts +28 -26
- package/src/drivers/download_blob.test.ts +21 -20
- package/src/drivers/download_blob.ts +47 -42
- package/src/drivers/download_blob_task.ts +25 -21
- package/src/drivers/download_blob_url/driver.ts +225 -0
- package/src/drivers/download_blob_url/driver_id.ts +11 -0
- package/src/drivers/download_blob_url/snapshot.ts +20 -0
- package/src/drivers/download_blob_url/task.ts +222 -0
- package/src/drivers/download_blob_url/url.test.ts +39 -0
- package/src/drivers/download_blob_url/url.ts +43 -0
- package/src/drivers/download_url.test.ts +3 -3
- package/src/drivers/download_url.ts +21 -20
- package/src/drivers/helpers/download_local_handle.ts +2 -2
- package/src/drivers/helpers/download_remote_handle.ts +8 -8
- package/src/drivers/helpers/files_cache.test.ts +7 -6
- package/src/drivers/helpers/files_cache.ts +2 -1
- package/src/drivers/helpers/helpers.ts +1 -1
- package/src/drivers/helpers/logs_handle.ts +7 -7
- package/src/drivers/helpers/ls_remote_import_handle.ts +7 -7
- package/src/drivers/helpers/ls_storage_entry.ts +6 -5
- package/src/drivers/logs.test.ts +23 -22
- package/src/drivers/logs.ts +13 -12
- package/src/drivers/logs_stream.ts +42 -37
- package/src/drivers/ls.test.ts +2 -2
- package/src/drivers/ls.ts +38 -35
- package/src/drivers/types.ts +12 -11
- package/src/drivers/upload.test.ts +19 -17
- package/src/drivers/upload.ts +30 -25
- package/src/drivers/upload_task.ts +23 -19
- package/src/drivers/virtual_storages.ts +6 -6
- package/src/helpers/download.ts +8 -8
- package/src/index.ts +2 -0
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.client.ts +4 -4
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/downloadapi/protocol.ts +88 -73
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.client.ts +2 -2
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/lsapi/protocol.ts +71 -56
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.client.ts +6 -5
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/progressapi/protocol.ts +130 -106
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.client.ts +14 -10
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/streamingapi/protocol.ts +142 -121
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.client.ts +11 -8
- package/src/proto/github.com/milaboratory/pl/controllers/shared/grpc/uploadapi/protocol.ts +216 -174
- package/src/proto/google/api/http.ts +95 -86
- package/src/proto/google/protobuf/descriptor.ts +674 -593
- package/src/proto/google/protobuf/duration.ts +31 -26
- package/src/proto/google/protobuf/timestamp.ts +52 -44
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
import { Transform, Writable } from 'node:stream';
|
|
2
|
+
import * as zlib from 'node:zlib';
|
|
3
|
+
import * as tar from 'tar-fs';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import fs from 'fs';
|
|
6
|
+
import * as fsp from 'fs/promises';
|
|
7
|
+
import { NetworkError400 } from '../../helpers/download';
|
|
8
|
+
import type { Watcher } from '@milaboratories/computable';
|
|
9
|
+
import { ChangeSource } from '@milaboratories/computable';
|
|
10
|
+
import type { MiLogger, Signer } from '@milaboratories/ts-helpers';
|
|
11
|
+
import { CallersCounter, createPathAtomically, ensureDirExists, fileExists, notEmpty } from '@milaboratories/ts-helpers';
|
|
12
|
+
import type { DownloadableBlobSnapshot } from './snapshot';
|
|
13
|
+
import { UnknownStorageError, WrongLocalFileUrl, type ClientDownload } from '../../clients/download';
|
|
14
|
+
import type { ArchiveFormat, FolderURL } from '@milaboratories/pl-model-common';
|
|
15
|
+
import { newFolderURL } from './url';
|
|
16
|
+
import decompress from 'decompress';
|
|
17
|
+
import { assertNever } from '@protobuf-ts/runtime';
|
|
18
|
+
import { stringifyWithResourceId } from '@milaboratories/pl-client';
|
|
19
|
+
|
|
20
|
+
export type URLResult = {
|
|
21
|
+
url?: FolderURL;
|
|
22
|
+
error?: string;
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
/** Downloads and extracts an archive to a directory. */
|
|
26
|
+
export class DownloadAndUnarchiveTask {
|
|
27
|
+
readonly counter = new CallersCounter();
|
|
28
|
+
readonly change = new ChangeSource();
|
|
29
|
+
private readonly signalCtl = new AbortController();
|
|
30
|
+
error: string | undefined;
|
|
31
|
+
done = false;
|
|
32
|
+
size = 0;
|
|
33
|
+
private url: FolderURL | undefined;
|
|
34
|
+
private state: DownloadCtx | undefined;
|
|
35
|
+
|
|
36
|
+
constructor(
|
|
37
|
+
private readonly logger: MiLogger,
|
|
38
|
+
private readonly signer: Signer,
|
|
39
|
+
readonly saveDir: string,
|
|
40
|
+
readonly path: string,
|
|
41
|
+
readonly rInfo: DownloadableBlobSnapshot,
|
|
42
|
+
readonly format: ArchiveFormat,
|
|
43
|
+
private readonly clientDownload: ClientDownload,
|
|
44
|
+
) {}
|
|
45
|
+
|
|
46
|
+
/** A debug info of the task. */
|
|
47
|
+
public info() {
|
|
48
|
+
return {
|
|
49
|
+
rInfo: this.rInfo,
|
|
50
|
+
format: this.format,
|
|
51
|
+
path: this.path,
|
|
52
|
+
done: this.done,
|
|
53
|
+
size: this.size,
|
|
54
|
+
error: this.error,
|
|
55
|
+
taskHistory: this.state,
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
attach(w: Watcher, callerId: string) {
|
|
60
|
+
this.counter.inc(callerId);
|
|
61
|
+
if (!this.done) this.change.attachWatcher(w);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
async download() {
|
|
65
|
+
try {
|
|
66
|
+
const size = await this.downloadAndDecompress(this.signalCtl.signal);
|
|
67
|
+
this.setDone(size);
|
|
68
|
+
this.change.markChanged();
|
|
69
|
+
|
|
70
|
+
this.logger.info(`blob to URL task is done: ${stringifyWithResourceId(this.info())}`);
|
|
71
|
+
} catch (e: any) {
|
|
72
|
+
this.logger.warn(`a error was produced: ${e} for blob to URL task: ${stringifyWithResourceId(this.info())}`);
|
|
73
|
+
|
|
74
|
+
if (nonRecoverableError(e)) {
|
|
75
|
+
this.setError(e);
|
|
76
|
+
this.change.markChanged();
|
|
77
|
+
// Just in case we were half-way extracting an archive.
|
|
78
|
+
await rmRFDir(this.path);
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
throw e;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
/** Does the download part and keeps a state of the process. */
|
|
87
|
+
private async downloadAndDecompress(signal: AbortSignal): Promise<number> {
|
|
88
|
+
this.state = {};
|
|
89
|
+
|
|
90
|
+
this.state.parentDir = path.dirname(this.path);
|
|
91
|
+
await ensureDirExists(this.state.parentDir);
|
|
92
|
+
|
|
93
|
+
this.state.fileExisted = await fileExists(this.path);
|
|
94
|
+
if (this.state.fileExisted) {
|
|
95
|
+
return await dirSize(this.path);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const { content, size } = await this.clientDownload.downloadBlob(
|
|
99
|
+
this.rInfo, {}, signal,
|
|
100
|
+
);
|
|
101
|
+
this.state.downloaded = true;
|
|
102
|
+
|
|
103
|
+
await createPathAtomically(this.logger, this.path, async (fPath: string) => {
|
|
104
|
+
this.state!.tempPath = fPath;
|
|
105
|
+
this.state!.archiveFormat = this.format;
|
|
106
|
+
|
|
107
|
+
switch (this.format) {
|
|
108
|
+
case 'tar':
|
|
109
|
+
await fsp.mkdir(fPath); // throws if a directory already exists.
|
|
110
|
+
const simpleUntar = Writable.toWeb(tar.extract(fPath));
|
|
111
|
+
await content.pipeTo(simpleUntar, { signal });
|
|
112
|
+
return;
|
|
113
|
+
|
|
114
|
+
case 'tgz':
|
|
115
|
+
await fsp.mkdir(fPath); // throws if a directory already exists.
|
|
116
|
+
const gunzip = Transform.toWeb(zlib.createGunzip());
|
|
117
|
+
const untar = Writable.toWeb(tar.extract(fPath));
|
|
118
|
+
|
|
119
|
+
await content
|
|
120
|
+
.pipeThrough(gunzip, { signal })
|
|
121
|
+
.pipeTo(untar, { signal });
|
|
122
|
+
return;
|
|
123
|
+
|
|
124
|
+
case 'zip':
|
|
125
|
+
this.state!.zipPath = this.path + '.zip';
|
|
126
|
+
|
|
127
|
+
const f = Writable.toWeb(fs.createWriteStream(this.state!.zipPath));
|
|
128
|
+
await content.pipeTo(f, { signal });
|
|
129
|
+
this.state!.zipPathCreated = true;
|
|
130
|
+
|
|
131
|
+
await decompress(this.path + '.zip', fPath);
|
|
132
|
+
|
|
133
|
+
await fs.promises.rm(this.state!.zipPath);
|
|
134
|
+
this.state!.zipPathDeleted = true;
|
|
135
|
+
|
|
136
|
+
return;
|
|
137
|
+
|
|
138
|
+
default:
|
|
139
|
+
assertNever(this.format);
|
|
140
|
+
}
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
this.state.pathCreated = true;
|
|
144
|
+
|
|
145
|
+
return size;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
getURL(): URLResult | undefined {
|
|
149
|
+
if (this.done) return { url: notEmpty(this.url) };
|
|
150
|
+
|
|
151
|
+
if (this.error) return { error: this.error };
|
|
152
|
+
|
|
153
|
+
return undefined;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
private setDone(size: number) {
|
|
157
|
+
this.done = true;
|
|
158
|
+
this.size = size;
|
|
159
|
+
this.url = newFolderURL(this.signer, this.saveDir, this.path);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
private setError(e: any) {
|
|
163
|
+
this.error = String(e);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
abort(reason: string) {
|
|
167
|
+
this.signalCtl.abort(new URLAborted(reason));
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/** Gets a directory size by calculating sizes recursively. */
|
|
172
|
+
async function dirSize(dir: string): Promise<number> {
|
|
173
|
+
const files = await fsp.readdir(dir, { withFileTypes: true });
|
|
174
|
+
const sizes = await Promise.all(
|
|
175
|
+
files.map(async (file) => {
|
|
176
|
+
const fPath = path.join(dir, file.name);
|
|
177
|
+
|
|
178
|
+
if (file.isDirectory()) return await dirSize(fPath);
|
|
179
|
+
|
|
180
|
+
const stat = await fsp.stat(fPath);
|
|
181
|
+
return stat.size;
|
|
182
|
+
}),
|
|
183
|
+
);
|
|
184
|
+
|
|
185
|
+
return sizes.reduce((sum, size) => sum + size, 0);
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
/** Do rm -rf on dir. */
|
|
189
|
+
export async function rmRFDir(path: string) {
|
|
190
|
+
await fsp.rm(path, { recursive: true, force: true });
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
/** Just a type that adds lots of context when the error happens. */
|
|
194
|
+
type DownloadCtx = {
|
|
195
|
+
parentDir?: string;
|
|
196
|
+
fileExisted?: boolean;
|
|
197
|
+
downloaded?: boolean;
|
|
198
|
+
archiveFormat?: ArchiveFormat;
|
|
199
|
+
tempPath?: string;
|
|
200
|
+
zipPath?: string;
|
|
201
|
+
zipPathCreated?: boolean;
|
|
202
|
+
zipPathDeleted?: boolean;
|
|
203
|
+
pathCreated?: boolean;
|
|
204
|
+
};
|
|
205
|
+
|
|
206
|
+
/** Throws when a downloading aborts. */
|
|
207
|
+
class URLAborted extends Error {}
|
|
208
|
+
|
|
209
|
+
export function nonRecoverableError(e: any) {
|
|
210
|
+
return (
|
|
211
|
+
e instanceof URLAborted
|
|
212
|
+
|| e instanceof NetworkError400
|
|
213
|
+
|| e instanceof UnknownStorageError
|
|
214
|
+
|| e instanceof WrongLocalFileUrl
|
|
215
|
+
// file that we downloads from was moved or deleted.
|
|
216
|
+
|| e?.code == 'ENOENT'
|
|
217
|
+
// A resource was deleted.
|
|
218
|
+
|| (e.name == 'RpcError' && (e.code == 'NOT_FOUND' || e.code == 'ABORTED'))
|
|
219
|
+
// wrong archive format
|
|
220
|
+
|| (String(e).includes('incorrect header check'))
|
|
221
|
+
);
|
|
222
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { describe, test, expect } from '@jest/globals';
|
|
3
|
+
import { isFolderURL, getPathForFolderURL } from './url';
|
|
4
|
+
import type { Signer } from '@milaboratories/ts-helpers';
|
|
5
|
+
import { HmacSha256Signer } from '@milaboratories/ts-helpers';
|
|
6
|
+
import { FolderURL } from '@milaboratories/pl-model-common';
|
|
7
|
+
|
|
8
|
+
describe('isFolderURL', () => {
|
|
9
|
+
test('should return true for a valid URL', () => {
|
|
10
|
+
const folderUrl = 'plblob+folder://5976f110166cc5d8803c41181fbb708470b69075db19ad98d8971df350732028.31330_tgz.blob/path/to/folder';
|
|
11
|
+
expect(isFolderURL(folderUrl)).toBe(true);
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
test('should return false for URL with a different protocol', () => {
|
|
15
|
+
expect(isFolderURL('https://example.com/path/to/folder')).toBe(false);
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
test('should throw error for incorrect URLs', () => {
|
|
19
|
+
expect(() => {
|
|
20
|
+
isFolderURL('not_a_valid_url');
|
|
21
|
+
}).toThrow();
|
|
22
|
+
});
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
describe('getPathForFolderURL', () => {
|
|
26
|
+
const signer: Signer = {
|
|
27
|
+
sign: (data: string | Uint8Array) => 'signature',
|
|
28
|
+
|
|
29
|
+
verify: (data: string | Uint8Array, signature: string, validationErrorMessage?: string) => null,
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
test('should be ok', () => {
|
|
33
|
+
const folderURL = 'plblob+folder://signature.subfolder_tgz.blob/path/to/resource.html';
|
|
34
|
+
|
|
35
|
+
const result = getPathForFolderURL(signer, folderURL, '/test/dir');
|
|
36
|
+
|
|
37
|
+
expect(result).toBe(path.join('/test/dir/subfolder_tgz/path/to/resource.html'));
|
|
38
|
+
});
|
|
39
|
+
});
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import type { FolderURL } from '@milaboratories/pl-model-common';
|
|
2
|
+
import { ArchiveFormat } from '@milaboratories/pl-model-common';
|
|
3
|
+
import type { Signer } from '@milaboratories/ts-helpers';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
|
|
6
|
+
export function newFolderURL(signer: Signer, saveDir: string, fPath: string): FolderURL {
|
|
7
|
+
const p = path.relative(saveDir, fPath);
|
|
8
|
+
const sign = signer.sign(p);
|
|
9
|
+
|
|
10
|
+
return `plblob+folder://${sign}.${p}.blob`;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export function isFolderURL(url: string): url is FolderURL {
|
|
14
|
+
const parsed = new URL(url);
|
|
15
|
+
return parsed.protocol == 'plblob+folder:';
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export function getPathForFolderURL(signer: Signer, url: FolderURL, rootDir: string): string {
|
|
19
|
+
const parsed = new URL(url);
|
|
20
|
+
const [sign, subfolder, _] = parsed.host.split('.');
|
|
21
|
+
|
|
22
|
+
signer.verify(subfolder, sign, `signature verification failed for url: ${url}, subfolder: ${subfolder}`);
|
|
23
|
+
|
|
24
|
+
let fPath = parseValidPath(path.join(rootDir, `${subfolder}`), parsed.pathname.slice(1));
|
|
25
|
+
|
|
26
|
+
if (parsed.pathname == '' || parsed.pathname == '/')
|
|
27
|
+
fPath = path.join(fPath, 'index.html');
|
|
28
|
+
|
|
29
|
+
return path.resolve(fPath);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/** Checks that the userInputPath is in baseDir and returns an absolute path. */
|
|
33
|
+
function parseValidPath(baseDir: string, userInputPath: string): string {
|
|
34
|
+
const absolutePath = path.resolve(baseDir, userInputPath);
|
|
35
|
+
|
|
36
|
+
const normalizedBase = path.resolve(baseDir);
|
|
37
|
+
|
|
38
|
+
if (!absolutePath.startsWith(normalizedBase)) {
|
|
39
|
+
throw new Error('Path validation failed.');
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return absolutePath;
|
|
43
|
+
}
|
|
@@ -16,7 +16,7 @@ test('should download a tar archive and extracts its content and then deleted',
|
|
|
16
16
|
const driver = new DownloadUrlDriver(logger, client.httpDispatcher, dir);
|
|
17
17
|
|
|
18
18
|
const url = new URL(
|
|
19
|
-
'https://block.registry.platforma.bio/releases/v1/milaboratory/enter-numbers/0.4.1/frontend.tgz'
|
|
19
|
+
'https://block.registry.platforma.bio/releases/v1/milaboratory/enter-numbers/0.4.1/frontend.tgz',
|
|
20
20
|
);
|
|
21
21
|
|
|
22
22
|
const c = driver.getPath(url);
|
|
@@ -48,7 +48,7 @@ test('should show a error when 404 status code', async () => {
|
|
|
48
48
|
const driver = new DownloadUrlDriver(logger, client.httpDispatcher, dir);
|
|
49
49
|
|
|
50
50
|
const url = new URL(
|
|
51
|
-
'https://block.registry.platforma.bio/releases/v1/milaboratory/NOT_FOUND'
|
|
51
|
+
'https://block.registry.platforma.bio/releases/v1/milaboratory/NOT_FOUND',
|
|
52
52
|
);
|
|
53
53
|
|
|
54
54
|
const c = driver.getPath(url);
|
|
@@ -74,7 +74,7 @@ test('should abort a downloading process when we reset a state of a computable',
|
|
|
74
74
|
const driver = new DownloadUrlDriver(logger, client.httpDispatcher, dir);
|
|
75
75
|
|
|
76
76
|
const url = new URL(
|
|
77
|
-
'https://block.registry.platforma.bio/releases/v1/milaboratory/enter-numbers/0.4.1/frontend.tgz'
|
|
77
|
+
'https://block.registry.platforma.bio/releases/v1/milaboratory/enter-numbers/0.4.1/frontend.tgz',
|
|
78
78
|
);
|
|
79
79
|
|
|
80
80
|
const c = driver.getPath(url);
|
|
@@ -1,12 +1,14 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import type { ComputableCtx, Watcher } from '@milaboratories/computable';
|
|
2
|
+
import { ChangeSource, Computable } from '@milaboratories/computable';
|
|
3
|
+
import type {
|
|
4
|
+
MiLogger } from '@milaboratories/ts-helpers';
|
|
2
5
|
import {
|
|
3
6
|
CallersCounter,
|
|
4
|
-
MiLogger,
|
|
5
7
|
TaskProcessor,
|
|
6
8
|
createPathAtomically,
|
|
7
9
|
ensureDirExists,
|
|
8
10
|
fileExists,
|
|
9
|
-
notEmpty
|
|
11
|
+
notEmpty,
|
|
10
12
|
} from '@milaboratories/ts-helpers';
|
|
11
13
|
import { createHash, randomUUID } from 'node:crypto';
|
|
12
14
|
import * as fsp from 'node:fs/promises';
|
|
@@ -14,11 +16,10 @@ import * as path from 'node:path';
|
|
|
14
16
|
import { Transform, Writable } from 'node:stream';
|
|
15
17
|
import * as zlib from 'node:zlib';
|
|
16
18
|
import * as tar from 'tar-fs';
|
|
17
|
-
import { Dispatcher } from 'undici';
|
|
19
|
+
import type { Dispatcher } from 'undici';
|
|
18
20
|
import { NetworkError400, RemoteFileDownloader } from '../helpers/download';
|
|
19
21
|
import { FilesCache } from './helpers/files_cache';
|
|
20
22
|
import { stringifyWithResourceId } from '@milaboratories/pl-client';
|
|
21
|
-
import { DownloadBlobTask } from './download_blob_task';
|
|
22
23
|
|
|
23
24
|
export interface DownloadUrlSyncReader {
|
|
24
25
|
/** Returns a Computable that (when the time will come)
|
|
@@ -59,8 +60,8 @@ export class DownloadUrlDriver implements DownloadUrlSyncReader {
|
|
|
59
60
|
private readonly opts: DownloadUrlDriverOps = {
|
|
60
61
|
cacheSoftSizeBytes: 50 * 1024 * 1024,
|
|
61
62
|
withGunzip: true,
|
|
62
|
-
nConcurrentDownloads: 50
|
|
63
|
-
}
|
|
63
|
+
nConcurrentDownloads: 50,
|
|
64
|
+
},
|
|
64
65
|
) {
|
|
65
66
|
this.downloadQueue = new TaskProcessor(this.logger, this.opts.nConcurrentDownloads);
|
|
66
67
|
this.cache = new FilesCache(this.opts.cacheSoftSizeBytes);
|
|
@@ -75,7 +76,7 @@ export class DownloadUrlDriver implements DownloadUrlSyncReader {
|
|
|
75
76
|
|
|
76
77
|
getPath(
|
|
77
78
|
url: URL,
|
|
78
|
-
ctx?: ComputableCtx
|
|
79
|
+
ctx?: ComputableCtx,
|
|
79
80
|
): Computable<PathResult | undefined> | PathResult | undefined {
|
|
80
81
|
// wrap result as computable, if we were not given an existing computable context
|
|
81
82
|
if (ctx === undefined) return Computable.make((c) => this.getPath(url, c));
|
|
@@ -88,7 +89,7 @@ export class DownloadUrlDriver implements DownloadUrlSyncReader {
|
|
|
88
89
|
const result = this.getPathNoCtx(url, ctx.watcher, callerId);
|
|
89
90
|
if (result?.path === undefined)
|
|
90
91
|
ctx.markUnstable(
|
|
91
|
-
`a path to the downloaded and untared archive might be undefined. The current result: ${result}
|
|
92
|
+
`a path to the downloaded and untared archive might be undefined. The current result: ${result}`,
|
|
92
93
|
);
|
|
93
94
|
|
|
94
95
|
return result;
|
|
@@ -106,7 +107,7 @@ export class DownloadUrlDriver implements DownloadUrlSyncReader {
|
|
|
106
107
|
const newTask = this.setNewTask(w, url, callerId);
|
|
107
108
|
this.downloadQueue.push({
|
|
108
109
|
fn: async () => this.downloadUrl(newTask, callerId),
|
|
109
|
-
recoverableErrorPredicate: (e) => true
|
|
110
|
+
recoverableErrorPredicate: (e) => true,
|
|
110
111
|
});
|
|
111
112
|
|
|
112
113
|
return newTask.getPath();
|
|
@@ -136,10 +137,10 @@ export class DownloadUrlDriver implements DownloadUrlSyncReader {
|
|
|
136
137
|
|
|
137
138
|
this.removeTask(
|
|
138
139
|
task,
|
|
139
|
-
`the task ${stringifyWithResourceId(task.info())} was removed`
|
|
140
|
-
|
|
140
|
+
`the task ${stringifyWithResourceId(task.info())} was removed`
|
|
141
|
+
+ `from cache along with ${stringifyWithResourceId(toDelete.map((t) => t.info()))}`,
|
|
141
142
|
);
|
|
142
|
-
})
|
|
143
|
+
}),
|
|
143
144
|
);
|
|
144
145
|
} else {
|
|
145
146
|
// The task is still in a downloading queue.
|
|
@@ -147,7 +148,7 @@ export class DownloadUrlDriver implements DownloadUrlSyncReader {
|
|
|
147
148
|
if (deleted)
|
|
148
149
|
this.removeTask(
|
|
149
150
|
task,
|
|
150
|
-
`the task ${stringifyWithResourceId(task.info())} was removed from cache
|
|
151
|
+
`the task ${stringifyWithResourceId(task.info())} was removed from cache`,
|
|
151
152
|
);
|
|
152
153
|
}
|
|
153
154
|
}
|
|
@@ -163,9 +164,9 @@ export class DownloadUrlDriver implements DownloadUrlSyncReader {
|
|
|
163
164
|
|
|
164
165
|
this.removeTask(
|
|
165
166
|
task,
|
|
166
|
-
`the task ${stringifyWithResourceId(task.info())} was released when the driver was closed
|
|
167
|
+
`the task ${stringifyWithResourceId(task.info())} was released when the driver was closed`,
|
|
167
168
|
);
|
|
168
|
-
})
|
|
169
|
+
}),
|
|
169
170
|
);
|
|
170
171
|
}
|
|
171
172
|
|
|
@@ -201,7 +202,7 @@ class DownloadByUrlTask {
|
|
|
201
202
|
constructor(
|
|
202
203
|
private readonly logger: MiLogger,
|
|
203
204
|
readonly path: string,
|
|
204
|
-
readonly url: URL
|
|
205
|
+
readonly url: URL,
|
|
205
206
|
) {}
|
|
206
207
|
|
|
207
208
|
public info() {
|
|
@@ -210,7 +211,7 @@ class DownloadByUrlTask {
|
|
|
210
211
|
path: this.path,
|
|
211
212
|
done: this.done,
|
|
212
213
|
size: this.size,
|
|
213
|
-
error: this.error
|
|
214
|
+
error: this.error,
|
|
214
215
|
};
|
|
215
216
|
}
|
|
216
217
|
|
|
@@ -240,7 +241,7 @@ class DownloadByUrlTask {
|
|
|
240
241
|
private async downloadAndUntar(
|
|
241
242
|
clientDownload: RemoteFileDownloader,
|
|
242
243
|
withGunzip: boolean,
|
|
243
|
-
signal: AbortSignal
|
|
244
|
+
signal: AbortSignal,
|
|
244
245
|
): Promise<number> {
|
|
245
246
|
await ensureDirExists(path.dirname(this.path));
|
|
246
247
|
|
|
@@ -301,7 +302,7 @@ async function dirSize(dir: string): Promise<number> {
|
|
|
301
302
|
|
|
302
303
|
const stat = await fsp.stat(fPath);
|
|
303
304
|
return stat.size;
|
|
304
|
-
})
|
|
305
|
+
}),
|
|
305
306
|
);
|
|
306
307
|
|
|
307
308
|
return sizes.reduce((sum, size) => sum + size, 0);
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
/** Handle of locally downloaded blob. This handle is issued only after the
|
|
2
2
|
* blob's content is downloaded locally, and ready for quick access. */
|
|
3
3
|
|
|
4
|
-
import { LocalBlobHandle } from '@milaboratories/pl-model-common';
|
|
5
|
-
import { Signer } from '@milaboratories/ts-helpers';
|
|
4
|
+
import type { LocalBlobHandle } from '@milaboratories/pl-model-common';
|
|
5
|
+
import type { Signer } from '@milaboratories/ts-helpers';
|
|
6
6
|
|
|
7
7
|
// https://regex101.com/r/kfnBVX/1
|
|
8
8
|
const localHandleRegex = /^blob\+local:\/\/download\/(?<path>.*)#(?<signature>.*)$/;
|
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
/** Handle of remote blob. This handle is issued as soon as the data becomes
|
|
2
2
|
* available on the remote server. */
|
|
3
3
|
|
|
4
|
-
import { Signer } from '@milaboratories/ts-helpers';
|
|
5
|
-
import { OnDemandBlobResourceSnapshot } from '../types';
|
|
6
|
-
import { RemoteBlobHandle } from '@milaboratories/pl-model-common';
|
|
7
|
-
import { ResourceInfo } from '@milaboratories/pl-tree';
|
|
4
|
+
import type { Signer } from '@milaboratories/ts-helpers';
|
|
5
|
+
import type { OnDemandBlobResourceSnapshot } from '../types';
|
|
6
|
+
import type { RemoteBlobHandle } from '@milaboratories/pl-model-common';
|
|
7
|
+
import type { ResourceInfo } from '@milaboratories/pl-tree';
|
|
8
8
|
import { bigintToResourceId } from '@milaboratories/pl-client';
|
|
9
9
|
|
|
10
10
|
// https://regex101.com/r/rvbPZt/1
|
|
11
|
-
const remoteHandleRegex
|
|
12
|
-
/^blob\+remote:\/\/download\/(?<content>(?<resourceType>.*)\/(?<resourceVersion>.*)\/(?<resourceId>.*))#(?<signature>.*)$/;
|
|
11
|
+
const remoteHandleRegex
|
|
12
|
+
= /^blob\+remote:\/\/download\/(?<content>(?<resourceType>.*)\/(?<resourceVersion>.*)\/(?<resourceId>.*))#(?<signature>.*)$/;
|
|
13
13
|
|
|
14
14
|
export function newRemoteHandle(
|
|
15
15
|
rInfo: OnDemandBlobResourceSnapshot,
|
|
16
|
-
signer: Signer
|
|
16
|
+
signer: Signer,
|
|
17
17
|
): RemoteBlobHandle {
|
|
18
18
|
const content = `${rInfo.type.name}/${rInfo.type.version}/${BigInt(rInfo.id)}`;
|
|
19
19
|
return `blob+remote://download/${content}#${signer.sign(content)}` as RemoteBlobHandle;
|
|
@@ -35,6 +35,6 @@ export function parseRemoteHandle(handle: RemoteBlobHandle, signer: Signer): Res
|
|
|
35
35
|
|
|
36
36
|
return {
|
|
37
37
|
id: bigintToResourceId(BigInt(resourceId)),
|
|
38
|
-
type: { name: resourceType, version: resourceVersion }
|
|
38
|
+
type: { name: resourceType, version: resourceVersion },
|
|
39
39
|
};
|
|
40
40
|
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { expect, test } from '@jest/globals';
|
|
2
2
|
import { CallersCounter } from '@milaboratories/ts-helpers';
|
|
3
|
-
import { CachedFile
|
|
3
|
+
import type { CachedFile } from './files_cache';
|
|
4
|
+
import { FilesCache } from './files_cache';
|
|
4
5
|
|
|
5
6
|
test('should delete blob3 when add 3 blobs, exceed a soft limit and nothing holds blob3', () => {
|
|
6
7
|
const cache = new FilesCache(20);
|
|
@@ -8,17 +9,17 @@ test('should delete blob3 when add 3 blobs, exceed a soft limit and nothing hold
|
|
|
8
9
|
const blob1: CachedFile = {
|
|
9
10
|
path: 'path1',
|
|
10
11
|
size: 5,
|
|
11
|
-
counter: new CallersCounter()
|
|
12
|
+
counter: new CallersCounter(),
|
|
12
13
|
};
|
|
13
14
|
const blob2: CachedFile = {
|
|
14
15
|
path: 'path2',
|
|
15
16
|
size: 10,
|
|
16
|
-
counter: new CallersCounter()
|
|
17
|
+
counter: new CallersCounter(),
|
|
17
18
|
};
|
|
18
19
|
const blob3: CachedFile = {
|
|
19
20
|
path: 'path3',
|
|
20
21
|
size: 10,
|
|
21
|
-
counter: new CallersCounter()
|
|
22
|
+
counter: new CallersCounter(),
|
|
22
23
|
};
|
|
23
24
|
|
|
24
25
|
// add blobs and check that we don't exceed the soft limit.
|
|
@@ -53,12 +54,12 @@ test('regression should allow to add empty files', () => {
|
|
|
53
54
|
const blob1: CachedFile = {
|
|
54
55
|
path: 'path1',
|
|
55
56
|
size: 0,
|
|
56
|
-
counter: new CallersCounter()
|
|
57
|
+
counter: new CallersCounter(),
|
|
57
58
|
};
|
|
58
59
|
const blob2: CachedFile = {
|
|
59
60
|
path: 'path2',
|
|
60
61
|
size: 2,
|
|
61
|
-
counter: new CallersCounter()
|
|
62
|
+
counter: new CallersCounter(),
|
|
62
63
|
};
|
|
63
64
|
|
|
64
65
|
// add a blob with 0 size.
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
/** Handle of logs. This handle should be passed
|
|
2
2
|
* to the driver for retrieving logs. */
|
|
3
3
|
|
|
4
|
-
import { ResourceInfo } from '@milaboratories/pl-tree';
|
|
5
|
-
import * as sdk from '@milaboratories/pl-model-common';
|
|
4
|
+
import type { ResourceInfo } from '@milaboratories/pl-tree';
|
|
5
|
+
import type * as sdk from '@milaboratories/pl-model-common';
|
|
6
6
|
import { bigintToResourceId } from '@milaboratories/pl-client';
|
|
7
7
|
|
|
8
8
|
export function newLogHandle(live: boolean, rInfo: ResourceInfo): sdk.AnyLogHandle {
|
|
@@ -17,8 +17,8 @@ export function newLogHandle(live: boolean, rInfo: ResourceInfo): sdk.AnyLogHand
|
|
|
17
17
|
* The resource that represents a log can be deleted,
|
|
18
18
|
* in this case the handle should be refreshed. */
|
|
19
19
|
|
|
20
|
-
export const liveHandleRegex
|
|
21
|
-
/^log\+live:\/\/log\/(?<resourceType>.*)\/(?<resourceVersion>.*)\/(?<resourceId>.*)$/;
|
|
20
|
+
export const liveHandleRegex
|
|
21
|
+
= /^log\+live:\/\/log\/(?<resourceType>.*)\/(?<resourceVersion>.*)\/(?<resourceId>.*)$/;
|
|
22
22
|
|
|
23
23
|
export function isLiveLogHandle(handle: string): handle is sdk.LiveLogHandle {
|
|
24
24
|
return liveHandleRegex.test(handle);
|
|
@@ -26,8 +26,8 @@ export function isLiveLogHandle(handle: string): handle is sdk.LiveLogHandle {
|
|
|
26
26
|
|
|
27
27
|
/** Handle of the ready logs of a program. */
|
|
28
28
|
|
|
29
|
-
export const readyHandleRegex
|
|
30
|
-
/^log\+ready:\/\/log\/(?<resourceType>.*)\/(?<resourceVersion>.*)\/(?<resourceId>.*)$/;
|
|
29
|
+
export const readyHandleRegex
|
|
30
|
+
= /^log\+ready:\/\/log\/(?<resourceType>.*)\/(?<resourceVersion>.*)\/(?<resourceId>.*)$/;
|
|
31
31
|
|
|
32
32
|
export function isReadyLogHandle(handle: string): handle is sdk.ReadyLogHandle {
|
|
33
33
|
return readyHandleRegex.test(handle);
|
|
@@ -47,6 +47,6 @@ export function getResourceInfoFromLogHandle(handle: sdk.AnyLogHandle): Resource
|
|
|
47
47
|
|
|
48
48
|
return {
|
|
49
49
|
id: bigintToResourceId(BigInt(resourceId)),
|
|
50
|
-
type: { name: resourceType, version: resourceVersion }
|
|
50
|
+
type: { name: resourceType, version: resourceVersion },
|
|
51
51
|
};
|
|
52
52
|
}
|
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import * as sdk from '@milaboratories/pl-model-common';
|
|
2
|
-
import { Signer } from '@milaboratories/ts-helpers';
|
|
1
|
+
import type * as sdk from '@milaboratories/pl-model-common';
|
|
2
|
+
import type { Signer } from '@milaboratories/ts-helpers';
|
|
3
3
|
import { ImportFileHandleIndexData, ImportFileHandleUploadData } from '../types';
|
|
4
4
|
|
|
5
5
|
export function createIndexImportHandle(
|
|
6
6
|
storageName: string,
|
|
7
|
-
path: string
|
|
7
|
+
path: string,
|
|
8
8
|
): sdk.ImportFileHandleIndex {
|
|
9
9
|
const data: ImportFileHandleIndexData = {
|
|
10
10
|
storageId: storageName,
|
|
11
|
-
path: path
|
|
11
|
+
path: path,
|
|
12
12
|
};
|
|
13
13
|
|
|
14
14
|
return `index://index/${encodeURIComponent(JSON.stringify(data))}`;
|
|
@@ -18,13 +18,13 @@ export function createUploadImportHandle(
|
|
|
18
18
|
localPath: string,
|
|
19
19
|
signer: Signer,
|
|
20
20
|
sizeBytes: bigint,
|
|
21
|
-
modificationTimeSeconds: bigint
|
|
21
|
+
modificationTimeSeconds: bigint,
|
|
22
22
|
): sdk.ImportFileHandleUpload {
|
|
23
23
|
const data: ImportFileHandleUploadData = {
|
|
24
24
|
localPath,
|
|
25
25
|
pathSignature: signer.sign(localPath),
|
|
26
26
|
sizeBytes: String(sizeBytes),
|
|
27
|
-
modificationTime: String(modificationTimeSeconds)
|
|
27
|
+
modificationTime: String(modificationTimeSeconds),
|
|
28
28
|
};
|
|
29
29
|
|
|
30
30
|
return `upload://upload/${encodeURIComponent(JSON.stringify(data))}`;
|
|
@@ -33,7 +33,7 @@ export function createUploadImportHandle(
|
|
|
33
33
|
export function parseUploadHandle(handle: sdk.ImportFileHandleUpload): ImportFileHandleUploadData {
|
|
34
34
|
const url = new URL(handle);
|
|
35
35
|
return ImportFileHandleUploadData.parse(
|
|
36
|
-
JSON.parse(decodeURIComponent(url.pathname.substring(1)))
|
|
36
|
+
JSON.parse(decodeURIComponent(url.pathname.substring(1))),
|
|
37
37
|
);
|
|
38
38
|
}
|
|
39
39
|
|