cod-dicomweb-server 1.3.11 → 1.3.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/main.js +1764 -1724
- package/dist/esm/classes/CodDicomWebServer.d.ts +2 -1
- package/dist/esm/classes/CodDicomWebServer.js +69 -27
- package/dist/esm/classes/customClasses.d.ts +1 -0
- package/dist/esm/dataRetrieval/dataRetrievalManager.d.ts +1 -1
- package/dist/esm/dataRetrieval/dataRetrievalManager.js +1 -0
- package/dist/esm/dataRetrieval/register.d.ts +1 -1
- package/dist/esm/dataRetrieval/register.js +1 -2
- package/dist/esm/dataRetrieval/requestManager.d.ts +1 -1
- package/dist/esm/dataRetrieval/scripts/filePartial.d.ts +1 -1
- package/dist/esm/dataRetrieval/scripts/filePartial.js +7 -4
- package/dist/esm/dataRetrieval/scripts/fileStreaming.d.ts +1 -4
- package/dist/esm/dataRetrieval/scripts/fileStreaming.js +8 -28
- package/dist/esm/dataRetrieval/workerManager.d.ts +1 -1
- package/dist/esm/fileManager.d.ts +3 -7
- package/dist/esm/fileManager.js +29 -17
- package/dist/esm/types/codDicomWebServerOptions.d.ts +1 -1
- package/dist/esm/types/fileManagerFile.d.ts +6 -0
- package/dist/esm/types/index.d.ts +1 -1
- package/dist/esm/types/index.js +1 -1
- package/dist/umd/563.js +2 -2
- package/dist/umd/563.js.map +1 -1
- package/dist/umd/846.js +2 -2
- package/dist/umd/846.js.map +1 -1
- package/dist/umd/main.js +3 -3
- package/dist/umd/main.js.map +1 -1
- package/package.json +1 -1
- package/dist/esm/types/fileManagerOptions.d.ts +0 -4
- /package/dist/esm/types/{fileManagerOptions.js → fileManagerFile.js} +0 -0
|
@@ -2,12 +2,13 @@ import { Enums } from '../constants';
|
|
|
2
2
|
import type { CodDicomWebServerOptions, CODRequestOptions, FileRequestOptions, InstanceMetadata, JsonMetadata, SeriesMetadata } from '../types';
|
|
3
3
|
declare class CodDicomWebServer {
|
|
4
4
|
private filePromises;
|
|
5
|
+
private files;
|
|
5
6
|
private options;
|
|
6
7
|
private fileManager;
|
|
7
8
|
private metadataManager;
|
|
8
9
|
private seriesUidFileUrls;
|
|
9
10
|
constructor(args?: {
|
|
10
|
-
|
|
11
|
+
maxCacheSize?: number;
|
|
11
12
|
domain?: string;
|
|
12
13
|
disableWorker?: boolean;
|
|
13
14
|
enableLocalCache?: boolean;
|
|
@@ -10,8 +10,9 @@ import { CustomErrorEvent } from './customClasses';
|
|
|
10
10
|
import { download, getDirectoryHandle } from '../fileAccessSystemUtils';
|
|
11
11
|
class CodDicomWebServer {
|
|
12
12
|
filePromises = {};
|
|
13
|
+
files = {};
|
|
13
14
|
options = {
|
|
14
|
-
|
|
15
|
+
maxCacheSize: 4 * 1024 * 1024 * 1024,
|
|
15
16
|
domain: constants.url.DOMAIN,
|
|
16
17
|
enableLocalCache: false
|
|
17
18
|
};
|
|
@@ -19,19 +20,19 @@ class CodDicomWebServer {
|
|
|
19
20
|
metadataManager;
|
|
20
21
|
seriesUidFileUrls = {};
|
|
21
22
|
constructor(args = {}) {
|
|
22
|
-
const {
|
|
23
|
-
this.options.
|
|
23
|
+
const { maxCacheSize, domain, disableWorker, enableLocalCache } = args;
|
|
24
|
+
this.options.maxCacheSize = maxCacheSize || this.options.maxCacheSize;
|
|
24
25
|
this.options.domain = domain || this.options.domain;
|
|
25
26
|
this.options.enableLocalCache = !!enableLocalCache;
|
|
26
27
|
const fileStreamingScriptName = constants.dataRetrieval.FILE_STREAMING_WORKER_NAME;
|
|
27
28
|
const filePartialScriptName = constants.dataRetrieval.FILE_PARTIAL_WORKER_NAME;
|
|
28
|
-
this.fileManager = new FileManager(
|
|
29
|
+
this.fileManager = new FileManager();
|
|
29
30
|
this.metadataManager = new MetadataManager();
|
|
30
31
|
if (disableWorker) {
|
|
31
32
|
const dataRetrievalManager = getDataRetrievalManager();
|
|
32
33
|
dataRetrievalManager.setDataRetrieverMode(Enums.DataRetrieveMode.REQUEST);
|
|
33
34
|
}
|
|
34
|
-
register({ fileStreamingScriptName, filePartialScriptName }
|
|
35
|
+
register({ fileStreamingScriptName, filePartialScriptName });
|
|
35
36
|
}
|
|
36
37
|
setOptions = (newOptions) => {
|
|
37
38
|
Object.keys(newOptions).forEach((key) => {
|
|
@@ -163,15 +164,11 @@ class CodDicomWebServer {
|
|
|
163
164
|
});
|
|
164
165
|
}
|
|
165
166
|
const directoryHandle = this.options.enableLocalCache && (await getDirectoryHandle());
|
|
166
|
-
const { maxWorkerFetchSize } = this.getOptions();
|
|
167
167
|
const dataRetrievalManager = getDataRetrievalManager();
|
|
168
|
-
const { FILE_STREAMING_WORKER_NAME, FILE_PARTIAL_WORKER_NAME
|
|
168
|
+
const { FILE_STREAMING_WORKER_NAME, FILE_PARTIAL_WORKER_NAME } = constants.dataRetrieval;
|
|
169
169
|
let tarPromise;
|
|
170
170
|
if (!this.filePromises[fileUrl]) {
|
|
171
171
|
tarPromise = new Promise((resolveFile, rejectFile) => {
|
|
172
|
-
if (this.fileManager.getTotalSize() + THRESHOLD > maxWorkerFetchSize) {
|
|
173
|
-
throw new CustomError(`CodDicomWebServer.ts: Maximum size(${maxWorkerFetchSize}) for fetching files reached`);
|
|
174
|
-
}
|
|
175
172
|
const FetchTypeEnum = constants.Enums.FetchType;
|
|
176
173
|
if (fetchType === FetchTypeEnum.API_OPTIMIZED) {
|
|
177
174
|
const handleFirstChunk = (evt) => {
|
|
@@ -181,7 +178,12 @@ class CodDicomWebServer {
|
|
|
181
178
|
}
|
|
182
179
|
const { url, position, fileArraybuffer } = evt.data;
|
|
183
180
|
if (url === fileUrl && fileArraybuffer) {
|
|
184
|
-
this.
|
|
181
|
+
if (this.options.enableLocalCache) {
|
|
182
|
+
this.files[fileUrl] = fileArraybuffer;
|
|
183
|
+
}
|
|
184
|
+
else {
|
|
185
|
+
this.fileManager.set(url, { data: fileArraybuffer, position });
|
|
186
|
+
}
|
|
185
187
|
dataRetrievalManager.removeEventListener(FILE_STREAMING_WORKER_NAME, 'message', handleFirstChunk);
|
|
186
188
|
}
|
|
187
189
|
};
|
|
@@ -201,7 +203,6 @@ class CodDicomWebServer {
|
|
|
201
203
|
})
|
|
202
204
|
.then(() => {
|
|
203
205
|
dataRetrievalManager.removeEventListener(FILE_STREAMING_WORKER_NAME, 'message', handleFirstChunk);
|
|
204
|
-
delete this.filePromises[fileUrl];
|
|
205
206
|
});
|
|
206
207
|
}
|
|
207
208
|
else if (fetchType === FetchTypeEnum.BYTES_OPTIMIZED && offsets) {
|
|
@@ -214,7 +215,12 @@ class CodDicomWebServer {
|
|
|
214
215
|
}
|
|
215
216
|
const { url, fileArraybuffer, offsets } = evt.data;
|
|
216
217
|
if (url === bytesRemovedUrl && offsets.startByte === startByte && offsets.endByte === endByte) {
|
|
217
|
-
this.
|
|
218
|
+
if (this.options.enableLocalCache) {
|
|
219
|
+
this.files[fileUrl] = fileArraybuffer;
|
|
220
|
+
}
|
|
221
|
+
else {
|
|
222
|
+
this.fileManager.set(fileUrl, { data: fileArraybuffer, position: fileArraybuffer.length });
|
|
223
|
+
}
|
|
218
224
|
dataRetrievalManager.removeEventListener(FILE_PARTIAL_WORKER_NAME, 'message', handleSlice);
|
|
219
225
|
resolveFile();
|
|
220
226
|
}
|
|
@@ -232,43 +238,69 @@ class CodDicomWebServer {
|
|
|
232
238
|
})
|
|
233
239
|
.then(() => {
|
|
234
240
|
dataRetrievalManager.removeEventListener(FILE_PARTIAL_WORKER_NAME, 'message', handleSlice);
|
|
235
|
-
delete this.filePromises[fileUrl];
|
|
236
241
|
});
|
|
237
242
|
}
|
|
238
243
|
else {
|
|
239
244
|
rejectFile(new CustomError('CodDicomWebServer.ts: Offsets is needed in bytes optimized fetching'));
|
|
240
245
|
}
|
|
241
246
|
});
|
|
242
|
-
this.filePromises[fileUrl] = tarPromise;
|
|
247
|
+
this.filePromises[fileUrl] = { promise: tarPromise, requestCount: 1 };
|
|
243
248
|
}
|
|
244
249
|
else {
|
|
245
|
-
tarPromise = this.filePromises[fileUrl];
|
|
250
|
+
tarPromise = this.filePromises[fileUrl].promise;
|
|
251
|
+
this.filePromises[fileUrl].requestCount++;
|
|
246
252
|
}
|
|
247
253
|
return new Promise((resolveRequest, rejectRequest) => {
|
|
248
|
-
let requestResolved = false;
|
|
254
|
+
let requestResolved = false, fileFetchingCompleted = false;
|
|
249
255
|
const handleChunkAppend = (evt) => {
|
|
250
256
|
if (evt instanceof CustomErrorEvent) {
|
|
251
257
|
rejectRequest(evt.message);
|
|
252
258
|
throw evt.error;
|
|
253
259
|
}
|
|
254
|
-
const { url, position, chunk, isAppending } = evt.data;
|
|
260
|
+
const { url, position, chunk, totalLength, isAppending } = evt.data;
|
|
255
261
|
if (isAppending) {
|
|
256
262
|
if (chunk) {
|
|
257
|
-
this.
|
|
263
|
+
if (this.options.enableLocalCache) {
|
|
264
|
+
this.files[url].set(chunk, position - chunk.length);
|
|
265
|
+
}
|
|
266
|
+
else {
|
|
267
|
+
this.fileManager.append(url, chunk, position);
|
|
268
|
+
}
|
|
258
269
|
}
|
|
259
270
|
else {
|
|
260
271
|
this.fileManager.setPosition(url, position);
|
|
261
272
|
}
|
|
262
273
|
}
|
|
263
|
-
|
|
274
|
+
else {
|
|
275
|
+
// The full empty file including with first chunk have been stored to fileManager
|
|
276
|
+
// by the worker listener in the file promise.
|
|
277
|
+
// So, we check whether the cache exceeded the limit here.
|
|
278
|
+
if (this.fileManager.getTotalSize() > this.options.maxCacheSize) {
|
|
279
|
+
this.fileManager.decacheNecessaryBytes(url, totalLength);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
if (!requestResolved && url === fileUrl && position > offsets.endByte) {
|
|
264
283
|
try {
|
|
265
|
-
const file = this.
|
|
266
|
-
|
|
284
|
+
const file = this.options.enableLocalCache
|
|
285
|
+
? this.files[url].slice(offsets.startByte, offsets.endByte)
|
|
286
|
+
: this.fileManager.get(url, offsets);
|
|
267
287
|
resolveRequest(file?.buffer);
|
|
268
288
|
}
|
|
269
289
|
catch (error) {
|
|
270
290
|
rejectRequest(error);
|
|
271
291
|
}
|
|
292
|
+
finally {
|
|
293
|
+
completeRequest(url);
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
};
|
|
297
|
+
const completeRequest = (url) => {
|
|
298
|
+
requestResolved = true;
|
|
299
|
+
this.filePromises[url].requestCount--;
|
|
300
|
+
dataRetrievalManager.removeEventListener(FILE_STREAMING_WORKER_NAME, 'message', handleChunkAppend);
|
|
301
|
+
if (fileFetchingCompleted && this.filePromises[url] && !this.filePromises[url]?.requestCount) {
|
|
302
|
+
delete this.filePromises[url];
|
|
303
|
+
delete this.files[url];
|
|
272
304
|
}
|
|
273
305
|
};
|
|
274
306
|
if (offsets && !isBytesOptimized) {
|
|
@@ -276,10 +308,19 @@ class CodDicomWebServer {
|
|
|
276
308
|
}
|
|
277
309
|
tarPromise
|
|
278
310
|
.then(() => {
|
|
311
|
+
fileFetchingCompleted = true;
|
|
279
312
|
if (!requestResolved) {
|
|
280
|
-
if (this.fileManager.getPosition(fileUrl)) {
|
|
281
|
-
|
|
282
|
-
|
|
313
|
+
if (this.fileManager.getPosition(fileUrl) || this.files[fileUrl]) {
|
|
314
|
+
let file;
|
|
315
|
+
if (this.options.enableLocalCache) {
|
|
316
|
+
file =
|
|
317
|
+
isBytesOptimized || !offsets
|
|
318
|
+
? this.files[fileUrl]
|
|
319
|
+
: this.files[fileUrl].slice(offsets.startByte, offsets.endByte);
|
|
320
|
+
}
|
|
321
|
+
else {
|
|
322
|
+
file = this.fileManager.get(fileUrl, isBytesOptimized ? undefined : offsets);
|
|
323
|
+
}
|
|
283
324
|
resolveRequest(file?.buffer);
|
|
284
325
|
}
|
|
285
326
|
else {
|
|
@@ -288,10 +329,11 @@ class CodDicomWebServer {
|
|
|
288
329
|
}
|
|
289
330
|
})
|
|
290
331
|
.catch((error) => {
|
|
332
|
+
fileFetchingCompleted = true;
|
|
291
333
|
rejectRequest(error);
|
|
292
334
|
})
|
|
293
|
-
.
|
|
294
|
-
|
|
335
|
+
.finally(() => {
|
|
336
|
+
completeRequest(fileUrl);
|
|
295
337
|
});
|
|
296
338
|
});
|
|
297
339
|
}
|
|
@@ -8,7 +8,7 @@ declare class DataRetrievalManager {
|
|
|
8
8
|
getDataRetrieverMode(): Enums.DataRetrieveMode;
|
|
9
9
|
setDataRetrieverMode(mode: Enums.DataRetrieveMode): void;
|
|
10
10
|
register(name: string, arg: (() => Worker) | ScriptObject): void;
|
|
11
|
-
executeTask(loaderName: string, taskName: string, options: Record<string, unknown> | unknown): Promise<
|
|
11
|
+
executeTask(loaderName: string, taskName: string, options: Record<string, unknown> | unknown): Promise<any>;
|
|
12
12
|
addEventListener(workerName: string, eventType: keyof WorkerEventMap, listener: (evt: CustomMessageEvent | CustomErrorEvent) => unknown): void;
|
|
13
13
|
removeEventListener(workerName: string, eventType: keyof WorkerEventMap, listener: (evt: CustomMessageEvent | CustomErrorEvent) => unknown): void;
|
|
14
14
|
reset(): void;
|
|
@@ -2,7 +2,7 @@ import { Enums } from '../constants';
|
|
|
2
2
|
import { getDataRetrievalManager } from './dataRetrievalManager';
|
|
3
3
|
import filePartial from './scripts/filePartial';
|
|
4
4
|
import fileStreaming from './scripts/fileStreaming';
|
|
5
|
-
export function register(workerNames
|
|
5
|
+
export function register(workerNames) {
|
|
6
6
|
const { fileStreamingScriptName, filePartialScriptName } = workerNames;
|
|
7
7
|
const dataRetrievalManager = getDataRetrievalManager();
|
|
8
8
|
if (dataRetrievalManager.getDataRetrieverMode() === Enums.DataRetrieveMode.REQUEST) {
|
|
@@ -21,5 +21,4 @@ export function register(workerNames, maxFetchSize) {
|
|
|
21
21
|
});
|
|
22
22
|
dataRetrievalManager.register(filePartialScriptName, partialWorkerFn);
|
|
23
23
|
}
|
|
24
|
-
dataRetrievalManager.executeTask(fileStreamingScriptName, 'setMaxFetchSize', maxFetchSize);
|
|
25
24
|
}
|
|
@@ -4,7 +4,7 @@ declare class RequestManager {
|
|
|
4
4
|
private loaderRegistry;
|
|
5
5
|
register(loaderName: string, loaderObject: ScriptObject): void;
|
|
6
6
|
private listenerCallback;
|
|
7
|
-
executeTask(loaderName: string, taskName: string, options: Record<string, unknown> | unknown): Promise<
|
|
7
|
+
executeTask(loaderName: string, taskName: string, options: Record<string, unknown> | unknown): Promise<any>;
|
|
8
8
|
addEventListener(workerName: string, eventType: keyof WorkerEventMap, listener: (evt: CustomMessageEvent | CustomErrorEvent) => unknown): void;
|
|
9
9
|
removeEventListener(workerName: string, eventType: keyof WorkerEventMap, listener: (evt: CustomMessageEvent | CustomErrorEvent) => unknown): void;
|
|
10
10
|
reset(): void;
|
|
@@ -10,17 +10,20 @@ const filePartial = {
|
|
|
10
10
|
if (directoryHandle) {
|
|
11
11
|
const file = (await readFile(directoryHandle, storageName, { offsets, isJson: false }));
|
|
12
12
|
if (file) {
|
|
13
|
-
|
|
14
|
-
|
|
13
|
+
const fileBuffer = new Uint8Array(file);
|
|
14
|
+
callBack({ url, fileArraybuffer: fileBuffer, offsets });
|
|
15
|
+
return fileBuffer;
|
|
15
16
|
}
|
|
16
17
|
}
|
|
17
|
-
await fetch(url, { headers })
|
|
18
|
+
return await fetch(url, { headers })
|
|
18
19
|
.then((response) => response.arrayBuffer())
|
|
19
20
|
.then((data) => {
|
|
20
|
-
|
|
21
|
+
const fileBuffer = new Uint8Array(data);
|
|
22
|
+
callBack({ url, fileArraybuffer: fileBuffer, offsets });
|
|
21
23
|
if (directoryHandle) {
|
|
22
24
|
writeFile(directoryHandle, storageName, data);
|
|
23
25
|
}
|
|
26
|
+
return fileBuffer;
|
|
24
27
|
})
|
|
25
28
|
.catch((error) => {
|
|
26
29
|
throw new CustomError('filePartial.ts: Error when fetching file: ' + error?.message);
|
|
@@ -1,8 +1,4 @@
|
|
|
1
1
|
declare const fileStreaming: {
|
|
2
|
-
maxFetchSize: number;
|
|
3
|
-
fetchedSize: number;
|
|
4
|
-
setMaxFetchSize(size: number): void;
|
|
5
|
-
decreaseFetchedSize(size: number): void;
|
|
6
2
|
stream(args: {
|
|
7
3
|
url: string;
|
|
8
4
|
headers?: Record<string, string>;
|
|
@@ -14,6 +10,7 @@ declare const fileStreaming: {
|
|
|
14
10
|
isAppending?: boolean;
|
|
15
11
|
fileArraybuffer?: Uint8Array;
|
|
16
12
|
chunk?: Uint8Array;
|
|
13
|
+
totalLength: number;
|
|
17
14
|
}) => void): Promise<Uint8Array | void>;
|
|
18
15
|
};
|
|
19
16
|
export default fileStreaming;
|
|
@@ -1,18 +1,6 @@
|
|
|
1
1
|
import { CustomError } from '../../classes/customClasses';
|
|
2
2
|
import { createStreamingFileName, readFile, writeFile } from '../../fileAccessSystemUtils';
|
|
3
3
|
const fileStreaming = {
|
|
4
|
-
maxFetchSize: 4 * 1024 * 1024 * 1024,
|
|
5
|
-
fetchedSize: 0,
|
|
6
|
-
setMaxFetchSize(size) {
|
|
7
|
-
if (size > 0) {
|
|
8
|
-
this.maxFetchSize = size;
|
|
9
|
-
}
|
|
10
|
-
},
|
|
11
|
-
decreaseFetchedSize(size) {
|
|
12
|
-
if (size > 0 && size <= this.fetchedSize) {
|
|
13
|
-
this.fetchedSize -= size;
|
|
14
|
-
}
|
|
15
|
-
},
|
|
16
4
|
async stream(args, callBack) {
|
|
17
5
|
const { url, headers, useSharedArrayBuffer, directoryHandle } = args;
|
|
18
6
|
const controller = new AbortController();
|
|
@@ -23,8 +11,10 @@ const fileStreaming = {
|
|
|
23
11
|
if (directoryHandle) {
|
|
24
12
|
const file = (await readFile(directoryHandle, fileName, { isJson: false }));
|
|
25
13
|
if (file) {
|
|
26
|
-
|
|
27
|
-
|
|
14
|
+
const totalLength = file.byteLength;
|
|
15
|
+
const fileBuffer = new Uint8Array(file);
|
|
16
|
+
callBack({ url, position: totalLength, fileArraybuffer: fileBuffer, totalLength });
|
|
17
|
+
return fileBuffer;
|
|
28
18
|
}
|
|
29
19
|
}
|
|
30
20
|
const response = await fetch(url, {
|
|
@@ -48,11 +38,6 @@ const fileStreaming = {
|
|
|
48
38
|
}
|
|
49
39
|
if (!completed) {
|
|
50
40
|
let position = firstChunk.value.length;
|
|
51
|
-
if (this.fetchedSize + position > this.maxFetchSize) {
|
|
52
|
-
controller.abort();
|
|
53
|
-
throw new CustomError(`Maximum size(${this.maxFetchSize}) for fetching files reached`);
|
|
54
|
-
}
|
|
55
|
-
this.fetchedSize += position;
|
|
56
41
|
if (useSharedArrayBuffer) {
|
|
57
42
|
sharedArraybuffer = new SharedArrayBuffer(totalLength);
|
|
58
43
|
fileArraybuffer = new Uint8Array(sharedArraybuffer);
|
|
@@ -61,7 +46,7 @@ const fileStreaming = {
|
|
|
61
46
|
fileArraybuffer = new Uint8Array(totalLength);
|
|
62
47
|
}
|
|
63
48
|
fileArraybuffer.set(firstChunk.value);
|
|
64
|
-
callBack({ url, position, fileArraybuffer });
|
|
49
|
+
callBack({ url, position, fileArraybuffer, totalLength });
|
|
65
50
|
while (!completed) {
|
|
66
51
|
result = await reader.read();
|
|
67
52
|
if (result.done) {
|
|
@@ -69,26 +54,21 @@ const fileStreaming = {
|
|
|
69
54
|
continue;
|
|
70
55
|
}
|
|
71
56
|
const chunk = result.value;
|
|
72
|
-
if (this.fetchedSize + chunk.length > this.maxFetchSize) {
|
|
73
|
-
sharedArraybuffer = null;
|
|
74
|
-
fileArraybuffer = null;
|
|
75
|
-
controller.abort();
|
|
76
|
-
throw new CustomError(`Maximum size(${this.maxFetchSize}) for fetching files reached`);
|
|
77
|
-
}
|
|
78
|
-
this.fetchedSize += chunk.length;
|
|
79
57
|
fileArraybuffer.set(chunk, position);
|
|
80
58
|
position += chunk.length;
|
|
81
59
|
callBack({
|
|
82
60
|
isAppending: true,
|
|
83
61
|
url,
|
|
84
62
|
position: position,
|
|
85
|
-
chunk: !useSharedArrayBuffer ? chunk : undefined
|
|
63
|
+
chunk: !useSharedArrayBuffer ? chunk : undefined,
|
|
64
|
+
totalLength
|
|
86
65
|
});
|
|
87
66
|
}
|
|
88
67
|
if (directoryHandle) {
|
|
89
68
|
writeFile(directoryHandle, fileName, fileArraybuffer.slice().buffer);
|
|
90
69
|
}
|
|
91
70
|
}
|
|
71
|
+
return fileArraybuffer;
|
|
92
72
|
}
|
|
93
73
|
catch (error) {
|
|
94
74
|
const streamingError = new CustomError('fileStreaming.ts: ' + error.message || 'An error occured when streaming');
|
|
@@ -2,7 +2,7 @@ import { CustomErrorEvent, CustomMessageEvent } from '../classes/customClasses';
|
|
|
2
2
|
declare class WebWorkerManager {
|
|
3
3
|
private workerRegistry;
|
|
4
4
|
register(name: string, workerFn: () => Worker): void;
|
|
5
|
-
executeTask(workerName: string, taskName: string, options: Record<string, unknown> | unknown): Promise<
|
|
5
|
+
executeTask(workerName: string, taskName: string, options: Record<string, unknown> | unknown): Promise<any>;
|
|
6
6
|
addEventListener(workerName: string, eventType: keyof WorkerEventMap, listener: (evt: CustomMessageEvent | CustomErrorEvent) => unknown): void;
|
|
7
7
|
removeEventListener(workerName: string, eventType: keyof WorkerEventMap, listener: (evt: CustomMessageEvent | CustomErrorEvent) => unknown): void;
|
|
8
8
|
reset(): void;
|
|
@@ -1,12 +1,7 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { FileManagerFile } from './types';
|
|
2
2
|
declare class FileManager {
|
|
3
3
|
private files;
|
|
4
|
-
|
|
5
|
-
constructor({ fileStreamingScriptName }: FileManagerOptions);
|
|
6
|
-
set(url: string, file: {
|
|
7
|
-
data: Uint8Array;
|
|
8
|
-
position: number;
|
|
9
|
-
}): void;
|
|
4
|
+
set(url: string, file: Omit<FileManagerFile, 'lastModified'>): void;
|
|
10
5
|
get(url: string, offsets?: {
|
|
11
6
|
startByte: number;
|
|
12
7
|
endByte: number;
|
|
@@ -17,5 +12,6 @@ declare class FileManager {
|
|
|
17
12
|
getTotalSize(): number;
|
|
18
13
|
remove(url: string): void;
|
|
19
14
|
purge(): void;
|
|
15
|
+
decacheNecessaryBytes(url: string, bytesNeeded: number): number;
|
|
20
16
|
}
|
|
21
17
|
export default FileManager;
|
package/dist/esm/fileManager.js
CHANGED
|
@@ -1,12 +1,7 @@
|
|
|
1
|
-
import { getDataRetrievalManager } from './dataRetrieval/dataRetrievalManager';
|
|
2
1
|
class FileManager {
|
|
3
2
|
files = {};
|
|
4
|
-
fileStreamingScriptName;
|
|
5
|
-
constructor({ fileStreamingScriptName }) {
|
|
6
|
-
this.fileStreamingScriptName = fileStreamingScriptName;
|
|
7
|
-
}
|
|
8
3
|
set(url, file) {
|
|
9
|
-
this.files[url] = file;
|
|
4
|
+
this.files[url] = { ...file, lastModified: Date.now() };
|
|
10
5
|
}
|
|
11
6
|
get(url, offsets) {
|
|
12
7
|
if (!this.files[url] || (offsets && this.files[url].position <= offsets.endByte)) {
|
|
@@ -17,6 +12,7 @@ class FileManager {
|
|
|
17
12
|
setPosition(url, position) {
|
|
18
13
|
if (this.files[url]) {
|
|
19
14
|
this.files[url].position = position;
|
|
15
|
+
this.files[url].lastModified = Date.now();
|
|
20
16
|
}
|
|
21
17
|
}
|
|
22
18
|
getPosition(url) {
|
|
@@ -29,24 +25,40 @@ class FileManager {
|
|
|
29
25
|
}
|
|
30
26
|
}
|
|
31
27
|
getTotalSize() {
|
|
32
|
-
return Object.
|
|
33
|
-
return
|
|
28
|
+
return Object.values(this.files).reduce((total, { data }) => {
|
|
29
|
+
return total + data.byteLength;
|
|
34
30
|
}, 0);
|
|
35
31
|
}
|
|
36
32
|
remove(url) {
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
33
|
+
try {
|
|
34
|
+
delete this.files[url];
|
|
35
|
+
console.log(`Removed ${url} from CodDicomwebServer cache`);
|
|
36
|
+
}
|
|
37
|
+
catch (error) {
|
|
38
|
+
console.warn(`Error removing ${url} from CodDicomwebServer cache:`, error);
|
|
41
39
|
}
|
|
42
|
-
const retrievalManager = getDataRetrievalManager();
|
|
43
|
-
retrievalManager.executeTask(this.fileStreamingScriptName, 'decreaseFetchedSize', removedSize);
|
|
44
40
|
}
|
|
45
41
|
purge() {
|
|
42
|
+
const fileURLs = Object.keys(this.files);
|
|
43
|
+
const totalSize = this.getTotalSize();
|
|
44
|
+
fileURLs.forEach((url) => this.remove(url));
|
|
45
|
+
console.log(`Purged ${totalSize - this.getTotalSize()} bytes from CodDicomwebServer cache`);
|
|
46
|
+
}
|
|
47
|
+
decacheNecessaryBytes(url, bytesNeeded) {
|
|
46
48
|
const totalSize = this.getTotalSize();
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
49
|
+
const filesToDelete = [];
|
|
50
|
+
let collectiveSize = 0;
|
|
51
|
+
Object.entries(this.files)
|
|
52
|
+
.sort(([, a], [, b]) => a.lastModified - b.lastModified)
|
|
53
|
+
.forEach(([key, file]) => {
|
|
54
|
+
if (collectiveSize < bytesNeeded && key !== url) {
|
|
55
|
+
filesToDelete.push(key);
|
|
56
|
+
collectiveSize += file.data.byteLength;
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
filesToDelete.forEach((key) => this.remove(key));
|
|
60
|
+
console.log(`Decached ${totalSize - this.getTotalSize()} bytes`);
|
|
61
|
+
return collectiveSize;
|
|
50
62
|
}
|
|
51
63
|
}
|
|
52
64
|
export default FileManager;
|
package/dist/esm/types/index.js
CHANGED