cod-dicomweb-server 1.2.3 → 1.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{types → esm}/classes/CodDicomWebServer.d.ts +1 -1
- package/dist/esm/classes/CodDicomWebServer.js +306 -0
- package/dist/esm/classes/utils.js +101 -0
- package/dist/esm/constants/enums.js +19 -0
- package/dist/esm/constants/index.js +6 -0
- package/dist/esm/constants/url.js +3 -0
- package/dist/esm/constants/worker.js +3 -0
- package/dist/esm/fileManager.js +52 -0
- package/dist/esm/index.js +4 -0
- package/dist/{types → esm}/metadataManager.d.ts +1 -0
- package/dist/esm/metadataManager.js +46 -0
- package/dist/esm/types/index.js +7 -0
- package/dist/{types → esm}/types/metadata.d.ts +2 -2
- package/dist/esm/types/metadata.js +1 -0
- package/dist/esm/types/metadataUrlCreationParams.js +1 -0
- package/dist/esm/types/parsedWadoRsUrlDetails.js +1 -0
- package/dist/esm/types/requestOptions.js +1 -0
- package/dist/esm/types/workerCustomMessageEvents.js +1 -0
- package/dist/esm/webWorker/registerWorkers.js +16 -0
- package/dist/esm/webWorker/scripts/filePartial.js +11 -0
- package/dist/esm/webWorker/scripts/fileStreaming.js +92 -0
- package/dist/esm/webWorker/workerManager.js +54 -0
- package/dist/esm/webWorker/workers/filePartialWorker.d.ts +1 -0
- package/dist/esm/webWorker/workers/filePartialWorker.js +3 -0
- package/dist/esm/webWorker/workers/fileStreamingWorker.d.ts +1 -0
- package/dist/esm/webWorker/workers/fileStreamingWorker.js +3 -0
- package/dist/{16.js → umd/16.js} +1 -1
- package/dist/{170.js → umd/170.js} +1 -1
- package/dist/{main.js → umd/main.js} +3 -3
- package/dist/umd/main.js.map +1 -0
- package/package.json +8 -5
- package/dist/main.js.map +0 -1
- /package/dist/{types → esm}/classes/utils.d.ts +0 -0
- /package/dist/{types → esm}/constants/enums.d.ts +0 -0
- /package/dist/{types → esm}/constants/index.d.ts +0 -0
- /package/dist/{types → esm}/constants/url.d.ts +0 -0
- /package/dist/{types → esm}/constants/worker.d.ts +0 -0
- /package/dist/{types → esm}/fileManager.d.ts +0 -0
- /package/dist/{types → esm}/index.d.ts +0 -0
- /package/dist/{types → esm}/types/codDicomWebServerOptions.d.ts +0 -0
- /package/dist/{types/webWorker/workers/filePartialWorker.d.ts → esm/types/codDicomWebServerOptions.js} +0 -0
- /package/dist/{types → esm}/types/fileManagerOptions.d.ts +0 -0
- /package/dist/{types/webWorker/workers/fileStreamingWorker.d.ts → esm/types/fileManagerOptions.js} +0 -0
- /package/dist/{types → esm}/types/index.d.ts +0 -0
- /package/dist/{types → esm}/types/metadataUrlCreationParams.d.ts +0 -0
- /package/dist/{types → esm}/types/parsedWadoRsUrlDetails.d.ts +0 -0
- /package/dist/{types → esm}/types/requestOptions.d.ts +0 -0
- /package/dist/{types → esm}/types/workerCustomMessageEvents.d.ts +0 -0
- /package/dist/{types → esm}/webWorker/registerWorkers.d.ts +0 -0
- /package/dist/{types → esm}/webWorker/scripts/filePartial.d.ts +0 -0
- /package/dist/{types → esm}/webWorker/scripts/fileStreaming.d.ts +0 -0
- /package/dist/{types → esm}/webWorker/workerManager.d.ts +0 -0
- /package/dist/{16.js.map → umd/16.js.map} +0 -0
- /package/dist/{170.js.map → umd/170.js.map} +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { Enums } from '../constants';
|
|
2
|
-
import type { CodDicomWebServerOptions,
|
|
2
|
+
import type { CodDicomWebServerOptions, CODRequestOptions, FileRequestOptions, InstanceMetadata, JsonMetadata, SeriesMetadata } from '../types';
|
|
3
3
|
declare class CodDicomWebServer {
|
|
4
4
|
private filePromises;
|
|
5
5
|
private options;
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
import { parseDicom } from 'dicom-parser';
|
|
2
|
+
import FileManager from '../fileManager';
|
|
3
|
+
import MetadataManager from '../metadataManager';
|
|
4
|
+
import { getFrameDetailsFromMetadata, parseWadorsURL } from './utils';
|
|
5
|
+
import { getWebWorkerManager } from '../webWorker/workerManager';
|
|
6
|
+
import { registerWorkers } from '../webWorker/registerWorkers';
|
|
7
|
+
import constants, { Enums } from '../constants';
|
|
8
|
+
class CodDicomWebServer {
|
|
9
|
+
filePromises = {};
|
|
10
|
+
options = {
|
|
11
|
+
maxWorkerFetchSize: Infinity,
|
|
12
|
+
domain: constants.url.DOMAIN
|
|
13
|
+
};
|
|
14
|
+
fileManager;
|
|
15
|
+
metadataManager;
|
|
16
|
+
seriesUidFileUrls = {};
|
|
17
|
+
constructor(args = {}) {
|
|
18
|
+
const { maxWorkerFetchSize, domain } = args;
|
|
19
|
+
this.options.maxWorkerFetchSize = maxWorkerFetchSize || this.options.maxWorkerFetchSize;
|
|
20
|
+
this.options.domain = domain || this.options.domain;
|
|
21
|
+
const fileStreamingWorkerName = constants.worker.FILE_STREAMING_WORKER_NAME;
|
|
22
|
+
const filePartialWorkerName = constants.worker.FILE_PARTIAL_WORKER_NAME;
|
|
23
|
+
this.fileManager = new FileManager({ fileStreamingWorkerName });
|
|
24
|
+
this.metadataManager = new MetadataManager();
|
|
25
|
+
registerWorkers({ fileStreamingWorkerName, filePartialWorkerName }, this.options.maxWorkerFetchSize);
|
|
26
|
+
}
|
|
27
|
+
setOptions = (newOptions) => {
|
|
28
|
+
Object.keys(newOptions).forEach((key) => {
|
|
29
|
+
if (newOptions[key] !== undefined) {
|
|
30
|
+
this.options[key] = newOptions[key];
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
};
|
|
34
|
+
getOptions = () => {
|
|
35
|
+
return this.options;
|
|
36
|
+
};
|
|
37
|
+
addFileUrl(seriesInstanceUID, url) {
|
|
38
|
+
if (this.seriesUidFileUrls[seriesInstanceUID]) {
|
|
39
|
+
this.seriesUidFileUrls[seriesInstanceUID].push(url);
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
this.seriesUidFileUrls[seriesInstanceUID] = [url];
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
async fetchCod(wadorsUrl, headers = {}, { useSharedArrayBuffer = false, fetchType = constants.Enums.FetchType.API_OPTIMIZED } = {}) {
|
|
46
|
+
try {
|
|
47
|
+
if (!wadorsUrl) {
|
|
48
|
+
throw new Error('Url not provided');
|
|
49
|
+
}
|
|
50
|
+
const parsedDetails = parseWadorsURL(wadorsUrl, this.options.domain);
|
|
51
|
+
if (parsedDetails) {
|
|
52
|
+
const { type, bucketName, bucketPrefix, studyInstanceUID, seriesInstanceUID, sopInstanceUID, frameNumber } = parsedDetails;
|
|
53
|
+
const metadataJson = await this.metadataManager.getMetadata({
|
|
54
|
+
domain: this.options.domain,
|
|
55
|
+
bucketName,
|
|
56
|
+
bucketPrefix,
|
|
57
|
+
studyInstanceUID,
|
|
58
|
+
seriesInstanceUID
|
|
59
|
+
}, headers);
|
|
60
|
+
if (!metadataJson) {
|
|
61
|
+
throw new Error(`Metadata not found for ${wadorsUrl}`);
|
|
62
|
+
}
|
|
63
|
+
const { url: fileUrl, startByte, endByte, thumbnailUrl, isMultiframe } = getFrameDetailsFromMetadata(metadataJson, sopInstanceUID, frameNumber - 1, {
|
|
64
|
+
domain: this.options.domain,
|
|
65
|
+
bucketName,
|
|
66
|
+
bucketPrefix
|
|
67
|
+
});
|
|
68
|
+
switch (type) {
|
|
69
|
+
case Enums.RequestType.THUMBNAIL:
|
|
70
|
+
if (!thumbnailUrl) {
|
|
71
|
+
throw new Error(`Thumbnail not found for ${wadorsUrl}`);
|
|
72
|
+
}
|
|
73
|
+
this.addFileUrl(seriesInstanceUID, thumbnailUrl);
|
|
74
|
+
return this.fetchFile(thumbnailUrl, headers, {
|
|
75
|
+
useSharedArrayBuffer
|
|
76
|
+
});
|
|
77
|
+
case Enums.RequestType.FRAME: {
|
|
78
|
+
if (!fileUrl) {
|
|
79
|
+
throw new Error('Url not found for frame');
|
|
80
|
+
}
|
|
81
|
+
let urlWithBytes = fileUrl;
|
|
82
|
+
if (fetchType === Enums.FetchType.BYTES_OPTIMIZED) {
|
|
83
|
+
urlWithBytes = `${fileUrl}?bytes=${startByte}-${endByte}`;
|
|
84
|
+
}
|
|
85
|
+
this.addFileUrl(seriesInstanceUID, fileUrl);
|
|
86
|
+
return this.fetchFile(urlWithBytes, headers, {
|
|
87
|
+
offsets: { startByte, endByte },
|
|
88
|
+
useSharedArrayBuffer,
|
|
89
|
+
fetchType
|
|
90
|
+
}).then((arraybuffer) => {
|
|
91
|
+
if (!arraybuffer?.byteLength) {
|
|
92
|
+
throw new Error('File Arraybuffer is not found');
|
|
93
|
+
}
|
|
94
|
+
if (isMultiframe) {
|
|
95
|
+
return arraybuffer;
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
const dataSet = parseDicom(new Uint8Array(arraybuffer));
|
|
99
|
+
const pixelDataElement = dataSet.elements.x7fe00010;
|
|
100
|
+
let { dataOffset, length } = pixelDataElement;
|
|
101
|
+
if (pixelDataElement.hadUndefinedLength && pixelDataElement.fragments) {
|
|
102
|
+
({ position: dataOffset, length } = pixelDataElement.fragments[0]);
|
|
103
|
+
}
|
|
104
|
+
else {
|
|
105
|
+
// Adding 8 bytes for 4 bytes tag + 4 bytes length for uncomppressed pixelData
|
|
106
|
+
dataOffset += 8;
|
|
107
|
+
}
|
|
108
|
+
return arraybuffer.slice(dataOffset, dataOffset + length);
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
case Enums.RequestType.SERIES_METADATA:
|
|
113
|
+
case Enums.RequestType.INSTANCE_METADATA:
|
|
114
|
+
return this.parseMetadata(metadataJson, type, sopInstanceUID);
|
|
115
|
+
default:
|
|
116
|
+
throw new Error(`Unsupported request type: ${type}`);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
else {
|
|
120
|
+
return new Promise((resolve, reject) => {
|
|
121
|
+
return this.fetchFile(wadorsUrl, headers, { useSharedArrayBuffer })
|
|
122
|
+
.then((result) => {
|
|
123
|
+
if (result instanceof ArrayBuffer) {
|
|
124
|
+
try {
|
|
125
|
+
const dataSet = parseDicom(new Uint8Array(result));
|
|
126
|
+
const seriesInstanceUID = dataSet.string('0020000e');
|
|
127
|
+
if (seriesInstanceUID) {
|
|
128
|
+
this.addFileUrl(seriesInstanceUID, wadorsUrl);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
catch (error) {
|
|
132
|
+
console.warn('CodDicomWebServer.ts: There is some issue parsing the file.', error);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
resolve(result);
|
|
136
|
+
})
|
|
137
|
+
.catch((error) => reject(error));
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
catch (error) {
|
|
142
|
+
const newError = new Error(`CodDicomWebServer.ts: ${error.message || 'An error occured when fetching the COD'}`);
|
|
143
|
+
console.error(newError);
|
|
144
|
+
throw newError;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
async fetchFile(fileUrl, headers, { offsets, useSharedArrayBuffer = false, fetchType = constants.Enums.FetchType.API_OPTIMIZED } = {}) {
|
|
148
|
+
const isBytesOptimized = fetchType === Enums.FetchType.BYTES_OPTIMIZED;
|
|
149
|
+
const extractedFile = this.fileManager.get(fileUrl, isBytesOptimized ? undefined : offsets);
|
|
150
|
+
if (extractedFile) {
|
|
151
|
+
return new Promise((resolveRequest, rejectRequest) => {
|
|
152
|
+
try {
|
|
153
|
+
resolveRequest(extractedFile.buffer);
|
|
154
|
+
}
|
|
155
|
+
catch (error) {
|
|
156
|
+
rejectRequest(error);
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
}
|
|
160
|
+
const { maxWorkerFetchSize } = this.getOptions();
|
|
161
|
+
const webWorkerManager = getWebWorkerManager();
|
|
162
|
+
const { FILE_STREAMING_WORKER_NAME, FILE_PARTIAL_WORKER_NAME, THRESHOLD } = constants.worker;
|
|
163
|
+
let tarPromise;
|
|
164
|
+
if (!this.filePromises[fileUrl]) {
|
|
165
|
+
tarPromise = new Promise((resolveFile, rejectFile) => {
|
|
166
|
+
if (this.fileManager.getTotalSize() + THRESHOLD > maxWorkerFetchSize) {
|
|
167
|
+
throw new Error(`CodDicomWebServer.ts: Maximum size(${maxWorkerFetchSize}) for fetching files reached`);
|
|
168
|
+
}
|
|
169
|
+
const FetchTypeEnum = constants.Enums.FetchType;
|
|
170
|
+
if (fetchType === FetchTypeEnum.API_OPTIMIZED) {
|
|
171
|
+
const handleFirstChunk = (evt) => {
|
|
172
|
+
if (evt instanceof ErrorEvent) {
|
|
173
|
+
rejectFile(evt.error);
|
|
174
|
+
throw evt.error;
|
|
175
|
+
}
|
|
176
|
+
const { url, position, fileArraybuffer } = evt.data;
|
|
177
|
+
if (url === fileUrl && fileArraybuffer) {
|
|
178
|
+
this.fileManager.set(url, { data: fileArraybuffer, position });
|
|
179
|
+
webWorkerManager.removeEventListener(FILE_STREAMING_WORKER_NAME, 'message', handleFirstChunk);
|
|
180
|
+
}
|
|
181
|
+
};
|
|
182
|
+
webWorkerManager.addEventListener(FILE_STREAMING_WORKER_NAME, 'message', handleFirstChunk);
|
|
183
|
+
webWorkerManager
|
|
184
|
+
.executeTask(FILE_STREAMING_WORKER_NAME, 'stream', {
|
|
185
|
+
url: fileUrl,
|
|
186
|
+
headers: headers,
|
|
187
|
+
useSharedArrayBuffer
|
|
188
|
+
})
|
|
189
|
+
.then(() => {
|
|
190
|
+
resolveFile();
|
|
191
|
+
})
|
|
192
|
+
.catch((error) => {
|
|
193
|
+
webWorkerManager.removeEventListener(FILE_STREAMING_WORKER_NAME, 'message', handleFirstChunk);
|
|
194
|
+
rejectFile(error);
|
|
195
|
+
})
|
|
196
|
+
.then(() => delete this.filePromises[fileUrl]);
|
|
197
|
+
}
|
|
198
|
+
else if (fetchType === FetchTypeEnum.BYTES_OPTIMIZED && offsets) {
|
|
199
|
+
const { startByte, endByte } = offsets;
|
|
200
|
+
headers['Range'] = `bytes=${startByte}-${endByte - 1}`;
|
|
201
|
+
const url = fileUrl.split('?bytes=')[0];
|
|
202
|
+
webWorkerManager
|
|
203
|
+
.executeTask(FILE_PARTIAL_WORKER_NAME, 'partial', {
|
|
204
|
+
url: url,
|
|
205
|
+
headers: headers,
|
|
206
|
+
useSharedArrayBuffer
|
|
207
|
+
})
|
|
208
|
+
.then((data) => {
|
|
209
|
+
if (data) {
|
|
210
|
+
this.fileManager.set(fileUrl, {
|
|
211
|
+
data: new Uint8Array(data),
|
|
212
|
+
position: data.byteLength
|
|
213
|
+
});
|
|
214
|
+
resolveFile();
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
rejectFile(new Error(`File - ${url} not found`));
|
|
218
|
+
}
|
|
219
|
+
})
|
|
220
|
+
.catch((error) => {
|
|
221
|
+
rejectFile(error);
|
|
222
|
+
})
|
|
223
|
+
.then(() => delete this.filePromises[fileUrl]);
|
|
224
|
+
}
|
|
225
|
+
else {
|
|
226
|
+
rejectFile(new Error('CodDicomWebServer.ts: Offsets is needed in bytes optimized fetching'));
|
|
227
|
+
}
|
|
228
|
+
});
|
|
229
|
+
this.filePromises[fileUrl] = tarPromise;
|
|
230
|
+
}
|
|
231
|
+
else {
|
|
232
|
+
tarPromise = this.filePromises[fileUrl];
|
|
233
|
+
}
|
|
234
|
+
return new Promise((resolveRequest, rejectRequest) => {
|
|
235
|
+
let requestResolved = false;
|
|
236
|
+
const handleChunkAppend = (evt) => {
|
|
237
|
+
if (evt instanceof ErrorEvent) {
|
|
238
|
+
rejectRequest(evt.message);
|
|
239
|
+
throw evt.error;
|
|
240
|
+
}
|
|
241
|
+
const { url, position, chunk, isAppending } = evt.data;
|
|
242
|
+
if (isAppending) {
|
|
243
|
+
if (chunk) {
|
|
244
|
+
this.fileManager.append(url, chunk, position);
|
|
245
|
+
}
|
|
246
|
+
else {
|
|
247
|
+
this.fileManager.setPosition(url, position);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
if (!requestResolved && url === fileUrl && offsets && position > offsets.endByte) {
|
|
251
|
+
try {
|
|
252
|
+
const file = this.fileManager.get(url, offsets);
|
|
253
|
+
requestResolved = true;
|
|
254
|
+
resolveRequest(file?.buffer);
|
|
255
|
+
}
|
|
256
|
+
catch (error) {
|
|
257
|
+
rejectRequest(error);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
};
|
|
261
|
+
if (offsets && !isBytesOptimized) {
|
|
262
|
+
webWorkerManager.addEventListener(FILE_STREAMING_WORKER_NAME, 'message', handleChunkAppend);
|
|
263
|
+
}
|
|
264
|
+
tarPromise
|
|
265
|
+
.then(() => {
|
|
266
|
+
if (!requestResolved) {
|
|
267
|
+
const file = this.fileManager.get(fileUrl, isBytesOptimized ? undefined : offsets);
|
|
268
|
+
requestResolved = true;
|
|
269
|
+
resolveRequest(file?.buffer);
|
|
270
|
+
}
|
|
271
|
+
})
|
|
272
|
+
.catch((error) => {
|
|
273
|
+
rejectRequest(error);
|
|
274
|
+
})
|
|
275
|
+
.then(() => {
|
|
276
|
+
webWorkerManager.removeEventListener(FILE_STREAMING_WORKER_NAME, 'message', handleChunkAppend);
|
|
277
|
+
});
|
|
278
|
+
});
|
|
279
|
+
}
|
|
280
|
+
delete(seriesInstanceUID) {
|
|
281
|
+
const fileUrls = this.seriesUidFileUrls[seriesInstanceUID];
|
|
282
|
+
if (fileUrls) {
|
|
283
|
+
fileUrls.forEach((fileUrl) => {
|
|
284
|
+
this.fileManager.remove(fileUrl);
|
|
285
|
+
});
|
|
286
|
+
}
|
|
287
|
+
delete this.seriesUidFileUrls[seriesInstanceUID];
|
|
288
|
+
}
|
|
289
|
+
deleteAll() {
|
|
290
|
+
Object.values(this.seriesUidFileUrls).forEach((fileUrls) => {
|
|
291
|
+
fileUrls.forEach((fileUrl) => {
|
|
292
|
+
this.fileManager.remove(fileUrl);
|
|
293
|
+
});
|
|
294
|
+
});
|
|
295
|
+
this.seriesUidFileUrls = {};
|
|
296
|
+
}
|
|
297
|
+
parseMetadata(metadata, type, sopInstanceUID) {
|
|
298
|
+
if (type === Enums.RequestType.INSTANCE_METADATA) {
|
|
299
|
+
return Object.values(metadata.cod.instances).find((instance) => instance.metadata['00080018']?.Value?.[0] === sopInstanceUID)?.metadata;
|
|
300
|
+
}
|
|
301
|
+
else {
|
|
302
|
+
return Object.values(metadata.cod.instances).map((instance) => instance.metadata);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
export default CodDicomWebServer;
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import constants, { Enums } from '../constants';
|
|
2
|
+
export function parseWadorsURL(url, domain) {
|
|
3
|
+
if (!url.includes(constants.url.URL_VALIDATION_STRING)) {
|
|
4
|
+
return;
|
|
5
|
+
}
|
|
6
|
+
const filePath = url.split(domain + '/')[1];
|
|
7
|
+
const prefix = filePath.split('/studies')[0];
|
|
8
|
+
const prefixParts = prefix.split('/');
|
|
9
|
+
const bucketName = prefixParts[0];
|
|
10
|
+
const bucketPrefix = prefixParts.slice(1).join('/');
|
|
11
|
+
const imagePath = filePath.split(prefix + '/')[1];
|
|
12
|
+
const imageParts = imagePath.split('/');
|
|
13
|
+
const studyInstanceUID = imageParts[1];
|
|
14
|
+
const seriesInstanceUID = imageParts[3];
|
|
15
|
+
let sopInstanceUID = '', frameNumber = 1, type;
|
|
16
|
+
switch (true) {
|
|
17
|
+
case imageParts.includes('thumbnail'):
|
|
18
|
+
type = Enums.RequestType.THUMBNAIL;
|
|
19
|
+
break;
|
|
20
|
+
case imageParts.includes('metadata'):
|
|
21
|
+
if (imageParts.includes('instances')) {
|
|
22
|
+
sopInstanceUID = imageParts[5];
|
|
23
|
+
type = Enums.RequestType.INSTANCE_METADATA;
|
|
24
|
+
}
|
|
25
|
+
else {
|
|
26
|
+
type = Enums.RequestType.SERIES_METADATA;
|
|
27
|
+
}
|
|
28
|
+
break;
|
|
29
|
+
case imageParts.includes('frames'):
|
|
30
|
+
sopInstanceUID = imageParts[5];
|
|
31
|
+
frameNumber = +imageParts[7];
|
|
32
|
+
type = Enums.RequestType.FRAME;
|
|
33
|
+
break;
|
|
34
|
+
default:
|
|
35
|
+
throw new Error('Invalid type of request');
|
|
36
|
+
}
|
|
37
|
+
return {
|
|
38
|
+
type,
|
|
39
|
+
bucketName,
|
|
40
|
+
bucketPrefix,
|
|
41
|
+
studyInstanceUID,
|
|
42
|
+
seriesInstanceUID,
|
|
43
|
+
sopInstanceUID,
|
|
44
|
+
frameNumber
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
export function getFrameDetailsFromMetadata(seriesMetadata, sopInstanceUID, frameIndex, bucketDetails) {
|
|
48
|
+
if (!seriesMetadata || !seriesMetadata.cod?.instances) {
|
|
49
|
+
throw new Error('Invalid seriesMetadata provided.');
|
|
50
|
+
}
|
|
51
|
+
if (frameIndex === null || frameIndex === undefined) {
|
|
52
|
+
throw new Error('Frame index is required.');
|
|
53
|
+
}
|
|
54
|
+
const { domain, bucketName, bucketPrefix } = bucketDetails;
|
|
55
|
+
let thumbnailUrl;
|
|
56
|
+
if (seriesMetadata.thumbnail) {
|
|
57
|
+
const thumbnailGsUtilUri = seriesMetadata.thumbnail.uri;
|
|
58
|
+
thumbnailUrl = `${domain}/${thumbnailGsUtilUri.split('gs://')[1]}`;
|
|
59
|
+
}
|
|
60
|
+
const instanceFound = Object.values(seriesMetadata.cod.instances).find((instance) => instance.metadata['00080018']?.Value?.[0] === sopInstanceUID);
|
|
61
|
+
if (!instanceFound) {
|
|
62
|
+
return { thumbnailUrl };
|
|
63
|
+
}
|
|
64
|
+
const { url, uri, headers: offsetHeaders, offset_tables } = instanceFound;
|
|
65
|
+
const modifiedUrl = handleUrl(url || uri, domain, bucketName, bucketPrefix);
|
|
66
|
+
const { CustomOffsetTable, CustomOffsetTableLengths } = offset_tables;
|
|
67
|
+
let sliceStart, sliceEnd, isMultiframe = false;
|
|
68
|
+
if (CustomOffsetTable?.length && CustomOffsetTableLengths?.length) {
|
|
69
|
+
sliceStart = CustomOffsetTable[frameIndex];
|
|
70
|
+
sliceEnd = sliceStart + CustomOffsetTableLengths[frameIndex];
|
|
71
|
+
isMultiframe = true;
|
|
72
|
+
}
|
|
73
|
+
const { start_byte: fileStartByte, end_byte: fileEndByte } = offsetHeaders;
|
|
74
|
+
const startByte = sliceStart !== undefined ? fileStartByte + sliceStart : fileStartByte;
|
|
75
|
+
const endByte = sliceEnd !== undefined ? fileStartByte + sliceEnd : fileEndByte;
|
|
76
|
+
return {
|
|
77
|
+
url: modifiedUrl,
|
|
78
|
+
startByte,
|
|
79
|
+
endByte,
|
|
80
|
+
thumbnailUrl,
|
|
81
|
+
isMultiframe
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
export function handleUrl(url, domain, bucketName, bucketPrefix) {
|
|
85
|
+
let modifiedUrl = url;
|
|
86
|
+
const matchingExtension = constants.url.FILE_EXTENSIONS.find((extension) => url.includes(extension));
|
|
87
|
+
if (matchingExtension) {
|
|
88
|
+
const fileParts = url.split(matchingExtension);
|
|
89
|
+
modifiedUrl = fileParts[0] + matchingExtension;
|
|
90
|
+
}
|
|
91
|
+
const filePath = modifiedUrl.split('studies/')[1];
|
|
92
|
+
modifiedUrl = `${domain}/${bucketName}/${bucketPrefix ? bucketPrefix + '/' : ''}studies/${filePath}`;
|
|
93
|
+
return modifiedUrl;
|
|
94
|
+
}
|
|
95
|
+
export function createMetadataJsonUrl(params) {
|
|
96
|
+
const { domain = constants.url.DOMAIN, bucketName, bucketPrefix, studyInstanceUID, seriesInstanceUID } = params;
|
|
97
|
+
if (!bucketName || !bucketPrefix || !studyInstanceUID || !seriesInstanceUID) {
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
return `${domain}/${bucketName}/${bucketPrefix}/studies/${studyInstanceUID}/series/${seriesInstanceUID}/metadata.json`;
|
|
101
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export var FetchType;
|
|
2
|
+
(function (FetchType) {
|
|
3
|
+
/**
|
|
4
|
+
* Fetch only the part of the file according to the offsets provided.
|
|
5
|
+
*/
|
|
6
|
+
FetchType[FetchType["BYTES_OPTIMIZED"] = 0] = "BYTES_OPTIMIZED";
|
|
7
|
+
/**
|
|
8
|
+
* Stream the file and returns the part of the file if offsets are provided.
|
|
9
|
+
* Or returns the whole file.
|
|
10
|
+
*/
|
|
11
|
+
FetchType[FetchType["API_OPTIMIZED"] = 1] = "API_OPTIMIZED";
|
|
12
|
+
})(FetchType || (FetchType = {}));
|
|
13
|
+
export var RequestType;
|
|
14
|
+
(function (RequestType) {
|
|
15
|
+
RequestType[RequestType["FRAME"] = 0] = "FRAME";
|
|
16
|
+
RequestType[RequestType["THUMBNAIL"] = 1] = "THUMBNAIL";
|
|
17
|
+
RequestType[RequestType["SERIES_METADATA"] = 2] = "SERIES_METADATA";
|
|
18
|
+
RequestType[RequestType["INSTANCE_METADATA"] = 3] = "INSTANCE_METADATA";
|
|
19
|
+
})(RequestType || (RequestType = {}));
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { getWebWorkerManager } from './webWorker/workerManager';
|
|
2
|
+
class FileManager {
|
|
3
|
+
files = {};
|
|
4
|
+
fileStreamingWorkerName;
|
|
5
|
+
constructor({ fileStreamingWorkerName }) {
|
|
6
|
+
this.fileStreamingWorkerName = fileStreamingWorkerName;
|
|
7
|
+
}
|
|
8
|
+
set(url, file) {
|
|
9
|
+
this.files[url] = file;
|
|
10
|
+
}
|
|
11
|
+
get(url, offsets) {
|
|
12
|
+
if (!this.files[url] || (offsets && this.files[url].position <= offsets.endByte)) {
|
|
13
|
+
return null;
|
|
14
|
+
}
|
|
15
|
+
return offsets ? this.files[url].data.slice(offsets.startByte, offsets.endByte) : this.files[url].data;
|
|
16
|
+
}
|
|
17
|
+
setPosition(url, position) {
|
|
18
|
+
if (this.files[url]) {
|
|
19
|
+
this.files[url].position = position;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
getPosition(url) {
|
|
23
|
+
return this.files[url]?.position;
|
|
24
|
+
}
|
|
25
|
+
append(url, chunk, position) {
|
|
26
|
+
if (this.files[url] && position) {
|
|
27
|
+
this.files[url].data.set(chunk, position - chunk.length);
|
|
28
|
+
this.setPosition(url, position);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
getTotalSize() {
|
|
32
|
+
return Object.entries(this.files).reduce((total, [url, { position }]) => {
|
|
33
|
+
return url.includes('?bytes=') ? total : total + position;
|
|
34
|
+
}, 0);
|
|
35
|
+
}
|
|
36
|
+
remove(url) {
|
|
37
|
+
const removedSize = this.getPosition(url);
|
|
38
|
+
delete this.files[url];
|
|
39
|
+
if (url.includes('?bytes=')) {
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
const workerManager = getWebWorkerManager();
|
|
43
|
+
workerManager.executeTask(this.fileStreamingWorkerName, 'decreaseFetchedSize', removedSize);
|
|
44
|
+
}
|
|
45
|
+
purge() {
|
|
46
|
+
const totalSize = this.getTotalSize();
|
|
47
|
+
this.files = {};
|
|
48
|
+
const workerManager = getWebWorkerManager();
|
|
49
|
+
workerManager.executeTask(this.fileStreamingWorkerName, 'decreaseFetchedSize', totalSize);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
export default FileManager;
|
|
@@ -2,6 +2,7 @@ import type { JsonMetadata, MetadataUrlCreationParams } from './types';
|
|
|
2
2
|
declare class MetadataManager {
|
|
3
3
|
private metadata;
|
|
4
4
|
constructor();
|
|
5
|
+
addDeidMetadata(jsonMetadata: JsonMetadata): void;
|
|
5
6
|
getMetadataFromCache(url: string): JsonMetadata;
|
|
6
7
|
getMetadata(params: MetadataUrlCreationParams, headers: Record<string, string>): Promise<JsonMetadata | null>;
|
|
7
8
|
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { createMetadataJsonUrl } from './classes/utils';
|
|
2
|
+
class MetadataManager {
|
|
3
|
+
metadata = {};
|
|
4
|
+
constructor() { }
|
|
5
|
+
addDeidMetadata(jsonMetadata) {
|
|
6
|
+
const { deid_study_uid, deid_series_uid, cod } = jsonMetadata;
|
|
7
|
+
if (!cod || !deid_study_uid || !deid_series_uid) {
|
|
8
|
+
console.warn('Missing required metadata properties: cod, deid_study_uid, or deid_series_uid');
|
|
9
|
+
return;
|
|
10
|
+
}
|
|
11
|
+
for (const deid_sop_uid in cod.instances) {
|
|
12
|
+
const instance = cod.instances[deid_sop_uid];
|
|
13
|
+
instance.metadata.DeidStudyInstanceUID = { Value: [deid_study_uid] };
|
|
14
|
+
instance.metadata.DeidSeriesInstanceUID = { Value: [deid_series_uid] };
|
|
15
|
+
instance.metadata.DeidSopInstanceUID = { Value: [deid_sop_uid] };
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
getMetadataFromCache(url) {
|
|
19
|
+
return this.metadata[url];
|
|
20
|
+
}
|
|
21
|
+
async getMetadata(params, headers) {
|
|
22
|
+
const url = createMetadataJsonUrl(params);
|
|
23
|
+
if (!url) {
|
|
24
|
+
throw new Error('Error creating metadata json url');
|
|
25
|
+
}
|
|
26
|
+
const cachedMetadata = this.getMetadataFromCache(url);
|
|
27
|
+
if (cachedMetadata) {
|
|
28
|
+
return cachedMetadata;
|
|
29
|
+
}
|
|
30
|
+
try {
|
|
31
|
+
const response = await fetch(url, { headers });
|
|
32
|
+
if (!response.ok) {
|
|
33
|
+
throw new Error(`Failed to fetch metadata: ${response.statusText}`);
|
|
34
|
+
}
|
|
35
|
+
const data = await response.json();
|
|
36
|
+
this.addDeidMetadata(data);
|
|
37
|
+
this.metadata[url] = data;
|
|
38
|
+
return data;
|
|
39
|
+
}
|
|
40
|
+
catch (error) {
|
|
41
|
+
console.error(error);
|
|
42
|
+
throw error;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
export default MetadataManager;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export * from './codDicomWebServerOptions';
|
|
2
|
+
export * from './fileManagerOptions';
|
|
3
|
+
export * from './metadata';
|
|
4
|
+
export * from './metadataUrlCreationParams';
|
|
5
|
+
export * from './parsedWadoRsUrlDetails';
|
|
6
|
+
export * from './requestOptions';
|
|
7
|
+
export * from './workerCustomMessageEvents';
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { getWebWorkerManager } from './workerManager';
|
|
2
|
+
export function registerWorkers(workerNames, maxFetchSize) {
|
|
3
|
+
const { fileStreamingWorkerName, filePartialWorkerName } = workerNames;
|
|
4
|
+
const workerManager = getWebWorkerManager();
|
|
5
|
+
// fileStreaming worker
|
|
6
|
+
const streamingWorkerFn = () => new Worker(new URL('./workers/fileStreamingWorker', import.meta.url), {
|
|
7
|
+
name: fileStreamingWorkerName
|
|
8
|
+
});
|
|
9
|
+
workerManager.registerWorker(fileStreamingWorkerName, streamingWorkerFn);
|
|
10
|
+
workerManager.executeTask(fileStreamingWorkerName, 'setMaxFetchSize', maxFetchSize);
|
|
11
|
+
// filePartial worker
|
|
12
|
+
const partialWorkerFn = () => new Worker(new URL('./workers/filePartialWorker', import.meta.url), {
|
|
13
|
+
name: filePartialWorkerName
|
|
14
|
+
});
|
|
15
|
+
workerManager.registerWorker(filePartialWorkerName, partialWorkerFn);
|
|
16
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
const filePartial = {
|
|
2
|
+
async partial(args) {
|
|
3
|
+
const { url, headers } = args;
|
|
4
|
+
return fetch(url, { headers })
|
|
5
|
+
.then((response) => response.arrayBuffer())
|
|
6
|
+
.catch((error) => {
|
|
7
|
+
throw new Error('filePartial.ts: Error when fetching file: ' + error?.message);
|
|
8
|
+
});
|
|
9
|
+
}
|
|
10
|
+
};
|
|
11
|
+
export default filePartial;
|