cod-dicomweb-server 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/430.min.js +7 -0
- package/dist/663.min.js +7 -0
- package/dist/index.html +1 -1
- package/dist/main.min.js +9 -0
- package/package.json +3 -3
- package/dist/assets/js/430.ae979bb9f7321087b4cd.js +0 -2
- package/dist/assets/js/430.ae979bb9f7321087b4cd.js.LICENSE.txt +0 -5
- package/dist/assets/js/663.f8ac8210581651c53c7e.js +0 -2
- package/dist/assets/js/663.f8ac8210581651c53c7e.js.LICENSE.txt +0 -5
- package/dist/assets/js/main.bd27b3d8a119b2e0661f.js +0 -2
- package/dist/assets/js/main.bd27b3d8a119b2e0661f.js.LICENSE.txt +0 -7
- package/dist/classes/CodDicomWebServer.ts +0 -423
- package/dist/classes/utils.ts +0 -176
- package/dist/constants/enums.ts +0 -18
- package/dist/constants/index.ts +0 -8
- package/dist/constants/url.ts +0 -5
- package/dist/constants/worker.ts +0 -4
- package/dist/fileManager.ts +0 -84
- package/dist/index.ts +0 -5
- package/dist/metadataManager.ts +0 -32
- package/dist/types/codDicomWebServerOptions.ts +0 -7
- package/dist/types/fileManagerOptions.ts +0 -3
- package/dist/types/index.ts +0 -7
- package/dist/types/metadata.ts +0 -57
- package/dist/types/metadataUrlCreationParams.ts +0 -9
- package/dist/types/parsedWadoRsUrlDetails.ts +0 -13
- package/dist/types/requestOptions.ts +0 -12
- package/dist/types/workerCustomMessageEvents.ts +0 -11
- package/dist/webWorker/registerWorker.ts +0 -33
- package/dist/webWorker/workerManager.ts +0 -83
- package/dist/webWorker/workers/filePartial.ts +0 -20
- package/dist/webWorker/workers/fileStreaming.ts +0 -130
package/dist/fileManager.ts
DELETED
|
@@ -1,84 +0,0 @@
|
|
|
1
|
-
import type { FileManagerOptions } from './types';
|
|
2
|
-
import { getWebWorkerManager } from './webWorker/workerManager';
|
|
3
|
-
|
|
4
|
-
class FileManager {
|
|
5
|
-
private files: Record<string, { data: Uint8Array; position: number }> = {};
|
|
6
|
-
private fileStreamingWorkerName: string;
|
|
7
|
-
|
|
8
|
-
constructor({ fileStreamingWorkerName }: FileManagerOptions) {
|
|
9
|
-
this.fileStreamingWorkerName = fileStreamingWorkerName;
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
set(url: string, file: { data: Uint8Array; position: number }): void {
|
|
13
|
-
this.files[url] = file;
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
get(
|
|
17
|
-
url: string,
|
|
18
|
-
offsets?: { startByte: number; endByte: number },
|
|
19
|
-
): Uint8Array | null {
|
|
20
|
-
if (
|
|
21
|
-
!this.files[url] ||
|
|
22
|
-
(offsets && this.files[url].position <= offsets.endByte)
|
|
23
|
-
) {
|
|
24
|
-
return null;
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
return offsets
|
|
28
|
-
? this.files[url].data.slice(offsets.startByte, offsets.endByte)
|
|
29
|
-
: this.files[url].data;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
setPosition(url: string, position: number): void {
|
|
33
|
-
if (this.files[url]) {
|
|
34
|
-
this.files[url].position = position;
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
getPosition(url: string): number {
|
|
39
|
-
return this.files[url]?.position;
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
append(url: string, chunk: Uint8Array, position: number): void {
|
|
43
|
-
if (this.files[url] && position) {
|
|
44
|
-
this.files[url].data.set(chunk, position - chunk.length);
|
|
45
|
-
this.setPosition(url, position);
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
getTotalSize(): number {
|
|
50
|
-
return Object.entries(this.files).reduce((total, [url, { position }]) => {
|
|
51
|
-
return url.includes('?bytes=') ? total : total + position;
|
|
52
|
-
}, 0);
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
remove(url: string): void {
|
|
56
|
-
const removedSize = this.getPosition(url);
|
|
57
|
-
delete this.files[url];
|
|
58
|
-
|
|
59
|
-
if (url.includes('?bytes=')) {
|
|
60
|
-
return;
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
const workerManager = getWebWorkerManager();
|
|
64
|
-
workerManager.executeTask(
|
|
65
|
-
this.fileStreamingWorkerName,
|
|
66
|
-
'decreaseFetchedSize',
|
|
67
|
-
removedSize,
|
|
68
|
-
);
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
purge(): void {
|
|
72
|
-
const totalSize = this.getTotalSize();
|
|
73
|
-
this.files = {};
|
|
74
|
-
|
|
75
|
-
const workerManager = getWebWorkerManager();
|
|
76
|
-
workerManager.executeTask(
|
|
77
|
-
this.fileStreamingWorkerName,
|
|
78
|
-
'decreaseFetchedSize',
|
|
79
|
-
totalSize,
|
|
80
|
-
);
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
export default FileManager;
|
package/dist/index.ts
DELETED
package/dist/metadataManager.ts
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
import { createMetadataJsonUrl } from './classes/utils';
|
|
2
|
-
import type { JsonMetadata, MetadataUrlCreationParams } from './types';
|
|
3
|
-
|
|
4
|
-
const metadata: Record<string, JsonMetadata> = {};
|
|
5
|
-
|
|
6
|
-
export async function getMetadata(
|
|
7
|
-
params: MetadataUrlCreationParams,
|
|
8
|
-
headers: Record<string, string>,
|
|
9
|
-
): Promise<JsonMetadata | null> {
|
|
10
|
-
const url = createMetadataJsonUrl(params);
|
|
11
|
-
|
|
12
|
-
if (!url) {
|
|
13
|
-
throw new Error('Error creating metadata json url');
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
if (metadata[url]) {
|
|
17
|
-
return metadata[url];
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
try {
|
|
21
|
-
const response = await fetch(url, { headers });
|
|
22
|
-
if (!response.ok) {
|
|
23
|
-
throw new Error(`Failed to fetch metadata: ${response.statusText}`);
|
|
24
|
-
}
|
|
25
|
-
const data = await response.json();
|
|
26
|
-
metadata[url] = data;
|
|
27
|
-
return data;
|
|
28
|
-
} catch (error) {
|
|
29
|
-
console.error(error);
|
|
30
|
-
throw error;
|
|
31
|
-
}
|
|
32
|
-
}
|
package/dist/types/index.ts
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
export type * from './codDicomWebServerOptions';
|
|
2
|
-
export type * from './fileManagerOptions';
|
|
3
|
-
export type * from './metadata';
|
|
4
|
-
export type * from './metadataUrlCreationParams';
|
|
5
|
-
export type * from './parsedWadoRsUrlDetails';
|
|
6
|
-
export type * from './requestOptions';
|
|
7
|
-
export type * from './workerCustomMessageEvents';
|
package/dist/types/metadata.ts
DELETED
|
@@ -1,57 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Metadata format stored in the metadata.json
|
|
3
|
-
*/
|
|
4
|
-
type JsonMetadata = {
|
|
5
|
-
deid_study_uid: string;
|
|
6
|
-
deid_series_uid: string;
|
|
7
|
-
cod: {
|
|
8
|
-
instances: Record<
|
|
9
|
-
string,
|
|
10
|
-
{
|
|
11
|
-
metadata: InstanceMetadata;
|
|
12
|
-
// The metadata will either have url or uri
|
|
13
|
-
uri: string;
|
|
14
|
-
url: string;
|
|
15
|
-
headers: { start_byte: number; end_byte: number };
|
|
16
|
-
offset_tables: {
|
|
17
|
-
CustomOffsetTable?: number[];
|
|
18
|
-
CustomOffsetTableLengths?: number[];
|
|
19
|
-
};
|
|
20
|
-
crc32c: string;
|
|
21
|
-
size: number;
|
|
22
|
-
original_path: string;
|
|
23
|
-
dependencies: string[];
|
|
24
|
-
diff_hash_dupe_paths: [string];
|
|
25
|
-
version: string;
|
|
26
|
-
modified_datetime: string;
|
|
27
|
-
}
|
|
28
|
-
>;
|
|
29
|
-
};
|
|
30
|
-
thumbnail: {
|
|
31
|
-
version: string;
|
|
32
|
-
uri: string;
|
|
33
|
-
thumbnail_index_to_instance_frame: [string, number][];
|
|
34
|
-
instances: Record<
|
|
35
|
-
string,
|
|
36
|
-
{
|
|
37
|
-
frames: {
|
|
38
|
-
thumbnail_index: number;
|
|
39
|
-
anchors: {
|
|
40
|
-
original_size: { width: number; height: number };
|
|
41
|
-
thumbnail_upper_left: { row: number; col: number };
|
|
42
|
-
thumbnail_bottom_right: { row: number; col: number };
|
|
43
|
-
};
|
|
44
|
-
}[];
|
|
45
|
-
}
|
|
46
|
-
>;
|
|
47
|
-
};
|
|
48
|
-
};
|
|
49
|
-
|
|
50
|
-
type InstanceMetadata = Record<
|
|
51
|
-
string,
|
|
52
|
-
{ vr: string; Value?: unknown[]; BulkDataURI?: string; InlineBinary?: string }
|
|
53
|
-
>;
|
|
54
|
-
|
|
55
|
-
type SeriesMetadata = InstanceMetadata[];
|
|
56
|
-
|
|
57
|
-
export type { InstanceMetadata, SeriesMetadata, JsonMetadata };
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import type { Enums } from '../constants';
|
|
2
|
-
|
|
3
|
-
type ParsedWadoRsUrlDetails = {
|
|
4
|
-
type: Enums.RequestType;
|
|
5
|
-
bucketName: string;
|
|
6
|
-
bucketPrefix: string;
|
|
7
|
-
studyInstanceUID: string;
|
|
8
|
-
seriesInstanceUID: string;
|
|
9
|
-
sopInstanceUID: string;
|
|
10
|
-
frameNumber: number;
|
|
11
|
-
};
|
|
12
|
-
|
|
13
|
-
export type { ParsedWadoRsUrlDetails };
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import { Enums } from '../constants';
|
|
2
|
-
|
|
3
|
-
type CODRequestOptions = {
|
|
4
|
-
useSharedArrayBuffer?: boolean;
|
|
5
|
-
fetchType?: Enums.FetchType;
|
|
6
|
-
};
|
|
7
|
-
|
|
8
|
-
type FileRequestOptions = CODRequestOptions & {
|
|
9
|
-
offsets?: { startByte: number; endByte: number };
|
|
10
|
-
};
|
|
11
|
-
|
|
12
|
-
export type { CODRequestOptions, FileRequestOptions };
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
import { getWebWorkerManager } from './workerManager';
|
|
2
|
-
|
|
3
|
-
export function registerWorkers(
|
|
4
|
-
workerNames: {
|
|
5
|
-
fileStreamingWorkerName: string;
|
|
6
|
-
filePartialWorkerName: string;
|
|
7
|
-
},
|
|
8
|
-
maxFetchSize: number,
|
|
9
|
-
): void {
|
|
10
|
-
const { fileStreamingWorkerName, filePartialWorkerName } = workerNames;
|
|
11
|
-
const workerManager = getWebWorkerManager();
|
|
12
|
-
|
|
13
|
-
// fileStreaming worker
|
|
14
|
-
const streamingWorkerFn = (): Worker =>
|
|
15
|
-
new Worker(new URL('./workers/fileStreaming.ts', import.meta.url), {
|
|
16
|
-
name: fileStreamingWorkerName,
|
|
17
|
-
});
|
|
18
|
-
|
|
19
|
-
workerManager.registerWorker(fileStreamingWorkerName, streamingWorkerFn);
|
|
20
|
-
workerManager.executeTask(
|
|
21
|
-
fileStreamingWorkerName,
|
|
22
|
-
'setMaxFetchSize',
|
|
23
|
-
maxFetchSize,
|
|
24
|
-
);
|
|
25
|
-
|
|
26
|
-
// filePartial worker
|
|
27
|
-
const partialWorkerFn = (): Worker =>
|
|
28
|
-
new Worker(new URL('./workers/filePartial.ts', import.meta.url), {
|
|
29
|
-
name: filePartialWorkerName,
|
|
30
|
-
});
|
|
31
|
-
|
|
32
|
-
workerManager.registerWorker(filePartialWorkerName, partialWorkerFn);
|
|
33
|
-
}
|
|
@@ -1,83 +0,0 @@
|
|
|
1
|
-
import { type Remote, wrap } from 'comlink';
|
|
2
|
-
|
|
3
|
-
import type { FileStreamingMessageEvent } from '../types';
|
|
4
|
-
|
|
5
|
-
class WebWorkerManager {
|
|
6
|
-
private workerRegistry: Record<
|
|
7
|
-
string,
|
|
8
|
-
{ instance: Remote<Worker>; nativeWorker: Worker }
|
|
9
|
-
> = {};
|
|
10
|
-
|
|
11
|
-
public registerWorker(name: string, workerFn: () => Worker): void {
|
|
12
|
-
try {
|
|
13
|
-
const worker: Worker = workerFn();
|
|
14
|
-
if (!worker) {
|
|
15
|
-
return;
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
this.workerRegistry[name] = {
|
|
19
|
-
instance: wrap(worker),
|
|
20
|
-
nativeWorker: worker,
|
|
21
|
-
};
|
|
22
|
-
} catch (error) {
|
|
23
|
-
console.warn(error);
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
public async executeTask(
|
|
28
|
-
workerName: string,
|
|
29
|
-
taskName: string,
|
|
30
|
-
options: Record<string, unknown> | unknown,
|
|
31
|
-
): Promise<void | ArrayBufferLike> {
|
|
32
|
-
const worker = this.workerRegistry[workerName]?.instance;
|
|
33
|
-
if (!worker) {
|
|
34
|
-
throw new Error(`Worker ${workerName} not registered`);
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
try {
|
|
38
|
-
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
39
|
-
// @ts-ignore
|
|
40
|
-
return await worker[taskName](options);
|
|
41
|
-
} catch (error) {
|
|
42
|
-
console.error(
|
|
43
|
-
`Error executing task "${taskName}" on worker "${workerName}":`,
|
|
44
|
-
error,
|
|
45
|
-
);
|
|
46
|
-
throw new Error(`Task "${taskName}" failed: ${(error as Error).message}`);
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
public addEventListener(
|
|
51
|
-
workerName: string,
|
|
52
|
-
eventType: keyof WorkerEventMap,
|
|
53
|
-
listener: (evt: FileStreamingMessageEvent | ErrorEvent) => unknown,
|
|
54
|
-
): void {
|
|
55
|
-
const worker = this.workerRegistry[workerName];
|
|
56
|
-
if (!worker) {
|
|
57
|
-
console.error(`Worker type '${workerName}' is not registered.`);
|
|
58
|
-
return;
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
worker.nativeWorker.addEventListener(eventType, listener);
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
public removeEventListener(
|
|
65
|
-
workerName: string,
|
|
66
|
-
eventType: keyof WorkerEventMap,
|
|
67
|
-
listener: (evt: FileStreamingMessageEvent | ErrorEvent) => unknown,
|
|
68
|
-
): void {
|
|
69
|
-
const worker = this.workerRegistry[workerName];
|
|
70
|
-
if (!worker) {
|
|
71
|
-
console.error(`Worker type '${workerName}' is not registered.`);
|
|
72
|
-
return;
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
worker.nativeWorker.removeEventListener(eventType, listener);
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
const webWorkerManager = new WebWorkerManager();
|
|
80
|
-
|
|
81
|
-
export function getWebWorkerManager(): WebWorkerManager {
|
|
82
|
-
return webWorkerManager;
|
|
83
|
-
}
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import { expose } from 'comlink';
|
|
2
|
-
|
|
3
|
-
const filePartial = {
|
|
4
|
-
async partial(args: {
|
|
5
|
-
url: string;
|
|
6
|
-
headers?: Record<string, string>;
|
|
7
|
-
}): Promise<ArrayBufferLike | Error> {
|
|
8
|
-
const { url, headers } = args;
|
|
9
|
-
|
|
10
|
-
return fetch(url, { headers })
|
|
11
|
-
.then((response) => response.arrayBuffer())
|
|
12
|
-
.catch((error) => {
|
|
13
|
-
throw new Error(
|
|
14
|
-
'filePartial.ts: Error when fetching file: ' + error?.message,
|
|
15
|
-
);
|
|
16
|
-
});
|
|
17
|
-
},
|
|
18
|
-
};
|
|
19
|
-
|
|
20
|
-
expose(filePartial);
|
|
@@ -1,130 +0,0 @@
|
|
|
1
|
-
import { expose } from 'comlink';
|
|
2
|
-
|
|
3
|
-
const fileStreaming = {
|
|
4
|
-
maxFetchSize: 0,
|
|
5
|
-
fetchedSize: 0,
|
|
6
|
-
|
|
7
|
-
setMaxFetchSize(size: number): void {
|
|
8
|
-
if (size > 0) {
|
|
9
|
-
this.maxFetchSize = size;
|
|
10
|
-
}
|
|
11
|
-
},
|
|
12
|
-
|
|
13
|
-
decreaseFetchedSize(size: number): void {
|
|
14
|
-
if (size > 0 && size <= this.fetchedSize) {
|
|
15
|
-
this.fetchedSize -= size;
|
|
16
|
-
}
|
|
17
|
-
},
|
|
18
|
-
|
|
19
|
-
async stream(args: {
|
|
20
|
-
url: string;
|
|
21
|
-
headers?: Record<string, string>;
|
|
22
|
-
useSharedArrayBuffer?: boolean;
|
|
23
|
-
}): Promise<void> {
|
|
24
|
-
const { url, headers, useSharedArrayBuffer } = args;
|
|
25
|
-
const controller = new AbortController();
|
|
26
|
-
let sharedArraybuffer: SharedArrayBuffer | null = null;
|
|
27
|
-
let fileArraybuffer: Uint8Array | null = null;
|
|
28
|
-
|
|
29
|
-
try {
|
|
30
|
-
const response = await fetch(url, {
|
|
31
|
-
headers: { ...headers },
|
|
32
|
-
signal: controller.signal,
|
|
33
|
-
});
|
|
34
|
-
|
|
35
|
-
if (!response.ok) {
|
|
36
|
-
throw new Error(`HTTP error! status: ${response.status}`);
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
const reader = response.body?.getReader();
|
|
40
|
-
if (!reader) {
|
|
41
|
-
throw new Error('Failed to get reader from response body');
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
let result: ReadableStreamReadResult<Uint8Array>;
|
|
45
|
-
let completed = false;
|
|
46
|
-
const totalLength = parseInt(
|
|
47
|
-
response.headers.get('Content-Length') || '0',
|
|
48
|
-
10,
|
|
49
|
-
);
|
|
50
|
-
const firstChunk = await reader.read();
|
|
51
|
-
completed = firstChunk.done;
|
|
52
|
-
|
|
53
|
-
if (!firstChunk.value) {
|
|
54
|
-
throw new Error('The fetched chunks does not have value');
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
if (!completed) {
|
|
58
|
-
let position = firstChunk.value.length;
|
|
59
|
-
|
|
60
|
-
if (
|
|
61
|
-
this.maxFetchSize &&
|
|
62
|
-
this.fetchedSize + position > this.maxFetchSize
|
|
63
|
-
) {
|
|
64
|
-
controller.abort();
|
|
65
|
-
throw new Error(
|
|
66
|
-
`Maximum size(${this.maxFetchSize}) for fetching files reached`,
|
|
67
|
-
);
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
this.fetchedSize += position;
|
|
71
|
-
|
|
72
|
-
if (useSharedArrayBuffer) {
|
|
73
|
-
sharedArraybuffer = new SharedArrayBuffer(totalLength);
|
|
74
|
-
fileArraybuffer = new Uint8Array(sharedArraybuffer);
|
|
75
|
-
} else {
|
|
76
|
-
fileArraybuffer = new Uint8Array(totalLength);
|
|
77
|
-
}
|
|
78
|
-
fileArraybuffer.set(firstChunk.value);
|
|
79
|
-
postMessage({ url, position, fileArraybuffer });
|
|
80
|
-
|
|
81
|
-
while (!completed) {
|
|
82
|
-
result = await reader.read();
|
|
83
|
-
|
|
84
|
-
if (result.done) {
|
|
85
|
-
completed = true;
|
|
86
|
-
continue;
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
const chunk = result.value;
|
|
90
|
-
|
|
91
|
-
if (
|
|
92
|
-
this.maxFetchSize &&
|
|
93
|
-
this.fetchedSize + chunk.length > this.maxFetchSize
|
|
94
|
-
) {
|
|
95
|
-
sharedArraybuffer = null;
|
|
96
|
-
fileArraybuffer = null;
|
|
97
|
-
controller.abort();
|
|
98
|
-
throw new Error(
|
|
99
|
-
`Maximum size(${this.maxFetchSize}) for fetching files reached`,
|
|
100
|
-
);
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
this.fetchedSize += chunk.length;
|
|
104
|
-
fileArraybuffer.set(chunk, position);
|
|
105
|
-
position += chunk.length;
|
|
106
|
-
|
|
107
|
-
postMessage({
|
|
108
|
-
isAppending: true,
|
|
109
|
-
url,
|
|
110
|
-
position: position,
|
|
111
|
-
chunk: !useSharedArrayBuffer ? chunk : null,
|
|
112
|
-
});
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
} catch (error) {
|
|
116
|
-
const streamingError = new Error(
|
|
117
|
-
'fileStreaming.ts: ' + (error as Error).message ||
|
|
118
|
-
'An error occured when streaming',
|
|
119
|
-
);
|
|
120
|
-
console.error(streamingError.message, error);
|
|
121
|
-
throw streamingError;
|
|
122
|
-
} finally {
|
|
123
|
-
sharedArraybuffer = null;
|
|
124
|
-
fileArraybuffer = null;
|
|
125
|
-
controller.abort();
|
|
126
|
-
}
|
|
127
|
-
},
|
|
128
|
-
};
|
|
129
|
-
|
|
130
|
-
expose(fileStreaming);
|