dcmjs 0.49.3 → 0.50.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +50 -0
- package/build/dcmjs.es.js +1071 -112
- package/build/dcmjs.es.js.map +1 -1
- package/build/dcmjs.js +1071 -112
- package/build/dcmjs.js.map +1 -1
- package/build/dcmjs.min.js +2 -2
- package/build/dcmjs.min.js.map +1 -1
- package/generate/dictionary.mjs +56029 -0
- package/package.json +18 -2
- package/.babelrc +0 -9
- package/.github/workflows/lint-and-format.yml +0 -27
- package/.github/workflows/publish-package.yml +0 -45
- package/.github/workflows/tests.yml +0 -24
- package/.prettierrc +0 -5
- package/.vscode/extensions.json +0 -7
- package/.vscode/settings.json +0 -8
- package/changelog.md +0 -31
- package/docs/ArrayBufferExpanderListener.md +0 -303
- package/docs/AsyncDicomReader-skill.md +0 -730
- package/eslint.config.mjs +0 -30
- package/generate-dictionary.js +0 -145
- package/jest.setup.js +0 -39
- package/netlify.toml +0 -22
- package/rollup.config.mjs +0 -57
- package/test/ArrayBufferExpanderListener.test.js +0 -365
- package/test/DICOMWEB.test.js +0 -1
- package/test/DicomMetaDictionary.test.js +0 -73
- package/test/SequenceOfItems.test.js +0 -86
- package/test/adapters.test.js +0 -43
- package/test/anonymizer.test.js +0 -176
- package/test/arrayItem.json +0 -351
- package/test/async-data.test.js +0 -575
- package/test/data-encoding.test.js +0 -59
- package/test/data-options.test.js +0 -199
- package/test/data.test.js +0 -1776
- package/test/derivations.test.js +0 -1
- package/test/helper/DicomDataReadBufferStreamBuilder.js +0 -89
- package/test/information-filter.test.js +0 -165
- package/test/integration/DicomMessage.readFile.test.js +0 -50
- package/test/lossless-read-write.test.js +0 -1407
- package/test/mocks/minimal_fields_dataset.json +0 -17
- package/test/mocks/null_number_vrs_dataset.json +0 -102
- package/test/normalizers.test.js +0 -38
- package/test/odd-frame-bit-data.js +0 -138
- package/test/rawTags.js +0 -170
- package/test/readBufferStream.test.js +0 -158
- package/test/sample-dicom.json +0 -904
- package/test/sample-op.lei +0 -0
- package/test/sample-sr.json +0 -997
- package/test/sr-tid.test.js +0 -251
- package/test/testUtils.js +0 -85
- package/test/utilities/deepEqual.test.js +0 -87
- package/test/utilities.test.js +0 -205
- package/test/video-test-dict.js +0 -40
- package/test/writeBufferStream.test.js +0 -149
package/test/data.test.js
DELETED
|
@@ -1,1776 +0,0 @@
|
|
|
1
|
-
import { jest } from "@jest/globals";
|
|
2
|
-
import fs from "fs";
|
|
3
|
-
import fsPromises from "fs/promises";
|
|
4
|
-
import path from "path";
|
|
5
|
-
import { WriteBufferStream } from "../src/BufferStream";
|
|
6
|
-
import dcmjs from "../src/index.js";
|
|
7
|
-
import { log } from "./../src/log.js";
|
|
8
|
-
import { getTestDataset, getZippedTestDataset } from "./testUtils.js";
|
|
9
|
-
|
|
10
|
-
import { promisify } from "util";
|
|
11
|
-
import arrayItem from "./arrayItem.json";
|
|
12
|
-
import minimalDataset from "./mocks/minimal_fields_dataset.json";
|
|
13
|
-
import datasetWithNullNumberVRs from "./mocks/null_number_vrs_dataset.json";
|
|
14
|
-
import { rawTags } from "./rawTags";
|
|
15
|
-
import sampleDicomSR from "./sample-sr.json";
|
|
16
|
-
|
|
17
|
-
import { ValueRepresentation } from "../src/ValueRepresentation";
|
|
18
|
-
import {
|
|
19
|
-
EXPLICIT_LITTLE_ENDIAN,
|
|
20
|
-
IMPLICIT_LITTLE_ENDIAN,
|
|
21
|
-
PADDING_SPACE
|
|
22
|
-
} from "./../src/constants/dicom.js";
|
|
23
|
-
|
|
24
|
-
const { DicomMetaDictionary, DicomDict, DicomMessage, ReadBufferStream } =
|
|
25
|
-
dcmjs.data;
|
|
26
|
-
|
|
27
|
-
const fileMetaInformationVersionArray = new Uint8Array(2);
|
|
28
|
-
fileMetaInformationVersionArray[1] = 1;
|
|
29
|
-
|
|
30
|
-
// The asset downloads in this file might take some time on a slower connection
|
|
31
|
-
jest.setTimeout(60000);
|
|
32
|
-
|
|
33
|
-
const metadata = {
|
|
34
|
-
"00020001": {
|
|
35
|
-
Value: [fileMetaInformationVersionArray.buffer],
|
|
36
|
-
vr: "OB"
|
|
37
|
-
},
|
|
38
|
-
"00020012": {
|
|
39
|
-
Value: ["1.2.840.113819.7.1.1997.1.0"],
|
|
40
|
-
vr: "UI"
|
|
41
|
-
},
|
|
42
|
-
"00020002": {
|
|
43
|
-
Value: ["1.2.840.10008.5.1.4.1.1.4"],
|
|
44
|
-
vr: "UI"
|
|
45
|
-
},
|
|
46
|
-
"00020003": {
|
|
47
|
-
Value: [DicomMetaDictionary.uid()],
|
|
48
|
-
vr: "UI"
|
|
49
|
-
},
|
|
50
|
-
"00020010": {
|
|
51
|
-
Value: ["1.2.840.10008.1.2"],
|
|
52
|
-
vr: "UI"
|
|
53
|
-
}
|
|
54
|
-
};
|
|
55
|
-
|
|
56
|
-
const sequenceMetadata = {
|
|
57
|
-
"00080081": { vr: "ST", Value: [null] },
|
|
58
|
-
"00081032": {
|
|
59
|
-
vr: "SQ",
|
|
60
|
-
Value: [
|
|
61
|
-
{
|
|
62
|
-
"00080100": {
|
|
63
|
-
vr: "SH",
|
|
64
|
-
Value: ["IMG1332"]
|
|
65
|
-
},
|
|
66
|
-
"00080102": {
|
|
67
|
-
vr: "SH",
|
|
68
|
-
Value: ["L"]
|
|
69
|
-
},
|
|
70
|
-
"00080104": {
|
|
71
|
-
vr: "LO",
|
|
72
|
-
Value: ["MRI SHOULDER WITHOUT IV CONTRAST LEFT"]
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
]
|
|
76
|
-
},
|
|
77
|
-
|
|
78
|
-
52009229: {
|
|
79
|
-
vr: "SQ",
|
|
80
|
-
Value: [
|
|
81
|
-
{
|
|
82
|
-
"00289110": {
|
|
83
|
-
vr: "SQ",
|
|
84
|
-
Value: [
|
|
85
|
-
{
|
|
86
|
-
"00180088": {
|
|
87
|
-
vr: "DS",
|
|
88
|
-
Value: [0.12]
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
]
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
]
|
|
95
|
-
}
|
|
96
|
-
};
|
|
97
|
-
|
|
98
|
-
function makeOverlayBitmap({ width, height }) {
|
|
99
|
-
const topBottom = new Array(width).fill(1, 0, width);
|
|
100
|
-
const middle = new Array(width).fill(0, 0, width);
|
|
101
|
-
const bitmap = [];
|
|
102
|
-
|
|
103
|
-
middle[0] = 1;
|
|
104
|
-
middle[width - 1] = 1;
|
|
105
|
-
|
|
106
|
-
bitmap.push(topBottom);
|
|
107
|
-
|
|
108
|
-
for (let i = 0; i < height - 2; i++) {
|
|
109
|
-
bitmap.push(middle);
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
bitmap.push(topBottom);
|
|
113
|
-
|
|
114
|
-
return bitmap.flat();
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
it("test_array_items", () => {
|
|
118
|
-
const dicomJSON = JSON.stringify(arrayItem);
|
|
119
|
-
const datasets = JSON.parse(dicomJSON);
|
|
120
|
-
const natural0 = DicomMetaDictionary.naturalizeDataset(datasets[0]);
|
|
121
|
-
// Shouldn't throw an exception
|
|
122
|
-
const natural0b = DicomMetaDictionary.naturalizeDataset(datasets[0]);
|
|
123
|
-
// And should be identical to the previous version
|
|
124
|
-
expect(natural0b).toEqual(natural0);
|
|
125
|
-
});
|
|
126
|
-
|
|
127
|
-
it("test_json_1", () => {
|
|
128
|
-
//
|
|
129
|
-
// multiple results example
|
|
130
|
-
// from http://dicom.nema.org/medical/dicom/current/output/html/part18.html#chapter_F
|
|
131
|
-
//
|
|
132
|
-
const dicomJSON = `
|
|
133
|
-
[
|
|
134
|
-
{
|
|
135
|
-
"0020000D": {
|
|
136
|
-
"vr": "UI",
|
|
137
|
-
"Value": [ "1.2.392.200036.9116.2.2.2.1762893313.1029997326.945873" ]
|
|
138
|
-
}
|
|
139
|
-
},
|
|
140
|
-
{
|
|
141
|
-
"0020000D" : {
|
|
142
|
-
"vr": "UI",
|
|
143
|
-
"Value": [ "1.2.392.200036.9116.2.2.2.2162893313.1029997326.945876" ]
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
]
|
|
147
|
-
`;
|
|
148
|
-
const datasets = JSON.parse(dicomJSON);
|
|
149
|
-
const firstUID = datasets[0]["0020000D"].Value[0];
|
|
150
|
-
const secondUID = datasets[1]["0020000D"].Value[0];
|
|
151
|
-
|
|
152
|
-
//
|
|
153
|
-
// make a natural version of the first study and confirm it has correct value
|
|
154
|
-
//
|
|
155
|
-
const naturalDICOM = DicomMetaDictionary.naturalizeDataset(datasets[0]);
|
|
156
|
-
|
|
157
|
-
expect(naturalDICOM.StudyInstanceUID).toEqual(firstUID);
|
|
158
|
-
|
|
159
|
-
//
|
|
160
|
-
// make a natural version of a dataset with sequence tags and confirm it has correct values
|
|
161
|
-
//
|
|
162
|
-
const naturalSequence =
|
|
163
|
-
DicomMetaDictionary.naturalizeDataset(sequenceMetadata);
|
|
164
|
-
|
|
165
|
-
// The match object needs to be done on the actual element, not the proxied value
|
|
166
|
-
expect(naturalSequence.ProcedureCodeSequence[0]).toMatchObject({
|
|
167
|
-
CodeValue: "IMG1332"
|
|
168
|
-
});
|
|
169
|
-
|
|
170
|
-
// tests that single element sequences have been converted
|
|
171
|
-
// from arrays to values.
|
|
172
|
-
// See discussion here for more details: https://github.com/dcmjs-org/dcmjs/commit/74571a4bd6c793af2a679a31cec7e197f93e28cc
|
|
173
|
-
const spacing =
|
|
174
|
-
naturalSequence.SharedFunctionalGroupsSequence.PixelMeasuresSequence
|
|
175
|
-
.SpacingBetweenSlices;
|
|
176
|
-
expect(spacing).toEqual(0.12);
|
|
177
|
-
expect(
|
|
178
|
-
Array.isArray(naturalSequence.SharedFunctionalGroupsSequence)
|
|
179
|
-
).toEqual(true);
|
|
180
|
-
|
|
181
|
-
expect(naturalSequence.ProcedureCodeSequence[0]).toMatchObject({
|
|
182
|
-
CodingSchemeDesignator: "L",
|
|
183
|
-
CodeMeaning: "MRI SHOULDER WITHOUT IV CONTRAST LEFT"
|
|
184
|
-
});
|
|
185
|
-
|
|
186
|
-
// expect original data to remain unnaturalized
|
|
187
|
-
expect(sequenceMetadata["00081032"].Value[0]).toHaveProperty("00080100");
|
|
188
|
-
expect(sequenceMetadata["00081032"].Value[0]).toHaveProperty("00080102");
|
|
189
|
-
expect(sequenceMetadata["00081032"].Value[0]).toHaveProperty("00080104");
|
|
190
|
-
|
|
191
|
-
//
|
|
192
|
-
// convert to part10 and back
|
|
193
|
-
//
|
|
194
|
-
const dicomDict = new DicomDict(metadata);
|
|
195
|
-
dicomDict.dict = datasets[1];
|
|
196
|
-
const part10Buffer = dicomDict.write();
|
|
197
|
-
|
|
198
|
-
const dicomData = dcmjs.data.DicomMessage.readFile(part10Buffer);
|
|
199
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
200
|
-
dicomData.dict
|
|
201
|
-
);
|
|
202
|
-
|
|
203
|
-
expect(dataset.StudyInstanceUID).toEqual(secondUID);
|
|
204
|
-
});
|
|
205
|
-
|
|
206
|
-
it("test_multiframe_1", async () => {
|
|
207
|
-
const url =
|
|
208
|
-
"https://github.com/dcmjs-org/data/releases/download/MRHead/MRHead.zip";
|
|
209
|
-
const unzipPath = await getZippedTestDataset(
|
|
210
|
-
url,
|
|
211
|
-
"MRHead.zip",
|
|
212
|
-
"test_multiframe_1"
|
|
213
|
-
);
|
|
214
|
-
const mrHeadPath = path.join(unzipPath, "MRHead");
|
|
215
|
-
const fileNames = await fsPromises.readdir(mrHeadPath);
|
|
216
|
-
|
|
217
|
-
const datasets = [];
|
|
218
|
-
fileNames.forEach(fileName => {
|
|
219
|
-
const arrayBuffer = fs.readFileSync(
|
|
220
|
-
path.join(mrHeadPath, fileName)
|
|
221
|
-
).buffer;
|
|
222
|
-
const dicomDict = DicomMessage.readFile(arrayBuffer);
|
|
223
|
-
const dataset = DicomMetaDictionary.naturalizeDataset(dicomDict.dict);
|
|
224
|
-
|
|
225
|
-
datasets.push(dataset);
|
|
226
|
-
});
|
|
227
|
-
|
|
228
|
-
const multiframe =
|
|
229
|
-
dcmjs.normalizers.Normalizer.normalizeToDataset(datasets);
|
|
230
|
-
const spacing =
|
|
231
|
-
multiframe.SharedFunctionalGroupsSequence.PixelMeasuresSequence
|
|
232
|
-
.SpacingBetweenSlices;
|
|
233
|
-
const roundedSpacing = Math.round(100 * spacing) / 100;
|
|
234
|
-
|
|
235
|
-
expect(multiframe.NumberOfFrames).toEqual(130);
|
|
236
|
-
expect(roundedSpacing).toEqual(1.3);
|
|
237
|
-
});
|
|
238
|
-
|
|
239
|
-
it("test_labelmapseg", async () => {
|
|
240
|
-
const segURL =
|
|
241
|
-
"https://github.com/dcmjs-org/data/releases/download/labelmap-seg/totalSegmentator.dcm";
|
|
242
|
-
var segFilePath = await getTestDataset(segURL, "LabelmapSeg.dcm");
|
|
243
|
-
const arrayBuffer = fs.readFileSync(segFilePath).buffer;
|
|
244
|
-
|
|
245
|
-
const datasets = [];
|
|
246
|
-
const dicomDict = DicomMessage.readFile(arrayBuffer);
|
|
247
|
-
const dataset = DicomMetaDictionary.naturalizeDataset(dicomDict.dict);
|
|
248
|
-
|
|
249
|
-
datasets.push(dataset);
|
|
250
|
-
|
|
251
|
-
const multiframe =
|
|
252
|
-
dcmjs.normalizers.Normalizer.normalizeToDataset(datasets);
|
|
253
|
-
const spacing =
|
|
254
|
-
multiframe.SharedFunctionalGroupsSequence.PixelMeasuresSequence
|
|
255
|
-
.SpacingBetweenSlices;
|
|
256
|
-
const roundedSpacing = Math.round(100 * spacing) / 100;
|
|
257
|
-
|
|
258
|
-
expect(multiframe.NumberOfFrames).toEqual(295);
|
|
259
|
-
expect(roundedSpacing).toEqual(1.0);
|
|
260
|
-
});
|
|
261
|
-
|
|
262
|
-
it("test_oneslice_seg", async () => {
|
|
263
|
-
const ctPelvisURL =
|
|
264
|
-
"https://github.com/dcmjs-org/data/releases/download/CTPelvis/CTPelvis.zip";
|
|
265
|
-
const segURL =
|
|
266
|
-
"https://github.com/dcmjs-org/data/releases/download/CTPelvis/Lesion1_onesliceSEG.dcm";
|
|
267
|
-
const unzipPath = await getZippedTestDataset(
|
|
268
|
-
ctPelvisURL,
|
|
269
|
-
"CTPelvis.zip",
|
|
270
|
-
"test_oneslice_seg"
|
|
271
|
-
);
|
|
272
|
-
const segFileName = "Lesion1_onesliceSEG.dcm";
|
|
273
|
-
|
|
274
|
-
const ctPelvisPath = path.join(
|
|
275
|
-
unzipPath,
|
|
276
|
-
"Series-1.2.840.113704.1.111.1916.1223562191.15"
|
|
277
|
-
);
|
|
278
|
-
|
|
279
|
-
const fileNames = await fsPromises.readdir(ctPelvisPath);
|
|
280
|
-
|
|
281
|
-
const datasets = [];
|
|
282
|
-
fileNames.forEach(fileName => {
|
|
283
|
-
const arrayBuffer = fs.readFileSync(
|
|
284
|
-
path.join(ctPelvisPath, fileName)
|
|
285
|
-
).buffer;
|
|
286
|
-
const dicomDict = DicomMessage.readFile(arrayBuffer);
|
|
287
|
-
const dataset = DicomMetaDictionary.naturalizeDataset(dicomDict.dict);
|
|
288
|
-
datasets.push(dataset);
|
|
289
|
-
});
|
|
290
|
-
|
|
291
|
-
let multiframe = dcmjs.normalizers.Normalizer.normalizeToDataset(datasets);
|
|
292
|
-
const spacing =
|
|
293
|
-
multiframe.SharedFunctionalGroupsSequence.PixelMeasuresSequence
|
|
294
|
-
.SpacingBetweenSlices;
|
|
295
|
-
const roundedSpacing = Math.round(100 * spacing) / 100;
|
|
296
|
-
|
|
297
|
-
expect(multiframe.NumberOfFrames).toEqual(60);
|
|
298
|
-
expect(roundedSpacing).toEqual(5);
|
|
299
|
-
|
|
300
|
-
var segFilePath = await getTestDataset(segURL, segFileName);
|
|
301
|
-
const arrayBuffer = fs.readFileSync(segFilePath).buffer;
|
|
302
|
-
const dicomDict = DicomMessage.readFile(arrayBuffer);
|
|
303
|
-
const dataset = DicomMetaDictionary.naturalizeDataset(dicomDict.dict);
|
|
304
|
-
|
|
305
|
-
multiframe = dcmjs.normalizers.Normalizer.normalizeToDataset([dataset]);
|
|
306
|
-
expect(dataset.NumberOfFrames).toEqual(1);
|
|
307
|
-
expect(multiframe.NumberOfFrames).toEqual(1);
|
|
308
|
-
});
|
|
309
|
-
|
|
310
|
-
it("test_normalizer_smaller", () => {
|
|
311
|
-
const naturalizedTags =
|
|
312
|
-
dcmjs.data.DicomMetaDictionary.naturalizeDataset(rawTags);
|
|
313
|
-
|
|
314
|
-
const rawTagsLen = JSON.stringify(rawTags).length;
|
|
315
|
-
const naturalizedTagsLen = JSON.stringify(naturalizedTags).length;
|
|
316
|
-
expect(naturalizedTagsLen).toBeLessThan(rawTagsLen);
|
|
317
|
-
});
|
|
318
|
-
|
|
319
|
-
it("test_multiframe_us", () => {
|
|
320
|
-
const file = fs.readFileSync("test/cine-test.dcm");
|
|
321
|
-
const dicomData = dcmjs.data.DicomMessage.readFile(file.buffer, {
|
|
322
|
-
// ignoreErrors: true,
|
|
323
|
-
});
|
|
324
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
325
|
-
dicomData.dict
|
|
326
|
-
);
|
|
327
|
-
|
|
328
|
-
dataset._meta = dcmjs.data.DicomMetaDictionary.namifyDataset(
|
|
329
|
-
dicomData.meta
|
|
330
|
-
);
|
|
331
|
-
expect(dataset.NumberOfFrames).toEqual(8);
|
|
332
|
-
});
|
|
333
|
-
|
|
334
|
-
it("test_fragment_multiframe", async () => {
|
|
335
|
-
const url =
|
|
336
|
-
"https://github.com/dcmjs-org/data/releases/download/encapsulation/encapsulation-fragment-multiframe.dcm";
|
|
337
|
-
const dcmPath = await getTestDataset(
|
|
338
|
-
url,
|
|
339
|
-
"encapsulation-fragment-multiframe.dcm"
|
|
340
|
-
);
|
|
341
|
-
const file = fs.readFileSync(dcmPath);
|
|
342
|
-
const dicomData = dcmjs.data.DicomMessage.readFile(file.buffer, {
|
|
343
|
-
// ignoreErrors: true,
|
|
344
|
-
});
|
|
345
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
346
|
-
dicomData.dict
|
|
347
|
-
);
|
|
348
|
-
|
|
349
|
-
dataset._meta = dcmjs.data.DicomMetaDictionary.namifyDataset(
|
|
350
|
-
dicomData.meta
|
|
351
|
-
);
|
|
352
|
-
expect(dataset.NumberOfFrames).toEqual(2);
|
|
353
|
-
});
|
|
354
|
-
|
|
355
|
-
it("test_null_number_vrs", () => {
|
|
356
|
-
const dicomDict = new DicomDict({
|
|
357
|
-
TransferSynxtaxUID: "1.2.840.10008.1.2.1"
|
|
358
|
-
});
|
|
359
|
-
dicomDict.dict = DicomMetaDictionary.denaturalizeDataset(
|
|
360
|
-
datasetWithNullNumberVRs
|
|
361
|
-
);
|
|
362
|
-
const part10Buffer = dicomDict.write();
|
|
363
|
-
const dicomData = dcmjs.data.DicomMessage.readFile(part10Buffer);
|
|
364
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
365
|
-
dicomData.dict
|
|
366
|
-
);
|
|
367
|
-
|
|
368
|
-
expect(dataset.ImageAndFluoroscopyAreaDoseProduct).toEqual(null);
|
|
369
|
-
expect(dataset.InstanceNumber).toEqual(null);
|
|
370
|
-
});
|
|
371
|
-
|
|
372
|
-
it("test_exponential_notation", () => {
|
|
373
|
-
const file = fs.readFileSync("test/sample-dicom.dcm");
|
|
374
|
-
const data = dcmjs.data.DicomMessage.readFile(file.buffer, {
|
|
375
|
-
// ignoreErrors: true,
|
|
376
|
-
});
|
|
377
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(data.dict);
|
|
378
|
-
dataset.ImagePositionPatient[2] = 7.1945578383e-5;
|
|
379
|
-
const buffer = data.write();
|
|
380
|
-
const copy = dcmjs.data.DicomMessage.readFile(buffer);
|
|
381
|
-
const datasetCopy = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
382
|
-
copy.dict
|
|
383
|
-
);
|
|
384
|
-
|
|
385
|
-
expect(dataset.ImagePositionPatient).toEqual(
|
|
386
|
-
datasetCopy.ImagePositionPatient
|
|
387
|
-
);
|
|
388
|
-
});
|
|
389
|
-
|
|
390
|
-
it("test_output_equality", () => {
|
|
391
|
-
const file = fs.readFileSync("test/cine-test.dcm");
|
|
392
|
-
const dicomData1 = dcmjs.data.DicomMessage.readFile(file.buffer, {
|
|
393
|
-
// ignoreErrors: true,
|
|
394
|
-
});
|
|
395
|
-
|
|
396
|
-
const buffer = dicomData1.write();
|
|
397
|
-
const dicomData2 = dcmjs.data.DicomMessage.readFile(buffer, {
|
|
398
|
-
// ignoreErrors: true,
|
|
399
|
-
});
|
|
400
|
-
|
|
401
|
-
check_equality(dicomData1.meta, dicomData2.meta);
|
|
402
|
-
check_equality(dicomData1.dict, dicomData2.dict);
|
|
403
|
-
|
|
404
|
-
function check_equality(dict1, dict2) {
|
|
405
|
-
Object.keys(dict1).forEach(key => {
|
|
406
|
-
const elem1 = dict1[key];
|
|
407
|
-
const elem2 = dict2[key];
|
|
408
|
-
|
|
409
|
-
expect(JSON.stringify(elem1)).toEqual(JSON.stringify(elem2));
|
|
410
|
-
});
|
|
411
|
-
}
|
|
412
|
-
});
|
|
413
|
-
|
|
414
|
-
it("test_performance", async () => {
|
|
415
|
-
const file = fs.readFileSync("test/cine-test.dcm");
|
|
416
|
-
let buffer = file.buffer;
|
|
417
|
-
let json;
|
|
418
|
-
const start = Date.now();
|
|
419
|
-
|
|
420
|
-
for (let i = 0; i < 100; ++i) {
|
|
421
|
-
let old = json;
|
|
422
|
-
json = DicomMessage.readFile(buffer);
|
|
423
|
-
buffer = json.write();
|
|
424
|
-
|
|
425
|
-
if (i > 0) {
|
|
426
|
-
check_equality(old.meta, json.meta);
|
|
427
|
-
check_equality(old.dict, json.dict);
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
|
|
431
|
-
function check_equality(dict1, dict2) {
|
|
432
|
-
Object.keys(dict1).forEach(key => {
|
|
433
|
-
const elem1 = dict1[key];
|
|
434
|
-
const elem2 = dict2[key];
|
|
435
|
-
|
|
436
|
-
expect(JSON.stringify(elem1)).toEqual(JSON.stringify(elem2));
|
|
437
|
-
});
|
|
438
|
-
}
|
|
439
|
-
|
|
440
|
-
console.log(`Finished. Total Time elapsed: ${Date.now() - start} ms`);
|
|
441
|
-
});
|
|
442
|
-
|
|
443
|
-
it("test_invalid_vr_length", () => {
|
|
444
|
-
const file = fs.readFileSync("test/invalid-vr-length-test.dcm");
|
|
445
|
-
const dicomDict = dcmjs.data.DicomMessage.readFile(file.buffer);
|
|
446
|
-
|
|
447
|
-
expect(() =>
|
|
448
|
-
writeToBuffer(dicomDict, { allowInvalidVRLength: false })
|
|
449
|
-
).toThrow();
|
|
450
|
-
expect(() =>
|
|
451
|
-
writeToBuffer(dicomDict, { allowInvalidVRLength: true })
|
|
452
|
-
).not.toThrow();
|
|
453
|
-
|
|
454
|
-
function writeToBuffer(dicomDict, options) {
|
|
455
|
-
return dicomDict.write(options);
|
|
456
|
-
}
|
|
457
|
-
});
|
|
458
|
-
|
|
459
|
-
it("test_long_explicit_vr", () => {
|
|
460
|
-
const contourData = [];
|
|
461
|
-
for (let i = 0; i < 65536; i++) {
|
|
462
|
-
contourData.push(String(i));
|
|
463
|
-
}
|
|
464
|
-
|
|
465
|
-
const dicomDict = new DicomDict({
|
|
466
|
-
TransferSynxtaxUID: EXPLICIT_LITTLE_ENDIAN
|
|
467
|
-
});
|
|
468
|
-
|
|
469
|
-
const natural = {
|
|
470
|
-
ContourData: contourData
|
|
471
|
-
};
|
|
472
|
-
|
|
473
|
-
dicomDict.dict = DicomMetaDictionary.denaturalizeDataset(natural);
|
|
474
|
-
|
|
475
|
-
const part10Buffer = dicomDict.write();
|
|
476
|
-
const dicomData = DicomMessage.readFile(part10Buffer);
|
|
477
|
-
const dataset = DicomMetaDictionary.naturalizeDataset(dicomData.dict);
|
|
478
|
-
|
|
479
|
-
expect(dataset.ContourData.length).toBe(contourData.length);
|
|
480
|
-
});
|
|
481
|
-
|
|
482
|
-
it("test_encapsulation", async () => {
|
|
483
|
-
const url =
|
|
484
|
-
"https://github.com/dcmjs-org/data/releases/download/encapsulation/encapsulation.dcm";
|
|
485
|
-
const dcmPath = await getTestDataset(url, "encapsulation.dcm");
|
|
486
|
-
|
|
487
|
-
// given
|
|
488
|
-
const arrayBuffer = fs.readFileSync(dcmPath).buffer;
|
|
489
|
-
const dicomDict = DicomMessage.readFile(arrayBuffer);
|
|
490
|
-
|
|
491
|
-
dicomDict.upsertTag("60000010", "US", 30); // Overlay Rows
|
|
492
|
-
dicomDict.upsertTag("60000011", "US", 30); // Overlay Columns
|
|
493
|
-
dicomDict.upsertTag("60000040", "CS", "G"); // Overlay Type
|
|
494
|
-
dicomDict.upsertTag("60000045", "LO", "AUTOMATED"); // Overlay Subtype
|
|
495
|
-
dicomDict.upsertTag("60000050", "SS", [1 + 50, 1 + 50]); // Overlay Origin
|
|
496
|
-
|
|
497
|
-
let overlay = dcmjs.data.BitArray.pack(
|
|
498
|
-
makeOverlayBitmap({ width: 30, height: 30 })
|
|
499
|
-
);
|
|
500
|
-
|
|
501
|
-
if (overlay.length % 2 !== 0) {
|
|
502
|
-
const newOverlay = new Uint8Array(overlay.length + 1);
|
|
503
|
-
|
|
504
|
-
newOverlay.set(overlay);
|
|
505
|
-
newOverlay.set([0], overlay.length);
|
|
506
|
-
|
|
507
|
-
overlay = newOverlay;
|
|
508
|
-
}
|
|
509
|
-
|
|
510
|
-
dicomDict.upsertTag("60003000", "OB", [overlay.buffer]);
|
|
511
|
-
|
|
512
|
-
// when
|
|
513
|
-
const lengths = [];
|
|
514
|
-
const stream = new ReadBufferStream(
|
|
515
|
-
dicomDict.write({ fragmentMultiframe: false })
|
|
516
|
-
),
|
|
517
|
-
useSyntax = EXPLICIT_LITTLE_ENDIAN;
|
|
518
|
-
|
|
519
|
-
stream.reset();
|
|
520
|
-
stream.increment(128);
|
|
521
|
-
|
|
522
|
-
if (stream.readAsciiString(4) !== "DICM") {
|
|
523
|
-
throw new Error("Invalid a dicom file");
|
|
524
|
-
}
|
|
525
|
-
|
|
526
|
-
const el = DicomMessage._readTag(stream, useSyntax),
|
|
527
|
-
metaLength = el.values[0]; //read header buffer
|
|
528
|
-
const metaStream = stream.more(metaLength);
|
|
529
|
-
const metaHeader = DicomMessage._read(metaStream, useSyntax); //get the syntax
|
|
530
|
-
let mainSyntax = metaHeader["00020010"].Value[0];
|
|
531
|
-
|
|
532
|
-
mainSyntax = DicomMessage._normalizeSyntax(mainSyntax);
|
|
533
|
-
|
|
534
|
-
while (!stream.end()) {
|
|
535
|
-
const group = new Uint16Array(stream.buffer, stream.offset, 1)[0]
|
|
536
|
-
.toString(16)
|
|
537
|
-
.padStart(4, "0");
|
|
538
|
-
const element = new Uint16Array(stream.buffer, stream.offset + 2, 1)[0]
|
|
539
|
-
.toString(16)
|
|
540
|
-
.padStart(4, "0");
|
|
541
|
-
|
|
542
|
-
if (group.concat(element) === "60003000") {
|
|
543
|
-
// Overlay Data
|
|
544
|
-
const length = Buffer.from(
|
|
545
|
-
new Uint8Array(stream.buffer, stream.offset + 8, 4)
|
|
546
|
-
).readUInt32LE(0);
|
|
547
|
-
|
|
548
|
-
lengths.push(length);
|
|
549
|
-
}
|
|
550
|
-
|
|
551
|
-
if (group.concat(element) === "7fe00010") {
|
|
552
|
-
// Pixel Data
|
|
553
|
-
const length = Buffer.from(
|
|
554
|
-
new Uint8Array(stream.buffer, stream.offset + 8, 4)
|
|
555
|
-
).readUInt32LE(0);
|
|
556
|
-
|
|
557
|
-
lengths.push(length);
|
|
558
|
-
}
|
|
559
|
-
|
|
560
|
-
DicomMessage._readTag(stream, mainSyntax);
|
|
561
|
-
}
|
|
562
|
-
|
|
563
|
-
// then
|
|
564
|
-
expect(lengths[0]).not.toEqual(0xffffffff);
|
|
565
|
-
expect(lengths[1]).toEqual(0xffffffff);
|
|
566
|
-
});
|
|
567
|
-
|
|
568
|
-
it("test_custom_dictionary", () => {
|
|
569
|
-
const customDictionary = DicomMetaDictionary.dictionary;
|
|
570
|
-
|
|
571
|
-
customDictionary["(0013,1010)"] = {
|
|
572
|
-
tag: "(0013,1010)",
|
|
573
|
-
vr: "LO",
|
|
574
|
-
name: "TrialName",
|
|
575
|
-
vm: "1",
|
|
576
|
-
version: "Custom"
|
|
577
|
-
};
|
|
578
|
-
|
|
579
|
-
const dicomMetaDictionary = new DicomMetaDictionary(customDictionary);
|
|
580
|
-
const dicomDict = new DicomDict(metadata);
|
|
581
|
-
minimalDataset["TrialName"] = "Test Trial";
|
|
582
|
-
dicomDict.dict = dicomMetaDictionary.denaturalizeDataset(minimalDataset);
|
|
583
|
-
const part10Buffer = dicomDict.write();
|
|
584
|
-
const dicomData = DicomMessage.readFile(part10Buffer);
|
|
585
|
-
const dataset = DicomMetaDictionary.naturalizeDataset(dicomData.dict);
|
|
586
|
-
|
|
587
|
-
expect(dataset.TrialName).toEqual("Test Trial");
|
|
588
|
-
//check that all other fields were preserved, 15 original + 1 for _vr and +1 for "TrialName"
|
|
589
|
-
expect(Object.keys(dataset).length).toEqual(17);
|
|
590
|
-
});
|
|
591
|
-
|
|
592
|
-
it("test_code_string_vr_truncated", () => {
|
|
593
|
-
// Create a dataset with a CS value that exceeds the 16-character limit and gets truncated
|
|
594
|
-
const testDataset = {
|
|
595
|
-
Modality: "MAGNETICRESONANCE"
|
|
596
|
-
};
|
|
597
|
-
|
|
598
|
-
const denaturalizedDataset =
|
|
599
|
-
DicomMetaDictionary.denaturalizeDataset(testDataset);
|
|
600
|
-
|
|
601
|
-
expect(denaturalizedDataset["00080060"].vr).toEqual("CS");
|
|
602
|
-
expect(denaturalizedDataset["00080060"].Value[0]).toEqual(
|
|
603
|
-
"MAGNETICRESONANC"
|
|
604
|
-
);
|
|
605
|
-
});
|
|
606
|
-
|
|
607
|
-
it("test_date_time_vr_range_matching_not_truncated", () => {
|
|
608
|
-
const dateTime = "20230131083000.000+0000-20230131090000.000+0000";
|
|
609
|
-
const studyDate = "20230101-20230301";
|
|
610
|
-
const studyTime = "080000.000-143000.000";
|
|
611
|
-
|
|
612
|
-
const padIfRequired = value => {
|
|
613
|
-
return value.length & 1 ? value + " " : value;
|
|
614
|
-
};
|
|
615
|
-
|
|
616
|
-
// Create a dataset with DA, TM and DT value representations with range matching
|
|
617
|
-
const testDataset = {
|
|
618
|
-
// 2023-01-31 08:30 AM to 09:00 AM UTC
|
|
619
|
-
DateTime: dateTime,
|
|
620
|
-
// January 1, 2023 to March 1, 2023
|
|
621
|
-
StudyDate: studyDate,
|
|
622
|
-
// 08:00 AM to 02:30 PM
|
|
623
|
-
StudyTime: studyTime
|
|
624
|
-
};
|
|
625
|
-
|
|
626
|
-
// Roundtrip the dataset through denaturalization, naturalization and denaturalization
|
|
627
|
-
const dicomDict = new DicomDict({
|
|
628
|
-
TransferSynxtaxUID: EXPLICIT_LITTLE_ENDIAN
|
|
629
|
-
});
|
|
630
|
-
dicomDict.dict = DicomMetaDictionary.denaturalizeDataset(testDataset);
|
|
631
|
-
const part10Buffer = dicomDict.write();
|
|
632
|
-
|
|
633
|
-
const dicomData = dcmjs.data.DicomMessage.readFile(part10Buffer);
|
|
634
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
635
|
-
dicomData.dict
|
|
636
|
-
);
|
|
637
|
-
|
|
638
|
-
const denaturalizedDataset =
|
|
639
|
-
DicomMetaDictionary.denaturalizeDataset(dataset);
|
|
640
|
-
|
|
641
|
-
// Check the VR and values (pad if required)
|
|
642
|
-
expect(denaturalizedDataset["0040A120"].vr).toEqual("DT");
|
|
643
|
-
expect(denaturalizedDataset["0040A120"].Value[0]).toEqual(
|
|
644
|
-
padIfRequired(dateTime)
|
|
645
|
-
);
|
|
646
|
-
expect(denaturalizedDataset["00080020"].vr).toEqual("DA");
|
|
647
|
-
expect(denaturalizedDataset["00080020"].Value[0]).toEqual(
|
|
648
|
-
padIfRequired(studyDate)
|
|
649
|
-
);
|
|
650
|
-
expect(denaturalizedDataset["00080030"].vr).toEqual("TM");
|
|
651
|
-
expect(denaturalizedDataset["00080030"].Value[0]).toEqual(studyTime); // No padding because of 'applyFormatting'
|
|
652
|
-
});
|
|
653
|
-
|
|
654
|
-
it("Reads DICOM with multiplicity", async () => {
|
|
655
|
-
const url =
|
|
656
|
-
"https://github.com/dcmjs-org/data/releases/download/multiplicity/multiplicity.dcm";
|
|
657
|
-
const dcmPath = await getTestDataset(url, "multiplicity.dcm");
|
|
658
|
-
const file = await promisify(fs.readFile)(dcmPath);
|
|
659
|
-
const dicomDict = DicomMessage.readFile(file.buffer);
|
|
660
|
-
|
|
661
|
-
expect(dicomDict.dict["00101020"].Value).toEqual([1, 2]);
|
|
662
|
-
expect(dicomDict.dict["0018100B"].Value).toEqual(["1.2", "3.4"]);
|
|
663
|
-
});
|
|
664
|
-
|
|
665
|
-
it("Reads DICOM with PersonName multiplicity", async () => {
|
|
666
|
-
const url =
|
|
667
|
-
"https://github.com/dcmjs-org/data/releases/download/multiplicity2/multiplicity.2.dcm";
|
|
668
|
-
const dcmPath = await getTestDataset(url, "multiplicity.2.dcm");
|
|
669
|
-
const file = await promisify(fs.readFile)(dcmPath);
|
|
670
|
-
const dicomDict = DicomMessage.readFile(file.buffer);
|
|
671
|
-
|
|
672
|
-
expect(dicomDict.dict["00081070"].Value).toEqual([
|
|
673
|
-
{ Alphabetic: "Doe^John" },
|
|
674
|
-
{ Alphabetic: "Doe^Jane" }
|
|
675
|
-
]);
|
|
676
|
-
});
|
|
677
|
-
|
|
678
|
-
it("Reads binary data into an ArrayBuffer", async () => {
|
|
679
|
-
const url =
|
|
680
|
-
"https://github.com/dcmjs-org/data/releases/download/binary-tag/binary-tag.dcm";
|
|
681
|
-
const dcmPath = await getTestDataset(url, "binary-tag.dcm");
|
|
682
|
-
|
|
683
|
-
const file = await promisify(fs.readFile)(dcmPath);
|
|
684
|
-
const dicomDict = DicomMessage.readFile(file.buffer);
|
|
685
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
686
|
-
dicomDict.dict
|
|
687
|
-
);
|
|
688
|
-
|
|
689
|
-
expect(dataset.PixelData).toBeInstanceOf(Array);
|
|
690
|
-
expect(dataset.PixelData[0]).toBeInstanceOf(ArrayBuffer);
|
|
691
|
-
expect([...new Uint8Array(dataset.PixelData[0])]).toEqual([2, 3, 4, 5, 6]);
|
|
692
|
-
});
|
|
693
|
-
|
|
694
|
-
it("Reads a multiframe DICOM which has trailing padding", async () => {
|
|
695
|
-
const url =
|
|
696
|
-
"https://github.com/dcmjs-org/data/releases/download/binary-parsing-stressors/multiframe-ultrasound.dcm";
|
|
697
|
-
const dcmPath = await getTestDataset(url, "multiframe-ultrasound.dcm");
|
|
698
|
-
const dicomDict = DicomMessage.readFile(fs.readFileSync(dcmPath).buffer);
|
|
699
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
700
|
-
dicomDict.dict
|
|
701
|
-
);
|
|
702
|
-
|
|
703
|
-
expect(dataset.PixelData.length).toEqual(29);
|
|
704
|
-
expect(dataset.PixelData[0]).toBeInstanceOf(ArrayBuffer);
|
|
705
|
-
expect(dataset.PixelData[0].byteLength).toEqual(104976);
|
|
706
|
-
expect(dataset.PixelData[1].byteLength).toEqual(104920);
|
|
707
|
-
expect(dataset.PixelData[27].byteLength).toEqual(103168);
|
|
708
|
-
expect(dataset.PixelData[28].byteLength).toEqual(103194);
|
|
709
|
-
});
|
|
710
|
-
|
|
711
|
-
it("Reads a multiframe DICOM with large private tags before and after the image data", async () => {
|
|
712
|
-
const url =
|
|
713
|
-
"https://github.com/dcmjs-org/data/releases/download/binary-parsing-stressors/large-private-tags.dcm";
|
|
714
|
-
const dcmPath = await getTestDataset(url, "large-private-tags.dcm");
|
|
715
|
-
const dicomDict = DicomMessage.readFile(fs.readFileSync(dcmPath).buffer);
|
|
716
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
717
|
-
dicomDict.dict
|
|
718
|
-
);
|
|
719
|
-
|
|
720
|
-
expect(dataset.PixelData).toBeInstanceOf(Array);
|
|
721
|
-
expect(dataset.PixelData.length).toEqual(130);
|
|
722
|
-
expect(dataset.PixelData[0]).toBeInstanceOf(ArrayBuffer);
|
|
723
|
-
expect(dataset.PixelData[0].byteLength).toEqual(61518);
|
|
724
|
-
expect(dataset.PixelData[1].byteLength).toEqual(61482);
|
|
725
|
-
expect(dataset.PixelData[128].byteLength).toEqual(62144);
|
|
726
|
-
expect(dataset.PixelData[129].byteLength).toEqual(62148);
|
|
727
|
-
});
|
|
728
|
-
|
|
729
|
-
it("Writes encapsulated OB data which has an odd length with a padding byte in its last fragment", async () => {
|
|
730
|
-
const pixelData = [1, 2, 3];
|
|
731
|
-
|
|
732
|
-
const dataset = DicomMetaDictionary.denaturalizeDataset({
|
|
733
|
-
PixelData: [new Uint8Array(pixelData).buffer],
|
|
734
|
-
_vrMap: { PixelData: "OB" }
|
|
735
|
-
});
|
|
736
|
-
|
|
737
|
-
const stream = new WriteBufferStream(1024);
|
|
738
|
-
const bytesWritten = DicomMessage.write(
|
|
739
|
-
dataset,
|
|
740
|
-
stream,
|
|
741
|
-
"1.2.840.10008.1.2.4.50" // JPEG baseline (an encapsulated format)
|
|
742
|
-
);
|
|
743
|
-
|
|
744
|
-
expect(bytesWritten).toEqual(44);
|
|
745
|
-
expect([...new Uint32Array(stream.buffer, 0, 11)]).toEqual([
|
|
746
|
-
0x00107fe0, // PixelData tag's group & element
|
|
747
|
-
0x0000424f, // VR type "OB"
|
|
748
|
-
0xffffffff, // Value length (0xffffffff here indicates an undefined length)
|
|
749
|
-
0xe000fffe, // SequenceItemTag for the BOT (basic offset table)
|
|
750
|
-
0x00000004, // Size in bytes of the BOT
|
|
751
|
-
0x00000000, // First (and only) offset in the BOT
|
|
752
|
-
0xe000fffe, // SequenceItemTag
|
|
753
|
-
0x00000004, // SequenceItemTag's length in bytes
|
|
754
|
-
0x00030201, // The actual data for this fragment (specified above), with padding
|
|
755
|
-
0xe0ddfffe, // SequenceDelimiterTag
|
|
756
|
-
0x00000000 // SequenceDelimiterTag value (always zero)
|
|
757
|
-
]);
|
|
758
|
-
});
|
|
759
|
-
|
|
760
|
-
it("test_deflated", async () => {
|
|
761
|
-
const url =
|
|
762
|
-
"https://github.com/dcmjs-org/data/releases/download/deflate-transfer-syntax/deflate_tests.zip";
|
|
763
|
-
const unzipPath = await getZippedTestDataset(
|
|
764
|
-
url,
|
|
765
|
-
"deflate_tests.zip",
|
|
766
|
-
"deflate_tests"
|
|
767
|
-
);
|
|
768
|
-
const deflatedPath = path.join(unzipPath, "deflate_tests");
|
|
769
|
-
|
|
770
|
-
const expected = [
|
|
771
|
-
{
|
|
772
|
-
file: "image_dfl",
|
|
773
|
-
tags: { Modality: "OT", Rows: 512, Columns: 512 }
|
|
774
|
-
},
|
|
775
|
-
{
|
|
776
|
-
file: "report_dfl",
|
|
777
|
-
tags: {
|
|
778
|
-
Modality: "SR",
|
|
779
|
-
VerificationFlag: "UNVERIFIED",
|
|
780
|
-
ContentDate: "20001110"
|
|
781
|
-
}
|
|
782
|
-
},
|
|
783
|
-
{
|
|
784
|
-
file: "wave_dfl",
|
|
785
|
-
tags: {
|
|
786
|
-
Modality: "ECG",
|
|
787
|
-
SynchronizationTrigger: "NO TRIGGER",
|
|
788
|
-
ContentDate: "19991223"
|
|
789
|
-
}
|
|
790
|
-
}
|
|
791
|
-
];
|
|
792
|
-
|
|
793
|
-
expected.forEach(e => {
|
|
794
|
-
const buffer = fs.readFileSync(path.join(deflatedPath, e.file));
|
|
795
|
-
const dicomDict = DicomMessage.readFile(
|
|
796
|
-
buffer.buffer.slice(
|
|
797
|
-
buffer.byteOffset,
|
|
798
|
-
buffer.byteOffset + buffer.byteLength
|
|
799
|
-
)
|
|
800
|
-
);
|
|
801
|
-
const dataset = DicomMetaDictionary.naturalizeDataset(dicomDict.dict);
|
|
802
|
-
Object.keys(e.tags).forEach(t => {
|
|
803
|
-
expect(dataset[t]).toEqual(e.tags[t]);
|
|
804
|
-
});
|
|
805
|
-
});
|
|
806
|
-
});
|
|
807
|
-
|
|
808
|
-
describe("With a SpecificCharacterSet tag", () => {
|
|
809
|
-
it("Reads a long string in the '' character set", async () => {
|
|
810
|
-
expect(readEncodedLongString("", [0x68, 0x69])).toEqual("hi");
|
|
811
|
-
});
|
|
812
|
-
|
|
813
|
-
it("Reads a long string in the ISO_IR 6 (default) character set", async () => {
|
|
814
|
-
expect(readEncodedLongString("ISO_IR 6", [0x68, 0x69])).toEqual("hi");
|
|
815
|
-
});
|
|
816
|
-
|
|
817
|
-
it("Reads a long string in the ISO_IR 13 (shift-jis) character set", async () => {
|
|
818
|
-
expect(readEncodedLongString("ISO_IR 13", [0x83, 0x8b])).toEqual("ル");
|
|
819
|
-
});
|
|
820
|
-
|
|
821
|
-
it("Reads a long string in the ISO_IR 166 (tis-620) character set", async () => {
|
|
822
|
-
expect(readEncodedLongString("ISO_IR 166", [0xb9, 0xf7])).toEqual("น๗");
|
|
823
|
-
});
|
|
824
|
-
|
|
825
|
-
it("Reads a long string in the ISO_IR 192 (utf-8) character set", async () => {
|
|
826
|
-
expect(readEncodedLongString("ISO_IR 192", [0xed, 0x95, 0x9c])).toEqual(
|
|
827
|
-
"한"
|
|
828
|
-
);
|
|
829
|
-
});
|
|
830
|
-
|
|
831
|
-
it("Throws an exception on an unsupported character set", async () => {
|
|
832
|
-
log.setLevel(5);
|
|
833
|
-
expect(() => readEncodedLongString("nope", [])).toThrow(
|
|
834
|
-
new Error("Unsupported character set: nope")
|
|
835
|
-
);
|
|
836
|
-
});
|
|
837
|
-
|
|
838
|
-
it("Doesn't throw an exception on an unsupported character set when ignoring errors", async () => {
|
|
839
|
-
log.setLevel(5);
|
|
840
|
-
expect(
|
|
841
|
-
readEncodedLongString("nope", [0x68, 0x69], { ignoreErrors: true })
|
|
842
|
-
).toEqual("hi");
|
|
843
|
-
});
|
|
844
|
-
|
|
845
|
-
it("Throws an exception on multiple character sets", async () => {
|
|
846
|
-
expect(() =>
|
|
847
|
-
readEncodedLongString("ISO_IR 13\\ISO_IR 166", [])
|
|
848
|
-
).toThrow(
|
|
849
|
-
/Using multiple character sets is not supported: ISO_IR 13,ISO_IR 166/
|
|
850
|
-
);
|
|
851
|
-
});
|
|
852
|
-
|
|
853
|
-
it("Doesn't throw an exception on multiple character sets when ignoring errors", async () => {
|
|
854
|
-
expect(
|
|
855
|
-
readEncodedLongString("ISO_IR 13\\ISO_IR 166", [0x68, 0x69], {
|
|
856
|
-
ignoreErrors: true
|
|
857
|
-
})
|
|
858
|
-
).toEqual("hi");
|
|
859
|
-
});
|
|
860
|
-
|
|
861
|
-
function readEncodedLongString(
|
|
862
|
-
specificCharacterSet,
|
|
863
|
-
encodedBytes,
|
|
864
|
-
readOptions = { ignoreErrors: false }
|
|
865
|
-
) {
|
|
866
|
-
// Pad to even lengths with spaces if needed
|
|
867
|
-
if (specificCharacterSet.length & 1) {
|
|
868
|
-
specificCharacterSet += " ";
|
|
869
|
-
}
|
|
870
|
-
if (encodedBytes.length & 1) {
|
|
871
|
-
encodedBytes.push(PADDING_SPACE);
|
|
872
|
-
}
|
|
873
|
-
|
|
874
|
-
// Manually construct the binary representation for the following two tags:
|
|
875
|
-
// - Tag #1: SpecificCharacterSet specifying the character set
|
|
876
|
-
// - Tag #2: InstitutionName which is a long string tag that will have its value
|
|
877
|
-
// set to the encoded bytes
|
|
878
|
-
const stream = new WriteBufferStream(
|
|
879
|
-
16 + specificCharacterSet.length + encodedBytes.length
|
|
880
|
-
);
|
|
881
|
-
stream.isLittleEndian = true;
|
|
882
|
-
|
|
883
|
-
// Write SpecificCharacterSet tag
|
|
884
|
-
stream.writeUint32(0x00050008);
|
|
885
|
-
stream.writeUint32(specificCharacterSet.length);
|
|
886
|
-
stream.writeAsciiString(specificCharacterSet);
|
|
887
|
-
|
|
888
|
-
// Write InstitutionName tag
|
|
889
|
-
stream.writeUint32(0x00800008);
|
|
890
|
-
stream.writeUint32(encodedBytes.length);
|
|
891
|
-
for (const encodedByte of encodedBytes) {
|
|
892
|
-
stream.writeUint8(encodedByte);
|
|
893
|
-
}
|
|
894
|
-
|
|
895
|
-
// Read the stream back to get the value of the InstitutionName tag
|
|
896
|
-
const readResult = DicomMessage._read(
|
|
897
|
-
new ReadBufferStream(stream.buffer),
|
|
898
|
-
IMPLICIT_LITTLE_ENDIAN,
|
|
899
|
-
readOptions
|
|
900
|
-
);
|
|
901
|
-
|
|
902
|
-
// Return the resulting UTF-8 string value for InstitutionName
|
|
903
|
-
return readResult["00080080"].Value[0];
|
|
904
|
-
}
|
|
905
|
-
});
|
|
906
|
-
|
|
907
|
-
it("Reads and writes numbers with NaN and Infinity values of tags with type FD (double float)", () => {
|
|
908
|
-
const dicomDict = new DicomDict({
|
|
909
|
-
TransferSynxtaxUID: EXPLICIT_LITTLE_ENDIAN
|
|
910
|
-
});
|
|
911
|
-
|
|
912
|
-
dicomDict.dict = DicomMetaDictionary.denaturalizeDataset({
|
|
913
|
-
LongitudinalTemporalOffsetFromEvent: NaN,
|
|
914
|
-
SequenceOfUltrasoundRegions: [{ PhysicalDeltaX: Infinity }]
|
|
915
|
-
});
|
|
916
|
-
|
|
917
|
-
const part10Buffer = dicomDict.write();
|
|
918
|
-
const dicomData = dcmjs.data.DicomMessage.readFile(part10Buffer);
|
|
919
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
920
|
-
dicomData.dict
|
|
921
|
-
);
|
|
922
|
-
|
|
923
|
-
expect(dataset.LongitudinalTemporalOffsetFromEvent).toEqual(NaN);
|
|
924
|
-
expect(dataset.SequenceOfUltrasoundRegions[0].PhysicalDeltaX).toEqual(
|
|
925
|
-
Infinity
|
|
926
|
-
);
|
|
927
|
-
});
|
|
928
|
-
|
|
929
|
-
it("Tests that reading fails on a DICOM without a meta length tag when ignoreErrors is false", () => {
|
|
930
|
-
const rawFile = fs.readFileSync("test/no-meta-length-test.dcm");
|
|
931
|
-
|
|
932
|
-
let arrayBuffer = rawFile.buffer;
|
|
933
|
-
if (
|
|
934
|
-
rawFile.byteOffset !== 0 ||
|
|
935
|
-
rawFile.byteLength !== arrayBuffer.byteLength
|
|
936
|
-
) {
|
|
937
|
-
arrayBuffer = arrayBuffer.slice(
|
|
938
|
-
rawFile.byteOffset,
|
|
939
|
-
rawFile.byteOffset + rawFile.byteLength
|
|
940
|
-
);
|
|
941
|
-
}
|
|
942
|
-
|
|
943
|
-
// Should throw an error when ignoreErrors is false
|
|
944
|
-
expect(() => {
|
|
945
|
-
dcmjs.data.DicomMessage.readFile(arrayBuffer, {
|
|
946
|
-
ignoreErrors: false,
|
|
947
|
-
untilTag: "0020000E",
|
|
948
|
-
includeUntilTagValue: true
|
|
949
|
-
});
|
|
950
|
-
}).toThrow(
|
|
951
|
-
"Invalid DICOM file, meta length tag is malformed or not present."
|
|
952
|
-
);
|
|
953
|
-
});
|
|
954
|
-
|
|
955
|
-
it("Tests that reading succeeds on a DICOM without a meta length tag when ignoreErrors is true", () => {
|
|
956
|
-
const rawFile = fs.readFileSync("test/no-meta-length-test.dcm");
|
|
957
|
-
|
|
958
|
-
let arrayBuffer = rawFile.buffer;
|
|
959
|
-
if (
|
|
960
|
-
rawFile.byteOffset !== 0 ||
|
|
961
|
-
rawFile.byteLength !== arrayBuffer.byteLength
|
|
962
|
-
) {
|
|
963
|
-
arrayBuffer = arrayBuffer.slice(
|
|
964
|
-
rawFile.byteOffset,
|
|
965
|
-
rawFile.byteOffset + rawFile.byteLength
|
|
966
|
-
);
|
|
967
|
-
}
|
|
968
|
-
|
|
969
|
-
// Should not throw an error and should successfully parse the file when ignoreErrors is true
|
|
970
|
-
expect(() => {
|
|
971
|
-
const dataset = dcmjs.data.DicomMessage.readFile(arrayBuffer, {
|
|
972
|
-
ignoreErrors: true
|
|
973
|
-
});
|
|
974
|
-
|
|
975
|
-
// Verify that we have a valid dataset with meta header
|
|
976
|
-
expect(dataset).toBeDefined();
|
|
977
|
-
expect(dataset.meta).toBeDefined();
|
|
978
|
-
expect(dataset.meta["00020010"]).toBeDefined(); // Transfer Syntax should be present
|
|
979
|
-
expect(dataset.dict["0020000E"]).toBeDefined(); // Series Instance UID should be present
|
|
980
|
-
}).not.toThrow();
|
|
981
|
-
});
|
|
982
|
-
|
|
983
|
-
describe("The same DICOM file loaded from both DCM and JSON", () => {
|
|
984
|
-
let dicomData;
|
|
985
|
-
let jsonData;
|
|
986
|
-
|
|
987
|
-
beforeEach(() => {
|
|
988
|
-
const file = fs.readFileSync("test/sample-sr.dcm");
|
|
989
|
-
dicomData = dcmjs.data.DicomMessage.readFile(file.buffer, {
|
|
990
|
-
// ignoreErrors: true,
|
|
991
|
-
});
|
|
992
|
-
jsonData = JSON.parse(JSON.stringify(sampleDicomSR));
|
|
993
|
-
});
|
|
994
|
-
|
|
995
|
-
describe("naturalized datasets", () => {
|
|
996
|
-
let dcmDataset;
|
|
997
|
-
let jsonDataset;
|
|
998
|
-
|
|
999
|
-
beforeEach(() => {
|
|
1000
|
-
dcmDataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
1001
|
-
dicomData.dict
|
|
1002
|
-
);
|
|
1003
|
-
jsonDataset =
|
|
1004
|
-
dcmjs.data.DicomMetaDictionary.naturalizeDataset(jsonData);
|
|
1005
|
-
});
|
|
1006
|
-
|
|
1007
|
-
it("Compares denaturalized PersonName values and accessors", () => {
|
|
1008
|
-
const jsonDenaturalized =
|
|
1009
|
-
dcmjs.data.DicomMetaDictionary.denaturalizeDataset(jsonDataset);
|
|
1010
|
-
const dcmDenaturalized =
|
|
1011
|
-
dcmjs.data.DicomMetaDictionary.denaturalizeDataset(dcmDataset);
|
|
1012
|
-
|
|
1013
|
-
// These check to ensure when new denaturalized tags are created we're adding
|
|
1014
|
-
// accessors to them, as well as the value accessors.
|
|
1015
|
-
// This is specific to PN VRs.
|
|
1016
|
-
expect(jsonDataset.OperatorsName.__hasValueAccessors).toBe(true);
|
|
1017
|
-
expect(dcmDataset.OperatorsName.__hasValueAccessors).toBe(true);
|
|
1018
|
-
expect(
|
|
1019
|
-
jsonDenaturalized["00081070"].Value.__hasValueAccessors
|
|
1020
|
-
).toBe(true);
|
|
1021
|
-
expect(dcmDenaturalized["00081070"].Value.__hasValueAccessors).toBe(
|
|
1022
|
-
true
|
|
1023
|
-
);
|
|
1024
|
-
expect(jsonDataset.__hasTagAccessors).toBe(true);
|
|
1025
|
-
expect(dcmDataset.__hasTagAccessors).toBe(true);
|
|
1026
|
-
expect(jsonDenaturalized["00081070"].__hasTagAccessors).toBe(true);
|
|
1027
|
-
expect(dcmDenaturalized["00081070"].__hasTagAccessors).toBe(true);
|
|
1028
|
-
expect(jsonDenaturalized["00081070"]).not.toBe(
|
|
1029
|
-
jsonDataset.OperatorsName
|
|
1030
|
-
);
|
|
1031
|
-
expect(dcmDenaturalized["00081070"]).not.toBe(
|
|
1032
|
-
dcmDataset.OperatorsName
|
|
1033
|
-
);
|
|
1034
|
-
});
|
|
1035
|
-
|
|
1036
|
-
it("Compares dcm rebuilt from json with original", () => {
|
|
1037
|
-
const dicomDict = new dcmjs.data.DicomDict(dicomData.meta);
|
|
1038
|
-
dicomDict.dict =
|
|
1039
|
-
dcmjs.data.DicomMetaDictionary.denaturalizeDataset(jsonDataset);
|
|
1040
|
-
|
|
1041
|
-
const buffer = dicomDict.write();
|
|
1042
|
-
|
|
1043
|
-
const rebuiltData = dcmjs.data.DicomMessage.readFile(buffer);
|
|
1044
|
-
|
|
1045
|
-
expect(JSON.stringify(rebuiltData)).toEqual(
|
|
1046
|
-
JSON.stringify(dicomData)
|
|
1047
|
-
);
|
|
1048
|
-
});
|
|
1049
|
-
|
|
1050
|
-
it("Adds a new PN tag", () => {
|
|
1051
|
-
jsonDataset.PerformingPhysicianName = { Alphabetic: "Doe^John" };
|
|
1052
|
-
|
|
1053
|
-
expect(String(jsonDataset.PerformingPhysicianName)).toEqual(
|
|
1054
|
-
"Doe^John"
|
|
1055
|
-
);
|
|
1056
|
-
expect(JSON.stringify(jsonDataset.PerformingPhysicianName)).toEqual(
|
|
1057
|
-
'[{"Alphabetic":"Doe^John"}]'
|
|
1058
|
-
);
|
|
1059
|
-
});
|
|
1060
|
-
|
|
1061
|
-
// Multiplicity
|
|
1062
|
-
describe("multiplicity", () => {
|
|
1063
|
-
it("Compares naturalized values", () => {
|
|
1064
|
-
expect(JSON.stringify(jsonDataset.OtherPatientNames)).toEqual(
|
|
1065
|
-
JSON.stringify(dcmDataset.OtherPatientNames)
|
|
1066
|
-
);
|
|
1067
|
-
expect(jsonDataset.OtherPatientNames.toString()).toEqual(
|
|
1068
|
-
dcmDataset.OtherPatientNames.toString()
|
|
1069
|
-
);
|
|
1070
|
-
});
|
|
1071
|
-
|
|
1072
|
-
it("Checks dicom output string", () => {
|
|
1073
|
-
expect(String(jsonDataset.OtherPatientNames)).toEqual(
|
|
1074
|
-
"Doe^John=Johnny=Jonny\\Doe^Jane=Janie=Jayne"
|
|
1075
|
-
);
|
|
1076
|
-
expect(String(dcmDataset.OtherPatientNames)).toEqual(
|
|
1077
|
-
"Doe^John=Johnny=Jonny\\Doe^Jane=Janie=Jayne"
|
|
1078
|
-
);
|
|
1079
|
-
});
|
|
1080
|
-
|
|
1081
|
-
it("Adds additional names", () => {
|
|
1082
|
-
jsonDataset.OtherPatientNames.push("Test==Name");
|
|
1083
|
-
expect(JSON.stringify(jsonDataset.OtherPatientNames)).toContain(
|
|
1084
|
-
`,{"Alphabetic":"Test","Phonetic":"Name"}]`
|
|
1085
|
-
);
|
|
1086
|
-
|
|
1087
|
-
jsonDataset.OtherPatientNames.push({ Alphabetic: "Test2" });
|
|
1088
|
-
expect(JSON.stringify(jsonDataset.OtherPatientNames)).toContain(
|
|
1089
|
-
`,{"Alphabetic":"Test2"}]`
|
|
1090
|
-
);
|
|
1091
|
-
|
|
1092
|
-
dcmDataset.OtherPatientNames.push("Test==Name");
|
|
1093
|
-
expect(JSON.stringify(dcmDataset.OtherPatientNames)).toContain(
|
|
1094
|
-
`,{"Alphabetic":"Test","Phonetic":"Name"}]`
|
|
1095
|
-
);
|
|
1096
|
-
|
|
1097
|
-
dcmDataset.OtherPatientNames.push({
|
|
1098
|
-
Alphabetic: "Test2"
|
|
1099
|
-
});
|
|
1100
|
-
expect(JSON.stringify(dcmDataset.OtherPatientNames)).toContain(
|
|
1101
|
-
`,{"Alphabetic":"Test2"}]`
|
|
1102
|
-
);
|
|
1103
|
-
});
|
|
1104
|
-
});
|
|
1105
|
-
|
|
1106
|
-
// OperatorName is three-component name
|
|
1107
|
-
describe("multiple-component name", () => {
|
|
1108
|
-
it("Compares denaturalized values", () => {
|
|
1109
|
-
const jsonDenaturalized =
|
|
1110
|
-
dcmjs.data.DicomMetaDictionary.denaturalizeDataset(
|
|
1111
|
-
jsonDataset
|
|
1112
|
-
);
|
|
1113
|
-
const dcmDenaturalized =
|
|
1114
|
-
dcmjs.data.DicomMetaDictionary.denaturalizeDataset(
|
|
1115
|
-
dcmDataset
|
|
1116
|
-
);
|
|
1117
|
-
|
|
1118
|
-
expect(jsonDenaturalized["00081070"].Value).toEqual([
|
|
1119
|
-
{
|
|
1120
|
-
Alphabetic: "Operator^John^^Mr.^Sr.",
|
|
1121
|
-
Ideographic: "John Operator",
|
|
1122
|
-
Phonetic: "O-per-a-tor"
|
|
1123
|
-
}
|
|
1124
|
-
]);
|
|
1125
|
-
expect(jsonDenaturalized["00081070"].Value).toEqual(
|
|
1126
|
-
dcmDenaturalized["00081070"].Value
|
|
1127
|
-
);
|
|
1128
|
-
expect(jsonDenaturalized["00081070"].Value).toEqual(
|
|
1129
|
-
jsonDataset.OperatorsName
|
|
1130
|
-
);
|
|
1131
|
-
expect(String(jsonDenaturalized["00081070"].Value)).toEqual(
|
|
1132
|
-
String(jsonDataset.OperatorsName)
|
|
1133
|
-
);
|
|
1134
|
-
expect(
|
|
1135
|
-
JSON.stringify(jsonDenaturalized["00081070"].Value)
|
|
1136
|
-
).toEqual(JSON.stringify(jsonDataset.OperatorsName));
|
|
1137
|
-
});
|
|
1138
|
-
|
|
1139
|
-
it("Compares changed values", () => {
|
|
1140
|
-
jsonDataset.OperatorsName.Alphabetic =
|
|
1141
|
-
dcmDataset.OperatorsName.Alphabetic = "Doe^John";
|
|
1142
|
-
jsonDataset.OperatorsName.Ideographic =
|
|
1143
|
-
dcmDataset.OperatorsName.Ideographic = undefined;
|
|
1144
|
-
jsonDataset.OperatorsName.Phonetic =
|
|
1145
|
-
dcmDataset.OperatorsName.Phonetic = undefined;
|
|
1146
|
-
|
|
1147
|
-
expect(JSON.stringify(jsonDataset.OperatorsName)).toEqual(
|
|
1148
|
-
JSON.stringify(dcmDataset.OperatorsName)
|
|
1149
|
-
);
|
|
1150
|
-
expect(jsonDataset.OperatorsName.toString()).toEqual(
|
|
1151
|
-
dcmDataset.OperatorsName.toString()
|
|
1152
|
-
);
|
|
1153
|
-
|
|
1154
|
-
const jsonDenaturalized =
|
|
1155
|
-
dcmjs.data.DicomMetaDictionary.denaturalizeDataset(
|
|
1156
|
-
jsonDataset
|
|
1157
|
-
);
|
|
1158
|
-
const dcmDenaturalized =
|
|
1159
|
-
dcmjs.data.DicomMetaDictionary.denaturalizeDataset(
|
|
1160
|
-
dcmDataset
|
|
1161
|
-
);
|
|
1162
|
-
|
|
1163
|
-
expect(jsonDenaturalized["00081070"].Value).toEqual([
|
|
1164
|
-
{ Alphabetic: "Doe^John" }
|
|
1165
|
-
]);
|
|
1166
|
-
expect(jsonDenaturalized["00081070"].Value).toEqual(
|
|
1167
|
-
dcmDenaturalized["00081070"].Value
|
|
1168
|
-
);
|
|
1169
|
-
});
|
|
1170
|
-
});
|
|
1171
|
-
});
|
|
1172
|
-
|
|
1173
|
-
describe("unnaturalized datasets", () => {
|
|
1174
|
-
it("Upserting a name", () => {
|
|
1175
|
-
// PerformingPhysicianName
|
|
1176
|
-
dicomData.upsertTag("00081050", "PN", "Test^Name=Upsert\\Test");
|
|
1177
|
-
expect(String(dicomData.dict["00081050"].Value)).toEqual(
|
|
1178
|
-
"Test^Name=Upsert\\Test"
|
|
1179
|
-
);
|
|
1180
|
-
expect(dicomData.dict["00081050"].Value).toBeInstanceOf(String);
|
|
1181
|
-
expect(JSON.stringify(dicomData.dict["00081050"].Value)).toEqual(
|
|
1182
|
-
'[{"Alphabetic":"Test^Name","Ideographic":"Upsert"},{"Alphabetic":"Test"}]'
|
|
1183
|
-
);
|
|
1184
|
-
|
|
1185
|
-
// Upsert a second time on the same tag to overwrite it.
|
|
1186
|
-
dicomData.upsertTag("00081050", "PN", "Another=Upsert\\Testing");
|
|
1187
|
-
|
|
1188
|
-
expect(String(dicomData.dict["00081050"].Value)).toEqual(
|
|
1189
|
-
"Another=Upsert\\Testing"
|
|
1190
|
-
);
|
|
1191
|
-
expect(dicomData.dict["00081050"].Value).toBeInstanceOf(String);
|
|
1192
|
-
expect(JSON.stringify(dicomData.dict["00081050"].Value)).toEqual(
|
|
1193
|
-
'[{"Alphabetic":"Another","Ideographic":"Upsert"},{"Alphabetic":"Testing"}]'
|
|
1194
|
-
);
|
|
1195
|
-
|
|
1196
|
-
// Upsert a third time on the same tag, with a naked object.
|
|
1197
|
-
dicomData.upsertTag("00081050", "PN", {
|
|
1198
|
-
Alphabetic: "Object^Testing"
|
|
1199
|
-
});
|
|
1200
|
-
expect(dicomData.dict["00081050"].Value).toEqual({
|
|
1201
|
-
Alphabetic: "Object^Testing"
|
|
1202
|
-
});
|
|
1203
|
-
expect(JSON.stringify(dicomData.dict["00081050"].Value)).toEqual(
|
|
1204
|
-
'[{"Alphabetic":"Object^Testing"}]'
|
|
1205
|
-
);
|
|
1206
|
-
|
|
1207
|
-
// Upsert a fourth time on the same tag, with a full object.
|
|
1208
|
-
dicomData.upsertTag("00081050", "PN", [
|
|
1209
|
-
{
|
|
1210
|
-
Alphabetic: "Object^Testing^Complete"
|
|
1211
|
-
}
|
|
1212
|
-
]);
|
|
1213
|
-
expect(dicomData.dict["00081050"].Value).toEqual([
|
|
1214
|
-
{
|
|
1215
|
-
Alphabetic: "Object^Testing^Complete"
|
|
1216
|
-
}
|
|
1217
|
-
]);
|
|
1218
|
-
expect(JSON.stringify(dicomData.dict["00081050"].Value)).toEqual(
|
|
1219
|
-
'[{"Alphabetic":"Object^Testing^Complete"}]'
|
|
1220
|
-
);
|
|
1221
|
-
});
|
|
1222
|
-
|
|
1223
|
-
describe("Multiplicity", () => {
|
|
1224
|
-
it("Checks raw output string", () => {
|
|
1225
|
-
expect(String(dicomData.dict["00101001"].Value)).toEqual(
|
|
1226
|
-
"Doe^John=Johnny=Jonny\\Doe^Jane=Janie=Jayne"
|
|
1227
|
-
);
|
|
1228
|
-
expect(dicomData.dict["00101001"].Value).toEqual([
|
|
1229
|
-
{
|
|
1230
|
-
Alphabetic: "Doe^John",
|
|
1231
|
-
Ideographic: "Johnny",
|
|
1232
|
-
Phonetic: "Jonny"
|
|
1233
|
-
},
|
|
1234
|
-
{
|
|
1235
|
-
Alphabetic: "Doe^Jane",
|
|
1236
|
-
Ideographic: "Janie",
|
|
1237
|
-
Phonetic: "Jayne"
|
|
1238
|
-
}
|
|
1239
|
-
]);
|
|
1240
|
-
expect(
|
|
1241
|
-
JSON.stringify(dicomData.dict["00101001"].Value)
|
|
1242
|
-
).toEqual(
|
|
1243
|
-
'[{"Alphabetic":"Doe^John","Ideographic":"Johnny","Phonetic":"Jonny"},{"Alphabetic":"Doe^Jane","Ideographic":"Janie","Phonetic":"Jayne"}]'
|
|
1244
|
-
);
|
|
1245
|
-
});
|
|
1246
|
-
});
|
|
1247
|
-
});
|
|
1248
|
-
});
|
|
1249
|
-
|
|
1250
|
-
describe("test_un_vr", () => {
|
|
1251
|
-
it("Tag with UN vr should be parsed according VR in dictionary", async () => {
|
|
1252
|
-
const expectedExposureIndex = 662;
|
|
1253
|
-
const expectedDeviationIndex = -1.835;
|
|
1254
|
-
|
|
1255
|
-
const url =
|
|
1256
|
-
"https://github.com/dcmjs-org/data/releases/download/unknown-VR/sample-dicom-with-un-vr.dcm";
|
|
1257
|
-
const dcmPath = await getTestDataset(
|
|
1258
|
-
url,
|
|
1259
|
-
"sample-dicom-with-un-vr.dcm"
|
|
1260
|
-
);
|
|
1261
|
-
|
|
1262
|
-
const file = await promisify(fs.readFile)(dcmPath);
|
|
1263
|
-
const dicomData = dcmjs.data.DicomMessage.readFile(file.buffer, {
|
|
1264
|
-
ignoreErrors: false,
|
|
1265
|
-
untilTag: null,
|
|
1266
|
-
includeUntilTagValue: false,
|
|
1267
|
-
noCopy: false
|
|
1268
|
-
});
|
|
1269
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
1270
|
-
dicomData.dict
|
|
1271
|
-
);
|
|
1272
|
-
|
|
1273
|
-
expect(dataset.ExposureIndex).toEqual(expectedExposureIndex);
|
|
1274
|
-
expect(dataset.DeviationIndex).toEqual(expectedDeviationIndex);
|
|
1275
|
-
});
|
|
1276
|
-
|
|
1277
|
-
describe("Test other VRs encoded as UN", () => {
|
|
1278
|
-
test.each([
|
|
1279
|
-
[
|
|
1280
|
-
"00000600",
|
|
1281
|
-
"AE",
|
|
1282
|
-
new Uint8Array([
|
|
1283
|
-
0x20, 0x20, 0x54, 0x45, 0x53, 0x54, 0x5f, 0x41, 0x45, 0x20
|
|
1284
|
-
]).buffer,
|
|
1285
|
-
[" TEST_AE "],
|
|
1286
|
-
["TEST_AE"]
|
|
1287
|
-
],
|
|
1288
|
-
[
|
|
1289
|
-
"00101010",
|
|
1290
|
-
"AS",
|
|
1291
|
-
new Uint8Array([0x30, 0x34, 0x35, 0x59]).buffer,
|
|
1292
|
-
["045Y"],
|
|
1293
|
-
["045Y"]
|
|
1294
|
-
],
|
|
1295
|
-
[
|
|
1296
|
-
"00280009",
|
|
1297
|
-
"AT",
|
|
1298
|
-
new Uint8Array([0x63, 0x10, 0x18, 0x00]).buffer,
|
|
1299
|
-
[0x10630018],
|
|
1300
|
-
[0x10630018]
|
|
1301
|
-
],
|
|
1302
|
-
[
|
|
1303
|
-
"00041130",
|
|
1304
|
-
"CS",
|
|
1305
|
-
new Uint8Array([
|
|
1306
|
-
0x4f, 0x52, 0x49, 0x47, 0x49, 0x4e, 0x41, 0x4c, 0x20, 0x20,
|
|
1307
|
-
0x5c, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x41, 0x52, 0x59, 0x20
|
|
1308
|
-
]).buffer,
|
|
1309
|
-
["ORIGINAL ", " PRIMARY"],
|
|
1310
|
-
["ORIGINAL", "PRIMARY"]
|
|
1311
|
-
],
|
|
1312
|
-
[
|
|
1313
|
-
"00181012",
|
|
1314
|
-
"DA",
|
|
1315
|
-
new Uint8Array([0x32, 0x30, 0x32, 0x34, 0x30, 0x31, 0x30, 0x31])
|
|
1316
|
-
.buffer,
|
|
1317
|
-
["20240101"],
|
|
1318
|
-
["20240101"]
|
|
1319
|
-
],
|
|
1320
|
-
[
|
|
1321
|
-
"00181041",
|
|
1322
|
-
"DS",
|
|
1323
|
-
new Uint8Array([
|
|
1324
|
-
0x30, 0x30, 0x30, 0x30, 0x31, 0x32, 0x33, 0x2e, 0x34, 0x35
|
|
1325
|
-
]).buffer,
|
|
1326
|
-
["0000123.45"],
|
|
1327
|
-
[123.45]
|
|
1328
|
-
],
|
|
1329
|
-
[
|
|
1330
|
-
"00181078",
|
|
1331
|
-
"DT",
|
|
1332
|
-
new Uint8Array([
|
|
1333
|
-
0x32, 0x30, 0x32, 0x34, 0x30, 0x31, 0x30, 0x31, 0x31, 0x32,
|
|
1334
|
-
0x33, 0x30, 0x34, 0x35, 0x2e, 0x31, 0x20, 0x20
|
|
1335
|
-
]).buffer,
|
|
1336
|
-
["20240101123045.1 "],
|
|
1337
|
-
["20240101123045.1 "]
|
|
1338
|
-
],
|
|
1339
|
-
[
|
|
1340
|
-
"00182043",
|
|
1341
|
-
"FL",
|
|
1342
|
-
new Uint8Array([0x66, 0x66, 0xa6, 0x3f, 0x66, 0x66, 0xa6, 0x3f])
|
|
1343
|
-
.buffer,
|
|
1344
|
-
[1.2999999523162842, 1.2999999523162842],
|
|
1345
|
-
[1.2999999523162842, 1.2999999523162842]
|
|
1346
|
-
],
|
|
1347
|
-
[
|
|
1348
|
-
"00186028",
|
|
1349
|
-
"FD",
|
|
1350
|
-
new Uint8Array([0x11, 0x2d, 0x44, 0x54, 0xfb, 0x21, 0x09, 0x40])
|
|
1351
|
-
.buffer,
|
|
1352
|
-
[3.14159265358979],
|
|
1353
|
-
[3.14159265358979]
|
|
1354
|
-
],
|
|
1355
|
-
[
|
|
1356
|
-
"00200012",
|
|
1357
|
-
"IS",
|
|
1358
|
-
new Uint8Array([0x20, 0x2b, 0x32, 0x37, 0x38, 0x39, 0x33, 0x20])
|
|
1359
|
-
.buffer,
|
|
1360
|
-
[" +27893 "],
|
|
1361
|
-
[27893]
|
|
1362
|
-
],
|
|
1363
|
-
[
|
|
1364
|
-
"0018702A",
|
|
1365
|
-
"LO",
|
|
1366
|
-
new Uint8Array([
|
|
1367
|
-
0x20, 0x20, 0x46, 0x65, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x20,
|
|
1368
|
-
0x6e, 0x61, 0x75, 0x73, 0x65, 0x6f, 0x75, 0x73, 0x20, 0x20
|
|
1369
|
-
]).buffer,
|
|
1370
|
-
[" Feeling nauseous "],
|
|
1371
|
-
["Feeling nauseous"]
|
|
1372
|
-
],
|
|
1373
|
-
[
|
|
1374
|
-
"00187040",
|
|
1375
|
-
"LT",
|
|
1376
|
-
new Uint8Array([
|
|
1377
|
-
0x20, 0x20, 0x46, 0x65, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x20,
|
|
1378
|
-
0x6e, 0x61, 0x75, 0x73, 0x65, 0x6f, 0x75, 0x73, 0x20, 0x20
|
|
1379
|
-
]).buffer,
|
|
1380
|
-
[" Feeling nauseous "],
|
|
1381
|
-
[" Feeling nauseous"]
|
|
1382
|
-
],
|
|
1383
|
-
[
|
|
1384
|
-
"00282000",
|
|
1385
|
-
"OB",
|
|
1386
|
-
new Uint8Array([0x13, 0x40, 0x80, 0x88, 0x88, 0x90, 0x88, 0x88])
|
|
1387
|
-
.buffer,
|
|
1388
|
-
[
|
|
1389
|
-
new Uint8Array([
|
|
1390
|
-
0x13, 0x40, 0x80, 0x88, 0x88, 0x90, 0x88, 0x88
|
|
1391
|
-
]).buffer
|
|
1392
|
-
],
|
|
1393
|
-
[
|
|
1394
|
-
new Uint8Array([
|
|
1395
|
-
0x13, 0x40, 0x80, 0x88, 0x88, 0x90, 0x88, 0x88
|
|
1396
|
-
]).buffer
|
|
1397
|
-
]
|
|
1398
|
-
],
|
|
1399
|
-
[
|
|
1400
|
-
"00701A07",
|
|
1401
|
-
"OD",
|
|
1402
|
-
new Uint8Array([0x00, 0x00, 0x00, 0x54, 0x34, 0x6f, 0x9d, 0x41])
|
|
1403
|
-
.buffer,
|
|
1404
|
-
[
|
|
1405
|
-
new Uint8Array([
|
|
1406
|
-
0x00, 0x00, 0x00, 0x54, 0x34, 0x6f, 0x9d, 0x41
|
|
1407
|
-
]).buffer
|
|
1408
|
-
],
|
|
1409
|
-
[
|
|
1410
|
-
new Uint8Array([
|
|
1411
|
-
0x00, 0x00, 0x00, 0x54, 0x34, 0x6f, 0x9d, 0x41
|
|
1412
|
-
]).buffer
|
|
1413
|
-
]
|
|
1414
|
-
],
|
|
1415
|
-
[
|
|
1416
|
-
"00720067",
|
|
1417
|
-
"OF",
|
|
1418
|
-
new Uint8Array([
|
|
1419
|
-
0x00, 0x00, 0x28, 0x41, 0x00, 0x00, 0x30, 0xc0, 0x00, 0x00,
|
|
1420
|
-
0xf6, 0x42
|
|
1421
|
-
]).buffer,
|
|
1422
|
-
[
|
|
1423
|
-
new Uint8Array([
|
|
1424
|
-
0x00, 0x00, 0x28, 0x41, 0x00, 0x00, 0x30, 0xc0, 0x00,
|
|
1425
|
-
0x00, 0xf6, 0x42
|
|
1426
|
-
]).buffer
|
|
1427
|
-
],
|
|
1428
|
-
[
|
|
1429
|
-
new Uint8Array([
|
|
1430
|
-
0x00, 0x00, 0x28, 0x41, 0x00, 0x00, 0x30, 0xc0, 0x00,
|
|
1431
|
-
0x00, 0xf6, 0x42
|
|
1432
|
-
]).buffer
|
|
1433
|
-
]
|
|
1434
|
-
],
|
|
1435
|
-
[
|
|
1436
|
-
"00281224",
|
|
1437
|
-
"OW",
|
|
1438
|
-
new Uint8Array([0x13, 0x40, 0x80, 0x88, 0x88, 0x90, 0x88, 0x88])
|
|
1439
|
-
.buffer,
|
|
1440
|
-
[
|
|
1441
|
-
new Uint8Array([
|
|
1442
|
-
0x13, 0x40, 0x80, 0x88, 0x88, 0x90, 0x88, 0x88
|
|
1443
|
-
]).buffer
|
|
1444
|
-
],
|
|
1445
|
-
[
|
|
1446
|
-
new Uint8Array([
|
|
1447
|
-
0x13, 0x40, 0x80, 0x88, 0x88, 0x90, 0x88, 0x88
|
|
1448
|
-
]).buffer
|
|
1449
|
-
]
|
|
1450
|
-
],
|
|
1451
|
-
[
|
|
1452
|
-
"00080090",
|
|
1453
|
-
"PN",
|
|
1454
|
-
new Uint8Array([
|
|
1455
|
-
0x44, 0x6f, 0x65, 0x5e, 0x4a, 0x6f, 0x68, 0x6e, 0x5e, 0x41,
|
|
1456
|
-
0x5e, 0x4a, 0x72, 0x2e, 0x5e, 0x4d, 0x44, 0x3d, 0x44, 0x6f,
|
|
1457
|
-
0x65, 0x5e, 0x4a, 0x61, 0x79, 0x5e, 0x41, 0x5e, 0x4a, 0x72,
|
|
1458
|
-
0x2e, 0x20
|
|
1459
|
-
]).buffer,
|
|
1460
|
-
["Doe^John^A^Jr.^MD=Doe^Jay^A^Jr."],
|
|
1461
|
-
[
|
|
1462
|
-
{
|
|
1463
|
-
Alphabetic: "Doe^John^A^Jr.^MD",
|
|
1464
|
-
Ideographic: "Doe^Jay^A^Jr."
|
|
1465
|
-
}
|
|
1466
|
-
]
|
|
1467
|
-
],
|
|
1468
|
-
[
|
|
1469
|
-
"00080094",
|
|
1470
|
-
"SH",
|
|
1471
|
-
new Uint8Array([
|
|
1472
|
-
0x43, 0x54, 0x5f, 0x53, 0x43, 0x41, 0x4e, 0x5f, 0x30, 0x31
|
|
1473
|
-
]).buffer,
|
|
1474
|
-
["CT_SCAN_01"],
|
|
1475
|
-
["CT_SCAN_01"]
|
|
1476
|
-
],
|
|
1477
|
-
[
|
|
1478
|
-
"00186020",
|
|
1479
|
-
"SL",
|
|
1480
|
-
new Uint8Array([0x40, 0xe2, 0x01, 0x00, 0x40, 0xe2, 0x01, 0x00])
|
|
1481
|
-
.buffer,
|
|
1482
|
-
[123456, 123456],
|
|
1483
|
-
[123456, 123456]
|
|
1484
|
-
],
|
|
1485
|
-
[
|
|
1486
|
-
"00189219",
|
|
1487
|
-
"SS",
|
|
1488
|
-
new Uint8Array([0xd2, 0x04, 0xd2, 0x04, 0xd2, 0x04]).buffer,
|
|
1489
|
-
[1234, 1234, 1234],
|
|
1490
|
-
[1234, 1234, 1234]
|
|
1491
|
-
],
|
|
1492
|
-
[
|
|
1493
|
-
"00189373",
|
|
1494
|
-
"ST",
|
|
1495
|
-
new Uint8Array([
|
|
1496
|
-
0x20, 0x20, 0x46, 0x65, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x20,
|
|
1497
|
-
0x6e, 0x61, 0x75, 0x73, 0x65, 0x6f, 0x75, 0x73, 0x20, 0x20
|
|
1498
|
-
]).buffer,
|
|
1499
|
-
[" Feeling nauseous "],
|
|
1500
|
-
[" Feeling nauseous"]
|
|
1501
|
-
],
|
|
1502
|
-
[
|
|
1503
|
-
"21000050",
|
|
1504
|
-
"TM",
|
|
1505
|
-
new Uint8Array([
|
|
1506
|
-
0x34, 0x32, 0x35, 0x33, 0x30, 0x2e, 0x31, 0x32, 0x33, 0x34,
|
|
1507
|
-
0x35, 0x36
|
|
1508
|
-
]).buffer,
|
|
1509
|
-
["42530.123456"],
|
|
1510
|
-
["42530.123456"]
|
|
1511
|
-
],
|
|
1512
|
-
[
|
|
1513
|
-
"3010001B",
|
|
1514
|
-
"UC",
|
|
1515
|
-
new Uint8Array([
|
|
1516
|
-
0x54, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x20, 0x73,
|
|
1517
|
-
0x70, 0x61, 0x63, 0x65, 0x73, 0x20, 0x61, 0x6c, 0x6c, 0x6f,
|
|
1518
|
-
0x77, 0x65, 0x64, 0x20, 0x20, 0x20
|
|
1519
|
-
]).buffer,
|
|
1520
|
-
["Trailing spaces allowed "],
|
|
1521
|
-
["Trailing spaces allowed"]
|
|
1522
|
-
],
|
|
1523
|
-
[
|
|
1524
|
-
"00041510",
|
|
1525
|
-
"UI",
|
|
1526
|
-
new Uint8Array([
|
|
1527
|
-
0x31, 0x2e, 0x32, 0x2e, 0x38, 0x34, 0x30, 0x2e, 0x31, 0x30,
|
|
1528
|
-
0x30, 0x30, 0x38, 0x2e, 0x31, 0x2e, 0x32, 0x2e, 0x31
|
|
1529
|
-
]).buffer,
|
|
1530
|
-
["1.2.840.10008.1.2.1"],
|
|
1531
|
-
["1.2.840.10008.1.2.1"]
|
|
1532
|
-
],
|
|
1533
|
-
[
|
|
1534
|
-
"30100092",
|
|
1535
|
-
"UL",
|
|
1536
|
-
new Uint8Array([0x40, 0xe2, 0x01, 0x00]).buffer,
|
|
1537
|
-
[123456],
|
|
1538
|
-
[123456]
|
|
1539
|
-
],
|
|
1540
|
-
[
|
|
1541
|
-
"0008010E",
|
|
1542
|
-
"UR",
|
|
1543
|
-
new Uint8Array([
|
|
1544
|
-
0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x64, 0x69, 0x63,
|
|
1545
|
-
0x6f, 0x6d, 0x2e, 0x6e, 0x65, 0x6d, 0x61, 0x2e, 0x6f, 0x72,
|
|
1546
|
-
0x67, 0x20
|
|
1547
|
-
]).buffer,
|
|
1548
|
-
["http://dicom.nema.org "],
|
|
1549
|
-
["http://dicom.nema.org "]
|
|
1550
|
-
],
|
|
1551
|
-
[
|
|
1552
|
-
"00080301",
|
|
1553
|
-
"US",
|
|
1554
|
-
new Uint8Array([0xd2, 0x04]).buffer,
|
|
1555
|
-
[1234],
|
|
1556
|
-
[1234]
|
|
1557
|
-
],
|
|
1558
|
-
[
|
|
1559
|
-
"0008030E",
|
|
1560
|
-
"UT",
|
|
1561
|
-
new Uint8Array([
|
|
1562
|
-
0x20, 0x20, 0x46, 0x65, 0x65, 0x6c, 0x69, 0x6e, 0x67, 0x20,
|
|
1563
|
-
0x6e, 0x61, 0x75, 0x73, 0x65, 0x6f, 0x75, 0x73, 0x20, 0x20
|
|
1564
|
-
]).buffer,
|
|
1565
|
-
[" Feeling nauseous "],
|
|
1566
|
-
[" Feeling nauseous"]
|
|
1567
|
-
]
|
|
1568
|
-
])(
|
|
1569
|
-
"for tag %s with expected VR %p",
|
|
1570
|
-
(tag, vr, byteArray, expectedRawValue, expectedValue) => {
|
|
1571
|
-
// setup input tag as UN
|
|
1572
|
-
const dataset = {
|
|
1573
|
-
[tag]: {
|
|
1574
|
-
vr: "UN",
|
|
1575
|
-
_rawValue: [byteArray],
|
|
1576
|
-
Value: [byteArray]
|
|
1577
|
-
}
|
|
1578
|
-
};
|
|
1579
|
-
|
|
1580
|
-
const dicomDict = new DicomDict({});
|
|
1581
|
-
dicomDict.dict = dataset;
|
|
1582
|
-
|
|
1583
|
-
// Write and re-read
|
|
1584
|
-
const outputDicomDict = DicomMessage.readFile(
|
|
1585
|
-
dicomDict.write(),
|
|
1586
|
-
{ forceStoreRaw: true }
|
|
1587
|
-
);
|
|
1588
|
-
|
|
1589
|
-
// Expect tag to be parsed correctly based on meta dictionary vr lookup
|
|
1590
|
-
expect(outputDicomDict.dict[tag].vr).toEqual(vr);
|
|
1591
|
-
expect(outputDicomDict.dict[tag]._rawValue).toEqual(
|
|
1592
|
-
expectedRawValue
|
|
1593
|
-
);
|
|
1594
|
-
expect(outputDicomDict.dict[tag].Value).toEqual(expectedValue);
|
|
1595
|
-
}
|
|
1596
|
-
);
|
|
1597
|
-
});
|
|
1598
|
-
});
|
|
1599
|
-
|
|
1600
|
-
describe("Save original non-standard VR and check dataset after denaturalized", () => {
|
|
1601
|
-
const dicomTagsWithNonStandardVr = {
|
|
1602
|
-
dict: {
|
|
1603
|
-
"00283010": {
|
|
1604
|
-
vr: "SQ",
|
|
1605
|
-
Value: [
|
|
1606
|
-
{
|
|
1607
|
-
"00283002": {
|
|
1608
|
-
vr: "US",
|
|
1609
|
-
Value: [0, 0, 16]
|
|
1610
|
-
},
|
|
1611
|
-
"00283003": {
|
|
1612
|
-
vr: "LO",
|
|
1613
|
-
Value: ["NORMAL"]
|
|
1614
|
-
},
|
|
1615
|
-
"00283006": {
|
|
1616
|
-
vr: "OW", // US by standard
|
|
1617
|
-
Value: [new ArrayBuffer()]
|
|
1618
|
-
}
|
|
1619
|
-
},
|
|
1620
|
-
{
|
|
1621
|
-
"00283002": {
|
|
1622
|
-
vr: "US",
|
|
1623
|
-
Value: [0, 0, 16]
|
|
1624
|
-
},
|
|
1625
|
-
"00283003": {
|
|
1626
|
-
vr: "LO",
|
|
1627
|
-
Value: ["HARDER"]
|
|
1628
|
-
},
|
|
1629
|
-
"00283006": {
|
|
1630
|
-
vr: "OW", // US by standard
|
|
1631
|
-
Value: [new ArrayBuffer()]
|
|
1632
|
-
}
|
|
1633
|
-
},
|
|
1634
|
-
{
|
|
1635
|
-
"00283002": {
|
|
1636
|
-
vr: "US",
|
|
1637
|
-
Value: [0, 0, 16]
|
|
1638
|
-
},
|
|
1639
|
-
"00283003": {
|
|
1640
|
-
vr: "LO",
|
|
1641
|
-
Value: ["SOFTER"]
|
|
1642
|
-
},
|
|
1643
|
-
"00283006": {
|
|
1644
|
-
vr: "OW", // US by standard
|
|
1645
|
-
Value: [new ArrayBuffer()]
|
|
1646
|
-
}
|
|
1647
|
-
}
|
|
1648
|
-
]
|
|
1649
|
-
},
|
|
1650
|
-
"00180015": {
|
|
1651
|
-
vr: "CS",
|
|
1652
|
-
Value: ["CHEST"]
|
|
1653
|
-
},
|
|
1654
|
-
"00080060": {
|
|
1655
|
-
vr: "CS",
|
|
1656
|
-
Value: ["DX"]
|
|
1657
|
-
},
|
|
1658
|
-
"00100010": {
|
|
1659
|
-
vr: "PN",
|
|
1660
|
-
Value: [
|
|
1661
|
-
{
|
|
1662
|
-
Alphabetic: "Qure Two"
|
|
1663
|
-
}
|
|
1664
|
-
]
|
|
1665
|
-
},
|
|
1666
|
-
"00100020": {
|
|
1667
|
-
vr: "LO",
|
|
1668
|
-
Value: ["ENM1-M0012260"]
|
|
1669
|
-
},
|
|
1670
|
-
"00100040": {
|
|
1671
|
-
vr: "CS",
|
|
1672
|
-
Value: ["M"]
|
|
1673
|
-
},
|
|
1674
|
-
"00104000": {
|
|
1675
|
-
vr: "LO", // LT by standard
|
|
1676
|
-
Value: ["Patient comment"]
|
|
1677
|
-
}
|
|
1678
|
-
}
|
|
1679
|
-
};
|
|
1680
|
-
|
|
1681
|
-
const addedCustomDictionaryNameMap = {
|
|
1682
|
-
LUTData: {
|
|
1683
|
-
tag: "(0028,3006)",
|
|
1684
|
-
vr: "US",
|
|
1685
|
-
name: "LUTData",
|
|
1686
|
-
vm: "1-n",
|
|
1687
|
-
version: "DICOM"
|
|
1688
|
-
},
|
|
1689
|
-
LUTDescriptor: {
|
|
1690
|
-
tag: "(0028,3002)",
|
|
1691
|
-
vr: "US",
|
|
1692
|
-
name: "LUTDescriptor",
|
|
1693
|
-
vm: "3",
|
|
1694
|
-
version: "DICOM"
|
|
1695
|
-
}
|
|
1696
|
-
};
|
|
1697
|
-
|
|
1698
|
-
for (const key in addedCustomDictionaryNameMap) {
|
|
1699
|
-
const element = addedCustomDictionaryNameMap[key];
|
|
1700
|
-
DicomMetaDictionary.dictionary[element.tag] = element;
|
|
1701
|
-
}
|
|
1702
|
-
DicomMetaDictionary._generateNameMap();
|
|
1703
|
-
|
|
1704
|
-
const dataset = dcmjs.data.DicomMetaDictionary.naturalizeDataset(
|
|
1705
|
-
dicomTagsWithNonStandardVr.dict
|
|
1706
|
-
);
|
|
1707
|
-
|
|
1708
|
-
expect(Object.keys(dataset._vrMap)).toContain("PatientComments");
|
|
1709
|
-
expect(dataset._vrMap.PatientComments).not.toEqual(
|
|
1710
|
-
DicomMetaDictionary.nameMap.PatientComments.vr
|
|
1711
|
-
);
|
|
1712
|
-
expect(dataset._vrMap.PatientComments).toEqual("LO");
|
|
1713
|
-
|
|
1714
|
-
dataset.VOILUTSequence.forEach(sequenceItem => {
|
|
1715
|
-
expect(sequenceItem._vrMap).toBeDefined();
|
|
1716
|
-
expect(Object.keys(sequenceItem._vrMap).length).toBe(1);
|
|
1717
|
-
expect(sequenceItem._vrMap.LUTData).toBe("OW"); // saved origin vr in _vrMap (by standard in addedCustomDictionaryNameMap is US)
|
|
1718
|
-
});
|
|
1719
|
-
|
|
1720
|
-
const denaturalizedDataset =
|
|
1721
|
-
dcmjs.data.DicomMetaDictionary.denaturalizeDataset(dataset);
|
|
1722
|
-
|
|
1723
|
-
expect(denaturalizedDataset["00104000"].vr).toBe("LO");
|
|
1724
|
-
|
|
1725
|
-
denaturalizedDataset["00283010"].Value.forEach(sequenceItem => {
|
|
1726
|
-
expect(sequenceItem["00283006"].vr).toBe("OW");
|
|
1727
|
-
});
|
|
1728
|
-
});
|
|
1729
|
-
|
|
1730
|
-
it.each([
|
|
1731
|
-
[1.0, "1"],
|
|
1732
|
-
[0.0, "0"],
|
|
1733
|
-
[-0.0, "0"],
|
|
1734
|
-
[0.123, "0.123"],
|
|
1735
|
-
[-0.321, "-0.321"],
|
|
1736
|
-
[0.00001, "0.00001"],
|
|
1737
|
-
[3.14159265358979323846, "3.14159265358979"],
|
|
1738
|
-
[-3.14159265358979323846, "-3.1415926535898"],
|
|
1739
|
-
[5.3859401928763739403e-7, "5.38594019288e-7"],
|
|
1740
|
-
[-5.3859401928763739403e-7, "-5.3859401929e-7"],
|
|
1741
|
-
[1.2342534378125532912998323e10, "12342534378.1255"],
|
|
1742
|
-
[6.40708699858767842501238e13, "64070869985876.8"],
|
|
1743
|
-
[1.7976931348623157e308, "1.797693135e+308"],
|
|
1744
|
-
[0.99990081787109, "0.99990081787109"]
|
|
1745
|
-
])(
|
|
1746
|
-
"A converted decimal string should not exceed 16 bytes in length",
|
|
1747
|
-
(a, expected) => {
|
|
1748
|
-
const decimalString = ValueRepresentation.createByTypeString("DS");
|
|
1749
|
-
let value = decimalString.convertToString(a);
|
|
1750
|
-
expect(value.length).toBeLessThanOrEqual(16);
|
|
1751
|
-
expect(value).toBe(expected);
|
|
1752
|
-
}
|
|
1753
|
-
);
|
|
1754
|
-
|
|
1755
|
-
describe("test OtherDouble ValueRepresentation", () => {
|
|
1756
|
-
it("Treat OD as explicit VR with correct length", async () => {
|
|
1757
|
-
const url =
|
|
1758
|
-
"https://github.com/dcmjs-org/data/releases/download/od-encoding-data/OD-single-word-example.dcm";
|
|
1759
|
-
const dcmPath = await getTestDataset(url, "OD-single-word-example");
|
|
1760
|
-
const file = fs.readFileSync(dcmPath);
|
|
1761
|
-
const data = dcmjs.data.DicomMessage.readFile(
|
|
1762
|
-
new Uint8Array(file).buffer
|
|
1763
|
-
);
|
|
1764
|
-
|
|
1765
|
-
// expect OD VR data element (VolumetricCurveUpDirections) to be read with expected value
|
|
1766
|
-
expect(data.dict["00701A07"]).toBeTruthy();
|
|
1767
|
-
const odBuffer = data.dict["00701A07"].Value[0];
|
|
1768
|
-
expect(new Uint8Array(odBuffer)).toEqual(
|
|
1769
|
-
new Uint8Array([0, 0, 0, 0, 0, 0, 0, 64])
|
|
1770
|
-
);
|
|
1771
|
-
|
|
1772
|
-
// expect arbitrary tag (BlendingInputNumber, US VR) after OD VR to be read without issue
|
|
1773
|
-
expect(data.dict["00701B02"]).toBeTruthy();
|
|
1774
|
-
expect(data.dict["00701B02"].Value[0]).toBe(1);
|
|
1775
|
-
});
|
|
1776
|
-
});
|