@cornerstonejs/adapters 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +14 -5
- package/src/adapters/Cornerstone/Angle.js +92 -0
- package/src/adapters/Cornerstone/ArrowAnnotate.js +106 -0
- package/src/adapters/Cornerstone/Bidirectional.js +187 -0
- package/src/adapters/Cornerstone/CircleRoi.js +109 -0
- package/src/adapters/Cornerstone/CobbAngle.js +96 -0
- package/src/adapters/Cornerstone/EllipticalRoi.js +149 -0
- package/src/adapters/Cornerstone/FreehandRoi.js +82 -0
- package/src/adapters/Cornerstone/Length.js +80 -0
- package/src/adapters/Cornerstone/MeasurementReport.js +352 -0
- package/src/adapters/Cornerstone/RectangleRoi.js +92 -0
- package/src/adapters/Cornerstone/Segmentation.js +118 -0
- package/src/adapters/Cornerstone/Segmentation_3X.js +632 -0
- package/src/adapters/Cornerstone/Segmentation_4X.js +1543 -0
- package/src/adapters/Cornerstone/cornerstone4Tag.js +1 -0
- package/src/adapters/Cornerstone/index.js +27 -0
- package/src/adapters/Cornerstone3D/ArrowAnnotate.js +155 -0
- package/src/adapters/Cornerstone3D/Bidirectional.js +196 -0
- package/src/adapters/Cornerstone3D/CodingScheme.js +16 -0
- package/src/adapters/Cornerstone3D/EllipticalROI.js +204 -0
- package/src/adapters/Cornerstone3D/Length.js +113 -0
- package/src/adapters/Cornerstone3D/MeasurementReport.js +445 -0
- package/src/adapters/Cornerstone3D/PlanarFreehandROI.js +137 -0
- package/src/adapters/Cornerstone3D/Probe.js +106 -0
- package/src/adapters/Cornerstone3D/cornerstone3DTag.js +1 -0
- package/src/adapters/Cornerstone3D/index.js +24 -0
- package/src/adapters/VTKjs/Segmentation.js +223 -0
- package/src/adapters/VTKjs/index.js +7 -0
- package/src/adapters/helpers.js +19 -0
- package/src/adapters/index.js +11 -0
- package/src/index.js +4 -0
- package/.babelrc +0 -9
- package/.eslintrc.json +0 -18
- package/.prettierrc +0 -5
- package/CHANGELOG.md +0 -106
- package/generate-dictionary.js +0 -145
- package/netlify.toml +0 -20
- package/rollup.config.js +0 -37
- package/test/adapters.test.js +0 -1
|
@@ -0,0 +1,1543 @@
|
|
|
1
|
+
import { log, utilities, normalizers, derivations } from "dcmjs";
|
|
2
|
+
import ndarray from "ndarray";
|
|
3
|
+
import cloneDeep from "lodash.clonedeep";
|
|
4
|
+
|
|
5
|
+
const {
|
|
6
|
+
rotateDirectionCosinesInPlane,
|
|
7
|
+
flipImageOrientationPatient: flipIOP,
|
|
8
|
+
flipMatrix2D,
|
|
9
|
+
rotateMatrix902D,
|
|
10
|
+
nearlyEqual
|
|
11
|
+
} = utilities.orientation;
|
|
12
|
+
|
|
13
|
+
const { datasetToBlob, BitArray, DicomMessage, DicomMetaDictionary } =
|
|
14
|
+
utilities;
|
|
15
|
+
|
|
16
|
+
const { Normalizer } = normalizers;
|
|
17
|
+
const { Segmentation: SegmentationDerivation } = derivations;
|
|
18
|
+
|
|
19
|
+
const { encode, decode } = utilities.compression;
|
|
20
|
+
|
|
21
|
+
const Segmentation = {
|
|
22
|
+
generateSegmentation,
|
|
23
|
+
generateToolState,
|
|
24
|
+
fillSegmentation
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export default Segmentation;
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
*
|
|
31
|
+
* @typedef {Object} BrushData
|
|
32
|
+
* @property {Object} toolState - The cornerstoneTools global toolState.
|
|
33
|
+
* @property {Object[]} segments - The cornerstoneTools segment metadata that corresponds to the
|
|
34
|
+
* seriesInstanceUid.
|
|
35
|
+
*/
|
|
36
|
+
|
|
37
|
+
const generateSegmentationDefaultOptions = {
|
|
38
|
+
includeSliceSpacing: true,
|
|
39
|
+
rleEncode: true
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* generateSegmentation - Generates cornerstoneTools brush data, given a stack of
|
|
44
|
+
* imageIds, images and the cornerstoneTools brushData.
|
|
45
|
+
*
|
|
46
|
+
* @param {object[]} images An array of cornerstone images that contain the source
|
|
47
|
+
* data under `image.data.byteArray.buffer`.
|
|
48
|
+
* @param {Object|Object[]} inputLabelmaps3D The cornerstone `Labelmap3D` object, or an array of objects.
|
|
49
|
+
* @param {Object} userOptions Options to pass to the segmentation derivation and `fillSegmentation`.
|
|
50
|
+
* @returns {Blob}
|
|
51
|
+
*/
|
|
52
|
+
function generateSegmentation(images, inputLabelmaps3D, userOptions = {}) {
|
|
53
|
+
const isMultiframe = images[0].imageId.includes("?frame");
|
|
54
|
+
const segmentation = _createSegFromImages(
|
|
55
|
+
images,
|
|
56
|
+
isMultiframe,
|
|
57
|
+
userOptions
|
|
58
|
+
);
|
|
59
|
+
|
|
60
|
+
return fillSegmentation(segmentation, inputLabelmaps3D, userOptions);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* fillSegmentation - Fills a derived segmentation dataset with cornerstoneTools `LabelMap3D` data.
|
|
65
|
+
*
|
|
66
|
+
* @param {object[]} segmentation An empty segmentation derived dataset.
|
|
67
|
+
* @param {Object|Object[]} inputLabelmaps3D The cornerstone `Labelmap3D` object, or an array of objects.
|
|
68
|
+
* @param {Object} userOptions Options object to override default options.
|
|
69
|
+
* @returns {Blob} description
|
|
70
|
+
*/
|
|
71
|
+
function fillSegmentation(segmentation, inputLabelmaps3D, userOptions = {}) {
|
|
72
|
+
const options = Object.assign(
|
|
73
|
+
{},
|
|
74
|
+
generateSegmentationDefaultOptions,
|
|
75
|
+
userOptions
|
|
76
|
+
);
|
|
77
|
+
|
|
78
|
+
// Use another variable so we don't redefine labelmaps3D.
|
|
79
|
+
const labelmaps3D = Array.isArray(inputLabelmaps3D)
|
|
80
|
+
? inputLabelmaps3D
|
|
81
|
+
: [inputLabelmaps3D];
|
|
82
|
+
|
|
83
|
+
let numberOfFrames = 0;
|
|
84
|
+
const referencedFramesPerLabelmap = [];
|
|
85
|
+
|
|
86
|
+
for (
|
|
87
|
+
let labelmapIndex = 0;
|
|
88
|
+
labelmapIndex < labelmaps3D.length;
|
|
89
|
+
labelmapIndex++
|
|
90
|
+
) {
|
|
91
|
+
const labelmap3D = labelmaps3D[labelmapIndex];
|
|
92
|
+
const { labelmaps2D, metadata } = labelmap3D;
|
|
93
|
+
|
|
94
|
+
const referencedFramesPerSegment = [];
|
|
95
|
+
|
|
96
|
+
for (let i = 1; i < metadata.length; i++) {
|
|
97
|
+
if (metadata[i]) {
|
|
98
|
+
referencedFramesPerSegment[i] = [];
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
for (let i = 0; i < labelmaps2D.length; i++) {
|
|
103
|
+
const labelmap2D = labelmaps2D[i];
|
|
104
|
+
|
|
105
|
+
if (labelmaps2D[i]) {
|
|
106
|
+
const { segmentsOnLabelmap } = labelmap2D;
|
|
107
|
+
|
|
108
|
+
segmentsOnLabelmap.forEach(segmentIndex => {
|
|
109
|
+
if (segmentIndex !== 0) {
|
|
110
|
+
referencedFramesPerSegment[segmentIndex].push(i);
|
|
111
|
+
numberOfFrames++;
|
|
112
|
+
}
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
referencedFramesPerLabelmap[labelmapIndex] = referencedFramesPerSegment;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
segmentation.setNumberOfFrames(numberOfFrames);
|
|
121
|
+
|
|
122
|
+
for (
|
|
123
|
+
let labelmapIndex = 0;
|
|
124
|
+
labelmapIndex < labelmaps3D.length;
|
|
125
|
+
labelmapIndex++
|
|
126
|
+
) {
|
|
127
|
+
const referencedFramesPerSegment =
|
|
128
|
+
referencedFramesPerLabelmap[labelmapIndex];
|
|
129
|
+
|
|
130
|
+
const labelmap3D = labelmaps3D[labelmapIndex];
|
|
131
|
+
const { metadata } = labelmap3D;
|
|
132
|
+
|
|
133
|
+
for (
|
|
134
|
+
let segmentIndex = 1;
|
|
135
|
+
segmentIndex < referencedFramesPerSegment.length;
|
|
136
|
+
segmentIndex++
|
|
137
|
+
) {
|
|
138
|
+
const referencedFrameIndicies =
|
|
139
|
+
referencedFramesPerSegment[segmentIndex];
|
|
140
|
+
|
|
141
|
+
if (referencedFrameIndicies) {
|
|
142
|
+
// Frame numbers start from 1.
|
|
143
|
+
const referencedFrameNumbers = referencedFrameIndicies.map(
|
|
144
|
+
element => {
|
|
145
|
+
return element + 1;
|
|
146
|
+
}
|
|
147
|
+
);
|
|
148
|
+
const segmentMetadata = metadata[segmentIndex];
|
|
149
|
+
const labelmaps = _getLabelmapsFromRefernecedFrameIndicies(
|
|
150
|
+
labelmap3D,
|
|
151
|
+
referencedFrameIndicies
|
|
152
|
+
);
|
|
153
|
+
|
|
154
|
+
segmentation.addSegmentFromLabelmap(
|
|
155
|
+
segmentMetadata,
|
|
156
|
+
labelmaps,
|
|
157
|
+
segmentIndex,
|
|
158
|
+
referencedFrameNumbers
|
|
159
|
+
);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
if (options.rleEncode) {
|
|
165
|
+
const rleEncodedFrames = encode(
|
|
166
|
+
segmentation.dataset.PixelData,
|
|
167
|
+
numberOfFrames,
|
|
168
|
+
segmentation.dataset.Rows,
|
|
169
|
+
segmentation.dataset.Columns
|
|
170
|
+
);
|
|
171
|
+
|
|
172
|
+
// Must use fractional now to RLE encode, as the DICOM standard only allows BitStored && BitsAllocated
|
|
173
|
+
// to be 1 for BINARY. This is not ideal and there should be a better format for compression in this manner
|
|
174
|
+
// added to the standard.
|
|
175
|
+
segmentation.assignToDataset({
|
|
176
|
+
BitsAllocated: "8",
|
|
177
|
+
BitsStored: "8",
|
|
178
|
+
HighBit: "7",
|
|
179
|
+
SegmentationType: "FRACTIONAL",
|
|
180
|
+
SegmentationFractionalType: "PROBABILITY",
|
|
181
|
+
MaximumFractionalValue: "255"
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
segmentation.dataset._meta.TransferSyntaxUID = {
|
|
185
|
+
Value: ["1.2.840.10008.1.2.5"],
|
|
186
|
+
vr: "UI"
|
|
187
|
+
};
|
|
188
|
+
segmentation.dataset._vrMap.PixelData = "OB";
|
|
189
|
+
segmentation.dataset.PixelData = rleEncodedFrames;
|
|
190
|
+
} else {
|
|
191
|
+
// If no rleEncoding, at least bitpack the data.
|
|
192
|
+
segmentation.bitPackPixelData();
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
const segBlob = datasetToBlob(segmentation.dataset);
|
|
196
|
+
|
|
197
|
+
return segBlob;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
function _getLabelmapsFromRefernecedFrameIndicies(
|
|
201
|
+
labelmap3D,
|
|
202
|
+
referencedFrameIndicies
|
|
203
|
+
) {
|
|
204
|
+
const { labelmaps2D } = labelmap3D;
|
|
205
|
+
|
|
206
|
+
const labelmaps = [];
|
|
207
|
+
|
|
208
|
+
for (let i = 0; i < referencedFrameIndicies.length; i++) {
|
|
209
|
+
const frame = referencedFrameIndicies[i];
|
|
210
|
+
|
|
211
|
+
labelmaps.push(labelmaps2D[frame].pixelData);
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
return labelmaps;
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* _createSegFromImages - description
|
|
219
|
+
*
|
|
220
|
+
* @param {Object[]} images An array of the cornerstone image objects.
|
|
221
|
+
* @param {Boolean} isMultiframe Whether the images are multiframe.
|
|
222
|
+
* @returns {Object} The Seg derived dataSet.
|
|
223
|
+
*/
|
|
224
|
+
function _createSegFromImages(images, isMultiframe, options) {
|
|
225
|
+
const datasets = [];
|
|
226
|
+
|
|
227
|
+
if (isMultiframe) {
|
|
228
|
+
const image = images[0];
|
|
229
|
+
const arrayBuffer = image.data.byteArray.buffer;
|
|
230
|
+
|
|
231
|
+
const dicomData = DicomMessage.readFile(arrayBuffer);
|
|
232
|
+
const dataset = DicomMetaDictionary.naturalizeDataset(dicomData.dict);
|
|
233
|
+
|
|
234
|
+
dataset._meta = DicomMetaDictionary.namifyDataset(dicomData.meta);
|
|
235
|
+
|
|
236
|
+
datasets.push(dataset);
|
|
237
|
+
} else {
|
|
238
|
+
for (let i = 0; i < images.length; i++) {
|
|
239
|
+
const image = images[i];
|
|
240
|
+
const arrayBuffer = image.data.byteArray.buffer;
|
|
241
|
+
const dicomData = DicomMessage.readFile(arrayBuffer);
|
|
242
|
+
const dataset = DicomMetaDictionary.naturalizeDataset(
|
|
243
|
+
dicomData.dict
|
|
244
|
+
);
|
|
245
|
+
|
|
246
|
+
dataset._meta = DicomMetaDictionary.namifyDataset(dicomData.meta);
|
|
247
|
+
datasets.push(dataset);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
const multiframe = Normalizer.normalizeToDataset(datasets);
|
|
252
|
+
|
|
253
|
+
return new SegmentationDerivation([multiframe], options);
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
/**
|
|
257
|
+
* generateToolState - Given a set of cornrstoneTools imageIds and a Segmentation buffer,
|
|
258
|
+
* derive cornerstoneTools toolState and brush metadata.
|
|
259
|
+
*
|
|
260
|
+
* @param {string[]} imageIds - An array of the imageIds.
|
|
261
|
+
* @param {ArrayBuffer} arrayBuffer - The SEG arrayBuffer.
|
|
262
|
+
* @param {*} metadataProvider.
|
|
263
|
+
* @param {bool} skipOverlapping - skip checks for overlapping segs, default value false.
|
|
264
|
+
* @param {number} tolerance - default value 1.e-3.
|
|
265
|
+
*
|
|
266
|
+
* @return {[]ArrayBuffer}a list of array buffer for each labelMap
|
|
267
|
+
* @return {Object} an object from which the segment metadata can be derived
|
|
268
|
+
* @return {[][][]} 2D list containing the track of segments per frame
|
|
269
|
+
* @return {[][][]} 3D list containing the track of segments per frame for each labelMap
|
|
270
|
+
* (available only for the overlapping case).
|
|
271
|
+
*/
|
|
272
|
+
function generateToolState(
|
|
273
|
+
imageIds,
|
|
274
|
+
arrayBuffer,
|
|
275
|
+
metadataProvider,
|
|
276
|
+
skipOverlapping = false,
|
|
277
|
+
tolerance = 1e-3
|
|
278
|
+
) {
|
|
279
|
+
const dicomData = DicomMessage.readFile(arrayBuffer);
|
|
280
|
+
const dataset = DicomMetaDictionary.naturalizeDataset(dicomData.dict);
|
|
281
|
+
dataset._meta = DicomMetaDictionary.namifyDataset(dicomData.meta);
|
|
282
|
+
const multiframe = Normalizer.normalizeToDataset([dataset]);
|
|
283
|
+
|
|
284
|
+
const imagePlaneModule = metadataProvider.get(
|
|
285
|
+
"imagePlaneModule",
|
|
286
|
+
imageIds[0]
|
|
287
|
+
);
|
|
288
|
+
|
|
289
|
+
const generalSeriesModule = metadataProvider.get(
|
|
290
|
+
"generalSeriesModule",
|
|
291
|
+
imageIds[0]
|
|
292
|
+
);
|
|
293
|
+
|
|
294
|
+
const SeriesInstanceUID = generalSeriesModule.seriesInstanceUID;
|
|
295
|
+
|
|
296
|
+
if (!imagePlaneModule) {
|
|
297
|
+
console.warn("Insufficient metadata, imagePlaneModule missing.");
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
const ImageOrientationPatient = Array.isArray(imagePlaneModule.rowCosines)
|
|
301
|
+
? [...imagePlaneModule.rowCosines, ...imagePlaneModule.columnCosines]
|
|
302
|
+
: [
|
|
303
|
+
imagePlaneModule.rowCosines.x,
|
|
304
|
+
imagePlaneModule.rowCosines.y,
|
|
305
|
+
imagePlaneModule.rowCosines.z,
|
|
306
|
+
imagePlaneModule.columnCosines.x,
|
|
307
|
+
imagePlaneModule.columnCosines.y,
|
|
308
|
+
imagePlaneModule.columnCosines.z
|
|
309
|
+
];
|
|
310
|
+
|
|
311
|
+
// Get IOP from ref series, compute supported orientations:
|
|
312
|
+
const validOrientations = getValidOrientations(ImageOrientationPatient);
|
|
313
|
+
|
|
314
|
+
const sliceLength = multiframe.Columns * multiframe.Rows;
|
|
315
|
+
const segMetadata = getSegmentMetadata(multiframe, SeriesInstanceUID);
|
|
316
|
+
|
|
317
|
+
const TransferSyntaxUID = multiframe._meta.TransferSyntaxUID.Value[0];
|
|
318
|
+
|
|
319
|
+
let pixelData;
|
|
320
|
+
|
|
321
|
+
if (TransferSyntaxUID === "1.2.840.10008.1.2.5") {
|
|
322
|
+
const rleEncodedFrames = Array.isArray(multiframe.PixelData)
|
|
323
|
+
? multiframe.PixelData
|
|
324
|
+
: [multiframe.PixelData];
|
|
325
|
+
|
|
326
|
+
pixelData = decode(
|
|
327
|
+
rleEncodedFrames,
|
|
328
|
+
multiframe.Rows,
|
|
329
|
+
multiframe.Columns
|
|
330
|
+
);
|
|
331
|
+
|
|
332
|
+
if (multiframe.BitsStored === 1) {
|
|
333
|
+
console.warn("No implementation for rle + bitbacking.");
|
|
334
|
+
|
|
335
|
+
return;
|
|
336
|
+
}
|
|
337
|
+
} else {
|
|
338
|
+
pixelData = unpackPixelData(multiframe);
|
|
339
|
+
|
|
340
|
+
if (!pixelData) {
|
|
341
|
+
throw new Error("Fractional segmentations are not yet supported");
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
const orientation = checkOrientation(
|
|
346
|
+
multiframe,
|
|
347
|
+
validOrientations,
|
|
348
|
+
[imagePlaneModule.rows, imagePlaneModule.columns, imageIds.length],
|
|
349
|
+
tolerance
|
|
350
|
+
);
|
|
351
|
+
|
|
352
|
+
let overlapping = false;
|
|
353
|
+
if (!skipOverlapping) {
|
|
354
|
+
overlapping = checkSEGsOverlapping(
|
|
355
|
+
pixelData,
|
|
356
|
+
multiframe,
|
|
357
|
+
imageIds,
|
|
358
|
+
validOrientations,
|
|
359
|
+
metadataProvider,
|
|
360
|
+
tolerance
|
|
361
|
+
);
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
let insertFunction;
|
|
365
|
+
|
|
366
|
+
switch (orientation) {
|
|
367
|
+
case "Planar":
|
|
368
|
+
if (overlapping) {
|
|
369
|
+
insertFunction = insertOverlappingPixelDataPlanar;
|
|
370
|
+
} else {
|
|
371
|
+
insertFunction = insertPixelDataPlanar;
|
|
372
|
+
}
|
|
373
|
+
break;
|
|
374
|
+
case "Perpendicular":
|
|
375
|
+
//insertFunction = insertPixelDataPerpendicular;
|
|
376
|
+
throw new Error(
|
|
377
|
+
"Segmentations orthogonal to the acquisition plane of the source data are not yet supported."
|
|
378
|
+
);
|
|
379
|
+
case "Oblique":
|
|
380
|
+
throw new Error(
|
|
381
|
+
"Segmentations oblique to the acquisition plane of the source data are not yet supported."
|
|
382
|
+
);
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
/* if SEGs are overlapping:
|
|
386
|
+
1) the labelmapBuffer will contain M volumes which have non-overlapping segments;
|
|
387
|
+
2) segmentsOnFrame will have M * numberOfFrames values to track in which labelMap are the segments;
|
|
388
|
+
3) insertFunction will return the number of LabelMaps
|
|
389
|
+
4) generateToolState return is an array*/
|
|
390
|
+
|
|
391
|
+
const segmentsOnFrameArray = [];
|
|
392
|
+
segmentsOnFrameArray[0] = [];
|
|
393
|
+
const segmentsOnFrame = [];
|
|
394
|
+
|
|
395
|
+
const arrayBufferLength = sliceLength * imageIds.length * 2; // 2 bytes per label voxel in cst4.
|
|
396
|
+
const labelmapBufferArray = [];
|
|
397
|
+
labelmapBufferArray[0] = new ArrayBuffer(arrayBufferLength);
|
|
398
|
+
|
|
399
|
+
insertFunction(
|
|
400
|
+
segmentsOnFrame,
|
|
401
|
+
segmentsOnFrameArray,
|
|
402
|
+
labelmapBufferArray,
|
|
403
|
+
pixelData,
|
|
404
|
+
multiframe,
|
|
405
|
+
imageIds,
|
|
406
|
+
validOrientations,
|
|
407
|
+
metadataProvider,
|
|
408
|
+
tolerance
|
|
409
|
+
);
|
|
410
|
+
|
|
411
|
+
return {
|
|
412
|
+
labelmapBufferArray,
|
|
413
|
+
segMetadata,
|
|
414
|
+
segmentsOnFrame,
|
|
415
|
+
segmentsOnFrameArray
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
// function insertPixelDataPerpendicular(
|
|
420
|
+
// segmentsOnFrame,
|
|
421
|
+
// labelmapBuffer,
|
|
422
|
+
// pixelData,
|
|
423
|
+
// multiframe,
|
|
424
|
+
// imageIds,
|
|
425
|
+
// validOrientations,
|
|
426
|
+
// metadataProvider
|
|
427
|
+
// ) {
|
|
428
|
+
// const {
|
|
429
|
+
// SharedFunctionalGroupsSequence,
|
|
430
|
+
// PerFrameFunctionalGroupsSequence,
|
|
431
|
+
// Rows,
|
|
432
|
+
// Columns
|
|
433
|
+
// } = multiframe;
|
|
434
|
+
|
|
435
|
+
// const firstImagePlaneModule = metadataProvider.get(
|
|
436
|
+
// "imagePlaneModule",
|
|
437
|
+
// imageIds[0]
|
|
438
|
+
// );
|
|
439
|
+
|
|
440
|
+
// const lastImagePlaneModule = metadataProvider.get(
|
|
441
|
+
// "imagePlaneModule",
|
|
442
|
+
// imageIds[imageIds.length - 1]
|
|
443
|
+
// );
|
|
444
|
+
|
|
445
|
+
// console.log(firstImagePlaneModule);
|
|
446
|
+
// console.log(lastImagePlaneModule);
|
|
447
|
+
|
|
448
|
+
// const corners = [
|
|
449
|
+
// ...getCorners(firstImagePlaneModule),
|
|
450
|
+
// ...getCorners(lastImagePlaneModule)
|
|
451
|
+
// ];
|
|
452
|
+
|
|
453
|
+
// console.log(`corners:`);
|
|
454
|
+
// console.log(corners);
|
|
455
|
+
|
|
456
|
+
// const indexToWorld = mat4.create();
|
|
457
|
+
|
|
458
|
+
// const ippFirstFrame = firstImagePlaneModule.imagePositionPatient;
|
|
459
|
+
// const rowCosines = Array.isArray(firstImagePlaneModule.rowCosines)
|
|
460
|
+
// ? [...firstImagePlaneModule.rowCosines]
|
|
461
|
+
// : [
|
|
462
|
+
// firstImagePlaneModule.rowCosines.x,
|
|
463
|
+
// firstImagePlaneModule.rowCosines.y,
|
|
464
|
+
// firstImagePlaneModule.rowCosines.z
|
|
465
|
+
// ];
|
|
466
|
+
|
|
467
|
+
// const columnCosines = Array.isArray(firstImagePlaneModule.columnCosines)
|
|
468
|
+
// ? [...firstImagePlaneModule.columnCosines]
|
|
469
|
+
// : [
|
|
470
|
+
// firstImagePlaneModule.columnCosines.x,
|
|
471
|
+
// firstImagePlaneModule.columnCosines.y,
|
|
472
|
+
// firstImagePlaneModule.columnCosines.z
|
|
473
|
+
// ];
|
|
474
|
+
|
|
475
|
+
// const { pixelSpacing } = firstImagePlaneModule;
|
|
476
|
+
|
|
477
|
+
// mat4.set(
|
|
478
|
+
// indexToWorld,
|
|
479
|
+
// // Column 1
|
|
480
|
+
// 0,
|
|
481
|
+
// 0,
|
|
482
|
+
// 0,
|
|
483
|
+
// ippFirstFrame[0],
|
|
484
|
+
// // Column 2
|
|
485
|
+
// 0,
|
|
486
|
+
// 0,
|
|
487
|
+
// 0,
|
|
488
|
+
// ippFirstFrame[1],
|
|
489
|
+
// // Column 3
|
|
490
|
+
// 0,
|
|
491
|
+
// 0,
|
|
492
|
+
// 0,
|
|
493
|
+
// ippFirstFrame[2],
|
|
494
|
+
// // Column 4
|
|
495
|
+
// 0,
|
|
496
|
+
// 0,
|
|
497
|
+
// 0,
|
|
498
|
+
// 1
|
|
499
|
+
// );
|
|
500
|
+
|
|
501
|
+
// // TODO -> Get origin and (x,y,z) increments to build a translation matrix:
|
|
502
|
+
// // TODO -> Equation C.7.6.2.1-1
|
|
503
|
+
|
|
504
|
+
// // | cx*di rx* Xx 0 | |x|
|
|
505
|
+
// // | cy*di ry Xy 0 | |y|
|
|
506
|
+
// // | cz*di rz Xz 0 | |z|
|
|
507
|
+
// // | tx ty tz 1 | |1|
|
|
508
|
+
|
|
509
|
+
// // const [
|
|
510
|
+
// // 0, 0 , 0 , 0,
|
|
511
|
+
// // 0, 0 , 0 , 0,
|
|
512
|
+
// // 0, 0 , 0 , 0,
|
|
513
|
+
// // ipp[0], ipp[1] , ipp[2] , 1,
|
|
514
|
+
// // ]
|
|
515
|
+
|
|
516
|
+
// // Each frame:
|
|
517
|
+
|
|
518
|
+
// // Find which corner the first voxel lines up with (one of 8 corners.)
|
|
519
|
+
|
|
520
|
+
// // Find how i,j,k orient with respect to source volume.
|
|
521
|
+
// // Go through each frame, find location in source to start, and whether to increment +/ix,+/-y,+/-z
|
|
522
|
+
// // through each voxel.
|
|
523
|
+
|
|
524
|
+
// // [1,0,0,0,1,0]
|
|
525
|
+
|
|
526
|
+
// // const [
|
|
527
|
+
|
|
528
|
+
// // ]
|
|
529
|
+
|
|
530
|
+
// // Invert transformation matrix to get worldToIndex
|
|
531
|
+
|
|
532
|
+
// // Apply world to index on each point to fill up the matrix.
|
|
533
|
+
|
|
534
|
+
// // const sharedImageOrientationPatient = SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
535
|
+
// // ? SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
536
|
+
// // .ImageOrientationPatient
|
|
537
|
+
// // : undefined;
|
|
538
|
+
// // const sliceLength = Columns * Rows;
|
|
539
|
+
// }
|
|
540
|
+
|
|
541
|
+
// function getCorners(imagePlaneModule) {
|
|
542
|
+
// // console.log(imagePlaneModule);
|
|
543
|
+
|
|
544
|
+
// const {
|
|
545
|
+
// rows,
|
|
546
|
+
// columns,
|
|
547
|
+
// rowCosines,
|
|
548
|
+
// columnCosines,
|
|
549
|
+
// imagePositionPatient: ipp,
|
|
550
|
+
// rowPixelSpacing,
|
|
551
|
+
// columnPixelSpacing
|
|
552
|
+
// } = imagePlaneModule;
|
|
553
|
+
|
|
554
|
+
// const rowLength = columns * columnPixelSpacing;
|
|
555
|
+
// const columnLength = rows * rowPixelSpacing;
|
|
556
|
+
|
|
557
|
+
// const entireRowVector = [
|
|
558
|
+
// rowLength * columnCosines[0],
|
|
559
|
+
// rowLength * columnCosines[1],
|
|
560
|
+
// rowLength * columnCosines[2]
|
|
561
|
+
// ];
|
|
562
|
+
|
|
563
|
+
// const entireColumnVector = [
|
|
564
|
+
// columnLength * rowCosines[0],
|
|
565
|
+
// columnLength * rowCosines[1],
|
|
566
|
+
// columnLength * rowCosines[2]
|
|
567
|
+
// ];
|
|
568
|
+
|
|
569
|
+
// const topLeft = [ipp[0], ipp[1], ipp[2]];
|
|
570
|
+
// const topRight = [
|
|
571
|
+
// topLeft[0] + entireRowVector[0],
|
|
572
|
+
// topLeft[1] + entireRowVector[1],
|
|
573
|
+
// topLeft[2] + entireRowVector[2]
|
|
574
|
+
// ];
|
|
575
|
+
// const bottomLeft = [
|
|
576
|
+
// topLeft[0] + entireColumnVector[0],
|
|
577
|
+
// topLeft[1] + entireColumnVector[1],
|
|
578
|
+
// topLeft[2] + entireColumnVector[2]
|
|
579
|
+
// ];
|
|
580
|
+
|
|
581
|
+
// const bottomRight = [
|
|
582
|
+
// bottomLeft[0] + entireRowVector[0],
|
|
583
|
+
// bottomLeft[1] + entireRowVector[1],
|
|
584
|
+
// bottomLeft[2] + entireRowVector[2]
|
|
585
|
+
// ];
|
|
586
|
+
|
|
587
|
+
// return [topLeft, topRight, bottomLeft, bottomRight];
|
|
588
|
+
// }
|
|
589
|
+
|
|
590
|
+
/**
|
|
591
|
+
* Find the reference frame of the segmentation frame in the source data.
|
|
592
|
+
*
|
|
593
|
+
* @param {Object} multiframe dicom metadata
|
|
594
|
+
* @param {Int} frameSegment frame dicom index
|
|
595
|
+
* @param {String[]} imageIds A list of imageIds.
|
|
596
|
+
* @param {Object} metadataProvider A Cornerstone metadataProvider to query
|
|
597
|
+
* metadata from imageIds.
|
|
598
|
+
* @param {Float} tolerance The tolerance parameter
|
|
599
|
+
*
|
|
600
|
+
* @returns {String} Returns the imageId
|
|
601
|
+
*/
|
|
602
|
+
function findReferenceSourceImageId(
|
|
603
|
+
multiframe,
|
|
604
|
+
frameSegment,
|
|
605
|
+
imageIds,
|
|
606
|
+
metadataProvider,
|
|
607
|
+
tolerance
|
|
608
|
+
) {
|
|
609
|
+
let imageId = undefined;
|
|
610
|
+
|
|
611
|
+
if (!multiframe) {
|
|
612
|
+
return imageId;
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
const {
|
|
616
|
+
FrameOfReferenceUID,
|
|
617
|
+
PerFrameFunctionalGroupsSequence,
|
|
618
|
+
SourceImageSequence,
|
|
619
|
+
ReferencedSeriesSequence
|
|
620
|
+
} = multiframe;
|
|
621
|
+
|
|
622
|
+
if (
|
|
623
|
+
!PerFrameFunctionalGroupsSequence ||
|
|
624
|
+
PerFrameFunctionalGroupsSequence.length === 0
|
|
625
|
+
) {
|
|
626
|
+
return imageId;
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
const PerFrameFunctionalGroup =
|
|
630
|
+
PerFrameFunctionalGroupsSequence[frameSegment];
|
|
631
|
+
|
|
632
|
+
if (!PerFrameFunctionalGroup) {
|
|
633
|
+
return imageId;
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
let frameSourceImageSequence = undefined;
|
|
637
|
+
if (SourceImageSequence && SourceImageSequence.length !== 0) {
|
|
638
|
+
frameSourceImageSequence = SourceImageSequence[frameSegment];
|
|
639
|
+
} else if (PerFrameFunctionalGroup.DerivationImageSequence) {
|
|
640
|
+
let DerivationImageSequence =
|
|
641
|
+
PerFrameFunctionalGroup.DerivationImageSequence;
|
|
642
|
+
if (Array.isArray(DerivationImageSequence)) {
|
|
643
|
+
if (DerivationImageSequence.length !== 0) {
|
|
644
|
+
DerivationImageSequence = DerivationImageSequence[0];
|
|
645
|
+
} else {
|
|
646
|
+
DerivationImageSequence = undefined;
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
|
|
650
|
+
if (DerivationImageSequence) {
|
|
651
|
+
frameSourceImageSequence =
|
|
652
|
+
DerivationImageSequence.SourceImageSequence;
|
|
653
|
+
if (Array.isArray(frameSourceImageSequence)) {
|
|
654
|
+
if (frameSourceImageSequence.length !== 0) {
|
|
655
|
+
frameSourceImageSequence = frameSourceImageSequence[0];
|
|
656
|
+
} else {
|
|
657
|
+
frameSourceImageSequence = undefined;
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
if (frameSourceImageSequence) {
|
|
664
|
+
imageId = getImageIdOfSourceImagebySourceImageSequence(
|
|
665
|
+
frameSourceImageSequence,
|
|
666
|
+
imageIds,
|
|
667
|
+
metadataProvider
|
|
668
|
+
);
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
if (imageId === undefined && ReferencedSeriesSequence) {
|
|
672
|
+
const referencedSeriesSequence = Array.isArray(ReferencedSeriesSequence)
|
|
673
|
+
? ReferencedSeriesSequence[0]
|
|
674
|
+
: ReferencedSeriesSequence;
|
|
675
|
+
const ReferencedSeriesInstanceUID =
|
|
676
|
+
referencedSeriesSequence.SeriesInstanceUID;
|
|
677
|
+
|
|
678
|
+
imageId = getImageIdOfSourceImagebyGeometry(
|
|
679
|
+
ReferencedSeriesInstanceUID,
|
|
680
|
+
FrameOfReferenceUID,
|
|
681
|
+
PerFrameFunctionalGroup,
|
|
682
|
+
imageIds,
|
|
683
|
+
metadataProvider,
|
|
684
|
+
tolerance
|
|
685
|
+
);
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
return imageId;
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
/**
|
|
692
|
+
* Checks if there is any overlapping segmentations.
|
|
693
|
+
* @returns {boolean} Returns a flag if segmentations overlapping
|
|
694
|
+
*/
|
|
695
|
+
|
|
696
|
+
function checkSEGsOverlapping(
|
|
697
|
+
pixelData,
|
|
698
|
+
multiframe,
|
|
699
|
+
imageIds,
|
|
700
|
+
validOrientations,
|
|
701
|
+
metadataProvider,
|
|
702
|
+
tolerance
|
|
703
|
+
) {
|
|
704
|
+
const {
|
|
705
|
+
SharedFunctionalGroupsSequence,
|
|
706
|
+
PerFrameFunctionalGroupsSequence,
|
|
707
|
+
SegmentSequence,
|
|
708
|
+
Rows,
|
|
709
|
+
Columns
|
|
710
|
+
} = multiframe;
|
|
711
|
+
|
|
712
|
+
let numberOfSegs = SegmentSequence.length;
|
|
713
|
+
if (numberOfSegs < 2) {
|
|
714
|
+
return false;
|
|
715
|
+
}
|
|
716
|
+
|
|
717
|
+
const sharedImageOrientationPatient =
|
|
718
|
+
SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
719
|
+
? SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
720
|
+
.ImageOrientationPatient
|
|
721
|
+
: undefined;
|
|
722
|
+
const sliceLength = Columns * Rows;
|
|
723
|
+
const groupsLen = PerFrameFunctionalGroupsSequence.length;
|
|
724
|
+
|
|
725
|
+
/** sort groupsLen to have all the segments for each frame in an array
|
|
726
|
+
* frame 2 : 1, 2
|
|
727
|
+
* frame 4 : 1, 3
|
|
728
|
+
* frame 5 : 4
|
|
729
|
+
*/
|
|
730
|
+
|
|
731
|
+
let frameSegmentsMapping = new Map();
|
|
732
|
+
for (let frameSegment = 0; frameSegment < groupsLen; ++frameSegment) {
|
|
733
|
+
const segmentIndex = getSegmentIndex(multiframe, frameSegment);
|
|
734
|
+
if (segmentIndex === undefined) {
|
|
735
|
+
console.warn(
|
|
736
|
+
"Could not retrieve the segment index for frame segment " +
|
|
737
|
+
frameSegment +
|
|
738
|
+
", skipping this frame."
|
|
739
|
+
);
|
|
740
|
+
continue;
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
const imageId = findReferenceSourceImageId(
|
|
744
|
+
multiframe,
|
|
745
|
+
frameSegment,
|
|
746
|
+
imageIds,
|
|
747
|
+
metadataProvider,
|
|
748
|
+
tolerance
|
|
749
|
+
);
|
|
750
|
+
|
|
751
|
+
if (!imageId) {
|
|
752
|
+
console.warn(
|
|
753
|
+
"Image not present in stack, can't import frame : " +
|
|
754
|
+
frameSegment +
|
|
755
|
+
"."
|
|
756
|
+
);
|
|
757
|
+
continue;
|
|
758
|
+
}
|
|
759
|
+
|
|
760
|
+
const imageIdIndex = imageIds.findIndex(element => element === imageId);
|
|
761
|
+
|
|
762
|
+
if (frameSegmentsMapping.has(imageIdIndex)) {
|
|
763
|
+
let segmentArray = frameSegmentsMapping.get(imageIdIndex);
|
|
764
|
+
if (!segmentArray.includes(frameSegment)) {
|
|
765
|
+
segmentArray.push(frameSegment);
|
|
766
|
+
frameSegmentsMapping.set(imageIdIndex, segmentArray);
|
|
767
|
+
}
|
|
768
|
+
} else {
|
|
769
|
+
frameSegmentsMapping.set(imageIdIndex, [frameSegment]);
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
|
|
773
|
+
for (let [, role] of frameSegmentsMapping.entries()) {
|
|
774
|
+
let temp2DArray = new Uint16Array(sliceLength).fill(0);
|
|
775
|
+
|
|
776
|
+
for (let i = 0; i < role.length; ++i) {
|
|
777
|
+
const frameSegment = role[i];
|
|
778
|
+
|
|
779
|
+
const PerFrameFunctionalGroups =
|
|
780
|
+
PerFrameFunctionalGroupsSequence[frameSegment];
|
|
781
|
+
|
|
782
|
+
const ImageOrientationPatientI =
|
|
783
|
+
sharedImageOrientationPatient ||
|
|
784
|
+
PerFrameFunctionalGroups.PlaneOrientationSequence
|
|
785
|
+
.ImageOrientationPatient;
|
|
786
|
+
|
|
787
|
+
const pixelDataI2D = ndarray(
|
|
788
|
+
new Uint8Array(
|
|
789
|
+
pixelData.buffer,
|
|
790
|
+
frameSegment * sliceLength,
|
|
791
|
+
sliceLength
|
|
792
|
+
),
|
|
793
|
+
[Rows, Columns]
|
|
794
|
+
);
|
|
795
|
+
|
|
796
|
+
const alignedPixelDataI = alignPixelDataWithSourceData(
|
|
797
|
+
pixelDataI2D,
|
|
798
|
+
ImageOrientationPatientI,
|
|
799
|
+
validOrientations,
|
|
800
|
+
tolerance
|
|
801
|
+
);
|
|
802
|
+
|
|
803
|
+
if (!alignedPixelDataI) {
|
|
804
|
+
console.warn(
|
|
805
|
+
"Individual SEG frames are out of plane with respect to the first SEG frame, this is not yet supported, skipping this frame."
|
|
806
|
+
);
|
|
807
|
+
continue;
|
|
808
|
+
}
|
|
809
|
+
|
|
810
|
+
const data = alignedPixelDataI.data;
|
|
811
|
+
for (let j = 0, len = data.length; j < len; ++j) {
|
|
812
|
+
if (data[j] !== 0) {
|
|
813
|
+
temp2DArray[j]++;
|
|
814
|
+
if (temp2DArray[j] > 1) {
|
|
815
|
+
return true;
|
|
816
|
+
}
|
|
817
|
+
}
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
return false;
|
|
823
|
+
}
|
|
824
|
+
|
|
825
|
+
function insertOverlappingPixelDataPlanar(
|
|
826
|
+
segmentsOnFrame,
|
|
827
|
+
segmentsOnFrameArray,
|
|
828
|
+
labelmapBufferArray,
|
|
829
|
+
pixelData,
|
|
830
|
+
multiframe,
|
|
831
|
+
imageIds,
|
|
832
|
+
validOrientations,
|
|
833
|
+
metadataProvider,
|
|
834
|
+
tolerance
|
|
835
|
+
) {
|
|
836
|
+
const {
|
|
837
|
+
SharedFunctionalGroupsSequence,
|
|
838
|
+
PerFrameFunctionalGroupsSequence,
|
|
839
|
+
Rows,
|
|
840
|
+
Columns
|
|
841
|
+
} = multiframe;
|
|
842
|
+
|
|
843
|
+
const sharedImageOrientationPatient =
|
|
844
|
+
SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
845
|
+
? SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
846
|
+
.ImageOrientationPatient
|
|
847
|
+
: undefined;
|
|
848
|
+
const sliceLength = Columns * Rows;
|
|
849
|
+
const arrayBufferLength = sliceLength * imageIds.length * 2; // 2 bytes per label voxel in cst4.
|
|
850
|
+
|
|
851
|
+
// indicate the number of labelMaps
|
|
852
|
+
let M = 1;
|
|
853
|
+
|
|
854
|
+
// indicate the current labelMap array index;
|
|
855
|
+
let m = 0;
|
|
856
|
+
|
|
857
|
+
// temp array for checking overlaps
|
|
858
|
+
let tempBuffer = labelmapBufferArray[m].slice(0);
|
|
859
|
+
|
|
860
|
+
// temp list for checking overlaps
|
|
861
|
+
let tempSegmentsOnFrame = cloneDeep(segmentsOnFrameArray[m]);
|
|
862
|
+
|
|
863
|
+
/** split overlapping SEGs algorithm for each segment:
|
|
864
|
+
* A) copy the labelmapBuffer in the array with index 0
|
|
865
|
+
* B) add the segment pixel per pixel on the copied buffer from (A)
|
|
866
|
+
* C) if no overlap, copy the results back on the orignal array from (A)
|
|
867
|
+
* D) if overlap, repeat increasing the index m up to M (if out of memory, add new buffer in the array and M++);
|
|
868
|
+
*/
|
|
869
|
+
|
|
870
|
+
let numberOfSegs = multiframe.SegmentSequence.length;
|
|
871
|
+
for (
|
|
872
|
+
let segmentIndexToProcess = 1;
|
|
873
|
+
segmentIndexToProcess <= numberOfSegs;
|
|
874
|
+
++segmentIndexToProcess
|
|
875
|
+
) {
|
|
876
|
+
for (
|
|
877
|
+
let i = 0, groupsLen = PerFrameFunctionalGroupsSequence.length;
|
|
878
|
+
i < groupsLen;
|
|
879
|
+
++i
|
|
880
|
+
) {
|
|
881
|
+
const PerFrameFunctionalGroups =
|
|
882
|
+
PerFrameFunctionalGroupsSequence[i];
|
|
883
|
+
|
|
884
|
+
const segmentIndex = getSegmentIndex(multiframe, i);
|
|
885
|
+
if (segmentIndex === undefined) {
|
|
886
|
+
throw new Error(
|
|
887
|
+
"Could not retrieve the segment index. Aborting segmentation loading."
|
|
888
|
+
);
|
|
889
|
+
}
|
|
890
|
+
|
|
891
|
+
if (segmentIndex !== segmentIndexToProcess) {
|
|
892
|
+
continue;
|
|
893
|
+
}
|
|
894
|
+
|
|
895
|
+
const ImageOrientationPatientI =
|
|
896
|
+
sharedImageOrientationPatient ||
|
|
897
|
+
PerFrameFunctionalGroups.PlaneOrientationSequence
|
|
898
|
+
.ImageOrientationPatient;
|
|
899
|
+
|
|
900
|
+
const pixelDataI2D = ndarray(
|
|
901
|
+
new Uint8Array(pixelData.buffer, i * sliceLength, sliceLength),
|
|
902
|
+
[Rows, Columns]
|
|
903
|
+
);
|
|
904
|
+
|
|
905
|
+
const alignedPixelDataI = alignPixelDataWithSourceData(
|
|
906
|
+
pixelDataI2D,
|
|
907
|
+
ImageOrientationPatientI,
|
|
908
|
+
validOrientations,
|
|
909
|
+
tolerance
|
|
910
|
+
);
|
|
911
|
+
|
|
912
|
+
if (!alignedPixelDataI) {
|
|
913
|
+
throw new Error(
|
|
914
|
+
"Individual SEG frames are out of plane with respect to the first SEG frame. " +
|
|
915
|
+
"This is not yet supported. Aborting segmentation loading."
|
|
916
|
+
);
|
|
917
|
+
}
|
|
918
|
+
|
|
919
|
+
const imageId = findReferenceSourceImageId(
|
|
920
|
+
multiframe,
|
|
921
|
+
i,
|
|
922
|
+
imageIds,
|
|
923
|
+
metadataProvider,
|
|
924
|
+
tolerance
|
|
925
|
+
);
|
|
926
|
+
|
|
927
|
+
if (!imageId) {
|
|
928
|
+
console.warn(
|
|
929
|
+
"Image not present in stack, can't import frame : " +
|
|
930
|
+
i +
|
|
931
|
+
"."
|
|
932
|
+
);
|
|
933
|
+
continue;
|
|
934
|
+
}
|
|
935
|
+
|
|
936
|
+
const sourceImageMetadata = metadataProvider.get(
|
|
937
|
+
"instance",
|
|
938
|
+
imageId
|
|
939
|
+
);
|
|
940
|
+
if (
|
|
941
|
+
Rows !== sourceImageMetadata.Rows ||
|
|
942
|
+
Columns !== sourceImageMetadata.Columns
|
|
943
|
+
) {
|
|
944
|
+
throw new Error(
|
|
945
|
+
"Individual SEG frames have different geometry dimensions (Rows and Columns) " +
|
|
946
|
+
"respect to the source image reference frame. This is not yet supported. " +
|
|
947
|
+
"Aborting segmentation loading. "
|
|
948
|
+
);
|
|
949
|
+
}
|
|
950
|
+
|
|
951
|
+
const imageIdIndex = imageIds.findIndex(
|
|
952
|
+
element => element === imageId
|
|
953
|
+
);
|
|
954
|
+
const byteOffset = sliceLength * 2 * imageIdIndex; // 2 bytes/pixel
|
|
955
|
+
|
|
956
|
+
const labelmap2DView = new Uint16Array(
|
|
957
|
+
tempBuffer,
|
|
958
|
+
byteOffset,
|
|
959
|
+
sliceLength
|
|
960
|
+
);
|
|
961
|
+
|
|
962
|
+
const data = alignedPixelDataI.data;
|
|
963
|
+
|
|
964
|
+
let segmentOnFrame = false;
|
|
965
|
+
for (let j = 0, len = alignedPixelDataI.data.length; j < len; ++j) {
|
|
966
|
+
if (data[j]) {
|
|
967
|
+
if (labelmap2DView[j] !== 0) {
|
|
968
|
+
m++;
|
|
969
|
+
if (m >= M) {
|
|
970
|
+
labelmapBufferArray[m] = new ArrayBuffer(
|
|
971
|
+
arrayBufferLength
|
|
972
|
+
);
|
|
973
|
+
segmentsOnFrameArray[m] = [];
|
|
974
|
+
M++;
|
|
975
|
+
}
|
|
976
|
+
tempBuffer = labelmapBufferArray[m].slice(0);
|
|
977
|
+
tempSegmentsOnFrame = cloneDeep(
|
|
978
|
+
segmentsOnFrameArray[m]
|
|
979
|
+
);
|
|
980
|
+
|
|
981
|
+
i = 0;
|
|
982
|
+
break;
|
|
983
|
+
} else {
|
|
984
|
+
labelmap2DView[j] = segmentIndex;
|
|
985
|
+
segmentOnFrame = true;
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
}
|
|
989
|
+
|
|
990
|
+
if (segmentOnFrame) {
|
|
991
|
+
if (!tempSegmentsOnFrame[imageIdIndex]) {
|
|
992
|
+
tempSegmentsOnFrame[imageIdIndex] = [];
|
|
993
|
+
}
|
|
994
|
+
|
|
995
|
+
tempSegmentsOnFrame[imageIdIndex].push(segmentIndex);
|
|
996
|
+
|
|
997
|
+
if (!segmentsOnFrame[imageIdIndex]) {
|
|
998
|
+
segmentsOnFrame[imageIdIndex] = [];
|
|
999
|
+
}
|
|
1000
|
+
|
|
1001
|
+
segmentsOnFrame[imageIdIndex].push(segmentIndex);
|
|
1002
|
+
}
|
|
1003
|
+
}
|
|
1004
|
+
|
|
1005
|
+
labelmapBufferArray[m] = tempBuffer.slice(0);
|
|
1006
|
+
segmentsOnFrameArray[m] = cloneDeep(tempSegmentsOnFrame);
|
|
1007
|
+
|
|
1008
|
+
// reset temp variables/buffers for new segment
|
|
1009
|
+
m = 0;
|
|
1010
|
+
tempBuffer = labelmapBufferArray[m].slice(0);
|
|
1011
|
+
tempSegmentsOnFrame = cloneDeep(segmentsOnFrameArray[m]);
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
|
|
1015
|
+
const getSegmentIndex = (multiframe, frame) => {
|
|
1016
|
+
const { PerFrameFunctionalGroupsSequence, SharedFunctionalGroupsSequence } =
|
|
1017
|
+
multiframe;
|
|
1018
|
+
const PerFrameFunctionalGroups = PerFrameFunctionalGroupsSequence[frame];
|
|
1019
|
+
return PerFrameFunctionalGroups &&
|
|
1020
|
+
PerFrameFunctionalGroups.SegmentIdentificationSequence
|
|
1021
|
+
? PerFrameFunctionalGroups.SegmentIdentificationSequence
|
|
1022
|
+
.ReferencedSegmentNumber
|
|
1023
|
+
: SharedFunctionalGroupsSequence.SegmentIdentificationSequence
|
|
1024
|
+
? SharedFunctionalGroupsSequence.SegmentIdentificationSequence
|
|
1025
|
+
.ReferencedSegmentNumber
|
|
1026
|
+
: undefined;
|
|
1027
|
+
};
|
|
1028
|
+
|
|
1029
|
+
function insertPixelDataPlanar(
|
|
1030
|
+
segmentsOnFrame,
|
|
1031
|
+
segmentsOnFrameArray,
|
|
1032
|
+
labelmapBufferArray,
|
|
1033
|
+
pixelData,
|
|
1034
|
+
multiframe,
|
|
1035
|
+
imageIds,
|
|
1036
|
+
validOrientations,
|
|
1037
|
+
metadataProvider,
|
|
1038
|
+
tolerance
|
|
1039
|
+
) {
|
|
1040
|
+
const {
|
|
1041
|
+
SharedFunctionalGroupsSequence,
|
|
1042
|
+
PerFrameFunctionalGroupsSequence,
|
|
1043
|
+
Rows,
|
|
1044
|
+
Columns
|
|
1045
|
+
} = multiframe;
|
|
1046
|
+
|
|
1047
|
+
const sharedImageOrientationPatient =
|
|
1048
|
+
SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
1049
|
+
? SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
1050
|
+
.ImageOrientationPatient
|
|
1051
|
+
: undefined;
|
|
1052
|
+
const sliceLength = Columns * Rows;
|
|
1053
|
+
|
|
1054
|
+
for (
|
|
1055
|
+
let i = 0, groupsLen = PerFrameFunctionalGroupsSequence.length;
|
|
1056
|
+
i < groupsLen;
|
|
1057
|
+
++i
|
|
1058
|
+
) {
|
|
1059
|
+
const PerFrameFunctionalGroups = PerFrameFunctionalGroupsSequence[i];
|
|
1060
|
+
|
|
1061
|
+
const ImageOrientationPatientI =
|
|
1062
|
+
sharedImageOrientationPatient ||
|
|
1063
|
+
PerFrameFunctionalGroups.PlaneOrientationSequence
|
|
1064
|
+
.ImageOrientationPatient;
|
|
1065
|
+
|
|
1066
|
+
const pixelDataI2D = ndarray(
|
|
1067
|
+
new Uint8Array(pixelData.buffer, i * sliceLength, sliceLength),
|
|
1068
|
+
[Rows, Columns]
|
|
1069
|
+
);
|
|
1070
|
+
|
|
1071
|
+
const alignedPixelDataI = alignPixelDataWithSourceData(
|
|
1072
|
+
pixelDataI2D,
|
|
1073
|
+
ImageOrientationPatientI,
|
|
1074
|
+
validOrientations,
|
|
1075
|
+
tolerance
|
|
1076
|
+
);
|
|
1077
|
+
|
|
1078
|
+
if (!alignedPixelDataI) {
|
|
1079
|
+
throw new Error(
|
|
1080
|
+
"Individual SEG frames are out of plane with respect to the first SEG frame. " +
|
|
1081
|
+
"This is not yet supported. Aborting segmentation loading."
|
|
1082
|
+
);
|
|
1083
|
+
}
|
|
1084
|
+
|
|
1085
|
+
const segmentIndex = getSegmentIndex(multiframe, i);
|
|
1086
|
+
if (segmentIndex === undefined) {
|
|
1087
|
+
throw new Error(
|
|
1088
|
+
"Could not retrieve the segment index. Aborting segmentation loading."
|
|
1089
|
+
);
|
|
1090
|
+
}
|
|
1091
|
+
|
|
1092
|
+
const imageId = findReferenceSourceImageId(
|
|
1093
|
+
multiframe,
|
|
1094
|
+
i,
|
|
1095
|
+
imageIds,
|
|
1096
|
+
metadataProvider,
|
|
1097
|
+
tolerance
|
|
1098
|
+
);
|
|
1099
|
+
|
|
1100
|
+
if (!imageId) {
|
|
1101
|
+
console.warn(
|
|
1102
|
+
"Image not present in stack, can't import frame : " + i + "."
|
|
1103
|
+
);
|
|
1104
|
+
continue;
|
|
1105
|
+
}
|
|
1106
|
+
|
|
1107
|
+
const sourceImageMetadata = metadataProvider.get("instance", imageId);
|
|
1108
|
+
if (
|
|
1109
|
+
Rows !== sourceImageMetadata.Rows ||
|
|
1110
|
+
Columns !== sourceImageMetadata.Columns
|
|
1111
|
+
) {
|
|
1112
|
+
throw new Error(
|
|
1113
|
+
"Individual SEG frames have different geometry dimensions (Rows and Columns) " +
|
|
1114
|
+
"respect to the source image reference frame. This is not yet supported. " +
|
|
1115
|
+
"Aborting segmentation loading. "
|
|
1116
|
+
);
|
|
1117
|
+
}
|
|
1118
|
+
|
|
1119
|
+
const imageIdIndex = imageIds.findIndex(element => element === imageId);
|
|
1120
|
+
const byteOffset = sliceLength * 2 * imageIdIndex; // 2 bytes/pixel
|
|
1121
|
+
|
|
1122
|
+
const labelmap2DView = new Uint16Array(
|
|
1123
|
+
labelmapBufferArray[0],
|
|
1124
|
+
byteOffset,
|
|
1125
|
+
sliceLength
|
|
1126
|
+
);
|
|
1127
|
+
|
|
1128
|
+
const data = alignedPixelDataI.data;
|
|
1129
|
+
for (let j = 0, len = alignedPixelDataI.data.length; j < len; ++j) {
|
|
1130
|
+
if (data[j]) {
|
|
1131
|
+
for (let x = j; x < len; ++x) {
|
|
1132
|
+
if (data[x]) {
|
|
1133
|
+
labelmap2DView[x] = segmentIndex;
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
|
|
1137
|
+
if (!segmentsOnFrame[imageIdIndex]) {
|
|
1138
|
+
segmentsOnFrame[imageIdIndex] = [];
|
|
1139
|
+
}
|
|
1140
|
+
|
|
1141
|
+
segmentsOnFrame[imageIdIndex].push(segmentIndex);
|
|
1142
|
+
|
|
1143
|
+
break;
|
|
1144
|
+
}
|
|
1145
|
+
}
|
|
1146
|
+
}
|
|
1147
|
+
}
|
|
1148
|
+
|
|
1149
|
+
function checkOrientation(
|
|
1150
|
+
multiframe,
|
|
1151
|
+
validOrientations,
|
|
1152
|
+
sourceDataDimensions,
|
|
1153
|
+
tolerance
|
|
1154
|
+
) {
|
|
1155
|
+
const { SharedFunctionalGroupsSequence, PerFrameFunctionalGroupsSequence } =
|
|
1156
|
+
multiframe;
|
|
1157
|
+
|
|
1158
|
+
const sharedImageOrientationPatient =
|
|
1159
|
+
SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
1160
|
+
? SharedFunctionalGroupsSequence.PlaneOrientationSequence
|
|
1161
|
+
.ImageOrientationPatient
|
|
1162
|
+
: undefined;
|
|
1163
|
+
|
|
1164
|
+
// Check if in plane.
|
|
1165
|
+
const PerFrameFunctionalGroups = PerFrameFunctionalGroupsSequence[0];
|
|
1166
|
+
|
|
1167
|
+
const iop =
|
|
1168
|
+
sharedImageOrientationPatient ||
|
|
1169
|
+
PerFrameFunctionalGroups.PlaneOrientationSequence
|
|
1170
|
+
.ImageOrientationPatient;
|
|
1171
|
+
|
|
1172
|
+
const inPlane = validOrientations.some(operation =>
|
|
1173
|
+
compareArrays(iop, operation, tolerance)
|
|
1174
|
+
);
|
|
1175
|
+
|
|
1176
|
+
if (inPlane) {
|
|
1177
|
+
return "Planar";
|
|
1178
|
+
}
|
|
1179
|
+
|
|
1180
|
+
if (
|
|
1181
|
+
checkIfPerpendicular(iop, validOrientations[0], tolerance) &&
|
|
1182
|
+
sourceDataDimensions.includes(multiframe.Rows) &&
|
|
1183
|
+
sourceDataDimensions.includes(multiframe.Columns)
|
|
1184
|
+
) {
|
|
1185
|
+
// Perpendicular and fits on same grid.
|
|
1186
|
+
return "Perpendicular";
|
|
1187
|
+
}
|
|
1188
|
+
|
|
1189
|
+
return "Oblique";
|
|
1190
|
+
}
|
|
1191
|
+
|
|
1192
|
+
/**
|
|
1193
|
+
* checkIfPerpendicular - Returns true if iop1 and iop2 are perpendicular
|
|
1194
|
+
* within a tolerance.
|
|
1195
|
+
*
|
|
1196
|
+
* @param {Number[6]} iop1 An ImageOrientationPatient array.
|
|
1197
|
+
* @param {Number[6]} iop2 An ImageOrientationPatient array.
|
|
1198
|
+
* @param {Number} tolerance.
|
|
1199
|
+
* @return {Boolean} True if iop1 and iop2 are equal.
|
|
1200
|
+
*/
|
|
1201
|
+
function checkIfPerpendicular(iop1, iop2, tolerance) {
|
|
1202
|
+
const absDotColumnCosines = Math.abs(
|
|
1203
|
+
iop1[0] * iop2[0] + iop1[1] * iop2[1] + iop1[2] * iop2[2]
|
|
1204
|
+
);
|
|
1205
|
+
const absDotRowCosines = Math.abs(
|
|
1206
|
+
iop1[3] * iop2[3] + iop1[4] * iop2[4] + iop1[5] * iop2[5]
|
|
1207
|
+
);
|
|
1208
|
+
|
|
1209
|
+
return (
|
|
1210
|
+
(absDotColumnCosines < tolerance ||
|
|
1211
|
+
Math.abs(absDotColumnCosines - 1) < tolerance) &&
|
|
1212
|
+
(absDotRowCosines < tolerance ||
|
|
1213
|
+
Math.abs(absDotRowCosines - 1) < tolerance)
|
|
1214
|
+
);
|
|
1215
|
+
}
|
|
1216
|
+
|
|
1217
|
+
/**
|
|
1218
|
+
* unpackPixelData - Unpacks bitpacked pixelData if the Segmentation is BINARY.
|
|
1219
|
+
*
|
|
1220
|
+
* @param {Object} multiframe The multiframe dataset.
|
|
1221
|
+
* @return {Uint8Array} The unpacked pixelData.
|
|
1222
|
+
*/
|
|
1223
|
+
function unpackPixelData(multiframe) {
|
|
1224
|
+
const segType = multiframe.SegmentationType;
|
|
1225
|
+
|
|
1226
|
+
let data;
|
|
1227
|
+
if (Array.isArray(multiframe.PixelData)) {
|
|
1228
|
+
data = multiframe.PixelData[0];
|
|
1229
|
+
} else {
|
|
1230
|
+
data = multiframe.PixelData;
|
|
1231
|
+
}
|
|
1232
|
+
|
|
1233
|
+
if (data === undefined) {
|
|
1234
|
+
log.error("This segmentation pixeldata is undefined.");
|
|
1235
|
+
}
|
|
1236
|
+
|
|
1237
|
+
if (segType === "BINARY") {
|
|
1238
|
+
return BitArray.unpack(data);
|
|
1239
|
+
}
|
|
1240
|
+
|
|
1241
|
+
const pixelData = new Uint8Array(data);
|
|
1242
|
+
|
|
1243
|
+
const max = multiframe.MaximumFractionalValue;
|
|
1244
|
+
const onlyMaxAndZero =
|
|
1245
|
+
pixelData.find(element => element !== 0 && element !== max) ===
|
|
1246
|
+
undefined;
|
|
1247
|
+
|
|
1248
|
+
if (!onlyMaxAndZero) {
|
|
1249
|
+
// This is a fractional segmentation, which is not currently supported.
|
|
1250
|
+
return;
|
|
1251
|
+
}
|
|
1252
|
+
|
|
1253
|
+
log.warn(
|
|
1254
|
+
"This segmentation object is actually binary... processing as such."
|
|
1255
|
+
);
|
|
1256
|
+
|
|
1257
|
+
return pixelData;
|
|
1258
|
+
}
|
|
1259
|
+
|
|
1260
|
+
/**
|
|
1261
|
+
* getImageIdOfSourceImagebySourceImageSequence - Returns the Cornerstone imageId of the source image.
|
|
1262
|
+
*
|
|
1263
|
+
* @param {Object} SourceImageSequence Sequence describing the source image.
|
|
1264
|
+
* @param {String[]} imageIds A list of imageIds.
|
|
1265
|
+
* @param {Object} metadataProvider A Cornerstone metadataProvider to query
|
|
1266
|
+
* metadata from imageIds.
|
|
1267
|
+
* @return {String} The corresponding imageId.
|
|
1268
|
+
*/
|
|
1269
|
+
function getImageIdOfSourceImagebySourceImageSequence(
|
|
1270
|
+
SourceImageSequence,
|
|
1271
|
+
imageIds,
|
|
1272
|
+
metadataProvider
|
|
1273
|
+
) {
|
|
1274
|
+
const { ReferencedSOPInstanceUID, ReferencedFrameNumber } =
|
|
1275
|
+
SourceImageSequence;
|
|
1276
|
+
|
|
1277
|
+
return ReferencedFrameNumber
|
|
1278
|
+
? getImageIdOfReferencedFrame(
|
|
1279
|
+
ReferencedSOPInstanceUID,
|
|
1280
|
+
ReferencedFrameNumber,
|
|
1281
|
+
imageIds,
|
|
1282
|
+
metadataProvider
|
|
1283
|
+
)
|
|
1284
|
+
: getImageIdOfReferencedSingleFramedSOPInstance(
|
|
1285
|
+
ReferencedSOPInstanceUID,
|
|
1286
|
+
imageIds,
|
|
1287
|
+
metadataProvider
|
|
1288
|
+
);
|
|
1289
|
+
}
|
|
1290
|
+
|
|
1291
|
+
/**
|
|
1292
|
+
* getImageIdOfSourceImagebyGeometry - Returns the Cornerstone imageId of the source image.
|
|
1293
|
+
*
|
|
1294
|
+
* @param {String} ReferencedSeriesInstanceUID Referenced series of the source image.
|
|
1295
|
+
* @param {String} FrameOfReferenceUID Frame of reference.
|
|
1296
|
+
* @param {Object} PerFrameFunctionalGroup Sequence describing segmentation reference attributes per frame.
|
|
1297
|
+
* @param {String[]} imageIds A list of imageIds.
|
|
1298
|
+
* @param {Object} metadataProvider A Cornerstone metadataProvider to query
|
|
1299
|
+
* @param {Float} tolerance The tolerance parameter
|
|
1300
|
+
*
|
|
1301
|
+
* @return {String} The corresponding imageId.
|
|
1302
|
+
*/
|
|
1303
|
+
function getImageIdOfSourceImagebyGeometry(
|
|
1304
|
+
ReferencedSeriesInstanceUID,
|
|
1305
|
+
FrameOfReferenceUID,
|
|
1306
|
+
PerFrameFunctionalGroup,
|
|
1307
|
+
imageIds,
|
|
1308
|
+
metadataProvider,
|
|
1309
|
+
tolerance
|
|
1310
|
+
) {
|
|
1311
|
+
if (
|
|
1312
|
+
ReferencedSeriesInstanceUID === undefined ||
|
|
1313
|
+
PerFrameFunctionalGroup.PlanePositionSequence === undefined ||
|
|
1314
|
+
PerFrameFunctionalGroup.PlanePositionSequence[0] === undefined ||
|
|
1315
|
+
PerFrameFunctionalGroup.PlanePositionSequence[0]
|
|
1316
|
+
.ImagePositionPatient === undefined
|
|
1317
|
+
) {
|
|
1318
|
+
return undefined;
|
|
1319
|
+
}
|
|
1320
|
+
|
|
1321
|
+
for (
|
|
1322
|
+
let imageIdsIndexc = 0;
|
|
1323
|
+
imageIdsIndexc < imageIds.length;
|
|
1324
|
+
++imageIdsIndexc
|
|
1325
|
+
) {
|
|
1326
|
+
const sourceImageMetadata = metadataProvider.get(
|
|
1327
|
+
"instance",
|
|
1328
|
+
imageIds[imageIdsIndexc]
|
|
1329
|
+
);
|
|
1330
|
+
|
|
1331
|
+
if (
|
|
1332
|
+
sourceImageMetadata === undefined ||
|
|
1333
|
+
sourceImageMetadata.ImagePositionPatient === undefined ||
|
|
1334
|
+
sourceImageMetadata.FrameOfReferenceUID !== FrameOfReferenceUID ||
|
|
1335
|
+
sourceImageMetadata.SeriesInstanceUID !==
|
|
1336
|
+
ReferencedSeriesInstanceUID
|
|
1337
|
+
) {
|
|
1338
|
+
continue;
|
|
1339
|
+
}
|
|
1340
|
+
|
|
1341
|
+
if (
|
|
1342
|
+
compareArrays(
|
|
1343
|
+
PerFrameFunctionalGroup.PlanePositionSequence[0]
|
|
1344
|
+
.ImagePositionPatient,
|
|
1345
|
+
sourceImageMetadata.ImagePositionPatient,
|
|
1346
|
+
tolerance
|
|
1347
|
+
)
|
|
1348
|
+
) {
|
|
1349
|
+
return imageIds[imageIdsIndexc];
|
|
1350
|
+
}
|
|
1351
|
+
}
|
|
1352
|
+
}
|
|
1353
|
+
|
|
1354
|
+
/**
|
|
1355
|
+
* getImageIdOfReferencedSingleFramedSOPInstance - Returns the imageId
|
|
1356
|
+
* corresponding to the specified sopInstanceUid for single-frame images.
|
|
1357
|
+
*
|
|
1358
|
+
* @param {String} sopInstanceUid The sopInstanceUid of the desired image.
|
|
1359
|
+
* @param {String[]} imageIds The list of imageIds.
|
|
1360
|
+
* @param {Object} metadataProvider The metadataProvider to obtain sopInstanceUids
|
|
1361
|
+
* from the cornerstone imageIds.
|
|
1362
|
+
* @return {String} The imageId that corresponds to the sopInstanceUid.
|
|
1363
|
+
*/
|
|
1364
|
+
function getImageIdOfReferencedSingleFramedSOPInstance(
|
|
1365
|
+
sopInstanceUid,
|
|
1366
|
+
imageIds,
|
|
1367
|
+
metadataProvider
|
|
1368
|
+
) {
|
|
1369
|
+
return imageIds.find(imageId => {
|
|
1370
|
+
const sopCommonModule = metadataProvider.get(
|
|
1371
|
+
"sopCommonModule",
|
|
1372
|
+
imageId
|
|
1373
|
+
);
|
|
1374
|
+
if (!sopCommonModule) {
|
|
1375
|
+
return;
|
|
1376
|
+
}
|
|
1377
|
+
|
|
1378
|
+
return sopCommonModule.sopInstanceUID === sopInstanceUid;
|
|
1379
|
+
});
|
|
1380
|
+
}
|
|
1381
|
+
|
|
1382
|
+
/**
|
|
1383
|
+
* getImageIdOfReferencedFrame - Returns the imageId corresponding to the
|
|
1384
|
+
* specified sopInstanceUid and frameNumber for multi-frame images.
|
|
1385
|
+
*
|
|
1386
|
+
* @param {String} sopInstanceUid The sopInstanceUid of the desired image.
|
|
1387
|
+
* @param {Number} frameNumber The frame number.
|
|
1388
|
+
* @param {String} imageIds The list of imageIds.
|
|
1389
|
+
* @param {Object} metadataProvider The metadataProvider to obtain sopInstanceUids
|
|
1390
|
+
* from the cornerstone imageIds.
|
|
1391
|
+
* @return {String} The imageId that corresponds to the sopInstanceUid.
|
|
1392
|
+
*/
|
|
1393
|
+
function getImageIdOfReferencedFrame(
|
|
1394
|
+
sopInstanceUid,
|
|
1395
|
+
frameNumber,
|
|
1396
|
+
imageIds,
|
|
1397
|
+
metadataProvider
|
|
1398
|
+
) {
|
|
1399
|
+
const imageId = imageIds.find(imageId => {
|
|
1400
|
+
const sopCommonModule = metadataProvider.get(
|
|
1401
|
+
"sopCommonModule",
|
|
1402
|
+
imageId
|
|
1403
|
+
);
|
|
1404
|
+
if (!sopCommonModule) {
|
|
1405
|
+
return;
|
|
1406
|
+
}
|
|
1407
|
+
|
|
1408
|
+
const imageIdFrameNumber = Number(imageId.split("frame=")[1]);
|
|
1409
|
+
|
|
1410
|
+
return (
|
|
1411
|
+
//frameNumber is zero indexed for cornerstoneWADOImageLoader image Ids.
|
|
1412
|
+
sopCommonModule.sopInstanceUID === sopInstanceUid &&
|
|
1413
|
+
imageIdFrameNumber === frameNumber - 1
|
|
1414
|
+
);
|
|
1415
|
+
});
|
|
1416
|
+
|
|
1417
|
+
return imageId;
|
|
1418
|
+
}
|
|
1419
|
+
|
|
1420
|
+
/**
|
|
1421
|
+
* getValidOrientations - returns an array of valid orientations.
|
|
1422
|
+
*
|
|
1423
|
+
* @param {Number[6]} iop The row (0..2) an column (3..5) direction cosines.
|
|
1424
|
+
* @return {Number[8][6]} An array of valid orientations.
|
|
1425
|
+
*/
|
|
1426
|
+
function getValidOrientations(iop) {
|
|
1427
|
+
const orientations = [];
|
|
1428
|
+
|
|
1429
|
+
// [0, 1, 2]: 0, 0hf, 0vf
|
|
1430
|
+
// [3, 4, 5]: 90, 90hf, 90vf
|
|
1431
|
+
// [6, 7]: 180, 270
|
|
1432
|
+
|
|
1433
|
+
orientations[0] = iop;
|
|
1434
|
+
orientations[1] = flipIOP.h(iop);
|
|
1435
|
+
orientations[2] = flipIOP.v(iop);
|
|
1436
|
+
|
|
1437
|
+
const iop90 = rotateDirectionCosinesInPlane(iop, Math.PI / 2);
|
|
1438
|
+
|
|
1439
|
+
orientations[3] = iop90;
|
|
1440
|
+
orientations[4] = flipIOP.h(iop90);
|
|
1441
|
+
orientations[5] = flipIOP.v(iop90);
|
|
1442
|
+
|
|
1443
|
+
orientations[6] = rotateDirectionCosinesInPlane(iop, Math.PI);
|
|
1444
|
+
orientations[7] = rotateDirectionCosinesInPlane(iop, 1.5 * Math.PI);
|
|
1445
|
+
|
|
1446
|
+
return orientations;
|
|
1447
|
+
}
|
|
1448
|
+
|
|
1449
|
+
/**
|
|
1450
|
+
* alignPixelDataWithSourceData -
|
|
1451
|
+
*
|
|
1452
|
+
* @param {Ndarray} pixelData2D - The data to align.
|
|
1453
|
+
* @param {Number[6]} iop - The orientation of the image slice.
|
|
1454
|
+
* @param {Number[8][6]} orientations - An array of valid imageOrientationPatient values.
|
|
1455
|
+
* @param {Number} tolerance.
|
|
1456
|
+
* @return {Ndarray} The aligned pixelData.
|
|
1457
|
+
*/
|
|
1458
|
+
function alignPixelDataWithSourceData(
|
|
1459
|
+
pixelData2D,
|
|
1460
|
+
iop,
|
|
1461
|
+
orientations,
|
|
1462
|
+
tolerance
|
|
1463
|
+
) {
|
|
1464
|
+
if (compareArrays(iop, orientations[0], tolerance)) {
|
|
1465
|
+
return pixelData2D;
|
|
1466
|
+
} else if (compareArrays(iop, orientations[1], tolerance)) {
|
|
1467
|
+
// Flipped vertically.
|
|
1468
|
+
|
|
1469
|
+
// Undo Flip
|
|
1470
|
+
return flipMatrix2D.v(pixelData2D);
|
|
1471
|
+
} else if (compareArrays(iop, orientations[2], tolerance)) {
|
|
1472
|
+
// Flipped horizontally.
|
|
1473
|
+
|
|
1474
|
+
// Unfo flip
|
|
1475
|
+
return flipMatrix2D.h(pixelData2D);
|
|
1476
|
+
} else if (compareArrays(iop, orientations[3], tolerance)) {
|
|
1477
|
+
//Rotated 90 degrees
|
|
1478
|
+
|
|
1479
|
+
// Rotate back
|
|
1480
|
+
return rotateMatrix902D(pixelData2D);
|
|
1481
|
+
} else if (compareArrays(iop, orientations[4], tolerance)) {
|
|
1482
|
+
//Rotated 90 degrees and fliped horizontally.
|
|
1483
|
+
|
|
1484
|
+
// Undo flip and rotate back.
|
|
1485
|
+
return rotateMatrix902D(flipMatrix2D.h(pixelData2D));
|
|
1486
|
+
} else if (compareArrays(iop, orientations[5], tolerance)) {
|
|
1487
|
+
// Rotated 90 degrees and fliped vertically
|
|
1488
|
+
|
|
1489
|
+
// Unfo flip and rotate back.
|
|
1490
|
+
return rotateMatrix902D(flipMatrix2D.v(pixelData2D));
|
|
1491
|
+
} else if (compareArrays(iop, orientations[6], tolerance)) {
|
|
1492
|
+
// Rotated 180 degrees. // TODO -> Do this more effeciently, there is a 1:1 mapping like 90 degree rotation.
|
|
1493
|
+
|
|
1494
|
+
return rotateMatrix902D(rotateMatrix902D(pixelData2D));
|
|
1495
|
+
} else if (compareArrays(iop, orientations[7], tolerance)) {
|
|
1496
|
+
// Rotated 270 degrees
|
|
1497
|
+
|
|
1498
|
+
// Rotate back.
|
|
1499
|
+
return rotateMatrix902D(
|
|
1500
|
+
rotateMatrix902D(rotateMatrix902D(pixelData2D))
|
|
1501
|
+
);
|
|
1502
|
+
}
|
|
1503
|
+
}
|
|
1504
|
+
|
|
1505
|
+
/**
|
|
1506
|
+
* compareArrays - Returns true if array1 and array2 are equal
|
|
1507
|
+
* within a tolerance.
|
|
1508
|
+
*
|
|
1509
|
+
* @param {Number[]} array1 - An array.
|
|
1510
|
+
* @param {Number[]} array2 - An array.
|
|
1511
|
+
* @param {Number} tolerance.
|
|
1512
|
+
* @return {Boolean} True if array1 and array2 are equal.
|
|
1513
|
+
*/
|
|
1514
|
+
function compareArrays(array1, array2, tolerance) {
|
|
1515
|
+
if (array1.length != array2.length) {
|
|
1516
|
+
return false;
|
|
1517
|
+
}
|
|
1518
|
+
|
|
1519
|
+
for (let i = 0; i < array1.length; ++i) {
|
|
1520
|
+
if (!nearlyEqual(array1[i], array2[i], tolerance)) {
|
|
1521
|
+
return false;
|
|
1522
|
+
}
|
|
1523
|
+
}
|
|
1524
|
+
|
|
1525
|
+
return true;
|
|
1526
|
+
}
|
|
1527
|
+
|
|
1528
|
+
function getSegmentMetadata(multiframe, seriesInstanceUid) {
|
|
1529
|
+
const segmentSequence = multiframe.SegmentSequence;
|
|
1530
|
+
let data = [];
|
|
1531
|
+
|
|
1532
|
+
if (Array.isArray(segmentSequence)) {
|
|
1533
|
+
data = [undefined, ...segmentSequence];
|
|
1534
|
+
} else {
|
|
1535
|
+
// Only one segment, will be stored as an object.
|
|
1536
|
+
data = [undefined, segmentSequence];
|
|
1537
|
+
}
|
|
1538
|
+
|
|
1539
|
+
return {
|
|
1540
|
+
seriesInstanceUid,
|
|
1541
|
+
data
|
|
1542
|
+
};
|
|
1543
|
+
}
|