@cornerstonejs/ai 3.0.0-beta.2 → 3.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import type { Types } from '@cornerstonejs/core';
|
|
2
|
+
import * as cornerstoneTools from '@cornerstonejs/tools';
|
|
3
|
+
export type ModelType = {
|
|
4
|
+
name: string;
|
|
5
|
+
key: string;
|
|
6
|
+
url: string;
|
|
7
|
+
size: number;
|
|
8
|
+
opt?: Record<string, unknown>;
|
|
9
|
+
};
|
|
10
|
+
export declare enum Loggers {
|
|
11
|
+
Log = "status",
|
|
12
|
+
Encoder = "encoder",
|
|
13
|
+
Decoder = "decoder"
|
|
14
|
+
}
|
|
15
|
+
export default class ONNXSegmentationController {
|
|
16
|
+
static MarkerInclude: string;
|
|
17
|
+
static MarkerExclude: string;
|
|
18
|
+
static BoxPrompt: string;
|
|
19
|
+
static viewportOptions: {
|
|
20
|
+
displayArea: Types.DisplayArea;
|
|
21
|
+
background: Types.Point3;
|
|
22
|
+
};
|
|
23
|
+
maxWidth: number;
|
|
24
|
+
maxHeight: number;
|
|
25
|
+
modelWidth: number;
|
|
26
|
+
modelHeight: number;
|
|
27
|
+
tool: any;
|
|
28
|
+
static MODELS: {
|
|
29
|
+
sam_l: ({
|
|
30
|
+
name: string;
|
|
31
|
+
url: string;
|
|
32
|
+
size: number;
|
|
33
|
+
key: string;
|
|
34
|
+
feedType: string;
|
|
35
|
+
} | {
|
|
36
|
+
name: string;
|
|
37
|
+
url: string;
|
|
38
|
+
size: number;
|
|
39
|
+
key: string;
|
|
40
|
+
feedType?: undefined;
|
|
41
|
+
})[];
|
|
42
|
+
sam_h: ({
|
|
43
|
+
name: string;
|
|
44
|
+
url: string;
|
|
45
|
+
size: number;
|
|
46
|
+
key: string;
|
|
47
|
+
feedType: string;
|
|
48
|
+
} | {
|
|
49
|
+
name: string;
|
|
50
|
+
url: string;
|
|
51
|
+
size: number;
|
|
52
|
+
key: string;
|
|
53
|
+
feedType?: undefined;
|
|
54
|
+
})[];
|
|
55
|
+
};
|
|
56
|
+
canvas: HTMLCanvasElement;
|
|
57
|
+
canvasMask: HTMLCanvasElement;
|
|
58
|
+
private sessions;
|
|
59
|
+
private config;
|
|
60
|
+
private points;
|
|
61
|
+
private labels;
|
|
62
|
+
private worldPoints;
|
|
63
|
+
private loadingAI;
|
|
64
|
+
protected viewport: any;
|
|
65
|
+
protected excludeTool: string;
|
|
66
|
+
protected currentImage: any;
|
|
67
|
+
private listeners;
|
|
68
|
+
protected desiredImage: {
|
|
69
|
+
imageId: any;
|
|
70
|
+
sampleImageId: any;
|
|
71
|
+
imageIndex: number;
|
|
72
|
+
decoder: any;
|
|
73
|
+
encoder: any;
|
|
74
|
+
};
|
|
75
|
+
protected imageEncodings: Map<any, any>;
|
|
76
|
+
protected sharedImageEncoding: any;
|
|
77
|
+
protected boxRadius: number;
|
|
78
|
+
protected imageImageData: any;
|
|
79
|
+
protected isGpuInUse: boolean;
|
|
80
|
+
protected annotationsNeedUpdating: boolean;
|
|
81
|
+
protected maskImageData: any;
|
|
82
|
+
protected promptAnnotationTypes: string[];
|
|
83
|
+
protected islandFillOptions: {
|
|
84
|
+
maxInternalRemove: number;
|
|
85
|
+
fillInternalEdge: boolean;
|
|
86
|
+
};
|
|
87
|
+
protected pCutoff: number;
|
|
88
|
+
constructor(options?: {
|
|
89
|
+
listeners: any;
|
|
90
|
+
getPromptAnnotations: any;
|
|
91
|
+
promptAnnotationTypes: any;
|
|
92
|
+
models: any;
|
|
93
|
+
modelName: any;
|
|
94
|
+
islandFillOptions: any;
|
|
95
|
+
});
|
|
96
|
+
initModel(): Promise<unknown>;
|
|
97
|
+
setPCutoff(cutoff: number): void;
|
|
98
|
+
initViewport(viewport: any): void;
|
|
99
|
+
acceptPreview(element: any): void;
|
|
100
|
+
rejectPreview(element: any): void;
|
|
101
|
+
interpolateScroll(viewport?: any, dir?: number): Promise<void>;
|
|
102
|
+
protected log(logger: Loggers, ...args: any[]): void;
|
|
103
|
+
protected getPromptAnnotations: (viewport?: any) => cornerstoneTools.Types.Annotations;
|
|
104
|
+
protected viewportRenderedListener: (_event: any) => void;
|
|
105
|
+
protected annotationModifiedListener: (_event?: any) => void;
|
|
106
|
+
disconnectViewport(viewport: any): void;
|
|
107
|
+
protected load(): Promise<void>;
|
|
108
|
+
clear(viewport: any): void;
|
|
109
|
+
cacheImageEncodings(current?: any, offset?: number, length?: number): any;
|
|
110
|
+
protected handleImage({ imageId, sampleImageId }: {
|
|
111
|
+
imageId: any;
|
|
112
|
+
sampleImageId: any;
|
|
113
|
+
}, imageSession: any): Promise<void>;
|
|
114
|
+
protected runDecode(): Promise<void>;
|
|
115
|
+
tryLoad(options?: {
|
|
116
|
+
resetImage: boolean;
|
|
117
|
+
}): void;
|
|
118
|
+
mapAnnotationPoint(worldPoint: any): number[];
|
|
119
|
+
updateAnnotations(): void;
|
|
120
|
+
restoreImageEncoding(session: any, imageId: any): Promise<any>;
|
|
121
|
+
loadStorageImageEncoding(session: any, imageId: any, index?: any): Promise<any>;
|
|
122
|
+
storeImageEncoding(session: any, imageId: any, data: any): Promise<void>;
|
|
123
|
+
createLabelmap(mask: any, canvasPosition: any, _points: any, _labels: any): void;
|
|
124
|
+
decode(points: any, labels: any, useSession?: any): Promise<void>;
|
|
125
|
+
fetchAndCacheModel(url: any, name: any): Promise<ArrayBuffer>;
|
|
126
|
+
loadModels(models: any, imageSession?: any): Promise<void>;
|
|
127
|
+
getDirectoryForImageId(session: any, imageId: any): Promise<any>;
|
|
128
|
+
getFileNameForImageId(imageId: any, extension: any): any;
|
|
129
|
+
getConfig(modelName?: string): any;
|
|
130
|
+
}
|
|
@@ -0,0 +1,836 @@
|
|
|
1
|
+
import { utilities, eventTarget, Enums } from '@cornerstonejs/core';
|
|
2
|
+
import * as cornerstoneTools from '@cornerstonejs/tools';
|
|
3
|
+
import { segmentation as cstSegmentation, LabelmapBaseTool, } from '@cornerstonejs/tools';
|
|
4
|
+
const { strategies } = cstSegmentation;
|
|
5
|
+
const { fillInsideCircle } = strategies;
|
|
6
|
+
import ort from 'onnxruntime-web/webgpu';
|
|
7
|
+
import { vec3 } from 'gl-matrix';
|
|
8
|
+
const { annotation } = cornerstoneTools;
|
|
9
|
+
const { state: annotationState } = annotation;
|
|
10
|
+
const { Events } = Enums;
|
|
11
|
+
const { Events: toolsEvents } = cornerstoneTools.Enums;
|
|
12
|
+
const { segmentation } = cornerstoneTools;
|
|
13
|
+
const { filterAnnotationsForDisplay } = cornerstoneTools.utilities.planar;
|
|
14
|
+
const { IslandRemoval } = cornerstoneTools.utilities;
|
|
15
|
+
const { triggerSegmentationDataModified } = segmentation.triggerSegmentationEvents;
|
|
16
|
+
function cloneTensor(t) {
|
|
17
|
+
return new ort.Tensor(t.type, Float32Array.from(t.data), t.dims);
|
|
18
|
+
}
|
|
19
|
+
function feedForSam(emb, points, labels, modelSize = [1024, 1024]) {
|
|
20
|
+
const maskInput = new ort.Tensor(new Float32Array(256 * 256), [1, 1, 256, 256]);
|
|
21
|
+
const hasMask = new ort.Tensor(new Float32Array([0]), [1]);
|
|
22
|
+
const originalImageSize = new ort.Tensor(new Float32Array(modelSize), [2]);
|
|
23
|
+
const pointCoords = new ort.Tensor(new Float32Array(points), [
|
|
24
|
+
1,
|
|
25
|
+
points.length / 2,
|
|
26
|
+
2,
|
|
27
|
+
]);
|
|
28
|
+
const pointLabels = new ort.Tensor(new Float32Array(labels), [
|
|
29
|
+
1,
|
|
30
|
+
labels.length,
|
|
31
|
+
]);
|
|
32
|
+
const key = (emb.image_embeddings && 'image_embeddings') || 'embeddings';
|
|
33
|
+
return {
|
|
34
|
+
image_embeddings: cloneTensor(emb[key]),
|
|
35
|
+
point_coords: pointCoords,
|
|
36
|
+
point_labels: pointLabels,
|
|
37
|
+
mask_input: maskInput,
|
|
38
|
+
has_mask_input: hasMask,
|
|
39
|
+
orig_im_size: originalImageSize,
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
function getBuffer(fileData) {
|
|
43
|
+
return new Promise((resolve) => {
|
|
44
|
+
const reader = new FileReader();
|
|
45
|
+
reader.readAsArrayBuffer(fileData);
|
|
46
|
+
reader.onload = function () {
|
|
47
|
+
const arrayBuffer = reader.result;
|
|
48
|
+
const bytes = new Float32Array(arrayBuffer);
|
|
49
|
+
resolve(bytes);
|
|
50
|
+
};
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
export var Loggers;
|
|
54
|
+
(function (Loggers) {
|
|
55
|
+
Loggers["Log"] = "status";
|
|
56
|
+
Loggers["Encoder"] = "encoder";
|
|
57
|
+
Loggers["Decoder"] = "decoder";
|
|
58
|
+
})(Loggers || (Loggers = {}));
|
|
59
|
+
export default class ONNXSegmentationController {
|
|
60
|
+
static { this.MarkerInclude = 'MarkerInclude'; }
|
|
61
|
+
static { this.MarkerExclude = 'MarkerExclude'; }
|
|
62
|
+
static { this.BoxPrompt = 'BoxPrompt'; }
|
|
63
|
+
static { this.viewportOptions = {
|
|
64
|
+
displayArea: {
|
|
65
|
+
storeAsInitialCamera: true,
|
|
66
|
+
interpolationType: Enums.InterpolationType.NEAREST,
|
|
67
|
+
imageArea: [1, 1],
|
|
68
|
+
imageCanvasPoint: {
|
|
69
|
+
imagePoint: [0.5, 0.5],
|
|
70
|
+
canvasPoint: [0.5, 0.5],
|
|
71
|
+
},
|
|
72
|
+
},
|
|
73
|
+
background: [0, 0, 0.2],
|
|
74
|
+
}; }
|
|
75
|
+
static { this.MODELS = {
|
|
76
|
+
sam_l: [
|
|
77
|
+
{
|
|
78
|
+
name: 'sam-l-encoder',
|
|
79
|
+
url: '/sam_l/vit_l_encoder.onnx',
|
|
80
|
+
size: 1224,
|
|
81
|
+
key: 'encoder',
|
|
82
|
+
feedType: 'images',
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
name: 'sam-l-decoder',
|
|
86
|
+
url: '/sam_l/vit_l_decoder.onnx',
|
|
87
|
+
size: 17,
|
|
88
|
+
key: 'decoder',
|
|
89
|
+
},
|
|
90
|
+
],
|
|
91
|
+
sam_h: [
|
|
92
|
+
{
|
|
93
|
+
name: 'sam-h-encoder',
|
|
94
|
+
url: '/sam_h/vit_h_encoder.onnx',
|
|
95
|
+
size: 18,
|
|
96
|
+
key: 'encoder',
|
|
97
|
+
feedType: 'images',
|
|
98
|
+
},
|
|
99
|
+
{
|
|
100
|
+
name: 'sam-h-decoder',
|
|
101
|
+
url: '/sam_h/vit_h_decoder.onnx',
|
|
102
|
+
size: 1,
|
|
103
|
+
key: 'decoder',
|
|
104
|
+
},
|
|
105
|
+
],
|
|
106
|
+
}; }
|
|
107
|
+
constructor(options = {
|
|
108
|
+
listeners: null,
|
|
109
|
+
getPromptAnnotations: null,
|
|
110
|
+
promptAnnotationTypes: null,
|
|
111
|
+
models: null,
|
|
112
|
+
modelName: null,
|
|
113
|
+
islandFillOptions: undefined,
|
|
114
|
+
}) {
|
|
115
|
+
this.maxWidth = 1024;
|
|
116
|
+
this.maxHeight = 1024;
|
|
117
|
+
this.modelWidth = 1024;
|
|
118
|
+
this.modelHeight = 1024;
|
|
119
|
+
this.canvas = document.createElement('canvas');
|
|
120
|
+
this.canvasMask = document.createElement('canvas');
|
|
121
|
+
this.sessions = [];
|
|
122
|
+
this.points = [];
|
|
123
|
+
this.labels = [];
|
|
124
|
+
this.worldPoints = new Array();
|
|
125
|
+
this.excludeTool = ONNXSegmentationController.MarkerExclude;
|
|
126
|
+
this.listeners = [console.log];
|
|
127
|
+
this.desiredImage = {
|
|
128
|
+
imageId: null,
|
|
129
|
+
sampleImageId: null,
|
|
130
|
+
imageIndex: -1,
|
|
131
|
+
decoder: null,
|
|
132
|
+
encoder: null,
|
|
133
|
+
};
|
|
134
|
+
this.imageEncodings = new Map();
|
|
135
|
+
this.boxRadius = 5;
|
|
136
|
+
this.isGpuInUse = false;
|
|
137
|
+
this.annotationsNeedUpdating = false;
|
|
138
|
+
this.promptAnnotationTypes = [
|
|
139
|
+
ONNXSegmentationController.MarkerInclude,
|
|
140
|
+
ONNXSegmentationController.MarkerExclude,
|
|
141
|
+
ONNXSegmentationController.BoxPrompt,
|
|
142
|
+
];
|
|
143
|
+
this.islandFillOptions = {
|
|
144
|
+
maxInternalRemove: 16,
|
|
145
|
+
fillInternalEdge: true,
|
|
146
|
+
};
|
|
147
|
+
this.pCutoff = 64;
|
|
148
|
+
this.getPromptAnnotations = (viewport = this.viewport) => {
|
|
149
|
+
const annotations = [];
|
|
150
|
+
const { element } = viewport;
|
|
151
|
+
for (const annotationName of this.promptAnnotationTypes) {
|
|
152
|
+
annotations.push(...annotationState.getAnnotations(annotationName, element));
|
|
153
|
+
}
|
|
154
|
+
const currentAnnotations = filterAnnotationsForDisplay(this.viewport, annotations);
|
|
155
|
+
return currentAnnotations;
|
|
156
|
+
};
|
|
157
|
+
this.viewportRenderedListener = (_event) => {
|
|
158
|
+
const { viewport, currentImage, desiredImage } = this;
|
|
159
|
+
desiredImage.imageId =
|
|
160
|
+
viewport.getCurrentImageId() || viewport.getViewReferenceId();
|
|
161
|
+
desiredImage.imageIndex = viewport.getCurrentImageIdIndex();
|
|
162
|
+
if (!desiredImage.imageId) {
|
|
163
|
+
return;
|
|
164
|
+
}
|
|
165
|
+
if (desiredImage.imageId.startsWith('volumeId:')) {
|
|
166
|
+
desiredImage.sampleImageId = viewport.getImageIds(viewport.getVolumeId())[0];
|
|
167
|
+
}
|
|
168
|
+
else {
|
|
169
|
+
desiredImage.sampleImageId = desiredImage.imageId;
|
|
170
|
+
}
|
|
171
|
+
if (desiredImage.imageId === currentImage?.imageId) {
|
|
172
|
+
return;
|
|
173
|
+
}
|
|
174
|
+
const { canvasMask } = this;
|
|
175
|
+
const ctxMask = canvasMask.getContext('2d');
|
|
176
|
+
ctxMask.clearRect(0, 0, canvasMask.width, canvasMask.height);
|
|
177
|
+
this.tryLoad({ resetImage: true });
|
|
178
|
+
};
|
|
179
|
+
this.annotationModifiedListener = (_event) => {
|
|
180
|
+
const currentAnnotations = this.getPromptAnnotations();
|
|
181
|
+
if (!currentAnnotations.length) {
|
|
182
|
+
return;
|
|
183
|
+
}
|
|
184
|
+
this.annotationsNeedUpdating = true;
|
|
185
|
+
this.tryLoad();
|
|
186
|
+
};
|
|
187
|
+
if (options.listeners) {
|
|
188
|
+
this.listeners = [...options.listeners];
|
|
189
|
+
}
|
|
190
|
+
if (options.getPromptAnnotations) {
|
|
191
|
+
this.getPromptAnnotations = options.getPromptAnnotations;
|
|
192
|
+
}
|
|
193
|
+
this.promptAnnotationTypes =
|
|
194
|
+
options.promptAnnotationTypes || this.promptAnnotationTypes;
|
|
195
|
+
if (options.models) {
|
|
196
|
+
Object.assign(ONNXSegmentationController.MODELS, options.models);
|
|
197
|
+
}
|
|
198
|
+
this.config = this.getConfig(options.modelName);
|
|
199
|
+
this.islandFillOptions =
|
|
200
|
+
options.islandFillOptions ?? this.islandFillOptions;
|
|
201
|
+
}
|
|
202
|
+
initModel() {
|
|
203
|
+
if (!this.loadingAI) {
|
|
204
|
+
this.loadingAI = this.load();
|
|
205
|
+
}
|
|
206
|
+
return this.loadingAI;
|
|
207
|
+
}
|
|
208
|
+
setPCutoff(cutoff) {
|
|
209
|
+
this.pCutoff = cutoff;
|
|
210
|
+
this.annotationsNeedUpdating = true;
|
|
211
|
+
this.tryLoad();
|
|
212
|
+
}
|
|
213
|
+
initViewport(viewport) {
|
|
214
|
+
const { desiredImage } = this;
|
|
215
|
+
if (this.viewport) {
|
|
216
|
+
this.disconnectViewport(this.viewport);
|
|
217
|
+
}
|
|
218
|
+
this.currentImage = null;
|
|
219
|
+
this.viewport = viewport;
|
|
220
|
+
const brushInstance = new LabelmapBaseTool({}, {
|
|
221
|
+
configuration: {
|
|
222
|
+
strategies: {
|
|
223
|
+
FILL_INSIDE_CIRCLE: fillInsideCircle,
|
|
224
|
+
},
|
|
225
|
+
activeStrategy: 'FILL_INSIDE_CIRCLE',
|
|
226
|
+
preview: {
|
|
227
|
+
enabled: true,
|
|
228
|
+
previewColors: {
|
|
229
|
+
0: [255, 255, 255, 128],
|
|
230
|
+
1: [0, 255, 255, 192],
|
|
231
|
+
2: [255, 0, 255, 255],
|
|
232
|
+
},
|
|
233
|
+
},
|
|
234
|
+
},
|
|
235
|
+
});
|
|
236
|
+
this.tool = brushInstance;
|
|
237
|
+
desiredImage.imageId =
|
|
238
|
+
viewport.getCurrentImageId?.() || viewport.getViewReferenceId();
|
|
239
|
+
if (desiredImage.imageId.startsWith('volumeId:')) {
|
|
240
|
+
desiredImage.sampleImageId = viewport.getImageIds(viewport.getVolumeId())[0];
|
|
241
|
+
}
|
|
242
|
+
else {
|
|
243
|
+
desiredImage.sampleImageId = desiredImage.imageId;
|
|
244
|
+
}
|
|
245
|
+
viewport.element.addEventListener(Events.IMAGE_RENDERED, this.viewportRenderedListener);
|
|
246
|
+
const boundListener = this.annotationModifiedListener;
|
|
247
|
+
eventTarget.addEventListener(toolsEvents.ANNOTATION_ADDED, boundListener);
|
|
248
|
+
eventTarget.addEventListener(toolsEvents.ANNOTATION_MODIFIED, boundListener);
|
|
249
|
+
eventTarget.addEventListener(toolsEvents.ANNOTATION_COMPLETED, boundListener);
|
|
250
|
+
if (desiredImage.imageId) {
|
|
251
|
+
this.tryLoad();
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
acceptPreview(element) {
|
|
255
|
+
this.tool.acceptPreview(element);
|
|
256
|
+
}
|
|
257
|
+
rejectPreview(element) {
|
|
258
|
+
this.tool.rejectPreview(element);
|
|
259
|
+
}
|
|
260
|
+
async interpolateScroll(viewport = this.viewport, dir = 1) {
|
|
261
|
+
const { element } = viewport;
|
|
262
|
+
this.tool.acceptPreview(element);
|
|
263
|
+
const promptAnnotations = this.getPromptAnnotations(viewport);
|
|
264
|
+
if (!promptAnnotations.length) {
|
|
265
|
+
return;
|
|
266
|
+
}
|
|
267
|
+
const currentSliceIndex = viewport.getCurrentImageIdIndex();
|
|
268
|
+
const { focalPoint } = viewport.getCamera();
|
|
269
|
+
const viewRef = viewport.getViewReference({
|
|
270
|
+
sliceIndex: currentSliceIndex + dir,
|
|
271
|
+
});
|
|
272
|
+
if (!viewRef || viewRef.sliceIndex === currentSliceIndex) {
|
|
273
|
+
console.warn('No next image in direction', dir, currentSliceIndex);
|
|
274
|
+
return;
|
|
275
|
+
}
|
|
276
|
+
viewport.scroll(dir);
|
|
277
|
+
await new Promise((resolve) => window.setTimeout(resolve, 250));
|
|
278
|
+
const nextAnnotations = this.getPromptAnnotations(viewport);
|
|
279
|
+
if (nextAnnotations.length > 0) {
|
|
280
|
+
return;
|
|
281
|
+
}
|
|
282
|
+
const { focalPoint: newFocal } = viewport.getCamera();
|
|
283
|
+
const newDelta = vec3.sub(vec3.create(), newFocal, focalPoint);
|
|
284
|
+
for (const annotation of promptAnnotations) {
|
|
285
|
+
annotation.interpolationUID ||= crypto.randomUUID();
|
|
286
|
+
const newAnnotation = structuredClone(annotation);
|
|
287
|
+
newAnnotation.annotationUID = undefined;
|
|
288
|
+
Object.assign(newAnnotation.metadata, viewRef);
|
|
289
|
+
newAnnotation.cachedStats = {};
|
|
290
|
+
for (const handle of newAnnotation.data.handles.points) {
|
|
291
|
+
vec3.add(handle, handle, newDelta);
|
|
292
|
+
}
|
|
293
|
+
annotationState.addAnnotation(newAnnotation, viewport.element);
|
|
294
|
+
}
|
|
295
|
+
viewport.render();
|
|
296
|
+
}
|
|
297
|
+
log(logger, ...args) {
|
|
298
|
+
for (const listener of this.listeners) {
|
|
299
|
+
listener(logger, ...args);
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
disconnectViewport(viewport) {
|
|
303
|
+
viewport.element.removeEventListener(Events.IMAGE_RENDERED, this.viewportRenderedListener);
|
|
304
|
+
const boundListener = this.annotationModifiedListener;
|
|
305
|
+
eventTarget.removeEventListener(toolsEvents.ANNOTATION_MODIFIED, boundListener);
|
|
306
|
+
eventTarget.removeEventListener(toolsEvents.ANNOTATION_COMPLETED, boundListener);
|
|
307
|
+
}
|
|
308
|
+
async load() {
|
|
309
|
+
const { sessions } = this;
|
|
310
|
+
this.canvas.style.cursor = 'wait';
|
|
311
|
+
let loader;
|
|
312
|
+
for (let i = 0; i < 2; i++) {
|
|
313
|
+
sessions.push({
|
|
314
|
+
sessionIndex: i,
|
|
315
|
+
encoder: null,
|
|
316
|
+
decoder: null,
|
|
317
|
+
imageEmbeddings: null,
|
|
318
|
+
isLoading: false,
|
|
319
|
+
canvas: i === 0 ? this.canvas : document.createElement('canvas'),
|
|
320
|
+
});
|
|
321
|
+
if (i === 0) {
|
|
322
|
+
loader = this.loadModels(ONNXSegmentationController.MODELS[this.config.model], sessions[i]).catch((e) => {
|
|
323
|
+
this.log(Loggers.Log, "Couldn't load models", e);
|
|
324
|
+
});
|
|
325
|
+
await loader;
|
|
326
|
+
}
|
|
327
|
+
else {
|
|
328
|
+
sessions[i].encoder = sessions[0].encoder;
|
|
329
|
+
}
|
|
330
|
+
sessions[i].loader = loader;
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
clear(viewport) {
|
|
334
|
+
this.points = [];
|
|
335
|
+
this.labels = [];
|
|
336
|
+
this.getPromptAnnotations(viewport).forEach((annotation) => annotationState.removeAnnotation(annotation.annotationUID));
|
|
337
|
+
this.tool.rejectPreview(this.viewport.element);
|
|
338
|
+
}
|
|
339
|
+
async cacheImageEncodings(current = this.viewport.getCurrentImageIdIndex(), offset = 0, length = 1000_000) {
|
|
340
|
+
const { viewport, imageEncodings } = this;
|
|
341
|
+
if (offset >= length) {
|
|
342
|
+
return;
|
|
343
|
+
}
|
|
344
|
+
const index = (offset + current) % length;
|
|
345
|
+
const view = viewport.getViewReference({ sliceIndex: index });
|
|
346
|
+
if (!view) {
|
|
347
|
+
length = index;
|
|
348
|
+
return this.cacheImageEncodings(current, offset, length);
|
|
349
|
+
}
|
|
350
|
+
const imageId = view.referencedImageId ||
|
|
351
|
+
viewport.getViewReferenceId({ sliceIndex: index });
|
|
352
|
+
if (!imageEncodings.has(imageId)) {
|
|
353
|
+
await this.loadStorageImageEncoding(current, imageId, index);
|
|
354
|
+
}
|
|
355
|
+
if (imageEncodings.has(imageId)) {
|
|
356
|
+
this.cacheImageEncodings(current, offset + 1, length);
|
|
357
|
+
return;
|
|
358
|
+
}
|
|
359
|
+
this.tryLoad();
|
|
360
|
+
if (this.isGpuInUse) {
|
|
361
|
+
setTimeout(() => this.cacheImageEncodings(current, offset), 500);
|
|
362
|
+
return;
|
|
363
|
+
}
|
|
364
|
+
this.log(Loggers.Log, 'Caching', index, imageId);
|
|
365
|
+
const sampleImageId = viewport.getImageIds()[0];
|
|
366
|
+
this.handleImage({ imageId, sampleImageId }, this.sessions[1]).then(() => {
|
|
367
|
+
this.cacheImageEncodings(current, offset + 1, length);
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
async handleImage({ imageId, sampleImageId }, imageSession) {
|
|
371
|
+
if (imageId === imageSession.imageId || this.isGpuInUse) {
|
|
372
|
+
return;
|
|
373
|
+
}
|
|
374
|
+
const { viewport, desiredImage } = this;
|
|
375
|
+
this.isGpuInUse = true;
|
|
376
|
+
imageSession.imageId = imageId;
|
|
377
|
+
imageSession.sampleImageId = sampleImageId;
|
|
378
|
+
try {
|
|
379
|
+
const isCurrent = desiredImage.imageId === imageId;
|
|
380
|
+
const { canvas } = imageSession;
|
|
381
|
+
if (isCurrent) {
|
|
382
|
+
this.log(Loggers.Encoder, `Loading image on ${imageSession.sessionIndex}`);
|
|
383
|
+
this.log(Loggers.Decoder, 'Awaiting image');
|
|
384
|
+
canvas.style.cursor = 'wait';
|
|
385
|
+
}
|
|
386
|
+
this.points = [];
|
|
387
|
+
this.labels = [];
|
|
388
|
+
const width = this.maxWidth;
|
|
389
|
+
const height = this.maxHeight;
|
|
390
|
+
canvas.width = width;
|
|
391
|
+
canvas.height = height;
|
|
392
|
+
imageSession.imageEmbeddings = undefined;
|
|
393
|
+
const size = canvas.style.width;
|
|
394
|
+
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
|
395
|
+
ctx.clearRect(0, 0, width, height);
|
|
396
|
+
const renderArguments = {
|
|
397
|
+
canvas,
|
|
398
|
+
imageId,
|
|
399
|
+
viewportOptions: {
|
|
400
|
+
...viewport.defaultOptions,
|
|
401
|
+
...ONNXSegmentationController.viewportOptions,
|
|
402
|
+
},
|
|
403
|
+
viewReference: null,
|
|
404
|
+
renderingEngineId: viewport.getRenderingEngine().id,
|
|
405
|
+
};
|
|
406
|
+
if (imageId.startsWith('volumeId:')) {
|
|
407
|
+
const viewRef = viewport.getViewReference();
|
|
408
|
+
renderArguments.viewReference = viewRef;
|
|
409
|
+
renderArguments.imageId = null;
|
|
410
|
+
}
|
|
411
|
+
imageSession.canvasPosition = await utilities.loadImageToCanvas(renderArguments);
|
|
412
|
+
canvas.style.width = size;
|
|
413
|
+
canvas.style.height = size;
|
|
414
|
+
if (isCurrent) {
|
|
415
|
+
this.log(Loggers.Encoder, `Rendered image on ${imageSession.sessionIndex}`);
|
|
416
|
+
}
|
|
417
|
+
this.imageImageData = ctx.getImageData(0, 0, width, height);
|
|
418
|
+
const data = await this.restoreImageEncoding(imageSession, imageId);
|
|
419
|
+
if (data) {
|
|
420
|
+
imageSession.imageEmbeddings = data;
|
|
421
|
+
if (desiredImage.imageId === imageId) {
|
|
422
|
+
this.log(Loggers.Encoder, 'Cached Image');
|
|
423
|
+
canvas.style.cursor = 'default';
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
else {
|
|
427
|
+
const t = await ort.Tensor.fromImage(this.imageImageData, {
|
|
428
|
+
resizedWidth: this.modelWidth,
|
|
429
|
+
resizedHeight: this.modelHeight,
|
|
430
|
+
});
|
|
431
|
+
const { feedType = 'input_image' } = this.config.encoder;
|
|
432
|
+
const feed = (feedType === 'images' && { images: t }) ||
|
|
433
|
+
(feedType === 'pixelValues' && { pixel_values: t }) || {
|
|
434
|
+
input_image: t,
|
|
435
|
+
};
|
|
436
|
+
await imageSession.loader;
|
|
437
|
+
const session = await imageSession.encoder;
|
|
438
|
+
if (!session) {
|
|
439
|
+
this.log(Loggers.Log, '****** No session');
|
|
440
|
+
return;
|
|
441
|
+
}
|
|
442
|
+
const start = performance.now();
|
|
443
|
+
imageSession.imageEmbeddings = session.run(feed);
|
|
444
|
+
const data = await imageSession.imageEmbeddings;
|
|
445
|
+
this.storeImageEncoding(imageSession, imageId, data);
|
|
446
|
+
if (desiredImage.imageId === imageId) {
|
|
447
|
+
this.log(Loggers.Encoder, `Image Ready ${imageSession.sessionIndex} ${(performance.now() - start).toFixed(1)} ms`);
|
|
448
|
+
canvas.style.cursor = 'default';
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
finally {
|
|
453
|
+
this.isGpuInUse = false;
|
|
454
|
+
}
|
|
455
|
+
this.tryLoad();
|
|
456
|
+
}
|
|
457
|
+
async runDecode() {
|
|
458
|
+
const { canvas } = this;
|
|
459
|
+
if (this.isGpuInUse || !this.currentImage?.imageEmbeddings) {
|
|
460
|
+
return;
|
|
461
|
+
}
|
|
462
|
+
this.isGpuInUse = true;
|
|
463
|
+
try {
|
|
464
|
+
this.canvas.style.cursor = 'wait';
|
|
465
|
+
await this.decode(this.points, this.labels);
|
|
466
|
+
}
|
|
467
|
+
finally {
|
|
468
|
+
canvas.style.cursor = 'default';
|
|
469
|
+
this.isGpuInUse = false;
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
tryLoad(options = { resetImage: false }) {
|
|
473
|
+
const { viewport, desiredImage } = this;
|
|
474
|
+
if (!desiredImage.imageId || options.resetImage) {
|
|
475
|
+
desiredImage.imageId =
|
|
476
|
+
viewport.getCurrentImageId() || viewport.getViewReferenceId();
|
|
477
|
+
this.currentImage = null;
|
|
478
|
+
}
|
|
479
|
+
const [session] = this.sessions;
|
|
480
|
+
if (session.imageId === desiredImage.imageId) {
|
|
481
|
+
if (this.currentImage !== session) {
|
|
482
|
+
this.currentImage = session;
|
|
483
|
+
}
|
|
484
|
+
this.updateAnnotations();
|
|
485
|
+
return;
|
|
486
|
+
}
|
|
487
|
+
this.handleImage(desiredImage, session);
|
|
488
|
+
}
|
|
489
|
+
mapAnnotationPoint(worldPoint) {
|
|
490
|
+
const { viewport } = this;
|
|
491
|
+
const canvasPoint = viewport.worldToCanvas(worldPoint);
|
|
492
|
+
const { width, height } = viewport.canvas;
|
|
493
|
+
const { width: destWidth, height: destHeight } = this.canvas;
|
|
494
|
+
const x = Math.trunc((canvasPoint[0] * destWidth * devicePixelRatio) / width);
|
|
495
|
+
const y = Math.trunc((canvasPoint[1] * destHeight * devicePixelRatio) / height);
|
|
496
|
+
return [x, y];
|
|
497
|
+
}
|
|
498
|
+
updateAnnotations() {
|
|
499
|
+
if (this.isGpuInUse ||
|
|
500
|
+
!this.annotationsNeedUpdating ||
|
|
501
|
+
!this.currentImage) {
|
|
502
|
+
return;
|
|
503
|
+
}
|
|
504
|
+
const promptAnnotations = this.getPromptAnnotations();
|
|
505
|
+
this.annotationsNeedUpdating = false;
|
|
506
|
+
this.points = [];
|
|
507
|
+
this.labels = [];
|
|
508
|
+
this.worldPoints = [];
|
|
509
|
+
if (!promptAnnotations?.length) {
|
|
510
|
+
return;
|
|
511
|
+
}
|
|
512
|
+
for (const annotation of promptAnnotations) {
|
|
513
|
+
const handle = annotation.data.handles.points[0];
|
|
514
|
+
const point = this.mapAnnotationPoint(handle);
|
|
515
|
+
this.points.push(...point);
|
|
516
|
+
if (annotation.metadata.toolName === ONNXSegmentationController.BoxPrompt) {
|
|
517
|
+
this.labels.push(2, 3);
|
|
518
|
+
this.points.push(...this.mapAnnotationPoint(annotation.data.handles.points[3]));
|
|
519
|
+
}
|
|
520
|
+
else {
|
|
521
|
+
const label = annotation.metadata.toolName === this.excludeTool ? 0 : 1;
|
|
522
|
+
if (label) {
|
|
523
|
+
this.worldPoints.push(handle);
|
|
524
|
+
}
|
|
525
|
+
this.labels.push(label);
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
this.runDecode();
|
|
529
|
+
}
|
|
530
|
+
async restoreImageEncoding(session, imageId) {
|
|
531
|
+
if (!this.sharedImageEncoding) {
|
|
532
|
+
return;
|
|
533
|
+
}
|
|
534
|
+
if (!this.imageEncodings.has(imageId)) {
|
|
535
|
+
await this.loadStorageImageEncoding(session, imageId);
|
|
536
|
+
}
|
|
537
|
+
const floatData = this.imageEncodings.get(imageId);
|
|
538
|
+
if (floatData) {
|
|
539
|
+
const key = (this.sharedImageEncoding.image_embeddings && 'image_embeddings') ||
|
|
540
|
+
'embeddings';
|
|
541
|
+
this.sharedImageEncoding[key].cpuData.set(floatData);
|
|
542
|
+
return this.sharedImageEncoding;
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
async loadStorageImageEncoding(session, imageId, index = null) {
|
|
546
|
+
try {
|
|
547
|
+
const root = await this.getDirectoryForImageId(session, imageId);
|
|
548
|
+
const name = this.getFileNameForImageId(imageId, this.config.model);
|
|
549
|
+
if (!root || !name) {
|
|
550
|
+
return null;
|
|
551
|
+
}
|
|
552
|
+
const fileHandle = await findFileEntry(root, name);
|
|
553
|
+
if (!fileHandle) {
|
|
554
|
+
return null;
|
|
555
|
+
}
|
|
556
|
+
this.log(Loggers.Log, 'Loading from storage', index || imageId, name);
|
|
557
|
+
const file = await fileHandle.getFile();
|
|
558
|
+
if (file) {
|
|
559
|
+
const buffer = await getBuffer(file);
|
|
560
|
+
this.imageEncodings.set(imageId, buffer);
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
catch (e) {
|
|
564
|
+
this.log(Loggers.Log, 'Unable to fetch file', imageId, e);
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
async storeImageEncoding(session, imageId, data) {
|
|
568
|
+
if (!this.sharedImageEncoding) {
|
|
569
|
+
this.sharedImageEncoding = data;
|
|
570
|
+
}
|
|
571
|
+
const storeData = (data.image_embeddings || data.embeddings)?.cpuData;
|
|
572
|
+
if (!storeData) {
|
|
573
|
+
console.log('Unable to store data', data);
|
|
574
|
+
return;
|
|
575
|
+
}
|
|
576
|
+
const writeData = new Float32Array(storeData);
|
|
577
|
+
this.imageEncodings.set(imageId, writeData);
|
|
578
|
+
try {
|
|
579
|
+
const root = await this.getDirectoryForImageId(session, imageId);
|
|
580
|
+
const name = this.getFileNameForImageId(imageId, this.config.model);
|
|
581
|
+
if (!root || !name) {
|
|
582
|
+
return;
|
|
583
|
+
}
|
|
584
|
+
const fileHandle = await root.getFileHandle(name, { create: true });
|
|
585
|
+
const writable = await fileHandle.createWritable();
|
|
586
|
+
await writable.write(writeData);
|
|
587
|
+
await writable.close();
|
|
588
|
+
}
|
|
589
|
+
catch (e) {
|
|
590
|
+
this.log(Loggers.Log, 'Unable to write', imageId, e);
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
createLabelmap(mask, canvasPosition, _points, _labels) {
|
|
594
|
+
const { canvas, viewport } = this;
|
|
595
|
+
const preview = this.tool.addPreview(viewport.element);
|
|
596
|
+
const { previewSegmentIndex, memo, segmentationId, segmentIndex } = preview;
|
|
597
|
+
const previewVoxelManager = memo?.voxelManager || preview.previewVoxelManager;
|
|
598
|
+
const { dimensions } = previewVoxelManager;
|
|
599
|
+
const { data } = mask;
|
|
600
|
+
const { origin, topRight, bottomLeft } = canvasPosition;
|
|
601
|
+
const downVec = vec3.subtract(vec3.create(), bottomLeft, origin);
|
|
602
|
+
const rightVec = vec3.subtract(vec3.create(), topRight, origin);
|
|
603
|
+
vec3.scale(downVec, downVec, 1 / canvas.height);
|
|
604
|
+
vec3.scale(rightVec, rightVec, 1 / canvas.width);
|
|
605
|
+
const worldPointJ = vec3.create();
|
|
606
|
+
const worldPoint = vec3.create();
|
|
607
|
+
const imageData = viewport.getDefaultImageData();
|
|
608
|
+
for (let j = 0; j < canvas.height; j++) {
|
|
609
|
+
vec3.scaleAndAdd(worldPointJ, origin, downVec, j);
|
|
610
|
+
for (let i = 0; i < canvas.width; i++) {
|
|
611
|
+
vec3.scaleAndAdd(worldPoint, worldPointJ, rightVec, i);
|
|
612
|
+
const ijkPoint = imageData.worldToIndex(worldPoint).map(Math.round);
|
|
613
|
+
if (ijkPoint.findIndex((v, index) => v < 0 || v >= dimensions[index]) !==
|
|
614
|
+
-1) {
|
|
615
|
+
continue;
|
|
616
|
+
}
|
|
617
|
+
const maskIndex = 4 * (i + j * this.maxWidth);
|
|
618
|
+
const v = data[maskIndex];
|
|
619
|
+
if (v > this.pCutoff) {
|
|
620
|
+
previewVoxelManager.setAtIJKPoint(ijkPoint, previewSegmentIndex);
|
|
621
|
+
}
|
|
622
|
+
else {
|
|
623
|
+
previewVoxelManager.setAtIJKPoint(ijkPoint, null);
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
const voxelManager = previewVoxelManager.sourceVoxelManager || previewVoxelManager;
|
|
628
|
+
if (this.islandFillOptions) {
|
|
629
|
+
const islandRemoval = new IslandRemoval(this.islandFillOptions);
|
|
630
|
+
if (islandRemoval.initialize(viewport, voxelManager, {
|
|
631
|
+
previewSegmentIndex,
|
|
632
|
+
segmentIndex,
|
|
633
|
+
points: this.worldPoints.map((point) => imageData.worldToIndex(point).map(Math.round)),
|
|
634
|
+
})) {
|
|
635
|
+
islandRemoval.floodFillSegmentIsland();
|
|
636
|
+
islandRemoval.removeExternalIslands();
|
|
637
|
+
islandRemoval.removeInternalIslands();
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
triggerSegmentationDataModified(segmentationId);
|
|
641
|
+
}
|
|
642
|
+
async decode(points, labels, useSession = this.currentImage) {
|
|
643
|
+
const { canvas, canvasMask, imageImageData, desiredImage, boxRadius } = this;
|
|
644
|
+
const ctx = canvas.getContext('2d', { willReadFrequently: true });
|
|
645
|
+
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
|
646
|
+
canvas.width = imageImageData.width;
|
|
647
|
+
canvas.height = imageImageData.height;
|
|
648
|
+
canvasMask.width = imageImageData.width;
|
|
649
|
+
canvasMask.height = imageImageData.height;
|
|
650
|
+
if (!useSession || useSession.imageId !== desiredImage.imageId) {
|
|
651
|
+
this.log(Loggers.Log, '***** Image not current, need to wait for current image');
|
|
652
|
+
return;
|
|
653
|
+
}
|
|
654
|
+
ctx.putImageData(imageImageData, 0, 0);
|
|
655
|
+
if (points.length) {
|
|
656
|
+
if (!useSession.imageEmbeddings) {
|
|
657
|
+
await useSession.encoder;
|
|
658
|
+
}
|
|
659
|
+
const emb = await useSession.imageEmbeddings;
|
|
660
|
+
const session = useSession.decoder;
|
|
661
|
+
const feed = feedForSam(emb, points, labels);
|
|
662
|
+
const res = await session.run(feed);
|
|
663
|
+
for (let i = 0; i < points.length; i += 2) {
|
|
664
|
+
const label = labels[i / 2];
|
|
665
|
+
ctx.fillStyle = label ? 'blue' : 'pink';
|
|
666
|
+
ctx.fillRect(points[i] - boxRadius, points[i + 1] - boxRadius, 2 * boxRadius, 2 * boxRadius);
|
|
667
|
+
}
|
|
668
|
+
const mask = res.masks;
|
|
669
|
+
this.maskImageData = mask.toImageData();
|
|
670
|
+
this.createLabelmap(this.maskImageData, useSession.canvasPosition, points, labels);
|
|
671
|
+
ctx.globalAlpha = 0.3;
|
|
672
|
+
const { data } = this.maskImageData;
|
|
673
|
+
const counts = [];
|
|
674
|
+
for (let i = 0; i < data.length; i += 4) {
|
|
675
|
+
const v = data[i];
|
|
676
|
+
if (v > 0) {
|
|
677
|
+
if (v < 255) {
|
|
678
|
+
data[i] = 0;
|
|
679
|
+
if (v > 192) {
|
|
680
|
+
data[i + 1] = 255;
|
|
681
|
+
}
|
|
682
|
+
else {
|
|
683
|
+
data[i + 2] = v + 64;
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
counts[v] = 1 + (counts[v] || 0);
|
|
687
|
+
}
|
|
688
|
+
}
|
|
689
|
+
const bitmap = await createImageBitmap(this.maskImageData);
|
|
690
|
+
ctx.drawImage(bitmap, 0, 0);
|
|
691
|
+
const ctxMask = canvasMask.getContext('2d');
|
|
692
|
+
ctxMask.globalAlpha = 0.9;
|
|
693
|
+
ctxMask.drawImage(bitmap, 0, 0);
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
async fetchAndCacheModel(url, name) {
|
|
697
|
+
try {
|
|
698
|
+
const cache = await caches.open('onnx');
|
|
699
|
+
let cachedResponse = await cache.match(url);
|
|
700
|
+
if (cachedResponse == undefined) {
|
|
701
|
+
await cache.add(url);
|
|
702
|
+
cachedResponse = await cache.match(url);
|
|
703
|
+
this.log(Loggers.Log, `${name} (network)`);
|
|
704
|
+
}
|
|
705
|
+
else {
|
|
706
|
+
this.log(Loggers.Log, `${name} (cached)`);
|
|
707
|
+
}
|
|
708
|
+
const data = await cachedResponse.arrayBuffer();
|
|
709
|
+
return data;
|
|
710
|
+
}
|
|
711
|
+
catch (error) {
|
|
712
|
+
this.log(Loggers.Log, `${name} (network)`);
|
|
713
|
+
return await fetch(url).then((response) => response.arrayBuffer());
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
async loadModels(models, imageSession = this.currentImage) {
|
|
717
|
+
const cache = await caches.open('onnx');
|
|
718
|
+
let missing = 0;
|
|
719
|
+
const urls = [];
|
|
720
|
+
for (const model of Object.values(models)) {
|
|
721
|
+
const cachedResponse = await cache.match(model.url);
|
|
722
|
+
if (cachedResponse === undefined) {
|
|
723
|
+
missing += model.size;
|
|
724
|
+
}
|
|
725
|
+
urls.push(model.url);
|
|
726
|
+
}
|
|
727
|
+
if (missing > 0) {
|
|
728
|
+
this.log(Loggers.Log, `downloading ${missing} MB from network ... it might take a while`);
|
|
729
|
+
}
|
|
730
|
+
else {
|
|
731
|
+
this.log(Loggers.Log, 'loading...');
|
|
732
|
+
}
|
|
733
|
+
const start = performance.now();
|
|
734
|
+
for (const model of Object.values(models)) {
|
|
735
|
+
const opt = {
|
|
736
|
+
executionProviders: [this.config.provider],
|
|
737
|
+
enableMemPattern: false,
|
|
738
|
+
enableCpuMemArena: false,
|
|
739
|
+
extra: {
|
|
740
|
+
session: {
|
|
741
|
+
disable_prepacking: '1',
|
|
742
|
+
use_device_allocator_for_initializers: '1',
|
|
743
|
+
use_ort_model_bytes_directly: '1',
|
|
744
|
+
use_ort_model_bytes_for_initializers: '1',
|
|
745
|
+
},
|
|
746
|
+
},
|
|
747
|
+
interOpNumThreads: 4,
|
|
748
|
+
intraOpNumThreads: 2,
|
|
749
|
+
};
|
|
750
|
+
const model_bytes = await this.fetchAndCacheModel(model.url, model.name);
|
|
751
|
+
const extra_opt = model.opt || {};
|
|
752
|
+
const sessionOptions = { ...opt, ...extra_opt };
|
|
753
|
+
this.config[model.key] = model;
|
|
754
|
+
imageSession[model.key] = await ort.InferenceSession.create(model_bytes, sessionOptions);
|
|
755
|
+
}
|
|
756
|
+
const stop = performance.now();
|
|
757
|
+
this.log(Loggers.Log, `ready, ${(stop - start).toFixed(1)}ms`, urls.join(', '));
|
|
758
|
+
}
|
|
759
|
+
async getDirectoryForImageId(session, imageId) {
|
|
760
|
+
if (imageId.indexOf('/studies/') === -1 ||
|
|
761
|
+
imageId.indexOf('/instances/') === -1) {
|
|
762
|
+
imageId = session.sampleImageId;
|
|
763
|
+
if (!imageId ||
|
|
764
|
+
imageId.indexOf('/studies/') === -1 ||
|
|
765
|
+
imageId.indexOf('/instances/') === -1) {
|
|
766
|
+
return null;
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
const studySeriesUids = imageId
|
|
770
|
+
.split('/studies/')[1]
|
|
771
|
+
.split('/instances/')[0]
|
|
772
|
+
.split('/');
|
|
773
|
+
const [studyUID, _series, seriesUID] = studySeriesUids;
|
|
774
|
+
const root = await window.navigator.storage.getDirectory();
|
|
775
|
+
const modelRoot = await getOrCreateDir(root, this.config.model);
|
|
776
|
+
const studyRoot = await getOrCreateDir(modelRoot, studyUID);
|
|
777
|
+
const seriesRoot = await getOrCreateDir(studyRoot, seriesUID);
|
|
778
|
+
return seriesRoot;
|
|
779
|
+
}
|
|
780
|
+
getFileNameForImageId(imageId, extension) {
|
|
781
|
+
if (imageId.startsWith('volumeId:')) {
|
|
782
|
+
const sliceIndex = imageId.indexOf('sliceIndex=');
|
|
783
|
+
const focalPoint = imageId.indexOf('&focalPoint=');
|
|
784
|
+
const name = imageId
|
|
785
|
+
.substring(sliceIndex, focalPoint)
|
|
786
|
+
.replace('&', '.')
|
|
787
|
+
.replace('sliceIndex=', 'volume.');
|
|
788
|
+
return name + extension;
|
|
789
|
+
}
|
|
790
|
+
const instancesLocation = imageId.indexOf('/instances/');
|
|
791
|
+
if (instancesLocation != -1) {
|
|
792
|
+
const sopLocation = instancesLocation + 11;
|
|
793
|
+
const nextSlash = imageId.indexOf('/', sopLocation);
|
|
794
|
+
return imageId.substring(sopLocation, nextSlash) + extension;
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
getConfig(modelName = 'sam_b') {
|
|
798
|
+
if (this.config) {
|
|
799
|
+
return this.config;
|
|
800
|
+
}
|
|
801
|
+
const query = window.location.search.substring(1);
|
|
802
|
+
const config = {
|
|
803
|
+
model: modelName,
|
|
804
|
+
provider: 'webgpu',
|
|
805
|
+
device: 'gpu',
|
|
806
|
+
threads: 4,
|
|
807
|
+
local: null,
|
|
808
|
+
isSlimSam: false,
|
|
809
|
+
};
|
|
810
|
+
const vars = query.split('&');
|
|
811
|
+
for (let i = 0; i < vars.length; i++) {
|
|
812
|
+
const pair = vars[i].split('=');
|
|
813
|
+
if (pair[0] in config) {
|
|
814
|
+
config[pair[0]] = decodeURIComponent(pair[1]);
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
config.threads = parseInt(String(config.threads));
|
|
818
|
+
config.local = parseInt(config.local);
|
|
819
|
+
ort.env.wasm.wasmPaths = 'dist/';
|
|
820
|
+
ort.env.wasm.numThreads = config.threads;
|
|
821
|
+
ort.env.wasm.proxy = config.provider == 'wasm';
|
|
822
|
+
this.config = config;
|
|
823
|
+
return config;
|
|
824
|
+
}
|
|
825
|
+
}
|
|
826
|
+
async function getOrCreateDir(dir, name) {
|
|
827
|
+
return ((await findFileEntry(dir, name)) ||
|
|
828
|
+
dir.getDirectoryHandle(name, { create: true }));
|
|
829
|
+
}
|
|
830
|
+
async function findFileEntry(dir, name) {
|
|
831
|
+
for await (const [key, value] of dir) {
|
|
832
|
+
if (key === name) {
|
|
833
|
+
return value;
|
|
834
|
+
}
|
|
835
|
+
}
|
|
836
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@cornerstonejs/ai",
|
|
3
|
-
"version": "3.0.0-beta.
|
|
3
|
+
"version": "3.0.0-beta.4",
|
|
4
4
|
"description": "AI and ML Interfaces for Cornerstone3D",
|
|
5
5
|
"files": [
|
|
6
6
|
"dist"
|
|
@@ -21,7 +21,8 @@
|
|
|
21
21
|
},
|
|
22
22
|
"scripts": {
|
|
23
23
|
"test": "jest --testTimeout 60000",
|
|
24
|
-
"clean": "
|
|
24
|
+
"clean": "rm -rf node_modules/.cache/storybook && shx rm -rf dist",
|
|
25
|
+
"clean:deep": "yarn run clean && shx rm -rf node_modules",
|
|
25
26
|
"build": "yarn run build:esm",
|
|
26
27
|
"build:esm": "tsc --project ./tsconfig.json",
|
|
27
28
|
"build:esm:watch": "tsc --project ./tsconfig.json --watch",
|
|
@@ -30,11 +31,14 @@
|
|
|
30
31
|
"build:update-api": "yarn run build:esm && api-extractor run --local",
|
|
31
32
|
"start": "tsc --project ./tsconfig.json --watch",
|
|
32
33
|
"format": "prettier --write 'src/**/*.js' 'test/**/*.js'",
|
|
33
|
-
"lint": "eslint --fix ."
|
|
34
|
+
"lint": "eslint --fix .",
|
|
35
|
+
"format-check": "npx eslint ./src --quiet",
|
|
36
|
+
"api-check": "api-extractor --debug run ",
|
|
37
|
+
"prepublishOnly": "yarn clean && yarn build"
|
|
34
38
|
},
|
|
35
39
|
"repository": {
|
|
36
40
|
"type": "git",
|
|
37
|
-
"url": "git+https://github.com/
|
|
41
|
+
"url": "git+https://github.com/cornerstonejs/cornerstone3D.git"
|
|
38
42
|
},
|
|
39
43
|
"author": "@cornerstonejs",
|
|
40
44
|
"license": "MIT",
|
|
@@ -53,8 +57,8 @@
|
|
|
53
57
|
"onnxruntime-web": "1.17.1"
|
|
54
58
|
},
|
|
55
59
|
"peerDependencies": {
|
|
56
|
-
"@cornerstonejs/core": "^3.0.0-beta.
|
|
57
|
-
"@cornerstonejs/tools": "^3.0.0-beta.
|
|
60
|
+
"@cornerstonejs/core": "^3.0.0-beta.4",
|
|
61
|
+
"@cornerstonejs/tools": "^3.0.0-beta.4"
|
|
58
62
|
},
|
|
59
|
-
"gitHead": "
|
|
63
|
+
"gitHead": "93464de8c8c6f01ca4ca35140c8f910b4a226582"
|
|
60
64
|
}
|