edge-impulse-linux 1.17.6 → 1.17.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/cli/linux/runner-utils.js +21 -6
- package/build/cli/linux/runner-utils.js.map +1 -1
- package/build/cli/linux/webserver/public/inference-server.js +64 -0
- package/build/cli/linux/webserver/public/inference-server.js.map +1 -1
- package/build/cli/linux/webserver/views/inference-server-view.js +7 -1
- package/build/cli/linux/webserver/views/inference-server-view.js.map +1 -1
- package/build/library/classifier/linux-impulse-runner-types.d.ts +7 -0
- package/build/library/moving-average-filter.d.ts +7 -0
- package/build/library/sensors/gstreamer.js +4 -4
- package/build/library/sensors/gstreamer.js.map +1 -1
- package/build/sdk/studio/sdk/api/dSPApi.d.ts +2 -0
- package/build/sdk/studio/sdk/api/dSPApi.js +4 -0
- package/build/sdk/studio/sdk/api/dSPApi.js.map +1 -1
- package/build/sdk/studio/sdk/api/organizationsApi.d.ts +4 -4
- package/build/sdk/studio/sdk/api/organizationsApi.js +4 -4
- package/build/sdk/studio/sdk/model/deployPretrainedModelInputImage.d.ts +2 -0
- package/build/sdk/studio/sdk/model/deployPretrainedModelInputImage.js +5 -0
- package/build/sdk/studio/sdk/model/deployPretrainedModelInputImage.js.map +1 -1
- package/build/sdk/studio/sdk/model/imageTensorFormat.d.ts +16 -0
- package/build/sdk/studio/sdk/model/imageTensorFormat.js +16 -0
- package/build/sdk/studio/sdk/model/imageTensorFormat.js.map +1 -0
- package/build/sdk/studio/sdk/model/models.d.ts +1 -0
- package/build/sdk/studio/sdk/model/models.js +2 -0
- package/build/sdk/studio/sdk/model/models.js.map +1 -1
- package/build/sdk/studio/sdk/model/testPretrainedModelImagesRequestInput.d.ts +2 -0
- package/build/sdk/studio/sdk/model/testPretrainedModelImagesRequestInput.js +5 -0
- package/build/sdk/studio/sdk/model/testPretrainedModelImagesRequestInput.js.map +1 -1
- package/build/sdk/studio/sdk/model/userEulaName.d.ts +1 -1
- package/build/sdk/studio/sdk/model/userEulaName.js +1 -1
- package/build/sdk/studio/sdk/model/userEulaName.js.map +1 -1
- package/build/shared/views/project/bounding-box-scaling.d.ts +40 -44
- package/build/shared/views/project/bounding-box-scaling.js +113 -123
- package/build/shared/views/project/bounding-box-scaling.js.map +1 -1
- package/cli/linux/webserver/public/assets/mobileclient.css +41 -0
- package/cli/linux/webserver/public/inference-server.js +68 -0
- package/package.json +1 -1
- package/test/gstreamer.test.ts +29 -0
- package/test/triple-vision-camera-inspect.txt +1430 -0
- package/test/triple-vision-camera-monitor.txt +3 -0
|
@@ -1,75 +1,96 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.maskCropped = exports.getMaskPercent = exports.mapPredictionToOriginalImage = void 0;
|
|
4
4
|
/**
|
|
5
|
-
*
|
|
6
|
-
*
|
|
7
|
-
|
|
5
|
+
* Maps a predicted bounding box from reference output coordinates to client image coordinates,
|
|
6
|
+
* adjusting for the resize mode. This is necessary because the reference image dimensions
|
|
7
|
+
* that the bounding boxes are relative to may differ from the displayed image due to cropping
|
|
8
|
+
* and resizing.
|
|
9
|
+
*
|
|
10
|
+
* Supported resize modes:
|
|
11
|
+
* - 'squash': The image is resized to fit the client image exactly, potentially distorting the aspect ratio.
|
|
12
|
+
* - 'fit-short': The shorter axis is fit to the client image, cropping the longer axis to maintain aspect ratio.
|
|
13
|
+
* - 'fit-long': The longer axis is fit to the client image, padding the shorter axis to maintain aspect ratio.
|
|
14
|
+
* - 'crop': Deprecated mode; returns a zeroed bounding box.
|
|
15
|
+
*
|
|
16
|
+
* @param resizeMode - The resize mode used during preprocessing.
|
|
17
|
+
* @param box - The predicted bounding box from the model, with normalized coordinates (0 to 1).
|
|
18
|
+
* @param img - Image dimension information:
|
|
19
|
+
* - clientWidth: The target width of the image as displayed on the client.
|
|
20
|
+
* - clientHeight: The target height of the image as displayed on the client.
|
|
21
|
+
* - referenceWidth: The width of the image used as the reference for bounding box predictions.
|
|
22
|
+
* - referenceHeight: The height of the image used as the reference for bounding box predictions.
|
|
23
|
+
* @returns An object containing the mapped bounding box coordinates in pixel units for display in the client:
|
|
24
|
+
* - x: The x-coordinate of the top-left corner in pixels.
|
|
25
|
+
* - y: The y-coordinate of the top-left corner in pixels.
|
|
26
|
+
* - width: The width of the bounding box in pixels.
|
|
27
|
+
* - height: The height of the bounding box in pixels.
|
|
28
|
+
*/
|
|
8
29
|
const mapPredictionToOriginalImage = (resizeMode, box, img) => {
|
|
9
30
|
let x = 0;
|
|
10
31
|
let y = 0;
|
|
11
32
|
let width = 0;
|
|
12
33
|
let height = 0;
|
|
13
34
|
if (resizeMode === 'squash') {
|
|
14
|
-
//
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
35
|
+
// Simple proportional scale from reference dimensions to client dimensions
|
|
36
|
+
const scaleX = img.clientWidth / img.referenceWidth;
|
|
37
|
+
const scaleY = img.clientHeight / img.referenceHeight;
|
|
38
|
+
x = Math.max(0, Math.min(box.x * img.referenceWidth * scaleX, img.clientWidth));
|
|
39
|
+
y = Math.max(0, Math.min(box.y * img.referenceHeight * scaleY, img.clientHeight));
|
|
40
|
+
width = Math.min(box.width * img.referenceWidth * scaleX, img.clientWidth - x);
|
|
41
|
+
height = Math.min(box.height * img.referenceHeight * scaleY, img.clientHeight - y);
|
|
19
42
|
}
|
|
20
43
|
else if (resizeMode === 'fit-short') {
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
const
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
44
|
+
// Fit-short: crop the longer axis, scale by the shorter axis, but use reference aspect ratio
|
|
45
|
+
const referenceAspectRatio = img.referenceWidth / img.referenceHeight;
|
|
46
|
+
const clientAspectRatio = img.clientWidth / img.clientHeight;
|
|
47
|
+
let scale = 1;
|
|
48
|
+
let offsetX = 0;
|
|
49
|
+
let offsetY = 0;
|
|
50
|
+
if (clientAspectRatio > referenceAspectRatio) {
|
|
51
|
+
// Client image is wider than reference image => crop width
|
|
52
|
+
scale = img.clientHeight / img.referenceHeight;
|
|
53
|
+
const cropWidth = img.clientWidth - img.referenceWidth * scale;
|
|
54
|
+
offsetX = cropWidth / 2;
|
|
55
|
+
}
|
|
56
|
+
else {
|
|
57
|
+
// Client image is taller than reference image => crop height
|
|
58
|
+
scale = img.clientWidth / img.referenceWidth;
|
|
59
|
+
const cropHeight = img.clientHeight - img.referenceHeight * scale;
|
|
60
|
+
offsetY = cropHeight / 2;
|
|
61
|
+
}
|
|
62
|
+
x = Math.max(0, Math.min(box.x * img.referenceWidth * scale + offsetX, img.clientWidth));
|
|
63
|
+
y = Math.max(0, Math.min(box.y * img.referenceHeight * scale + offsetY, img.clientHeight));
|
|
64
|
+
width = Math.min(box.width * img.referenceWidth * scale, img.clientWidth - x);
|
|
65
|
+
height = Math.min(box.height * img.referenceHeight * scale, img.clientHeight - y);
|
|
32
66
|
}
|
|
33
67
|
else if (resizeMode === 'fit-long') {
|
|
34
|
-
|
|
35
|
-
const
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
//
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
else if (ratio < 1) {
|
|
50
|
-
// Image is taller, so horizontal padding was added
|
|
51
|
-
// Calculate normalized padding for the padding
|
|
52
|
-
const squaredWidth = img.clientWidth + 2 * padding;
|
|
53
|
-
const normalizedPadding = padding / squaredWidth;
|
|
54
|
-
// Remove normalized padding from x and re-normalize to the
|
|
55
|
-
// original image width (excluding the padding)
|
|
56
|
-
x = (box.x - normalizedPadding) / (1 - 2 * normalizedPadding);
|
|
57
|
-
y = box.y;
|
|
58
|
-
// Scale normalized width and height of the bounding box
|
|
59
|
-
width = box.width / (1 - 2 * normalizedPadding);
|
|
60
|
-
height = box.height;
|
|
68
|
+
// Fit-long: pad the shorter axis, scale by the longer axis, use reference aspect ratio
|
|
69
|
+
const referenceAspectRatio = img.referenceWidth / img.referenceHeight;
|
|
70
|
+
const clientAspectRatio = img.clientWidth / img.clientHeight;
|
|
71
|
+
let scale = 1;
|
|
72
|
+
let padX = 0;
|
|
73
|
+
let padY = 0;
|
|
74
|
+
if (clientAspectRatio > referenceAspectRatio) {
|
|
75
|
+
// Client image is wider than reference image => scale by width, pad the height
|
|
76
|
+
scale = img.clientWidth / img.referenceWidth;
|
|
77
|
+
const padHeight = img.clientHeight - img.referenceHeight * scale;
|
|
78
|
+
padY = padHeight / 2;
|
|
79
|
+
x = Math.max(0, Math.min(box.x * img.referenceWidth * scale, img.clientWidth));
|
|
80
|
+
y = Math.max(0, Math.min(box.y * img.referenceHeight * scale + padY, img.clientHeight));
|
|
81
|
+
width = Math.min(box.width * img.referenceWidth * scale, img.clientWidth - x);
|
|
82
|
+
height = Math.min(box.height * img.referenceHeight * scale, img.clientHeight - y);
|
|
61
83
|
}
|
|
62
84
|
else {
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
85
|
+
// Client image is taller than reference image => scale by height, pad the width
|
|
86
|
+
scale = img.clientHeight / img.referenceHeight;
|
|
87
|
+
const padWidth = img.clientWidth - img.referenceWidth * scale;
|
|
88
|
+
padX = padWidth / 2;
|
|
89
|
+
x = Math.max(0, Math.min(box.x * img.referenceWidth * scale + padX, img.clientWidth));
|
|
90
|
+
y = Math.max(0, Math.min(box.y * img.referenceHeight * scale, img.clientHeight));
|
|
91
|
+
width = Math.min(box.width * img.referenceWidth * scale, img.clientWidth - x);
|
|
92
|
+
height = Math.min(box.height * img.referenceHeight * scale, img.clientHeight - y);
|
|
67
93
|
}
|
|
68
|
-
// Now scale the coordinates to the original image ratio
|
|
69
|
-
x = Math.max(0, Math.min(x * img.clientWidth, img.clientWidth));
|
|
70
|
-
y = Math.max(0, Math.min(y * img.clientHeight, img.clientHeight));
|
|
71
|
-
width = Math.min(width * img.clientWidth, img.clientWidth - x);
|
|
72
|
-
height = Math.min(height * img.clientHeight, img.clientHeight - y);
|
|
73
94
|
}
|
|
74
95
|
else if (resizeMode === 'crop') {
|
|
75
96
|
// Not expected
|
|
@@ -79,77 +100,46 @@ const mapPredictionToOriginalImage = (resizeMode, box, img) => {
|
|
|
79
100
|
};
|
|
80
101
|
exports.mapPredictionToOriginalImage = mapPredictionToOriginalImage;
|
|
81
102
|
// Calculates the width/height of the area to be masked (at one side).
|
|
82
|
-
const
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
103
|
+
const getMaskPercent = (side, img) => {
|
|
104
|
+
// Use reference aspect ratio to determine mask size as a percentage
|
|
105
|
+
const referenceAspect = img.referenceWidth / img.referenceHeight;
|
|
106
|
+
const clientAspect = img.clientWidth / img.clientHeight;
|
|
107
|
+
let percent = 0;
|
|
108
|
+
if (side === 'horizontal' && clientAspect > referenceAspect) {
|
|
109
|
+
// Image is wider than reference: mask left/right
|
|
110
|
+
const displayWidth = img.clientHeight * referenceAspect;
|
|
111
|
+
percent = ((img.clientWidth - displayWidth) / 2) / img.clientWidth * 100;
|
|
88
112
|
}
|
|
89
|
-
else if (side === 'vertical' &&
|
|
90
|
-
|
|
91
|
-
const
|
|
92
|
-
|
|
113
|
+
else if (side === 'vertical' && clientAspect < referenceAspect) {
|
|
114
|
+
// Image is taller than reference: mask top/bottom
|
|
115
|
+
const displayHeight = img.clientWidth / referenceAspect;
|
|
116
|
+
percent = ((img.clientHeight - displayHeight) / 2) / img.clientHeight * 100;
|
|
93
117
|
}
|
|
94
|
-
return
|
|
118
|
+
return percent;
|
|
95
119
|
};
|
|
96
|
-
exports.
|
|
97
|
-
|
|
98
|
-
const
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
120
|
+
exports.getMaskPercent = getMaskPercent;
|
|
121
|
+
const maskCropped = (els, img) => {
|
|
122
|
+
const referenceAspect = img.referenceWidth / img.referenceHeight;
|
|
123
|
+
const clientAspect = img.clientWidth / img.clientHeight;
|
|
124
|
+
if (clientAspect > referenceAspect) {
|
|
125
|
+
// Image is wider than reference: mask left/right
|
|
126
|
+
const maskPercent = (0, exports.getMaskPercent)('horizontal', img);
|
|
127
|
+
els.maskLeft.style.width = els.maskRight.style.width = maskPercent + '%';
|
|
128
|
+
els.maskTop.style.height = els.maskBottom.style.height = '0%';
|
|
129
|
+
els.maskLeft.style.height = els.maskRight.style.height = '100%';
|
|
105
130
|
}
|
|
106
|
-
else if (
|
|
107
|
-
// Image is taller
|
|
108
|
-
const
|
|
109
|
-
|
|
131
|
+
else if (clientAspect < referenceAspect) {
|
|
132
|
+
// Image is taller than reference: mask top/bottom
|
|
133
|
+
const maskPercent = (0, exports.getMaskPercent)('vertical', img);
|
|
134
|
+
els.maskLeft.style.width = els.maskRight.style.width = '0%';
|
|
135
|
+
els.maskTop.style.height = els.maskBottom.style.height = maskPercent + '%';
|
|
136
|
+
els.maskTop.style.width = els.maskBottom.style.width = '100%';
|
|
137
|
+
}
|
|
138
|
+
else {
|
|
139
|
+
// Aspect ratios match: no mask
|
|
140
|
+
els.maskLeft.style.width = els.maskRight.style.width = '0%';
|
|
141
|
+
els.maskTop.style.height = els.maskBottom.style.height = '0%';
|
|
110
142
|
}
|
|
111
|
-
return maskSize;
|
|
112
|
-
};
|
|
113
|
-
const mapStructuredResultsBoundingBox = (opts) => {
|
|
114
|
-
const bb = (0, exports.mapPredictionToOriginalImage)(opts.mode, {
|
|
115
|
-
label: 'test',
|
|
116
|
-
x: opts.box.xmin,
|
|
117
|
-
y: opts.box.ymin,
|
|
118
|
-
width: (opts.box.xmax - opts.box.xmin),
|
|
119
|
-
height: (opts.box.ymax - opts.box.ymin),
|
|
120
|
-
}, {
|
|
121
|
-
naturalWidth: opts.originalWidth,
|
|
122
|
-
naturalHeight: opts.originalHeight,
|
|
123
|
-
clientWidth: opts.originalWidth,
|
|
124
|
-
clientHeight: opts.originalHeight,
|
|
125
|
-
});
|
|
126
|
-
return {
|
|
127
|
-
x: Math.round(bb.x),
|
|
128
|
-
y: Math.round(bb.y),
|
|
129
|
-
width: Math.round(bb.width),
|
|
130
|
-
height: Math.round(bb.height),
|
|
131
|
-
};
|
|
132
|
-
};
|
|
133
|
-
exports.mapStructuredResultsBoundingBox = mapStructuredResultsBoundingBox;
|
|
134
|
-
const mapResizedPixelResultsBoundingBox = (opts) => {
|
|
135
|
-
const bb = (0, exports.mapPredictionToOriginalImage)(opts.mode, {
|
|
136
|
-
label: 'test',
|
|
137
|
-
x: opts.box.x / opts.resized.newWidth,
|
|
138
|
-
y: opts.box.y / opts.resized.newHeight,
|
|
139
|
-
width: opts.box.width / opts.resized.newWidth,
|
|
140
|
-
height: opts.box.height / opts.resized.newHeight,
|
|
141
|
-
}, {
|
|
142
|
-
naturalWidth: opts.resized.originalWidth,
|
|
143
|
-
naturalHeight: opts.resized.originalHeight,
|
|
144
|
-
clientWidth: opts.resized.originalWidth,
|
|
145
|
-
clientHeight: opts.resized.originalHeight,
|
|
146
|
-
});
|
|
147
|
-
return {
|
|
148
|
-
x: Math.round(bb.x),
|
|
149
|
-
y: Math.round(bb.y),
|
|
150
|
-
width: Math.round(bb.width),
|
|
151
|
-
height: Math.round(bb.height),
|
|
152
|
-
};
|
|
153
143
|
};
|
|
154
|
-
exports.
|
|
144
|
+
exports.maskCropped = maskCropped;
|
|
155
145
|
//# sourceMappingURL=bounding-box-scaling.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"bounding-box-scaling.js","sourceRoot":"","sources":["../../../../shared/views/project/bounding-box-scaling.ts"],"names":[],"mappings":";;;AAUA
|
|
1
|
+
{"version":3,"file":"bounding-box-scaling.js","sourceRoot":"","sources":["../../../../shared/views/project/bounding-box-scaling.ts"],"names":[],"mappings":";;;AAUA;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACI,MAAM,4BAA4B,GAAG,CACxC,UAAsB,EACtB,GAAgB,EAChB,GAYC,EACH,EAAE;IACA,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,IAAI,MAAM,GAAG,CAAC,CAAC;IAEf,IAAI,UAAU,KAAK,QAAQ,EAAE;QACzB,2EAA2E;QAC3E,MAAM,MAAM,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC,cAAc,CAAC;QACpD,MAAM,MAAM,GAAG,GAAG,CAAC,YAAY,GAAG,GAAG,CAAC,eAAe,CAAC;QACtD,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,cAAc,GAAG,MAAM,EAAE,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC;QAChF,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,eAAe,GAAG,MAAM,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;QAClF,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,GAAG,GAAG,CAAC,cAAc,GAAG,MAAM,EAAE,GAAG,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC;QAC/E,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,GAAG,GAAG,CAAC,eAAe,GAAG,MAAM,EAAE,GAAG,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC;KACtF;SACI,IAAI,UAAU,KAAK,WAAW,EAAE;QACjC,6FAA6F;QAC7F,MAAM,oBAAoB,GAAG,GAAG,CAAC,cAAc,GAAG,GAAG,CAAC,eAAe,CAAC;QACtE,MAAM,iBAAiB,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC,YAAY,CAAC;QAE7D,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,OAAO,GAAG,CAAC,CAAC;QAChB,IAAI,OAAO,GAAG,CAAC,CAAC;QAChB,IAAI,iBAAiB,GAAG,oBAAoB,EAAE;YAC1C,2DAA2D;YAC3D,KAAK,GAAG,GAAG,CAAC,YAAY,GAAG,GAAG,CAAC,eAAe,CAAC;YAC/C,MAAM,SAAS,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC,cAAc,GAAG,KAAK,CAAC;YAC/D,OAAO,GAAG,SAAS,GAAG,CAAC,CAAC;SAC3B;aACI;YACD,6DAA6D;YAC7D,KAAK,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC,cAAc,CAAC;YAC7C,MAAM,UAAU,GAAG,GAAG,CAAC,YAAY,GAAG,GAAG,CAAC,eAAe,GAAG,KAAK,CAAC;YAClE,OAAO,GAAG,UAAU,GAAG,CAAC,CAAC;SAC5B;QACD,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,cAAc,GAAG,KAAK,GAAG,OAAO,EAAE,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC;QACzF,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,eAAe,GAAG,KAAK,GAAG,OAAO,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;QAC3F,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,GAAG,GAAG,CAAC,cAAc,GAAG,KAAK,EAAE,GAAG,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC;QAC9E,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,GAAG,GAAG,CAAC,eAAe,GAAG,KAAK,EAAE,GAAG,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC;KACrF;SACI,IAAI,UAAU,KAAK,UAAU,EAAE;QAChC,uFAAuF;QACvF,MAAM,oBAAoB,GAAG,GAAG,CAAC,cAAc,GAAG,GAAG,CAAC,eAAe,CAAC;QACtE,MAAM,iBAAiB,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC,YAAY,CAAC;QAC7D,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,IAAI,GAAG,CAAC,CAAC;QACb,IAAI,IAAI,GAAG,CAAC,CAAC;QACb,IAAI,iBAAiB,GAAG,oBAAoB,EAAE;YAC1C,+EAA+E;YAC/E,KAAK,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC,cAAc,CAAC;YAC7C,MAAM,SAAS,GAAG,GAAG,CAAC,YAAY,GAAG,GAAG,CAAC,eAAe,GAAG,KAAK,CAAC;YACjE,IAAI,GAAG,SAAS,GAAG,CAAC,CAAC;YACrB,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,cAAc,GAAG,KAAK,EAAE,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC;YAC/E,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,eAAe,GAAG,KAAK,GAAG,IAAI,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;YACxF,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,GAAG,GAAG,CAAC,cAAc,GAAG,KAAK,EAAE,GAAG,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC;YAC9E,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,GAAG,GAAG,CAAC,eAAe,GAAG,KAAK,EAAE,GAAG,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC;SACrF;aACI;YACD,gFAAgF;YAChF,KAAK,GAAG,GAAG,CAAC,YAAY,GAAG,GAAG,CAAC,eAAe,CAAC;YAC/C,MAAM,QAAQ,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC,cAAc,GAAG,KAAK,CAAC;YAC9D,IAAI,GAAG,QAAQ,GAAG,CAAC,CAAC;YACpB,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,cAAc,GAAG,KAAK,GAAG,IAAI,EAAE,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC;YACtF,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,eAAe,GAAG,KAAK,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;YACjF,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,GAAG,GAAG,CAAC,cAAc,GAAG,KAAK,EAAE,GAAG,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC;YAC9E,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,GAAG,GAAG,CAAC,eAAe,GAAG,KAAK,EAAE,GAAG,CAAC,YAAY,GAAG,CAAC,CAAC,CAAC;SACrF;KACJ;SACI,IAAI,UAAU,KAAK,MAAM,EAAE;QAC5B,eAAe;QACf,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC;KAC9C;IAED,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC;AACnC,CAAC,CAAC;AA1FW,QAAA,4BAA4B,gCA0FvC;AAEF,sEAAsE;AAC/D,MAAM,cAAc,GAAG,CAC1B,IAA+B,EAC/B,GAKC,EACH,EAAE;IACA,oEAAoE;IACpE,MAAM,eAAe,GAAG,GAAG,CAAC,cAAc,GAAG,GAAG,CAAC,eAAe,CAAC;IACjE,MAAM,YAAY,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC,YAAY,CAAC;IACxD,IAAI,OAAO,GAAG,CAAC,CAAC;IAChB,IAAI,IAAI,KAAK,YAAY,IAAI,YAAY,GAAG,eAAe,EAAE;QACzD,iDAAiD;QACjD,MAAM,YAAY,GAAG,GAAG,CAAC,YAAY,GAAG,eAAe,CAAC;QACxD,OAAO,GAAG,CAAC,CAAC,GAAG,CAAC,WAAW,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC;KAC5E;SACI,IAAI,IAAI,KAAK,UAAU,IAAI,YAAY,GAAG,eAAe,EAAE;QAC5D,kDAAkD;QAClD,MAAM,aAAa,GAAG,GAAG,CAAC,WAAW,GAAG,eAAe,CAAC;QACxD,OAAO,GAAG,CAAC,CAAC,GAAG,CAAC,YAAY,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,YAAY,GAAG,GAAG,CAAC;KAC/E;IACD,OAAO,OAAO,CAAC;AACnB,CAAC,CAAC;AAxBW,QAAA,cAAc,kBAwBzB;AAEK,MAAM,WAAW,GAAG,CAAC,GAK3B,EAAE,GAKF,EAAE,EAAE;IACD,MAAM,eAAe,GAAG,GAAG,CAAC,cAAc,GAAG,GAAG,CAAC,eAAe,CAAC;IACjE,MAAM,YAAY,GAAG,GAAG,CAAC,WAAW,GAAG,GAAG,CAAC,YAAY,CAAC;IAExD,IAAI,YAAY,GAAG,eAAe,EAAE;QAChC,iDAAiD;QACjD,MAAM,WAAW,GAAG,IAAA,sBAAc,EAAC,YAAY,EAAE,GAAG,CAAC,CAAC;QACtD,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,WAAW,GAAG,GAAG,CAAC;QACzE,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,GAAG,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC;QAC9D,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,GAAG,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;KACnE;SACI,IAAI,YAAY,GAAG,eAAe,EAAE;QACrC,kDAAkD;QAClD,MAAM,WAAW,GAAG,IAAA,sBAAc,EAAC,UAAU,EAAE,GAAG,CAAC,CAAC;QACpD,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC;QAC5D,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,GAAG,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,GAAG,WAAW,GAAG,GAAG,CAAC;QAC3E,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC;KACjE;SACI;QACD,+BAA+B;QAC/B,GAAG,CAAC,QAAQ,CAAC,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC;QAC5D,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,GAAG,GAAG,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC;KACjE;AACL,CAAC,CAAC;AAjCW,QAAA,WAAW,eAiCtB"}
|
|
@@ -4184,6 +4184,7 @@ video {
|
|
|
4184
4184
|
#preview-image-container {
|
|
4185
4185
|
position: relative;
|
|
4186
4186
|
max-width: 100%;
|
|
4187
|
+
display: inline-block;
|
|
4187
4188
|
}
|
|
4188
4189
|
|
|
4189
4190
|
#preview-image-container img {
|
|
@@ -4243,3 +4244,43 @@ video {
|
|
|
4243
4244
|
font-family: monospace;
|
|
4244
4245
|
font-size: 0.75rem;
|
|
4245
4246
|
}
|
|
4247
|
+
|
|
4248
|
+
.label-mask-left {
|
|
4249
|
+
position: absolute;
|
|
4250
|
+
border: 0;
|
|
4251
|
+
height: 100%;
|
|
4252
|
+
left: 0;
|
|
4253
|
+
top: 0;
|
|
4254
|
+
background: rgba(0, 0, 0, 0.4);
|
|
4255
|
+
pointer-events: none;
|
|
4256
|
+
}
|
|
4257
|
+
|
|
4258
|
+
.label-mask-right {
|
|
4259
|
+
position: absolute;
|
|
4260
|
+
border: 0;
|
|
4261
|
+
height: 100%;
|
|
4262
|
+
right: 0;
|
|
4263
|
+
top: 0;
|
|
4264
|
+
background: rgba(0, 0, 0, 0.4);
|
|
4265
|
+
pointer-events: none;
|
|
4266
|
+
}
|
|
4267
|
+
|
|
4268
|
+
.label-mask-top {
|
|
4269
|
+
position: absolute;
|
|
4270
|
+
border: 0;
|
|
4271
|
+
width: 100%;
|
|
4272
|
+
left: 0;
|
|
4273
|
+
top: 0;
|
|
4274
|
+
background: rgba(0, 0, 0, 0.4);
|
|
4275
|
+
pointer-events: none;
|
|
4276
|
+
}
|
|
4277
|
+
|
|
4278
|
+
.label-mask-bottom {
|
|
4279
|
+
position: absolute;
|
|
4280
|
+
border: 0;
|
|
4281
|
+
width: 100%;
|
|
4282
|
+
left: 0;
|
|
4283
|
+
bottom: 0;
|
|
4284
|
+
background: rgba(0, 0, 0, 0.4);
|
|
4285
|
+
pointer-events: none;
|
|
4286
|
+
}
|
|
@@ -10,6 +10,10 @@ window.InferenceServer = async (opts) => {
|
|
|
10
10
|
previewSectionImgContainer: document.querySelector('#preview-image-container'),
|
|
11
11
|
previewSectionImg: document.querySelector('#preview-section img'),
|
|
12
12
|
features: document.querySelector('#features'),
|
|
13
|
+
maskLeft: document.querySelector('.label-mask-left'),
|
|
14
|
+
maskRight: document.querySelector('.label-mask-right'),
|
|
15
|
+
maskTop: document.querySelector('.label-mask-top'),
|
|
16
|
+
maskBottom: document.querySelector('.label-mask-bottom')
|
|
13
17
|
};
|
|
14
18
|
|
|
15
19
|
const colors = [
|
|
@@ -217,6 +221,24 @@ window.InferenceServer = async (opts) => {
|
|
|
217
221
|
|
|
218
222
|
els.previewSectionImgContainer.appendChild(el);
|
|
219
223
|
}
|
|
224
|
+
|
|
225
|
+
// Only the fit-shortest resizing mode crops the image
|
|
226
|
+
if (result.resized && result.resizeMode === 'fit-shortest') {
|
|
227
|
+
const { originalWidth, originalHeight, newWidth, newHeight } = result.resized;
|
|
228
|
+
let maskElements = {
|
|
229
|
+
maskLeft: els.maskLeft,
|
|
230
|
+
maskRight: els.maskRight,
|
|
231
|
+
maskTop: els.maskTop,
|
|
232
|
+
maskBottom: els.maskBottom
|
|
233
|
+
};
|
|
234
|
+
const img = {
|
|
235
|
+
clientWidth: originalWidth,
|
|
236
|
+
clientHeight: originalHeight,
|
|
237
|
+
referenceWidth: newWidth,
|
|
238
|
+
referenceHeight: newHeight,
|
|
239
|
+
};
|
|
240
|
+
maskCropped(maskElements, img);
|
|
241
|
+
}
|
|
220
242
|
};
|
|
221
243
|
|
|
222
244
|
els.previewSectionImg.src = dataUrl;
|
|
@@ -245,4 +267,50 @@ window.InferenceServer = async (opts) => {
|
|
|
245
267
|
els.previewSectionImg.onload();
|
|
246
268
|
}
|
|
247
269
|
};
|
|
270
|
+
|
|
271
|
+
// Shared from bounding-box-scaling.ts. If we introduce a TypeScript build step we can
|
|
272
|
+
// share this code directly.
|
|
273
|
+
const getMaskPercent = (side, img) => {
|
|
274
|
+
// Use reference aspect ratio to determine mask size as a percentage
|
|
275
|
+
const referenceAspect = img.referenceWidth / img.referenceHeight;
|
|
276
|
+
const clientAspect = img.clientWidth / img.clientHeight;
|
|
277
|
+
let percent = 0;
|
|
278
|
+
if (side === 'horizontal' && clientAspect > referenceAspect) {
|
|
279
|
+
// Image is wider than reference: mask left/right
|
|
280
|
+
const displayWidth = img.clientHeight * referenceAspect;
|
|
281
|
+
percent = ((img.clientWidth - displayWidth) / 2) / img.clientWidth * 100;
|
|
282
|
+
}
|
|
283
|
+
else if (side === 'vertical' && clientAspect < referenceAspect) {
|
|
284
|
+
// Image is taller than reference: mask top/bottom
|
|
285
|
+
const displayHeight = img.clientWidth / referenceAspect;
|
|
286
|
+
percent = ((img.clientHeight - displayHeight) / 2) / img.clientHeight * 100;
|
|
287
|
+
}
|
|
288
|
+
return percent;
|
|
289
|
+
};
|
|
290
|
+
|
|
291
|
+
// Shared from bounding-box-scaling.ts
|
|
292
|
+
const maskCropped = (maskElements, img) => {
|
|
293
|
+
const referenceAspect = img.referenceWidth / img.referenceHeight;
|
|
294
|
+
const clientAspect = img.clientWidth / img.clientHeight;
|
|
295
|
+
|
|
296
|
+
if (clientAspect > referenceAspect) {
|
|
297
|
+
// Image is wider than reference: mask left/right
|
|
298
|
+
const maskPercent = getMaskPercent('horizontal', img);
|
|
299
|
+
maskElements.maskLeft.style.width = maskElements.maskRight.style.width = maskPercent + '%';
|
|
300
|
+
maskElements.maskTop.style.height = maskElements.maskBottom.style.height = '0%';
|
|
301
|
+
maskElements.maskLeft.style.height = maskElements.maskRight.style.height = '100%';
|
|
302
|
+
}
|
|
303
|
+
else if (clientAspect < referenceAspect) {
|
|
304
|
+
// Image is taller than reference: mask top/bottom
|
|
305
|
+
const maskPercent = getMaskPercent('vertical', img);
|
|
306
|
+
maskElements.maskLeft.style.width = maskElements.maskRight.style.width = '0%';
|
|
307
|
+
maskElements.maskTop.style.height = maskElements.maskBottom.style.height = maskPercent + '%';
|
|
308
|
+
maskElements.maskTop.style.width = maskElements.maskBottom.style.width = '100%';
|
|
309
|
+
}
|
|
310
|
+
else {
|
|
311
|
+
// Aspect ratios match: no mask
|
|
312
|
+
maskElements.maskLeft.style.width = maskElements.maskRight.style.width = '0%';
|
|
313
|
+
maskElements.maskTop.style.height = maskElements.maskBottom.style.height = '0%';
|
|
314
|
+
}
|
|
315
|
+
};
|
|
248
316
|
};
|
package/package.json
CHANGED
package/test/gstreamer.test.ts
CHANGED
|
@@ -5040,6 +5040,7 @@ Freeing pipeline ...
|
|
|
5040
5040
|
}
|
|
5041
5041
|
}
|
|
5042
5042
|
});
|
|
5043
|
+
assert.equal(devices.length, 2);
|
|
5043
5044
|
assert.equal(devices[0].id, '/dev/video2');
|
|
5044
5045
|
assert.equal(devices[0].name, 'Logitech BRIO (/dev/video2)');
|
|
5045
5046
|
assert.equal(devices[0].videoSource, 'v4l2src');
|
|
@@ -5195,9 +5196,37 @@ Freeing pipeline ...
|
|
|
5195
5196
|
"framerate": 30
|
|
5196
5197
|
}
|
|
5197
5198
|
]));
|
|
5199
|
+
|
|
5200
|
+
assert.equal(devices[1].id, '/dev/video4');
|
|
5201
|
+
assert.equal(devices[1].name, 'Logitech BRIO (/dev/video4)');
|
|
5202
|
+
assert.equal(devices[1].videoSource, 'v4l2src');
|
|
5203
|
+
});
|
|
5204
|
+
|
|
5205
|
+
it("Triple Vision AI Industrial Camera", async () => {
|
|
5206
|
+
const gstMonitorOutput = await fs.readFile('./test/triple-vision-camera-monitor.txt', { encoding: 'utf-8' });
|
|
5207
|
+
const gstInspectOutput = await fs.readFile('./test/qualcomm-rb3-inspect.txt', { encoding: 'utf-8' });
|
|
5208
|
+
|
|
5209
|
+
const devices = await testGetDevices({
|
|
5210
|
+
gstDeviceMonitor: () => gstMonitorOutput,
|
|
5211
|
+
gstInspect: () => gstInspectOutput,
|
|
5212
|
+
modeOverride: 'qualcomm-yupik'
|
|
5213
|
+
});
|
|
5214
|
+
assert.equal(devices.length, 3);
|
|
5215
|
+
assert.equal(devices[0].id, '0');
|
|
5216
|
+
assert.equal(devices[0].name, 'Camera 0 (0)');
|
|
5217
|
+
assert.equal(devices[0].videoSource, 'qtiqmmfsrc');
|
|
5218
|
+
|
|
5219
|
+
assert.equal(devices[1].id, '1');
|
|
5220
|
+
assert.equal(devices[1].name, 'Camera 1 (1)');
|
|
5221
|
+
assert.equal(devices[1].videoSource, 'qtiqmmfsrc');
|
|
5222
|
+
|
|
5223
|
+
assert.equal(devices[2].id, '2');
|
|
5224
|
+
assert.equal(devices[2].name, 'Camera 2 (2)');
|
|
5225
|
+
assert.equal(devices[2].videoSource, 'qtiqmmfsrc');
|
|
5198
5226
|
});
|
|
5199
5227
|
});
|
|
5200
5228
|
|
|
5229
|
+
|
|
5201
5230
|
describe("gstreamer command", () => {
|
|
5202
5231
|
// https://github.com/edgeimpulse/edgeimpulse/issues/9506
|
|
5203
5232
|
it("nvidia orin w/ basler camera", async () => {
|