@appium/support 2.55.3 → 2.56.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/lib/env.js +102 -0
- package/build/lib/fs.js +74 -56
- package/build/lib/image-util.js +4 -339
- package/build/lib/index.js +18 -6
- package/build/lib/log-internal.js +2 -4
- package/build/lib/logger.js +2 -4
- package/build/lib/logging.js +2 -4
- package/build/lib/mjpeg.js +2 -4
- package/build/lib/mkdirp.js +7 -11
- package/build/lib/net.js +2 -4
- package/build/lib/node.js +99 -2
- package/build/lib/npm.js +240 -0
- package/build/lib/plist.js +2 -4
- package/build/lib/process.js +2 -4
- package/build/lib/system.js +2 -4
- package/build/lib/tempdir.js +2 -4
- package/build/lib/timing.js +2 -4
- package/build/lib/util.js +6 -8
- package/build/lib/zip.js +4 -8
- package/lib/env.js +162 -0
- package/lib/fs.js +193 -69
- package/lib/image-util.js +3 -578
- package/lib/index.js +8 -2
- package/lib/log-internal.js +2 -2
- package/lib/logging.js +1 -1
- package/lib/mkdirp.js +3 -6
- package/lib/net.js +4 -4
- package/lib/node.js +104 -1
- package/lib/npm.js +335 -0
- package/lib/tempdir.js +6 -6
- package/lib/util.js +28 -24
- package/lib/zip.js +7 -8
- package/package.json +21 -9
- package/build/test/assets/sample_binary.plist +0 -0
- package/build/test/assets/sample_text.plist +0 -28
- package/build/test/fs-specs.js +0 -264
- package/build/test/helpers.js +0 -35
- package/build/test/image-util-e2e-specs.js +0 -227
- package/build/test/index-specs.js +0 -49
- package/build/test/log-internals-specs.js +0 -97
- package/build/test/logger/helpers.js +0 -71
- package/build/test/logger/logger-force-specs.js +0 -41
- package/build/test/logger/logger-normal-specs.js +0 -113
- package/build/test/logger/logger-test-specs.js +0 -40
- package/build/test/mjpeg-e2e-specs.js +0 -96
- package/build/test/net-e2e-specs.js +0 -32
- package/build/test/node-e2e-specs.js +0 -22
- package/build/test/plist-specs.js +0 -54
- package/build/test/process-specs.js +0 -104
- package/build/test/system-specs.js +0 -136
- package/build/test/tempdir-specs.js +0 -86
- package/build/test/timing-specs.js +0 -125
- package/build/test/util-e2e-specs.js +0 -136
- package/build/test/util-specs.js +0 -537
- package/build/test/zip-e2e-specs.js +0 -233
package/lib/image-util.js
CHANGED
|
@@ -3,92 +3,10 @@ import Jimp from 'jimp';
|
|
|
3
3
|
import { Buffer } from 'buffer';
|
|
4
4
|
import { PNG } from 'pngjs';
|
|
5
5
|
import B from 'bluebird';
|
|
6
|
-
import { hasValue } from './util';
|
|
7
|
-
import log from './logger';
|
|
8
|
-
import { requirePackage } from './node';
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
const OCV_OLD = 'opencv4nodejs';
|
|
12
|
-
const OCV_NEW = '@u4/opencv4nodejs';
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
const { MIME_JPEG, MIME_PNG, MIME_BMP } = Jimp;
|
|
16
|
-
let cv = null;
|
|
17
|
-
|
|
18
|
-
/**
|
|
19
|
-
* @typedef {Object} Region
|
|
20
|
-
* @property {number} left - The offset from the left side
|
|
21
|
-
* @property {number} top - The offset from the top
|
|
22
|
-
* @property {number} width - The width
|
|
23
|
-
* @property {number} height - The height
|
|
24
|
-
*/
|
|
25
|
-
|
|
26
|
-
/**
|
|
27
|
-
* @typedef {Object} Point
|
|
28
|
-
* @property {number} x - The x coordinate
|
|
29
|
-
* @property {number} y - The y coordinate
|
|
30
|
-
*/
|
|
31
|
-
|
|
32
|
-
/**
|
|
33
|
-
* @typedef {Object} Rect
|
|
34
|
-
* @property {number} x - The top left coordinate
|
|
35
|
-
* @property {number} y - The bottom right coordinate
|
|
36
|
-
* @property {number} width - The width
|
|
37
|
-
* @property {number} height - The height
|
|
38
|
-
*/
|
|
39
6
|
|
|
40
7
|
const BYTES_IN_PIXEL_BLOCK = 4;
|
|
41
8
|
const SCANLINE_FILTER_METHOD = 4;
|
|
42
|
-
const
|
|
43
|
-
const MATCH_NEIGHBOUR_THRESHOLD = 10;
|
|
44
|
-
|
|
45
|
-
const AVAILABLE_DETECTORS = [
|
|
46
|
-
'AKAZE',
|
|
47
|
-
'AGAST',
|
|
48
|
-
'BRISK',
|
|
49
|
-
'FAST',
|
|
50
|
-
'GFTT',
|
|
51
|
-
'KAZE',
|
|
52
|
-
'MSER',
|
|
53
|
-
'SIFT',
|
|
54
|
-
'ORB',
|
|
55
|
-
];
|
|
56
|
-
|
|
57
|
-
const AVAILABLE_MATCHING_FUNCTIONS = [
|
|
58
|
-
'FlannBased',
|
|
59
|
-
'BruteForce',
|
|
60
|
-
'BruteForceL1',
|
|
61
|
-
'BruteForceHamming',
|
|
62
|
-
'BruteForceHammingLut',
|
|
63
|
-
'BruteForceSL2',
|
|
64
|
-
];
|
|
65
|
-
|
|
66
|
-
const MATCHING_METHODS = [
|
|
67
|
-
'TM_CCOEFF',
|
|
68
|
-
'TM_CCOEFF_NORMED',
|
|
69
|
-
'TM_CCORR',
|
|
70
|
-
'TM_CCORR_NORMED',
|
|
71
|
-
'TM_SQDIFF',
|
|
72
|
-
'TM_SQDIFF_NORMED',
|
|
73
|
-
];
|
|
74
|
-
const DEFAULT_MATCHING_METHOD = 'TM_CCOEFF_NORMED';
|
|
75
|
-
|
|
76
|
-
/**
|
|
77
|
-
* Transforms matching method name to the actual
|
|
78
|
-
* constant value from OpenCV library
|
|
79
|
-
*
|
|
80
|
-
* @param {string} name One of supported method names
|
|
81
|
-
* (see MATCHING_METHODS array above)
|
|
82
|
-
* @returns {number} The method value
|
|
83
|
-
* @throws {Error} if an unsupported method name is given
|
|
84
|
-
*/
|
|
85
|
-
function toMatchingMethod (name) {
|
|
86
|
-
if (!MATCHING_METHODS.includes(name)) {
|
|
87
|
-
throw new Error(`The matching method '${name}' is unknown. ` +
|
|
88
|
-
`Only the following matching methods are supported: ${MATCHING_METHODS}`);
|
|
89
|
-
}
|
|
90
|
-
return cv[name];
|
|
91
|
-
}
|
|
9
|
+
const { MIME_JPEG, MIME_PNG, MIME_BMP } = Jimp;
|
|
92
10
|
|
|
93
11
|
/**
|
|
94
12
|
* Utility function to get a Jimp image object from buffer or base64 data. Jimp
|
|
@@ -122,498 +40,6 @@ async function getJimpImage (data) {
|
|
|
122
40
|
});
|
|
123
41
|
}
|
|
124
42
|
|
|
125
|
-
/**
|
|
126
|
-
* @throws {Error} If opencv4nodejs module is not installed or cannot be loaded
|
|
127
|
-
*/
|
|
128
|
-
async function initOpenCV () {
|
|
129
|
-
if (cv) {
|
|
130
|
-
return;
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
log.debug(`Initializing opencv`);
|
|
134
|
-
for (const ocvPackage of [OCV_OLD, OCV_NEW]) {
|
|
135
|
-
try {
|
|
136
|
-
log.debug(`Attempting to load '${ocvPackage}'`);
|
|
137
|
-
cv = await requirePackage(ocvPackage);
|
|
138
|
-
break;
|
|
139
|
-
} catch (err) {
|
|
140
|
-
log.warn(`Unable to load '${ocvPackage}': ${err.message}`);
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
if (!cv) {
|
|
145
|
-
throw new Error(`An opencv node module is required to use OpenCV features. ` +
|
|
146
|
-
`Please install one first (e.g., 'npm i -g ${OCV_NEW}') and restart Appium. ` +
|
|
147
|
-
'Read https://github.com/UrielCh/opencv4nodejs#how-to-install for more details on this topic.');
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
|
-
|
|
151
|
-
/**
|
|
152
|
-
* @typedef {Object} MatchComputationResult
|
|
153
|
-
* @property {cv.DescriptorMatch} desciptor - OpenCV match descriptor
|
|
154
|
-
* @property {Array<cv.KeyPoint>} keyPoints - The array of key points
|
|
155
|
-
*/
|
|
156
|
-
|
|
157
|
-
/**
|
|
158
|
-
* Calculates an OpenCV match descriptor of an image, which can be used
|
|
159
|
-
* for brute-force matching.
|
|
160
|
-
* Read https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_feature2d/py_matcher/py_matcher.html
|
|
161
|
-
* for more details.
|
|
162
|
-
*
|
|
163
|
-
* @param {cv.Mat} img Image data
|
|
164
|
-
* @param {cv.FeatureDetector} detector OpenCV feature detector instance
|
|
165
|
-
*
|
|
166
|
-
* @returns {MatchComputationResult}
|
|
167
|
-
*/
|
|
168
|
-
async function detectAndCompute (img, detector) {
|
|
169
|
-
const keyPoints = await detector.detectAsync(img);
|
|
170
|
-
const descriptor = await detector.computeAsync(img, keyPoints);
|
|
171
|
-
return {
|
|
172
|
-
keyPoints,
|
|
173
|
-
descriptor
|
|
174
|
-
};
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
/**
|
|
178
|
-
* Calculated the bounding rect coordinates for the array of matching points
|
|
179
|
-
*
|
|
180
|
-
* @param {Array<Point>} matchedPoints Array of matching points
|
|
181
|
-
* @returns {Rect} The matching bounding rect or a zero rect if no match
|
|
182
|
-
* can be found.
|
|
183
|
-
*/
|
|
184
|
-
function calculateMatchedRect (matchedPoints) {
|
|
185
|
-
if (matchedPoints.length < 2) {
|
|
186
|
-
return {
|
|
187
|
-
x: 0,
|
|
188
|
-
y: 0,
|
|
189
|
-
width: 0,
|
|
190
|
-
height: 0
|
|
191
|
-
};
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
const pointsSortedByDistance = matchedPoints
|
|
195
|
-
.map((point) => [Math.sqrt(point.x * point.x + point.y * point.y), point])
|
|
196
|
-
.sort((pair1, pair2) => pair1[0] >= pair2[0])
|
|
197
|
-
.map((pair) => pair[1]);
|
|
198
|
-
const firstPoint = _.head(pointsSortedByDistance);
|
|
199
|
-
const lastPoint = _.last(pointsSortedByDistance);
|
|
200
|
-
const topLeftPoint = {
|
|
201
|
-
x: firstPoint.x <= lastPoint.x ? firstPoint.x : lastPoint.x,
|
|
202
|
-
y: firstPoint.y <= lastPoint.y ? firstPoint.y : lastPoint.y,
|
|
203
|
-
};
|
|
204
|
-
const bottomRightPoint = {
|
|
205
|
-
x: firstPoint.x >= lastPoint.x ? firstPoint.x : lastPoint.x,
|
|
206
|
-
y: firstPoint.y >= lastPoint.y ? firstPoint.y : lastPoint.y,
|
|
207
|
-
};
|
|
208
|
-
return {
|
|
209
|
-
x: topLeftPoint.x,
|
|
210
|
-
y: topLeftPoint.y,
|
|
211
|
-
width: bottomRightPoint.x - topLeftPoint.x,
|
|
212
|
-
height: bottomRightPoint.y - topLeftPoint.y
|
|
213
|
-
};
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
/**
|
|
217
|
-
* Draws a rectanngle on the given image matrix
|
|
218
|
-
*
|
|
219
|
-
* @param {cv.Mat} mat The source image
|
|
220
|
-
* @param {Rect} region The region to highlight
|
|
221
|
-
*
|
|
222
|
-
* @returns {cv.Mat} The same image with the rectangle on it
|
|
223
|
-
*/
|
|
224
|
-
function highlightRegion (mat, region) {
|
|
225
|
-
if (region.width <= 0 || region.height <= 0) {
|
|
226
|
-
return;
|
|
227
|
-
}
|
|
228
|
-
|
|
229
|
-
// highlight in red
|
|
230
|
-
const color = new cv.Vec(0, 0, 255);
|
|
231
|
-
const thickness = 2;
|
|
232
|
-
mat.drawRectangle(new cv.Rect(region.x, region.y, region.width, region.height), color, thickness, cv.LINE_8);
|
|
233
|
-
return mat;
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
/**
|
|
237
|
-
* @typedef {Object} MatchingOptions
|
|
238
|
-
* @property {?string} detectorName ['ORB'] One of possible OpenCV feature detector names
|
|
239
|
-
* from `AVAILABLE_DETECTORS` array.
|
|
240
|
-
* Some of these methods (FAST, AGAST, GFTT, FAST, SIFT and MSER) are not available
|
|
241
|
-
* in the default OpenCV installation and have to be enabled manually before
|
|
242
|
-
* library compilation.
|
|
243
|
-
* @property {?string} matchFunc ['BruteForce'] The name of the matching function.
|
|
244
|
-
* Should be one of `AVAILABLE_MATCHING_FUNCTIONS` array.
|
|
245
|
-
* @property {?number|Function} goodMatchesFactor The maximum count of "good" matches
|
|
246
|
-
* (e. g. with minimal distances) or a function, which accepts 3 arguments: the current distance,
|
|
247
|
-
* minimal distance, maximum distance and returns true or false to include or exclude the match.
|
|
248
|
-
* @property {?boolean} visualize [false] Whether to return the resulting visalization
|
|
249
|
-
* as an image (useful for debugging purposes)
|
|
250
|
-
*/
|
|
251
|
-
|
|
252
|
-
/**
|
|
253
|
-
* @typedef {Object} MatchingResult
|
|
254
|
-
* @property {number} count The count of matched edges on both images.
|
|
255
|
-
* The more matching edges there are no both images the more similar they are.
|
|
256
|
-
* @property {number} totalCount The total count of matched edges on both images.
|
|
257
|
-
* It is equal to `count` if `goodMatchesFactor` does not limit the matches,
|
|
258
|
-
* otherwise it contains the total count of matches before `goodMatchesFactor` is
|
|
259
|
-
* applied.
|
|
260
|
-
* @property {?Buffer} visualization The visualization of the matching result
|
|
261
|
-
* represented as PNG image buffer. This visualization looks like
|
|
262
|
-
* https://user-images.githubusercontent.com/31125521/29702731-c79e3142-8972-11e7-947e-db109d415469.jpg
|
|
263
|
-
* @property {Array<Point>} points1 The array of matching points on the first image
|
|
264
|
-
* @property {Rect} rect1 The bounding rect for the `matchedPoints1` set or a zero rect
|
|
265
|
-
* if not enough matching points are found
|
|
266
|
-
* @property {Array<Point>} points2 The array of matching points on the second image
|
|
267
|
-
* @property {Rect} rect2 The bounding rect for the `matchedPoints2` set or a zero rect
|
|
268
|
-
* if not enough matching points are found
|
|
269
|
-
*/
|
|
270
|
-
|
|
271
|
-
/**
|
|
272
|
-
* Calculates the count of common edges between two images.
|
|
273
|
-
* The images might be rotated or resized relatively to each other.
|
|
274
|
-
*
|
|
275
|
-
* @param {Buffer} img1Data The data of the first image packed into a NodeJS buffer
|
|
276
|
-
* @param {Buffer} img2Data The data of the second image packed into a NodeJS buffer
|
|
277
|
-
* @param {?MatchingOptions} options [{}] Set of matching options
|
|
278
|
-
*
|
|
279
|
-
* @returns {MatchingResult} Maching result
|
|
280
|
-
* @throws {Error} If `detectorName` value is unknown.
|
|
281
|
-
*/
|
|
282
|
-
async function getImagesMatches (img1Data, img2Data, options = {}) {
|
|
283
|
-
await initOpenCV();
|
|
284
|
-
|
|
285
|
-
const {detectorName = 'ORB', visualize = false,
|
|
286
|
-
goodMatchesFactor, matchFunc = 'BruteForce'} = options;
|
|
287
|
-
if (!_.includes(AVAILABLE_DETECTORS, detectorName)) {
|
|
288
|
-
throw new Error(`'${detectorName}' detector is unknown. ` +
|
|
289
|
-
`Only ${JSON.stringify(AVAILABLE_DETECTORS)} detectors are supported.`);
|
|
290
|
-
}
|
|
291
|
-
if (!_.includes(AVAILABLE_MATCHING_FUNCTIONS, matchFunc)) {
|
|
292
|
-
throw new Error(`'${matchFunc}' matching function is unknown. ` +
|
|
293
|
-
`Only ${JSON.stringify(AVAILABLE_MATCHING_FUNCTIONS)} matching functions are supported.`);
|
|
294
|
-
}
|
|
295
|
-
|
|
296
|
-
const detector = new cv[`${detectorName}Detector`]();
|
|
297
|
-
const [img1, img2] = await B.all([
|
|
298
|
-
cv.imdecodeAsync(img1Data),
|
|
299
|
-
cv.imdecodeAsync(img2Data)
|
|
300
|
-
]);
|
|
301
|
-
const [result1, result2] = await B.all([
|
|
302
|
-
detectAndCompute(img1, detector),
|
|
303
|
-
detectAndCompute(img2, detector)
|
|
304
|
-
]);
|
|
305
|
-
let matches = [];
|
|
306
|
-
try {
|
|
307
|
-
matches = await cv[`match${matchFunc}Async`](result1.descriptor, result2.descriptor);
|
|
308
|
-
} catch (e) {
|
|
309
|
-
throw new Error(`Cannot find any matches between the given images. Try another detection algorithm. ` +
|
|
310
|
-
` Original error: ${e}`);
|
|
311
|
-
}
|
|
312
|
-
const totalCount = matches.length;
|
|
313
|
-
if (hasValue(goodMatchesFactor)) {
|
|
314
|
-
if (_.isFunction(goodMatchesFactor)) {
|
|
315
|
-
const distances = matches.map((match) => match.distance);
|
|
316
|
-
const minDistance = _.min(distances);
|
|
317
|
-
const maxDistance = _.max(distances);
|
|
318
|
-
matches = matches
|
|
319
|
-
.filter((match) => goodMatchesFactor(match.distance, minDistance, maxDistance));
|
|
320
|
-
} else {
|
|
321
|
-
if (matches.length > goodMatchesFactor) {
|
|
322
|
-
matches = matches
|
|
323
|
-
.sort((match1, match2) => match1.distance - match2.distance)
|
|
324
|
-
.slice(0, goodMatchesFactor);
|
|
325
|
-
}
|
|
326
|
-
}
|
|
327
|
-
}
|
|
328
|
-
|
|
329
|
-
const extractPoint = (keyPoints, indexPropertyName) => (match) => {
|
|
330
|
-
const {pt, point} = keyPoints[match[indexPropertyName]];
|
|
331
|
-
// https://github.com/justadudewhohacks/opencv4nodejs/issues/584
|
|
332
|
-
return (pt || point);
|
|
333
|
-
};
|
|
334
|
-
const points1 = matches.map(extractPoint(result1.keyPoints, 'queryIdx'));
|
|
335
|
-
const rect1 = calculateMatchedRect(points1);
|
|
336
|
-
const points2 = matches.map(extractPoint(result2.keyPoints, 'trainIdx'));
|
|
337
|
-
const rect2 = calculateMatchedRect(points2);
|
|
338
|
-
|
|
339
|
-
const result = {
|
|
340
|
-
points1,
|
|
341
|
-
rect1,
|
|
342
|
-
points2,
|
|
343
|
-
rect2,
|
|
344
|
-
totalCount,
|
|
345
|
-
count: matches.length,
|
|
346
|
-
};
|
|
347
|
-
if (visualize) {
|
|
348
|
-
const visualization = cv.drawMatches(img1, img2, result1.keyPoints, result2.keyPoints, matches);
|
|
349
|
-
highlightRegion(visualization, rect1);
|
|
350
|
-
highlightRegion(visualization, {
|
|
351
|
-
x: img1.cols + rect2.x,
|
|
352
|
-
y: rect2.y,
|
|
353
|
-
width: rect2.width,
|
|
354
|
-
height: rect2.height
|
|
355
|
-
});
|
|
356
|
-
result.visualization = await cv.imencodeAsync('.png', visualization);
|
|
357
|
-
}
|
|
358
|
-
return result;
|
|
359
|
-
}
|
|
360
|
-
|
|
361
|
-
/**
|
|
362
|
-
* @typedef {Object} SimilarityOptions
|
|
363
|
-
* @property {?boolean} visualize [false] Whether to return the resulting visalization
|
|
364
|
-
* as an image (useful for debugging purposes)
|
|
365
|
-
* @property {string} method [TM_CCOEFF_NORMED] The name of the template matching method.
|
|
366
|
-
* Acceptable values are:
|
|
367
|
-
* - TM_CCOEFF
|
|
368
|
-
* - TM_CCOEFF_NORMED (default)
|
|
369
|
-
* - TM_CCORR
|
|
370
|
-
* - TM_CCORR_NORMED
|
|
371
|
-
* - TM_SQDIFF
|
|
372
|
-
* - TM_SQDIFF_NORMED
|
|
373
|
-
* Read https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_template_matching/py_template_matching.html
|
|
374
|
-
* for more details.
|
|
375
|
-
*/
|
|
376
|
-
|
|
377
|
-
/**
|
|
378
|
-
* @typedef {Object} SimilarityResult
|
|
379
|
-
* @property {number} score The similarity score as a float number in range [0.0, 1.0].
|
|
380
|
-
* 1.0 is the highest score (means both images are totally equal).
|
|
381
|
-
* @property {?Buffer} visualization The visualization of the matching result
|
|
382
|
-
* represented as PNG image buffer. This image includes both input pictures where
|
|
383
|
-
* difference regions are highlighted with rectangles.
|
|
384
|
-
*/
|
|
385
|
-
|
|
386
|
-
/**
|
|
387
|
-
* Calculates the similarity score between two images.
|
|
388
|
-
* It is expected, that both images have the same resolution.
|
|
389
|
-
*
|
|
390
|
-
* @param {Buffer} img1Data The data of the first image packed into a NodeJS buffer
|
|
391
|
-
* @param {Buffer} img2Data The data of the second image packed into a NodeJS buffer
|
|
392
|
-
* @param {?SimilarityOptions} options [{}] Set of similarity calculation options
|
|
393
|
-
*
|
|
394
|
-
* @returns {SimilarityResult} The calculation result
|
|
395
|
-
* @throws {Error} If the given images have different resolution.
|
|
396
|
-
*/
|
|
397
|
-
async function getImagesSimilarity (img1Data, img2Data, options = {}) {
|
|
398
|
-
await initOpenCV();
|
|
399
|
-
|
|
400
|
-
const {
|
|
401
|
-
method = DEFAULT_MATCHING_METHOD,
|
|
402
|
-
visualize = false,
|
|
403
|
-
} = options;
|
|
404
|
-
let [template, reference] = await B.all([
|
|
405
|
-
cv.imdecodeAsync(img1Data),
|
|
406
|
-
cv.imdecodeAsync(img2Data)
|
|
407
|
-
]);
|
|
408
|
-
if (template.rows !== reference.rows || template.cols !== reference.cols) {
|
|
409
|
-
throw new Error('Both images are expected to have the same size in order to ' +
|
|
410
|
-
'calculate the similarity score.');
|
|
411
|
-
}
|
|
412
|
-
[template, reference] = await B.all([
|
|
413
|
-
template.convertToAsync(cv.CV_8UC3),
|
|
414
|
-
reference.convertToAsync(cv.CV_8UC3)
|
|
415
|
-
]);
|
|
416
|
-
|
|
417
|
-
let matched;
|
|
418
|
-
try {
|
|
419
|
-
matched = await reference.matchTemplateAsync(template, toMatchingMethod(method));
|
|
420
|
-
} catch (e) {
|
|
421
|
-
throw new Error(`The reference image did not match to the template one. Original error: ${e.message}`);
|
|
422
|
-
}
|
|
423
|
-
const minMax = await matched.minMaxLocAsync();
|
|
424
|
-
const result = {
|
|
425
|
-
score: minMax.maxVal
|
|
426
|
-
};
|
|
427
|
-
if (visualize) {
|
|
428
|
-
const resultMat = new cv.Mat(template.rows, template.cols * 2, cv.CV_8UC3);
|
|
429
|
-
await B.all([
|
|
430
|
-
reference.copyToAsync(
|
|
431
|
-
resultMat.getRegion(new cv.Rect(0, 0, reference.cols, reference.rows))),
|
|
432
|
-
template.copyToAsync(
|
|
433
|
-
resultMat.getRegion(new cv.Rect(reference.cols, 0, template.cols, template.rows)))
|
|
434
|
-
]);
|
|
435
|
-
let mask = reference.absdiff(template);
|
|
436
|
-
mask = await mask.cvtColorAsync(cv.COLOR_BGR2GRAY);
|
|
437
|
-
let contours = [];
|
|
438
|
-
try {
|
|
439
|
-
mask = await mask.thresholdAsync(128, 255, cv.THRESH_BINARY | cv.THRESH_OTSU);
|
|
440
|
-
contours = await mask.findContoursAsync(cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE);
|
|
441
|
-
} catch (ign) {
|
|
442
|
-
// No contours can be found, which means, most likely, that images are equal
|
|
443
|
-
}
|
|
444
|
-
for (const contour of contours) {
|
|
445
|
-
const boundingRect = contour.boundingRect();
|
|
446
|
-
highlightRegion(resultMat, boundingRect);
|
|
447
|
-
highlightRegion(resultMat, {
|
|
448
|
-
x: reference.cols + boundingRect.x,
|
|
449
|
-
y: boundingRect.y,
|
|
450
|
-
width: boundingRect.width,
|
|
451
|
-
height: boundingRect.height
|
|
452
|
-
});
|
|
453
|
-
}
|
|
454
|
-
result.visualization = await cv.imencodeAsync('.png', resultMat);
|
|
455
|
-
}
|
|
456
|
-
return result;
|
|
457
|
-
}
|
|
458
|
-
|
|
459
|
-
/**
|
|
460
|
-
* @typedef {Object} OccurrenceOptions
|
|
461
|
-
* @property {?boolean} visualize [false] Whether to return the resulting visalization
|
|
462
|
-
* as an image (useful for debugging purposes)
|
|
463
|
-
* @property {?float} threshold [0.5] At what normalized threshold to reject
|
|
464
|
-
* a match
|
|
465
|
-
* @property {?float} multiple [false] find multiple matches in the image
|
|
466
|
-
* @property {?number} matchNeighbourThreshold [10] The pixel distance between matches we consider
|
|
467
|
-
* to be part of the same template match
|
|
468
|
-
*/
|
|
469
|
-
|
|
470
|
-
/**
|
|
471
|
-
* @typedef {Object} OccurrenceResult
|
|
472
|
-
* @property {Rect} rect The region of the partial image occurence
|
|
473
|
-
* on the full image
|
|
474
|
-
* @property {?Buffer} visualization The visualization of the matching result
|
|
475
|
-
* represented as PNG image buffer. On this image the matching
|
|
476
|
-
* region is highlighted with a rectangle. If the multiple option is passed,
|
|
477
|
-
* all results are highlighted here.
|
|
478
|
-
* @property {number} score The similarity score as a float number in range [0.0, 1.0].
|
|
479
|
-
* 1.0 is the highest score (means both images are totally equal).
|
|
480
|
-
* @property {Array<OccurrenceResult>} multiple The array of matching OccurenceResults
|
|
481
|
-
* - only when multiple option is passed
|
|
482
|
-
* @property {string} method [TM_CCOEFF_NORMED] The name of the template matching method.
|
|
483
|
-
* Acceptable values are:
|
|
484
|
-
* - TM_CCOEFF
|
|
485
|
-
* - TM_CCOEFF_NORMED (default)
|
|
486
|
-
* - TM_CCORR
|
|
487
|
-
* - TM_CCORR_NORMED
|
|
488
|
-
* - TM_SQDIFF
|
|
489
|
-
* - TM_SQDIFF_NORMED
|
|
490
|
-
* Read https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_template_matching/py_template_matching.html
|
|
491
|
-
* for more details.
|
|
492
|
-
*/
|
|
493
|
-
|
|
494
|
-
/**
|
|
495
|
-
* Calculates the occurrence position of a partial image in the full
|
|
496
|
-
* image.
|
|
497
|
-
*
|
|
498
|
-
* @param {Buffer} fullImgData The data of the full image packed into a NodeJS buffer
|
|
499
|
-
* @param {Buffer} partialImgData The data of the partial image packed into a NodeJS buffer
|
|
500
|
-
* @param {?OccurrenceOptions} options [{}] Set of occurrence calculation options
|
|
501
|
-
*
|
|
502
|
-
* @returns {OccurrenceResult}
|
|
503
|
-
* @throws {Error} If no occurrences of the partial image can be found in the full image
|
|
504
|
-
*/
|
|
505
|
-
async function getImageOccurrence (fullImgData, partialImgData, options = {}) {
|
|
506
|
-
await initOpenCV();
|
|
507
|
-
|
|
508
|
-
const {
|
|
509
|
-
visualize = false,
|
|
510
|
-
threshold = DEFAULT_MATCH_THRESHOLD,
|
|
511
|
-
multiple = false,
|
|
512
|
-
matchNeighbourThreshold = MATCH_NEIGHBOUR_THRESHOLD,
|
|
513
|
-
method = DEFAULT_MATCHING_METHOD,
|
|
514
|
-
} = options;
|
|
515
|
-
|
|
516
|
-
const [fullImg, partialImg] = await B.all([
|
|
517
|
-
cv.imdecodeAsync(fullImgData),
|
|
518
|
-
cv.imdecodeAsync(partialImgData)
|
|
519
|
-
]);
|
|
520
|
-
const results = [];
|
|
521
|
-
let visualization = null;
|
|
522
|
-
|
|
523
|
-
try {
|
|
524
|
-
const matched = await fullImg.matchTemplateAsync(partialImg, toMatchingMethod(method));
|
|
525
|
-
const minMax = await matched.minMaxLocAsync();
|
|
526
|
-
|
|
527
|
-
if (multiple) {
|
|
528
|
-
const nonZeroMatchResults = matched.threshold(threshold, 1, cv.THRESH_BINARY)
|
|
529
|
-
.convertTo(cv.CV_8U)
|
|
530
|
-
.findNonZero();
|
|
531
|
-
const matches = filterNearMatches(nonZeroMatchResults, matchNeighbourThreshold);
|
|
532
|
-
|
|
533
|
-
for (const {x, y} of matches) {
|
|
534
|
-
results.push({
|
|
535
|
-
score: matched.at(y, x),
|
|
536
|
-
rect: {
|
|
537
|
-
x, y,
|
|
538
|
-
width: partialImg.cols,
|
|
539
|
-
height: partialImg.rows
|
|
540
|
-
}
|
|
541
|
-
});
|
|
542
|
-
}
|
|
543
|
-
} else if (minMax.maxVal >= threshold) {
|
|
544
|
-
const {x, y} = method.includes('SQDIFF') ? minMax.minLoc : minMax.maxLoc;
|
|
545
|
-
results.push({
|
|
546
|
-
score: minMax.maxVal,
|
|
547
|
-
rect: {
|
|
548
|
-
x, y,
|
|
549
|
-
width: partialImg.cols,
|
|
550
|
-
height: partialImg.rows
|
|
551
|
-
}
|
|
552
|
-
});
|
|
553
|
-
}
|
|
554
|
-
|
|
555
|
-
if (_.isEmpty(results)) {
|
|
556
|
-
// Below error message, `Cannot find any occurrences` is referenced in find by image
|
|
557
|
-
throw new Error(`Match threshold: ${threshold}. Highest match value ` +
|
|
558
|
-
`found was ${minMax.maxVal}`);
|
|
559
|
-
}
|
|
560
|
-
} catch (e) {
|
|
561
|
-
// Below error message, `Cannot find any occurrences` is referenced in find by image
|
|
562
|
-
throw new Error(`Cannot find any occurrences of the partial image in the full image. ` +
|
|
563
|
-
`Original error: ${e.message}`);
|
|
564
|
-
}
|
|
565
|
-
|
|
566
|
-
if (visualize) {
|
|
567
|
-
const fullHighlightedImage = fullImg.copy();
|
|
568
|
-
|
|
569
|
-
for (const result of results) {
|
|
570
|
-
const singleHighlightedImage = fullImg.copy();
|
|
571
|
-
|
|
572
|
-
highlightRegion(singleHighlightedImage, result.rect);
|
|
573
|
-
highlightRegion(fullHighlightedImage, result.rect);
|
|
574
|
-
result.visualization = await cv.imencodeAsync('.png', singleHighlightedImage);
|
|
575
|
-
}
|
|
576
|
-
visualization = await cv.imencodeAsync('.png', fullHighlightedImage);
|
|
577
|
-
}
|
|
578
|
-
|
|
579
|
-
return {
|
|
580
|
-
rect: results[0].rect,
|
|
581
|
-
score: results[0].score,
|
|
582
|
-
visualization,
|
|
583
|
-
multiple: results
|
|
584
|
-
};
|
|
585
|
-
}
|
|
586
|
-
|
|
587
|
-
/**
|
|
588
|
-
* Filter out match results which have a matched neighbour
|
|
589
|
-
*
|
|
590
|
-
* @param {Array<Point>} nonZeroMatchResults matrix of image match results
|
|
591
|
-
* @param {number} matchNeighbourThreshold The pixel distance within which we
|
|
592
|
-
* consider an element being a neighbour of an existing match
|
|
593
|
-
* @return {Array<Point>} the filtered array of matched points
|
|
594
|
-
*/
|
|
595
|
-
function filterNearMatches (nonZeroMatchResults, matchNeighbourThreshold) {
|
|
596
|
-
return nonZeroMatchResults.reduce((acc, element) => {
|
|
597
|
-
if (!acc.some((match) => distance(match, element) <= matchNeighbourThreshold)) {
|
|
598
|
-
acc.push(element);
|
|
599
|
-
}
|
|
600
|
-
return acc;
|
|
601
|
-
}, []);
|
|
602
|
-
}
|
|
603
|
-
|
|
604
|
-
/**
|
|
605
|
-
* Find the distance between two points
|
|
606
|
-
*
|
|
607
|
-
* @param {Point} point1 The first point
|
|
608
|
-
* @param {Point} point2 The second point
|
|
609
|
-
* @return {number} the distance
|
|
610
|
-
*/
|
|
611
|
-
function distance (point1, point2) {
|
|
612
|
-
const a2 = Math.pow((point1.x - point2.x), 2);
|
|
613
|
-
const b2 = Math.pow((point1.y - point2.y), 2);
|
|
614
|
-
return Math.sqrt(a2 + b2);
|
|
615
|
-
}
|
|
616
|
-
|
|
617
43
|
/**
|
|
618
44
|
* Crop the image by given rectangle (use base64 string as input and output)
|
|
619
45
|
*
|
|
@@ -709,7 +135,6 @@ function getRectIntersection (rect, imageSize) {
|
|
|
709
135
|
}
|
|
710
136
|
|
|
711
137
|
export {
|
|
712
|
-
cropBase64Image, base64ToImage, imageToBase64, cropImage,
|
|
713
|
-
|
|
714
|
-
MIME_BMP,
|
|
138
|
+
cropBase64Image, base64ToImage, imageToBase64, cropImage,
|
|
139
|
+
getJimpImage, MIME_JPEG, MIME_PNG, MIME_BMP
|
|
715
140
|
};
|
package/lib/index.js
CHANGED
|
@@ -12,17 +12,23 @@ import * as imageUtil from './image-util';
|
|
|
12
12
|
import * as mjpeg from './mjpeg';
|
|
13
13
|
import * as node from './node';
|
|
14
14
|
import * as timing from './timing';
|
|
15
|
+
import * as env from './env';
|
|
15
16
|
|
|
17
|
+
export { npm } from './npm';
|
|
16
18
|
|
|
17
19
|
const { fs } = fsIndex;
|
|
18
20
|
const { cancellableDelay } = util;
|
|
21
|
+
/**
|
|
22
|
+
* Alias for `fs.mkdir(dir, {recursive: true}`). Use `fs.mkdirp` instead.
|
|
23
|
+
* @deprecated
|
|
24
|
+
*/
|
|
19
25
|
const { mkdirp } = mkdirpIndex;
|
|
20
26
|
|
|
21
27
|
export {
|
|
22
28
|
tempDir, system, util, fs, cancellableDelay, plist, mkdirp, logger, process,
|
|
23
|
-
zip, imageUtil, net, mjpeg, node, timing,
|
|
29
|
+
zip, imageUtil, net, mjpeg, node, timing, env
|
|
24
30
|
};
|
|
25
31
|
export default {
|
|
26
32
|
tempDir, system, util, fs, cancellableDelay, plist, mkdirp, logger, process,
|
|
27
|
-
zip, imageUtil, net, mjpeg, node, timing,
|
|
33
|
+
zip, imageUtil, net, mjpeg, node, timing, env
|
|
28
34
|
};
|
package/lib/log-internal.js
CHANGED
|
@@ -4,7 +4,7 @@ import _ from 'lodash';
|
|
|
4
4
|
const DEFAULT_REPLACER = '**SECURE**';
|
|
5
5
|
|
|
6
6
|
/**
|
|
7
|
-
* @typedef
|
|
7
|
+
* @typedef SecureValuePreprocessingRule
|
|
8
8
|
* @property {RegExp} pattern The parsed pattern which is going to be used for replacement
|
|
9
9
|
* @property {string} replacer [DEFAULT_SECURE_REPLACER] The replacer value to use. By default
|
|
10
10
|
* equals to `DEFAULT_SECURE_REPLACER`
|
|
@@ -24,7 +24,7 @@ class SecureValuesPreprocessor {
|
|
|
24
24
|
}
|
|
25
25
|
|
|
26
26
|
/**
|
|
27
|
-
* @typedef
|
|
27
|
+
* @typedef Rule
|
|
28
28
|
* @property {string} pattern A valid RegExp pattern to be replaced
|
|
29
29
|
* @property {string} text A text match to replace. Either this property or the
|
|
30
30
|
* above one must be provided. `pattern` has priority over `text` if both are provided.
|
package/lib/logging.js
CHANGED
|
@@ -104,7 +104,7 @@ function getLogger (prefix = null) {
|
|
|
104
104
|
}
|
|
105
105
|
|
|
106
106
|
/**
|
|
107
|
-
* @typedef
|
|
107
|
+
* @typedef LoadResult
|
|
108
108
|
* @property {List<string>} issues The list of rule parsing issues (one item per rule).
|
|
109
109
|
* Rules with issues are skipped. An empty list is returned if no parsing issues exist.
|
|
110
110
|
* @property {List<SecureValuePreprocessingRule>} rules The list of successfully loaded
|
package/lib/mkdirp.js
CHANGED
|
@@ -1,9 +1,6 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
1
|
+
import { fs } from './fs';
|
|
3
2
|
/**
|
|
4
|
-
*
|
|
5
|
-
* of fs.mkdir(dir, {recursive: true});
|
|
3
|
+
* @deprecated Use `fs.mkdirp` instead.
|
|
6
4
|
*/
|
|
7
|
-
|
|
8
|
-
|
|
5
|
+
const { mkdirp } = fs;
|
|
9
6
|
export { mkdirp };
|
package/lib/net.js
CHANGED
|
@@ -111,20 +111,20 @@ async function uploadFileToFtp (localFileStream, parsedUri, uploadOptions = {})
|
|
|
111
111
|
}
|
|
112
112
|
|
|
113
113
|
/**
|
|
114
|
-
* @typedef
|
|
114
|
+
* @typedef AuthCredentials
|
|
115
115
|
* @property {string} user - Non-empty user name
|
|
116
116
|
* @property {string} pass - Non-empty password
|
|
117
117
|
*/
|
|
118
118
|
|
|
119
119
|
/**
|
|
120
|
-
* @typedef
|
|
120
|
+
* @typedef FtpUploadOptions
|
|
121
121
|
* @property {boolean} isMetered [true] - Whether to log the actual upload performance
|
|
122
122
|
* (e.g. timings and speed)
|
|
123
123
|
* @property {AuthCredentials} auth
|
|
124
124
|
*/
|
|
125
125
|
|
|
126
126
|
/**
|
|
127
|
-
* @typedef
|
|
127
|
+
* @typedef HttpUploadOptions
|
|
128
128
|
* @property {boolean} isMetered [true] - Whether to log the actual upload performance
|
|
129
129
|
* (e.g. timings and speed)
|
|
130
130
|
* @property {string} method [POST] - The HTTP method used for file upload
|
|
@@ -183,7 +183,7 @@ async function uploadFile (localPath, remoteUri, uploadOptions = {}) {
|
|
|
183
183
|
}
|
|
184
184
|
|
|
185
185
|
/**
|
|
186
|
-
* @typedef
|
|
186
|
+
* @typedef DownloadOptions
|
|
187
187
|
* @property {boolean} isMetered [true] - Whether to log the actual download performance
|
|
188
188
|
* (e.g. timings and speed)
|
|
189
189
|
* @property {AuthCredentials} auth
|