@appium/support 2.55.0 → 2.55.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/image-util.js CHANGED
@@ -3,88 +3,10 @@ import Jimp from 'jimp';
3
3
  import { Buffer } from 'buffer';
4
4
  import { PNG } from 'pngjs';
5
5
  import B from 'bluebird';
6
- import { hasValue } from './util';
7
- import log from './logger';
8
- import { requirePackage } from './node';
9
-
10
-
11
- const { MIME_JPEG, MIME_PNG, MIME_BMP } = Jimp;
12
- let cv = null;
13
-
14
- /**
15
- * @typedef {Object} Region
16
- * @property {number} left - The offset from the left side
17
- * @property {number} top - The offset from the top
18
- * @property {number} width - The width
19
- * @property {number} height - The height
20
- */
21
-
22
- /**
23
- * @typedef {Object} Point
24
- * @property {number} x - The x coordinate
25
- * @property {number} y - The y coordinate
26
- */
27
-
28
- /**
29
- * @typedef {Object} Rect
30
- * @property {number} x - The top left coordinate
31
- * @property {number} y - The bottom right coordinate
32
- * @property {number} width - The width
33
- * @property {number} height - The height
34
- */
35
6
 
36
7
  const BYTES_IN_PIXEL_BLOCK = 4;
37
8
  const SCANLINE_FILTER_METHOD = 4;
38
- const DEFAULT_MATCH_THRESHOLD = 0.5;
39
- const MATCH_NEIGHBOUR_THRESHOLD = 10;
40
-
41
- const AVAILABLE_DETECTORS = [
42
- 'AKAZE',
43
- 'AGAST',
44
- 'BRISK',
45
- 'FAST',
46
- 'GFTT',
47
- 'KAZE',
48
- 'MSER',
49
- 'SIFT',
50
- 'ORB',
51
- ];
52
-
53
- const AVAILABLE_MATCHING_FUNCTIONS = [
54
- 'FlannBased',
55
- 'BruteForce',
56
- 'BruteForceL1',
57
- 'BruteForceHamming',
58
- 'BruteForceHammingLut',
59
- 'BruteForceSL2',
60
- ];
61
-
62
- const MATCHING_METHODS = [
63
- 'TM_CCOEFF',
64
- 'TM_CCOEFF_NORMED',
65
- 'TM_CCORR',
66
- 'TM_CCORR_NORMED',
67
- 'TM_SQDIFF',
68
- 'TM_SQDIFF_NORMED',
69
- ];
70
- const DEFAULT_MATCHING_METHOD = 'TM_CCOEFF_NORMED';
71
-
72
- /**
73
- * Transforms matching method name to the actual
74
- * constant value from OpenCV library
75
- *
76
- * @param {string} name One of supported method names
77
- * (see MATCHING_METHODS array above)
78
- * @returns {number} The method value
79
- * @throws {Error} if an unsupported method name is given
80
- */
81
- function toMatchingMethod (name) {
82
- if (!MATCHING_METHODS.includes(name)) {
83
- throw new Error(`The matching method '${name}' is unknown. ` +
84
- `Only the following matching methods are supported: ${MATCHING_METHODS}`);
85
- }
86
- return cv[name];
87
- }
9
+ const { MIME_JPEG, MIME_PNG, MIME_BMP } = Jimp;
88
10
 
89
11
  /**
90
12
  * Utility function to get a Jimp image object from buffer or base64 data. Jimp
@@ -118,494 +40,6 @@ async function getJimpImage (data) {
118
40
  });
119
41
  }
120
42
 
121
- /**
122
- * @throws {Error} If opencv4nodejs module is not installed or cannot be loaded
123
- */
124
- async function initOpenCV () {
125
- if (cv) {
126
- return;
127
- }
128
-
129
- log.debug(`Initializing opencv`);
130
- try {
131
- cv = await requirePackage('opencv4nodejs');
132
- } catch (err) {
133
- log.warn(`Unable to load 'opencv4nodejs': ${err.message}`);
134
- }
135
-
136
- if (!cv) {
137
- throw new Error(`'opencv4nodejs' module is required to use OpenCV features. ` +
138
- `Please install it first ('npm i -g opencv4nodejs') and restart Appium. ` +
139
- 'Read https://github.com/justadudewhohacks/opencv4nodejs#how-to-install for more details on this topic.');
140
- }
141
- }
142
-
143
- /**
144
- * @typedef {Object} MatchComputationResult
145
- * @property {cv.DescriptorMatch} desciptor - OpenCV match descriptor
146
- * @property {Array<cv.KeyPoint>} keyPoints - The array of key points
147
- */
148
-
149
- /**
150
- * Calculates an OpenCV match descriptor of an image, which can be used
151
- * for brute-force matching.
152
- * Read https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_feature2d/py_matcher/py_matcher.html
153
- * for more details.
154
- *
155
- * @param {cv.Mat} img Image data
156
- * @param {cv.FeatureDetector} detector OpenCV feature detector instance
157
- *
158
- * @returns {MatchComputationResult}
159
- */
160
- async function detectAndCompute (img, detector) {
161
- const keyPoints = await detector.detectAsync(img);
162
- const descriptor = await detector.computeAsync(img, keyPoints);
163
- return {
164
- keyPoints,
165
- descriptor
166
- };
167
- }
168
-
169
- /**
170
- * Calculated the bounding rect coordinates for the array of matching points
171
- *
172
- * @param {Array<Point>} matchedPoints Array of matching points
173
- * @returns {Rect} The matching bounding rect or a zero rect if no match
174
- * can be found.
175
- */
176
- function calculateMatchedRect (matchedPoints) {
177
- if (matchedPoints.length < 2) {
178
- return {
179
- x: 0,
180
- y: 0,
181
- width: 0,
182
- height: 0
183
- };
184
- }
185
-
186
- const pointsSortedByDistance = matchedPoints
187
- .map((point) => [Math.sqrt(point.x * point.x + point.y * point.y), point])
188
- .sort((pair1, pair2) => pair1[0] >= pair2[0])
189
- .map((pair) => pair[1]);
190
- const firstPoint = _.head(pointsSortedByDistance);
191
- const lastPoint = _.last(pointsSortedByDistance);
192
- const topLeftPoint = {
193
- x: firstPoint.x <= lastPoint.x ? firstPoint.x : lastPoint.x,
194
- y: firstPoint.y <= lastPoint.y ? firstPoint.y : lastPoint.y,
195
- };
196
- const bottomRightPoint = {
197
- x: firstPoint.x >= lastPoint.x ? firstPoint.x : lastPoint.x,
198
- y: firstPoint.y >= lastPoint.y ? firstPoint.y : lastPoint.y,
199
- };
200
- return {
201
- x: topLeftPoint.x,
202
- y: topLeftPoint.y,
203
- width: bottomRightPoint.x - topLeftPoint.x,
204
- height: bottomRightPoint.y - topLeftPoint.y
205
- };
206
- }
207
-
208
- /**
209
- * Draws a rectanngle on the given image matrix
210
- *
211
- * @param {cv.Mat} mat The source image
212
- * @param {Rect} region The region to highlight
213
- *
214
- * @returns {cv.Mat} The same image with the rectangle on it
215
- */
216
- function highlightRegion (mat, region) {
217
- if (region.width <= 0 || region.height <= 0) {
218
- return;
219
- }
220
-
221
- // highlight in red
222
- const color = new cv.Vec(0, 0, 255);
223
- const thickness = 2;
224
- mat.drawRectangle(new cv.Rect(region.x, region.y, region.width, region.height), color, thickness, cv.LINE_8);
225
- return mat;
226
- }
227
-
228
- /**
229
- * @typedef {Object} MatchingOptions
230
- * @property {?string} detectorName ['ORB'] One of possible OpenCV feature detector names
231
- * from `AVAILABLE_DETECTORS` array.
232
- * Some of these methods (FAST, AGAST, GFTT, FAST, SIFT and MSER) are not available
233
- * in the default OpenCV installation and have to be enabled manually before
234
- * library compilation.
235
- * @property {?string} matchFunc ['BruteForce'] The name of the matching function.
236
- * Should be one of `AVAILABLE_MATCHING_FUNCTIONS` array.
237
- * @property {?number|Function} goodMatchesFactor The maximum count of "good" matches
238
- * (e. g. with minimal distances) or a function, which accepts 3 arguments: the current distance,
239
- * minimal distance, maximum distance and returns true or false to include or exclude the match.
240
- * @property {?boolean} visualize [false] Whether to return the resulting visalization
241
- * as an image (useful for debugging purposes)
242
- */
243
-
244
- /**
245
- * @typedef {Object} MatchingResult
246
- * @property {number} count The count of matched edges on both images.
247
- * The more matching edges there are no both images the more similar they are.
248
- * @property {number} totalCount The total count of matched edges on both images.
249
- * It is equal to `count` if `goodMatchesFactor` does not limit the matches,
250
- * otherwise it contains the total count of matches before `goodMatchesFactor` is
251
- * applied.
252
- * @property {?Buffer} visualization The visualization of the matching result
253
- * represented as PNG image buffer. This visualization looks like
254
- * https://user-images.githubusercontent.com/31125521/29702731-c79e3142-8972-11e7-947e-db109d415469.jpg
255
- * @property {Array<Point>} points1 The array of matching points on the first image
256
- * @property {Rect} rect1 The bounding rect for the `matchedPoints1` set or a zero rect
257
- * if not enough matching points are found
258
- * @property {Array<Point>} points2 The array of matching points on the second image
259
- * @property {Rect} rect2 The bounding rect for the `matchedPoints2` set or a zero rect
260
- * if not enough matching points are found
261
- */
262
-
263
- /**
264
- * Calculates the count of common edges between two images.
265
- * The images might be rotated or resized relatively to each other.
266
- *
267
- * @param {Buffer} img1Data The data of the first image packed into a NodeJS buffer
268
- * @param {Buffer} img2Data The data of the second image packed into a NodeJS buffer
269
- * @param {?MatchingOptions} options [{}] Set of matching options
270
- *
271
- * @returns {MatchingResult} Maching result
272
- * @throws {Error} If `detectorName` value is unknown.
273
- */
274
- async function getImagesMatches (img1Data, img2Data, options = {}) {
275
- await initOpenCV();
276
-
277
- const {detectorName = 'ORB', visualize = false,
278
- goodMatchesFactor, matchFunc = 'BruteForce'} = options;
279
- if (!_.includes(AVAILABLE_DETECTORS, detectorName)) {
280
- throw new Error(`'${detectorName}' detector is unknown. ` +
281
- `Only ${JSON.stringify(AVAILABLE_DETECTORS)} detectors are supported.`);
282
- }
283
- if (!_.includes(AVAILABLE_MATCHING_FUNCTIONS, matchFunc)) {
284
- throw new Error(`'${matchFunc}' matching function is unknown. ` +
285
- `Only ${JSON.stringify(AVAILABLE_MATCHING_FUNCTIONS)} matching functions are supported.`);
286
- }
287
-
288
- const detector = new cv[`${detectorName}Detector`]();
289
- const [img1, img2] = await B.all([
290
- cv.imdecodeAsync(img1Data),
291
- cv.imdecodeAsync(img2Data)
292
- ]);
293
- const [result1, result2] = await B.all([
294
- detectAndCompute(img1, detector),
295
- detectAndCompute(img2, detector)
296
- ]);
297
- let matches = [];
298
- try {
299
- matches = await cv[`match${matchFunc}Async`](result1.descriptor, result2.descriptor);
300
- } catch (e) {
301
- throw new Error(`Cannot find any matches between the given images. Try another detection algorithm. ` +
302
- ` Original error: ${e}`);
303
- }
304
- const totalCount = matches.length;
305
- if (hasValue(goodMatchesFactor)) {
306
- if (_.isFunction(goodMatchesFactor)) {
307
- const distances = matches.map((match) => match.distance);
308
- const minDistance = _.min(distances);
309
- const maxDistance = _.max(distances);
310
- matches = matches
311
- .filter((match) => goodMatchesFactor(match.distance, minDistance, maxDistance));
312
- } else {
313
- if (matches.length > goodMatchesFactor) {
314
- matches = matches
315
- .sort((match1, match2) => match1.distance - match2.distance)
316
- .slice(0, goodMatchesFactor);
317
- }
318
- }
319
- }
320
-
321
- const extractPoint = (keyPoints, indexPropertyName) => (match) => {
322
- const {pt, point} = keyPoints[match[indexPropertyName]];
323
- // https://github.com/justadudewhohacks/opencv4nodejs/issues/584
324
- return (pt || point);
325
- };
326
- const points1 = matches.map(extractPoint(result1.keyPoints, 'queryIdx'));
327
- const rect1 = calculateMatchedRect(points1);
328
- const points2 = matches.map(extractPoint(result2.keyPoints, 'trainIdx'));
329
- const rect2 = calculateMatchedRect(points2);
330
-
331
- const result = {
332
- points1,
333
- rect1,
334
- points2,
335
- rect2,
336
- totalCount,
337
- count: matches.length,
338
- };
339
- if (visualize) {
340
- const visualization = cv.drawMatches(img1, img2, result1.keyPoints, result2.keyPoints, matches);
341
- highlightRegion(visualization, rect1);
342
- highlightRegion(visualization, {
343
- x: img1.cols + rect2.x,
344
- y: rect2.y,
345
- width: rect2.width,
346
- height: rect2.height
347
- });
348
- result.visualization = await cv.imencodeAsync('.png', visualization);
349
- }
350
- return result;
351
- }
352
-
353
- /**
354
- * @typedef {Object} SimilarityOptions
355
- * @property {?boolean} visualize [false] Whether to return the resulting visalization
356
- * as an image (useful for debugging purposes)
357
- * @property {string} method [TM_CCOEFF_NORMED] The name of the template matching method.
358
- * Acceptable values are:
359
- * - TM_CCOEFF
360
- * - TM_CCOEFF_NORMED (default)
361
- * - TM_CCORR
362
- * - TM_CCORR_NORMED
363
- * - TM_SQDIFF
364
- * - TM_SQDIFF_NORMED
365
- * Read https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_template_matching/py_template_matching.html
366
- * for more details.
367
- */
368
-
369
- /**
370
- * @typedef {Object} SimilarityResult
371
- * @property {number} score The similarity score as a float number in range [0.0, 1.0].
372
- * 1.0 is the highest score (means both images are totally equal).
373
- * @property {?Buffer} visualization The visualization of the matching result
374
- * represented as PNG image buffer. This image includes both input pictures where
375
- * difference regions are highlighted with rectangles.
376
- */
377
-
378
- /**
379
- * Calculates the similarity score between two images.
380
- * It is expected, that both images have the same resolution.
381
- *
382
- * @param {Buffer} img1Data The data of the first image packed into a NodeJS buffer
383
- * @param {Buffer} img2Data The data of the second image packed into a NodeJS buffer
384
- * @param {?SimilarityOptions} options [{}] Set of similarity calculation options
385
- *
386
- * @returns {SimilarityResult} The calculation result
387
- * @throws {Error} If the given images have different resolution.
388
- */
389
- async function getImagesSimilarity (img1Data, img2Data, options = {}) {
390
- await initOpenCV();
391
-
392
- const {
393
- method = DEFAULT_MATCHING_METHOD,
394
- visualize = false,
395
- } = options;
396
- let [template, reference] = await B.all([
397
- cv.imdecodeAsync(img1Data),
398
- cv.imdecodeAsync(img2Data)
399
- ]);
400
- if (template.rows !== reference.rows || template.cols !== reference.cols) {
401
- throw new Error('Both images are expected to have the same size in order to ' +
402
- 'calculate the similarity score.');
403
- }
404
- [template, reference] = await B.all([
405
- template.convertToAsync(cv.CV_8UC3),
406
- reference.convertToAsync(cv.CV_8UC3)
407
- ]);
408
-
409
- let matched;
410
- try {
411
- matched = await reference.matchTemplateAsync(template, toMatchingMethod(method));
412
- } catch (e) {
413
- throw new Error(`The reference image did not match to the template one. Original error: ${e.message}`);
414
- }
415
- const minMax = await matched.minMaxLocAsync();
416
- const result = {
417
- score: minMax.maxVal
418
- };
419
- if (visualize) {
420
- const resultMat = new cv.Mat(template.rows, template.cols * 2, cv.CV_8UC3);
421
- await B.all([
422
- reference.copyToAsync(
423
- resultMat.getRegion(new cv.Rect(0, 0, reference.cols, reference.rows))),
424
- template.copyToAsync(
425
- resultMat.getRegion(new cv.Rect(reference.cols, 0, template.cols, template.rows)))
426
- ]);
427
- let mask = reference.absdiff(template);
428
- mask = await mask.cvtColorAsync(cv.COLOR_BGR2GRAY);
429
- let contours = [];
430
- try {
431
- mask = await mask.thresholdAsync(128, 255, cv.THRESH_BINARY | cv.THRESH_OTSU);
432
- contours = await mask.findContoursAsync(cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE);
433
- } catch (ign) {
434
- // No contours can be found, which means, most likely, that images are equal
435
- }
436
- for (const contour of contours) {
437
- const boundingRect = contour.boundingRect();
438
- highlightRegion(resultMat, boundingRect);
439
- highlightRegion(resultMat, {
440
- x: reference.cols + boundingRect.x,
441
- y: boundingRect.y,
442
- width: boundingRect.width,
443
- height: boundingRect.height
444
- });
445
- }
446
- result.visualization = await cv.imencodeAsync('.png', resultMat);
447
- }
448
- return result;
449
- }
450
-
451
- /**
452
- * @typedef {Object} OccurrenceOptions
453
- * @property {?boolean} visualize [false] Whether to return the resulting visalization
454
- * as an image (useful for debugging purposes)
455
- * @property {?float} threshold [0.5] At what normalized threshold to reject
456
- * a match
457
- * @property {?float} multiple [false] find multiple matches in the image
458
- * @property {?number} matchNeighbourThreshold [10] The pixel distance between matches we consider
459
- * to be part of the same template match
460
- */
461
-
462
- /**
463
- * @typedef {Object} OccurrenceResult
464
- * @property {Rect} rect The region of the partial image occurence
465
- * on the full image
466
- * @property {?Buffer} visualization The visualization of the matching result
467
- * represented as PNG image buffer. On this image the matching
468
- * region is highlighted with a rectangle. If the multiple option is passed,
469
- * all results are highlighted here.
470
- * @property {number} score The similarity score as a float number in range [0.0, 1.0].
471
- * 1.0 is the highest score (means both images are totally equal).
472
- * @property {Array<OccurrenceResult>} multiple The array of matching OccurenceResults
473
- * - only when multiple option is passed
474
- * @property {string} method [TM_CCOEFF_NORMED] The name of the template matching method.
475
- * Acceptable values are:
476
- * - TM_CCOEFF
477
- * - TM_CCOEFF_NORMED (default)
478
- * - TM_CCORR
479
- * - TM_CCORR_NORMED
480
- * - TM_SQDIFF
481
- * - TM_SQDIFF_NORMED
482
- * Read https://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_template_matching/py_template_matching.html
483
- * for more details.
484
- */
485
-
486
- /**
487
- * Calculates the occurrence position of a partial image in the full
488
- * image.
489
- *
490
- * @param {Buffer} fullImgData The data of the full image packed into a NodeJS buffer
491
- * @param {Buffer} partialImgData The data of the partial image packed into a NodeJS buffer
492
- * @param {?OccurrenceOptions} options [{}] Set of occurrence calculation options
493
- *
494
- * @returns {OccurrenceResult}
495
- * @throws {Error} If no occurrences of the partial image can be found in the full image
496
- */
497
- async function getImageOccurrence (fullImgData, partialImgData, options = {}) {
498
- await initOpenCV();
499
-
500
- const {
501
- visualize = false,
502
- threshold = DEFAULT_MATCH_THRESHOLD,
503
- multiple = false,
504
- matchNeighbourThreshold = MATCH_NEIGHBOUR_THRESHOLD,
505
- method = DEFAULT_MATCHING_METHOD,
506
- } = options;
507
-
508
- const [fullImg, partialImg] = await B.all([
509
- cv.imdecodeAsync(fullImgData),
510
- cv.imdecodeAsync(partialImgData)
511
- ]);
512
- const results = [];
513
- let visualization = null;
514
-
515
- try {
516
- const matched = await fullImg.matchTemplateAsync(partialImg, toMatchingMethod(method));
517
- const minMax = await matched.minMaxLocAsync();
518
-
519
- if (multiple) {
520
- const nonZeroMatchResults = matched.threshold(threshold, 1, cv.THRESH_BINARY)
521
- .convertTo(cv.CV_8U)
522
- .findNonZero();
523
- const matches = filterNearMatches(nonZeroMatchResults, matchNeighbourThreshold);
524
-
525
- for (const {x, y} of matches) {
526
- results.push({
527
- score: matched.at(y, x),
528
- rect: {
529
- x, y,
530
- width: partialImg.cols,
531
- height: partialImg.rows
532
- }
533
- });
534
- }
535
- } else if (minMax.maxVal >= threshold) {
536
- const {x, y} = method.includes('SQDIFF') ? minMax.minLoc : minMax.maxLoc;
537
- results.push({
538
- score: minMax.maxVal,
539
- rect: {
540
- x, y,
541
- width: partialImg.cols,
542
- height: partialImg.rows
543
- }
544
- });
545
- }
546
-
547
- if (_.isEmpty(results)) {
548
- // Below error message, `Cannot find any occurrences` is referenced in find by image
549
- throw new Error(`Match threshold: ${threshold}. Highest match value ` +
550
- `found was ${minMax.maxVal}`);
551
- }
552
- } catch (e) {
553
- // Below error message, `Cannot find any occurrences` is referenced in find by image
554
- throw new Error(`Cannot find any occurrences of the partial image in the full image. ` +
555
- `Original error: ${e.message}`);
556
- }
557
-
558
- if (visualize) {
559
- const fullHighlightedImage = fullImg.copy();
560
-
561
- for (const result of results) {
562
- const singleHighlightedImage = fullImg.copy();
563
-
564
- highlightRegion(singleHighlightedImage, result.rect);
565
- highlightRegion(fullHighlightedImage, result.rect);
566
- result.visualization = await cv.imencodeAsync('.png', singleHighlightedImage);
567
- }
568
- visualization = await cv.imencodeAsync('.png', fullHighlightedImage);
569
- }
570
-
571
- return {
572
- rect: results[0].rect,
573
- score: results[0].score,
574
- visualization,
575
- multiple: results
576
- };
577
- }
578
-
579
- /**
580
- * Filter out match results which have a matched neighbour
581
- *
582
- * @param {Array<Point>} nonZeroMatchResults matrix of image match results
583
- * @param {number} matchNeighbourThreshold The pixel distance within which we
584
- * consider an element being a neighbour of an existing match
585
- * @return {Array<Point>} the filtered array of matched points
586
- */
587
- function filterNearMatches (nonZeroMatchResults, matchNeighbourThreshold) {
588
- return nonZeroMatchResults.reduce((acc, element) => {
589
- if (!acc.some((match) => distance(match, element) <= matchNeighbourThreshold)) {
590
- acc.push(element);
591
- }
592
- return acc;
593
- }, []);
594
- }
595
-
596
- /**
597
- * Find the distance between two points
598
- *
599
- * @param {Point} point1 The first point
600
- * @param {Point} point2 The second point
601
- * @return {number} the distance
602
- */
603
- function distance (point1, point2) {
604
- const a2 = Math.pow((point1.x - point2.x), 2);
605
- const b2 = Math.pow((point1.y - point2.y), 2);
606
- return Math.sqrt(a2 + b2);
607
- }
608
-
609
43
  /**
610
44
  * Crop the image by given rectangle (use base64 string as input and output)
611
45
  *
@@ -701,7 +135,6 @@ function getRectIntersection (rect, imageSize) {
701
135
  }
702
136
 
703
137
  export {
704
- cropBase64Image, base64ToImage, imageToBase64, cropImage, getImagesMatches,
705
- getImagesSimilarity, getImageOccurrence, getJimpImage, MIME_JPEG, MIME_PNG,
706
- MIME_BMP,
138
+ cropBase64Image, base64ToImage, imageToBase64, cropImage,
139
+ getJimpImage, MIME_JPEG, MIME_PNG, MIME_BMP
707
140
  };
package/lib/index.js ADDED
@@ -0,0 +1,28 @@
1
+ import * as tempDir from './tempdir';
2
+ import * as system from './system';
3
+ import * as util from './util';
4
+ import * as fsIndex from './fs';
5
+ import * as net from './net';
6
+ import * as plist from './plist';
7
+ import * as mkdirpIndex from './mkdirp';
8
+ import * as logger from './logging';
9
+ import * as process from './process';
10
+ import * as zip from './zip';
11
+ import * as imageUtil from './image-util';
12
+ import * as mjpeg from './mjpeg';
13
+ import * as node from './node';
14
+ import * as timing from './timing';
15
+
16
+
17
+ const { fs } = fsIndex;
18
+ const { cancellableDelay } = util;
19
+ const { mkdirp } = mkdirpIndex;
20
+
21
+ export {
22
+ tempDir, system, util, fs, cancellableDelay, plist, mkdirp, logger, process,
23
+ zip, imageUtil, net, mjpeg, node, timing,
24
+ };
25
+ export default {
26
+ tempDir, system, util, fs, cancellableDelay, plist, mkdirp, logger, process,
27
+ zip, imageUtil, net, mjpeg, node, timing,
28
+ };
@@ -103,7 +103,7 @@ class SecureValuesPreprocessor {
103
103
  /**
104
104
  * Loads rules from the given JSON file
105
105
  *
106
- * @param {string|Array<string|Rule>} source The full path to the JSON file containing secure
106
+ * @param {string|string[]|Rule[]>} source The full path to the JSON file containing secure
107
107
  * values replacement rules or the rules themselves represented as an array
108
108
  * @throws {Error} If the format of the source file is invalid or
109
109
  * it does not exist
package/lib/logging.js CHANGED
@@ -117,7 +117,7 @@ function getLogger (prefix = null) {
117
117
  * appear in Appium logs.
118
118
  * Each call to this method replaces the previously loaded rules if any existed.
119
119
  *
120
- * @param {string} rulesJsonPath The full path to the JSON file containing
120
+ * @param {string|string[]|Rule[]} rulesJsonPath The full path to the JSON file containing
121
121
  * the replacement rules. Each rule could either be a string to be replaced
122
122
  * or an object with predefined properties. See the `Rule` type definition in
123
123
  * `log-internals.js` to get more details on its format.