@appium/support 7.0.4 → 7.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/lib/console.d.ts +42 -88
- package/build/lib/console.d.ts.map +1 -1
- package/build/lib/console.js +25 -85
- package/build/lib/console.js.map +1 -1
- package/build/lib/doctor.d.ts +6 -18
- package/build/lib/doctor.d.ts.map +1 -1
- package/build/lib/doctor.js +0 -15
- package/build/lib/doctor.js.map +1 -1
- package/build/lib/env.d.ts +14 -20
- package/build/lib/env.d.ts.map +1 -1
- package/build/lib/env.js +24 -61
- package/build/lib/env.js.map +1 -1
- package/build/lib/fs.d.ts +109 -148
- package/build/lib/fs.d.ts.map +1 -1
- package/build/lib/fs.js +130 -230
- package/build/lib/fs.js.map +1 -1
- package/build/lib/image-util.d.ts +7 -6
- package/build/lib/image-util.d.ts.map +1 -1
- package/build/lib/image-util.js +9 -6
- package/build/lib/image-util.js.map +1 -1
- package/build/lib/index.d.ts +19 -17
- package/build/lib/index.d.ts.map +1 -1
- package/build/lib/logger.d.ts +1 -1
- package/build/lib/logger.d.ts.map +1 -1
- package/build/lib/logger.js +1 -1
- package/build/lib/logger.js.map +1 -1
- package/build/lib/logging.d.ts +7 -15
- package/build/lib/logging.d.ts.map +1 -1
- package/build/lib/logging.js +36 -62
- package/build/lib/logging.js.map +1 -1
- package/build/lib/mjpeg.d.ts +19 -56
- package/build/lib/mjpeg.d.ts.map +1 -1
- package/build/lib/mjpeg.js +55 -78
- package/build/lib/mjpeg.js.map +1 -1
- package/build/lib/mkdirp.d.ts +4 -1
- package/build/lib/mkdirp.d.ts.map +1 -1
- package/build/lib/mkdirp.js +1 -2
- package/build/lib/mkdirp.js.map +1 -1
- package/build/lib/net.d.ts +52 -90
- package/build/lib/net.d.ts.map +1 -1
- package/build/lib/net.js +104 -193
- package/build/lib/net.js.map +1 -1
- package/build/lib/node.d.ts +16 -17
- package/build/lib/node.d.ts.map +1 -1
- package/build/lib/node.js +115 -120
- package/build/lib/node.js.map +1 -1
- package/build/lib/npm.d.ts +65 -86
- package/build/lib/npm.d.ts.map +1 -1
- package/build/lib/npm.js +64 -122
- package/build/lib/npm.js.map +1 -1
- package/build/lib/plist.d.ts +36 -29
- package/build/lib/plist.d.ts.map +1 -1
- package/build/lib/plist.js +62 -59
- package/build/lib/plist.js.map +1 -1
- package/build/lib/process.d.ts +19 -2
- package/build/lib/process.d.ts.map +1 -1
- package/build/lib/process.js +24 -7
- package/build/lib/process.js.map +1 -1
- package/build/lib/system.d.ts +41 -6
- package/build/lib/system.d.ts.map +1 -1
- package/build/lib/system.js +49 -14
- package/build/lib/system.js.map +1 -1
- package/build/lib/tempdir.d.ts +26 -49
- package/build/lib/tempdir.d.ts.map +1 -1
- package/build/lib/tempdir.js +46 -78
- package/build/lib/tempdir.js.map +1 -1
- package/build/lib/timing.d.ts +28 -22
- package/build/lib/timing.d.ts.map +1 -1
- package/build/lib/timing.js +16 -17
- package/build/lib/timing.js.map +1 -1
- package/build/lib/util.d.ts +164 -181
- package/build/lib/util.d.ts.map +1 -1
- package/build/lib/util.js +198 -253
- package/build/lib/util.js.map +1 -1
- package/build/lib/zip.d.ts +81 -139
- package/build/lib/zip.d.ts.map +1 -1
- package/build/lib/zip.js +235 -283
- package/build/lib/zip.js.map +1 -1
- package/lib/console.ts +139 -0
- package/lib/{doctor.js → doctor.ts} +6 -20
- package/lib/{env.js → env.ts} +34 -62
- package/lib/fs.ts +453 -0
- package/lib/image-util.ts +40 -0
- package/lib/index.ts +1 -0
- package/lib/{logger.js → logger.ts} +1 -1
- package/lib/logging.ts +157 -0
- package/lib/mjpeg.ts +186 -0
- package/lib/{mkdirp.js → mkdirp.ts} +2 -2
- package/lib/net.ts +305 -0
- package/lib/{node.js → node.ts} +136 -135
- package/lib/npm.ts +291 -0
- package/lib/plist.ts +187 -0
- package/lib/process.ts +62 -0
- package/lib/system.ts +95 -0
- package/lib/tempdir.ts +115 -0
- package/lib/{timing.js → timing.ts} +28 -33
- package/lib/util.ts +561 -0
- package/lib/{zip.js → zip.ts} +344 -299
- package/package.json +24 -26
- package/tsconfig.json +3 -5
- package/index.js +0 -1
- package/lib/console.js +0 -173
- package/lib/fs.js +0 -496
- package/lib/image-util.js +0 -32
- package/lib/logging.js +0 -145
- package/lib/mjpeg.js +0 -207
- package/lib/net.js +0 -336
- package/lib/npm.js +0 -310
- package/lib/plist.js +0 -182
- package/lib/process.js +0 -46
- package/lib/system.js +0 -48
- package/lib/tempdir.js +0 -131
- package/lib/util.js +0 -585
package/lib/mjpeg.js
DELETED
|
@@ -1,207 +0,0 @@
|
|
|
1
|
-
import _ from 'lodash';
|
|
2
|
-
import log from './logger';
|
|
3
|
-
import B from 'bluebird';
|
|
4
|
-
import {requireSharp} from './image-util';
|
|
5
|
-
import {Writable} from 'stream';
|
|
6
|
-
import {requirePackage} from './node';
|
|
7
|
-
import axios from 'axios';
|
|
8
|
-
|
|
9
|
-
// lazy load this, as it might not be available
|
|
10
|
-
let MJpegConsumer = null;
|
|
11
|
-
|
|
12
|
-
/**
|
|
13
|
-
* @throws {Error} If `mjpeg-consumer` module is not installed or cannot be loaded
|
|
14
|
-
*/
|
|
15
|
-
async function initMJpegConsumer() {
|
|
16
|
-
if (!MJpegConsumer) {
|
|
17
|
-
try {
|
|
18
|
-
MJpegConsumer = await requirePackage('mjpeg-consumer');
|
|
19
|
-
} catch {}
|
|
20
|
-
}
|
|
21
|
-
if (!MJpegConsumer) {
|
|
22
|
-
throw new Error(
|
|
23
|
-
'mjpeg-consumer module is required to use MJPEG-over-HTTP features. ' +
|
|
24
|
-
'Please install it first (npm i -g mjpeg-consumer) and restart Appium.'
|
|
25
|
-
);
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
// amount of time to wait for the first image in the stream
|
|
30
|
-
const MJPEG_SERVER_TIMEOUT_MS = 10000;
|
|
31
|
-
|
|
32
|
-
/** Class which stores the last bit of data streamed into it */
|
|
33
|
-
class MJpegStream extends Writable {
|
|
34
|
-
/**
|
|
35
|
-
* @type {number}
|
|
36
|
-
*/
|
|
37
|
-
updateCount = 0;
|
|
38
|
-
|
|
39
|
-
/**
|
|
40
|
-
* Create an MJpegStream
|
|
41
|
-
* @param {string} mJpegUrl - URL of MJPEG-over-HTTP stream
|
|
42
|
-
* @param {function} [errorHandler=noop] - additional function that will be
|
|
43
|
-
* called in the case of any errors.
|
|
44
|
-
* @param {object} [options={}] - Options to pass to the Writable constructor
|
|
45
|
-
*/
|
|
46
|
-
constructor(mJpegUrl, errorHandler = _.noop, options = {}) {
|
|
47
|
-
super(options);
|
|
48
|
-
|
|
49
|
-
this.errorHandler = errorHandler;
|
|
50
|
-
this.url = mJpegUrl;
|
|
51
|
-
this.clear();
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
/**
|
|
55
|
-
* Get the base64-encoded version of the JPEG
|
|
56
|
-
*
|
|
57
|
-
* @returns {?string} base64-encoded JPEG image data
|
|
58
|
-
* or `null` if no image can be parsed
|
|
59
|
-
*/
|
|
60
|
-
get lastChunkBase64() {
|
|
61
|
-
const lastChunk = /** @type {Buffer} */ (this.lastChunk);
|
|
62
|
-
return !_.isEmpty(this.lastChunk) && _.isBuffer(this.lastChunk)
|
|
63
|
-
? lastChunk.toString('base64')
|
|
64
|
-
: null;
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
/**
|
|
68
|
-
* Get the PNG version of the JPEG buffer
|
|
69
|
-
*
|
|
70
|
-
* @returns {Promise<Buffer?>} PNG image data or `null` if no PNG
|
|
71
|
-
* image can be parsed
|
|
72
|
-
*/
|
|
73
|
-
async lastChunkPNG() {
|
|
74
|
-
const lastChunk = /** @type {Buffer} */ (this.lastChunk);
|
|
75
|
-
if (_.isEmpty(lastChunk) || !_.isBuffer(lastChunk)) {
|
|
76
|
-
return null;
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
try {
|
|
80
|
-
return await requireSharp()(lastChunk).png().toBuffer();
|
|
81
|
-
} catch {
|
|
82
|
-
return null;
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
/**
|
|
87
|
-
* Get the base64-encoded version of the PNG
|
|
88
|
-
*
|
|
89
|
-
* @returns {Promise<string?>} base64-encoded PNG image data
|
|
90
|
-
* or `null` if no image can be parsed
|
|
91
|
-
*/
|
|
92
|
-
async lastChunkPNGBase64() {
|
|
93
|
-
const png = await this.lastChunkPNG();
|
|
94
|
-
return png ? png.toString('base64') : null;
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
/**
|
|
98
|
-
* Reset internal state
|
|
99
|
-
*/
|
|
100
|
-
clear() {
|
|
101
|
-
this.registerStartSuccess = null;
|
|
102
|
-
this.registerStartFailure = null;
|
|
103
|
-
this.responseStream = null;
|
|
104
|
-
this.consumer = null;
|
|
105
|
-
this.lastChunk = null;
|
|
106
|
-
this.updateCount = 0;
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
/**
|
|
110
|
-
* Start reading the MJpeg stream and storing the last image
|
|
111
|
-
*/
|
|
112
|
-
async start(serverTimeout = MJPEG_SERVER_TIMEOUT_MS) {
|
|
113
|
-
// ensure we're not started already
|
|
114
|
-
this.stop();
|
|
115
|
-
|
|
116
|
-
await initMJpegConsumer();
|
|
117
|
-
|
|
118
|
-
this.consumer = new MJpegConsumer();
|
|
119
|
-
const url = this.url;
|
|
120
|
-
try {
|
|
121
|
-
this.responseStream = (
|
|
122
|
-
await axios({
|
|
123
|
-
url,
|
|
124
|
-
responseType: 'stream',
|
|
125
|
-
timeout: serverTimeout,
|
|
126
|
-
})
|
|
127
|
-
).data;
|
|
128
|
-
} catch (e) {
|
|
129
|
-
throw new Error(
|
|
130
|
-
`Cannot connect to the MJPEG stream at ${url}. ` +
|
|
131
|
-
`Original error: ${_.has(e, 'response') ? JSON.stringify(e.response) : /** @type {Error} */ (e).message}`
|
|
132
|
-
);
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
const onErr = (/** @type {Error} */ err) => {
|
|
136
|
-
// Make sure we don't get an outdated screenshot if there was an error
|
|
137
|
-
this.lastChunk = null;
|
|
138
|
-
|
|
139
|
-
log.error(`Error getting MJpeg screenshot chunk: ${err.message}`);
|
|
140
|
-
this.errorHandler(err);
|
|
141
|
-
if (this.registerStartFailure) {
|
|
142
|
-
this.registerStartFailure(err);
|
|
143
|
-
}
|
|
144
|
-
};
|
|
145
|
-
const onClose = () => {
|
|
146
|
-
log.debug(`The connection to MJPEG server at ${url} has been closed`);
|
|
147
|
-
this.lastChunk = null;
|
|
148
|
-
};
|
|
149
|
-
// use the deferred pattern so we can wait for the start of the stream
|
|
150
|
-
// based on what comes in from an external pipe
|
|
151
|
-
const startPromise = new B((res, rej) => {
|
|
152
|
-
this.registerStartSuccess = res;
|
|
153
|
-
this.registerStartFailure = rej;
|
|
154
|
-
})
|
|
155
|
-
// start a timeout so that if the server does not return data, we don't
|
|
156
|
-
// block forever.
|
|
157
|
-
.timeout(
|
|
158
|
-
serverTimeout,
|
|
159
|
-
`Waited ${serverTimeout}ms but the MJPEG server never sent any images`
|
|
160
|
-
);
|
|
161
|
-
|
|
162
|
-
this.responseStream
|
|
163
|
-
.once('close', onClose)
|
|
164
|
-
.on('error', onErr) // ensure we do something with errors
|
|
165
|
-
.pipe(this.consumer) // allow chunking and transforming of jpeg data
|
|
166
|
-
.pipe(this); // send the actual jpegs to ourself
|
|
167
|
-
|
|
168
|
-
await startPromise;
|
|
169
|
-
}
|
|
170
|
-
|
|
171
|
-
/**
|
|
172
|
-
* Stop reading the MJpeg stream. Ensure we disconnect all the pipes and stop
|
|
173
|
-
* the HTTP request itself. Then reset the state.
|
|
174
|
-
*/
|
|
175
|
-
stop() {
|
|
176
|
-
if (this.consumer) {
|
|
177
|
-
this.consumer.unpipe(this);
|
|
178
|
-
}
|
|
179
|
-
if (this.responseStream) {
|
|
180
|
-
if (this.consumer) {
|
|
181
|
-
this.responseStream.unpipe(this.consumer);
|
|
182
|
-
}
|
|
183
|
-
this.responseStream.destroy();
|
|
184
|
-
}
|
|
185
|
-
this.clear();
|
|
186
|
-
}
|
|
187
|
-
|
|
188
|
-
/**
|
|
189
|
-
* Override the Writable write() method in order to save the last image and
|
|
190
|
-
* log the number of images we have received
|
|
191
|
-
* @override
|
|
192
|
-
* @param {Buffer} data - binary data streamed from the MJpeg consumer
|
|
193
|
-
*/
|
|
194
|
-
write(data) {
|
|
195
|
-
this.lastChunk = data;
|
|
196
|
-
this.updateCount++;
|
|
197
|
-
|
|
198
|
-
if (this.registerStartSuccess) {
|
|
199
|
-
this.registerStartSuccess();
|
|
200
|
-
this.registerStartSuccess = null;
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
return true;
|
|
204
|
-
}
|
|
205
|
-
}
|
|
206
|
-
|
|
207
|
-
export {MJpegStream};
|
package/lib/net.js
DELETED
|
@@ -1,336 +0,0 @@
|
|
|
1
|
-
import _ from 'lodash';
|
|
2
|
-
import {fs} from './fs';
|
|
3
|
-
import B from 'bluebird';
|
|
4
|
-
import {toReadableSizeString} from './util';
|
|
5
|
-
import log from './logger';
|
|
6
|
-
import Ftp from 'jsftp';
|
|
7
|
-
import {Timer} from './timing';
|
|
8
|
-
import axios from 'axios';
|
|
9
|
-
import FormData from 'form-data';
|
|
10
|
-
|
|
11
|
-
const DEFAULT_TIMEOUT_MS = 4 * 60 * 1000;
|
|
12
|
-
|
|
13
|
-
/**
|
|
14
|
-
* Type guard for param to {@linkcode toAxiosAuth}
|
|
15
|
-
* @param {any} value
|
|
16
|
-
* @returns {value is AuthCredentials | import('axios').AxiosBasicCredentials}
|
|
17
|
-
*/
|
|
18
|
-
function isAxiosAuth(value) {
|
|
19
|
-
return _.isPlainObject(value);
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
/**
|
|
23
|
-
* Converts {@linkcode AuthCredentials} to credentials understood by {@linkcode axios}.
|
|
24
|
-
* @param {AuthCredentials | import('axios').AxiosBasicCredentials} [auth]
|
|
25
|
-
* @returns {import('axios').AxiosBasicCredentials?}
|
|
26
|
-
*/
|
|
27
|
-
function toAxiosAuth(auth) {
|
|
28
|
-
if (!isAxiosAuth(auth)) {
|
|
29
|
-
return null;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
const axiosAuth = {
|
|
33
|
-
username: 'username' in auth ? auth.username : auth.user,
|
|
34
|
-
password: 'password' in auth ? auth.password : auth.pass,
|
|
35
|
-
};
|
|
36
|
-
return axiosAuth.username && axiosAuth.password ? axiosAuth : null;
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
/**
|
|
40
|
-
* @param {NodeJS.ReadableStream} localFileStream
|
|
41
|
-
* @param {URL} parsedUri
|
|
42
|
-
* @param {HttpUploadOptions & NetOptions} [uploadOptions]
|
|
43
|
-
*/
|
|
44
|
-
async function uploadFileToHttp(
|
|
45
|
-
localFileStream,
|
|
46
|
-
parsedUri,
|
|
47
|
-
uploadOptions = /** @type {HttpUploadOptions & NetOptions} */ ({})
|
|
48
|
-
) {
|
|
49
|
-
const {
|
|
50
|
-
method = 'POST',
|
|
51
|
-
timeout = DEFAULT_TIMEOUT_MS,
|
|
52
|
-
headers,
|
|
53
|
-
auth,
|
|
54
|
-
fileFieldName = 'file',
|
|
55
|
-
formFields,
|
|
56
|
-
} = uploadOptions;
|
|
57
|
-
const {href} = parsedUri;
|
|
58
|
-
|
|
59
|
-
/** @type {import('axios').RawAxiosRequestConfig} */
|
|
60
|
-
const requestOpts = {
|
|
61
|
-
url: href,
|
|
62
|
-
method,
|
|
63
|
-
timeout,
|
|
64
|
-
maxContentLength: Infinity,
|
|
65
|
-
maxBodyLength: Infinity,
|
|
66
|
-
};
|
|
67
|
-
const axiosAuth = toAxiosAuth(auth);
|
|
68
|
-
if (axiosAuth) {
|
|
69
|
-
requestOpts.auth = axiosAuth;
|
|
70
|
-
}
|
|
71
|
-
if (fileFieldName) {
|
|
72
|
-
const form = new FormData();
|
|
73
|
-
if (formFields) {
|
|
74
|
-
let pairs = [];
|
|
75
|
-
if (_.isArray(formFields)) {
|
|
76
|
-
pairs = formFields;
|
|
77
|
-
} else if (_.isPlainObject(formFields)) {
|
|
78
|
-
pairs = _.toPairs(formFields);
|
|
79
|
-
}
|
|
80
|
-
for (const [key, value] of pairs) {
|
|
81
|
-
if (_.toLower(key) !== _.toLower(fileFieldName)) {
|
|
82
|
-
form.append(key, value);
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
form.append(fileFieldName, localFileStream); // AWS S3 POST upload requires this to be the last field
|
|
87
|
-
requestOpts.headers = {
|
|
88
|
-
...(_.isPlainObject(headers) ? headers : {}),
|
|
89
|
-
...form.getHeaders(),
|
|
90
|
-
};
|
|
91
|
-
requestOpts.data = form;
|
|
92
|
-
} else {
|
|
93
|
-
if (_.isPlainObject(headers)) {
|
|
94
|
-
requestOpts.headers = headers;
|
|
95
|
-
}
|
|
96
|
-
requestOpts.data = localFileStream;
|
|
97
|
-
}
|
|
98
|
-
log.debug(
|
|
99
|
-
`Performing ${method} to ${href} with options (excluding data): ` +
|
|
100
|
-
JSON.stringify(_.omit(requestOpts, ['data']))
|
|
101
|
-
);
|
|
102
|
-
|
|
103
|
-
const {status, statusText} = await axios(requestOpts);
|
|
104
|
-
log.info(`Server response: ${status} ${statusText}`);
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
/**
|
|
108
|
-
* @param {string | Buffer | NodeJS.ReadableStream} localFileStream
|
|
109
|
-
* @param {URL} parsedUri
|
|
110
|
-
* @param {NotHttpUploadOptions & NetOptions} [uploadOptions]
|
|
111
|
-
*/
|
|
112
|
-
async function uploadFileToFtp(
|
|
113
|
-
localFileStream,
|
|
114
|
-
parsedUri,
|
|
115
|
-
uploadOptions = /** @type {NotHttpUploadOptions & NetOptions} */ ({})
|
|
116
|
-
) {
|
|
117
|
-
const {auth} = uploadOptions;
|
|
118
|
-
const {hostname, port, protocol, pathname} = parsedUri;
|
|
119
|
-
|
|
120
|
-
const ftpOpts = {
|
|
121
|
-
host: hostname,
|
|
122
|
-
port: !_.isUndefined(port) ? _.parseInt(port) : 21,
|
|
123
|
-
};
|
|
124
|
-
if (auth?.user && auth?.pass) {
|
|
125
|
-
ftpOpts.user = auth.user;
|
|
126
|
-
ftpOpts.pass = auth.pass;
|
|
127
|
-
}
|
|
128
|
-
log.debug(`${protocol} upload options: ${JSON.stringify(ftpOpts)}`);
|
|
129
|
-
return await new B((resolve, reject) => {
|
|
130
|
-
new Ftp(ftpOpts).put(localFileStream, pathname, (err) => {
|
|
131
|
-
if (err) {
|
|
132
|
-
reject(err);
|
|
133
|
-
} else {
|
|
134
|
-
resolve();
|
|
135
|
-
}
|
|
136
|
-
});
|
|
137
|
-
});
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
/**
|
|
141
|
-
* Returns `true` if params are valid for {@linkcode uploadFileToHttp}.
|
|
142
|
-
* @param {any} opts
|
|
143
|
-
* @param {URL} url
|
|
144
|
-
* @returns {opts is HttpUploadOptions & NetOptions}
|
|
145
|
-
*/
|
|
146
|
-
function isHttpUploadOptions(opts, url) {
|
|
147
|
-
try {
|
|
148
|
-
const {protocol} = url;
|
|
149
|
-
return protocol === 'http:' || protocol === 'https:';
|
|
150
|
-
} catch {
|
|
151
|
-
return false;
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
/**
|
|
156
|
-
* Returns `true` if params are valid for {@linkcode uploadFileToFtp}.
|
|
157
|
-
* @param {any} opts
|
|
158
|
-
* @param {URL} url
|
|
159
|
-
* @returns {opts is NotHttpUploadOptions & NetOptions}
|
|
160
|
-
*/
|
|
161
|
-
function isNotHttpUploadOptions(opts, url) {
|
|
162
|
-
try {
|
|
163
|
-
const {protocol} = url;
|
|
164
|
-
return protocol === 'ftp:';
|
|
165
|
-
} catch {
|
|
166
|
-
return false;
|
|
167
|
-
}
|
|
168
|
-
}
|
|
169
|
-
/**
|
|
170
|
-
* Uploads the given file to a remote location. HTTP(S) and FTP
|
|
171
|
-
* protocols are supported.
|
|
172
|
-
*
|
|
173
|
-
* @param {string} localPath - The path to a file on the local storage.
|
|
174
|
-
* @param {string} remoteUri - The remote URI to upload the file to.
|
|
175
|
-
* @param {(HttpUploadOptions|NotHttpUploadOptions) & NetOptions} [uploadOptions]
|
|
176
|
-
* @returns {Promise<void>}
|
|
177
|
-
*/
|
|
178
|
-
async function uploadFile(
|
|
179
|
-
localPath,
|
|
180
|
-
remoteUri,
|
|
181
|
-
uploadOptions = /** @type {(HttpUploadOptions|NotHttpUploadOptions) & NetOptions} */ ({})
|
|
182
|
-
) {
|
|
183
|
-
if (!(await fs.exists(localPath))) {
|
|
184
|
-
throw new Error(`'${localPath}' does not exists or is not accessible`);
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
const {isMetered = true} = uploadOptions;
|
|
188
|
-
const url = new URL(remoteUri);
|
|
189
|
-
const {size} = await fs.stat(localPath);
|
|
190
|
-
if (isMetered) {
|
|
191
|
-
log.info(`Uploading '${localPath}' of ${toReadableSizeString(size)} size to '${remoteUri}'`);
|
|
192
|
-
}
|
|
193
|
-
const timer = new Timer().start();
|
|
194
|
-
if (isHttpUploadOptions(uploadOptions, url)) {
|
|
195
|
-
if (!uploadOptions.fileFieldName) {
|
|
196
|
-
uploadOptions.headers = {
|
|
197
|
-
...(_.isPlainObject(uploadOptions.headers) ? uploadOptions.headers : {}),
|
|
198
|
-
'Content-Length': size,
|
|
199
|
-
};
|
|
200
|
-
}
|
|
201
|
-
await uploadFileToHttp(fs.createReadStream(localPath), url, uploadOptions);
|
|
202
|
-
} else if (isNotHttpUploadOptions(uploadOptions, url)) {
|
|
203
|
-
await uploadFileToFtp(fs.createReadStream(localPath), url, uploadOptions);
|
|
204
|
-
} else {
|
|
205
|
-
throw new Error(
|
|
206
|
-
`Cannot upload the file at '${localPath}' to '${remoteUri}'. ` +
|
|
207
|
-
`Unsupported remote protocol '${url.protocol}'. ` +
|
|
208
|
-
`Only http/https and ftp/ftps protocols are supported.`
|
|
209
|
-
);
|
|
210
|
-
}
|
|
211
|
-
if (isMetered) {
|
|
212
|
-
log.info(
|
|
213
|
-
`Uploaded '${localPath}' of ${toReadableSizeString(size)} size in ` +
|
|
214
|
-
`${timer.getDuration().asSeconds.toFixed(3)}s`
|
|
215
|
-
);
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
/**
|
|
220
|
-
* Downloads the given file via HTTP(S)
|
|
221
|
-
*
|
|
222
|
-
* @param {string} remoteUrl - The remote url
|
|
223
|
-
* @param {string} dstPath - The local path to download the file to
|
|
224
|
-
* @param {DownloadOptions & NetOptions} [downloadOptions]
|
|
225
|
-
* @throws {Error} If download operation fails
|
|
226
|
-
*/
|
|
227
|
-
async function downloadFile(
|
|
228
|
-
remoteUrl,
|
|
229
|
-
dstPath,
|
|
230
|
-
downloadOptions = /** @type {DownloadOptions & NetOptions} */ ({})
|
|
231
|
-
) {
|
|
232
|
-
const {isMetered = true, auth, timeout = DEFAULT_TIMEOUT_MS, headers} = downloadOptions;
|
|
233
|
-
|
|
234
|
-
/**
|
|
235
|
-
* @type {import('axios').RawAxiosRequestConfig}
|
|
236
|
-
*/
|
|
237
|
-
const requestOpts = {
|
|
238
|
-
url: remoteUrl,
|
|
239
|
-
responseType: 'stream',
|
|
240
|
-
timeout,
|
|
241
|
-
};
|
|
242
|
-
const axiosAuth = toAxiosAuth(auth);
|
|
243
|
-
if (axiosAuth) {
|
|
244
|
-
requestOpts.auth = axiosAuth;
|
|
245
|
-
}
|
|
246
|
-
if (_.isPlainObject(headers)) {
|
|
247
|
-
requestOpts.headers = headers;
|
|
248
|
-
}
|
|
249
|
-
|
|
250
|
-
const timer = new Timer().start();
|
|
251
|
-
let responseLength;
|
|
252
|
-
try {
|
|
253
|
-
const writer = fs.createWriteStream(dstPath);
|
|
254
|
-
const {data: responseStream, headers: responseHeaders} = await axios(requestOpts);
|
|
255
|
-
responseLength = parseInt(responseHeaders['content-length'] || '0', 10);
|
|
256
|
-
responseStream.pipe(writer);
|
|
257
|
-
|
|
258
|
-
await new B((resolve, reject) => {
|
|
259
|
-
responseStream.once('error', reject);
|
|
260
|
-
writer.once('finish', resolve);
|
|
261
|
-
writer.once('error', (e) => {
|
|
262
|
-
responseStream.unpipe(writer);
|
|
263
|
-
reject(e);
|
|
264
|
-
});
|
|
265
|
-
});
|
|
266
|
-
} catch (err) {
|
|
267
|
-
throw new Error(`Cannot download the file from ${remoteUrl}: ${err.message}`);
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
const {size} = await fs.stat(dstPath);
|
|
271
|
-
if (responseLength && size !== responseLength) {
|
|
272
|
-
await fs.rimraf(dstPath);
|
|
273
|
-
throw new Error(
|
|
274
|
-
`The size of the file downloaded from ${remoteUrl} (${size} bytes) ` +
|
|
275
|
-
`differs from the one in Content-Length response header (${responseLength} bytes)`
|
|
276
|
-
);
|
|
277
|
-
}
|
|
278
|
-
if (isMetered) {
|
|
279
|
-
const secondsElapsed = timer.getDuration().asSeconds;
|
|
280
|
-
log.debug(
|
|
281
|
-
`${remoteUrl} (${toReadableSizeString(size)}) ` +
|
|
282
|
-
`has been downloaded to '${dstPath}' in ${secondsElapsed.toFixed(3)}s`
|
|
283
|
-
);
|
|
284
|
-
if (secondsElapsed >= 2) {
|
|
285
|
-
const bytesPerSec = Math.floor(size / secondsElapsed);
|
|
286
|
-
log.debug(`Approximate download speed: ${toReadableSizeString(bytesPerSec)}/s`);
|
|
287
|
-
}
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
export {uploadFile, downloadFile};
|
|
292
|
-
|
|
293
|
-
/**
|
|
294
|
-
* Common options for {@linkcode uploadFile} and {@linkcode downloadFile}.
|
|
295
|
-
* @typedef NetOptions
|
|
296
|
-
* @property {boolean} [isMetered=true] - Whether to log the actual download performance
|
|
297
|
-
* (e.g. timings and speed)
|
|
298
|
-
* @property {AuthCredentials} [auth] - Authentication credentials
|
|
299
|
-
*/
|
|
300
|
-
|
|
301
|
-
/**
|
|
302
|
-
* Specific options for {@linkcode downloadFile}.
|
|
303
|
-
* @typedef DownloadOptions
|
|
304
|
-
* @property {number} [timeout] - The actual request timeout in milliseconds; defaults to {@linkcode DEFAULT_TIMEOUT_MS}
|
|
305
|
-
* @property {Record<string,any>} [headers] - Request headers mapping
|
|
306
|
-
*/
|
|
307
|
-
|
|
308
|
-
/**
|
|
309
|
-
* Basic auth credentials; used by {@linkcode NetOptions}.
|
|
310
|
-
* @typedef AuthCredentials
|
|
311
|
-
* @property {string} user - Non-empty user name
|
|
312
|
-
* @property {string} pass - Non-empty password
|
|
313
|
-
*/
|
|
314
|
-
|
|
315
|
-
/**
|
|
316
|
-
* This type is used in {@linkcode uploadFile} if the remote location uses the `ftp` protocol, and distinguishes the type from {@linkcode HttpUploadOptions}.
|
|
317
|
-
* @typedef NotHttpUploadOptions
|
|
318
|
-
* @property {never} headers
|
|
319
|
-
* @property {never} method
|
|
320
|
-
* @property {never} timeout
|
|
321
|
-
* @property {never} fileFieldName
|
|
322
|
-
* @property {never} formFields
|
|
323
|
-
*/
|
|
324
|
-
|
|
325
|
-
/**
|
|
326
|
-
* Specific options for {@linkcode uploadFile} if the remote location uses the `http(s)` protocol
|
|
327
|
-
* @typedef HttpUploadOptions
|
|
328
|
-
* @property {import('@appium/types').HTTPHeaders} [headers] - Additional request headers mapping
|
|
329
|
-
* @property {import('axios').Method} [method='POST'] - The HTTP method used for file upload
|
|
330
|
-
* @property {number} [timeout] - The actual request timeout in milliseconds; defaults to {@linkcode DEFAULT_TIMEOUT_MS}
|
|
331
|
-
* @property {string} [fileFieldName='file'] - The name of the form field containing the file
|
|
332
|
-
* content to be uploaded. Any falsy value make the request to use non-multipart upload
|
|
333
|
-
* @property {Record<string, any> | [string, any][]} [formFields] - The additional form fields
|
|
334
|
-
* to be included into the upload request. This property is only considered if
|
|
335
|
-
* `fileFieldName` is set
|
|
336
|
-
*/
|