@vercel/client 13.0.1 → 13.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/upload.js CHANGED
@@ -1,199 +1,221 @@
1
1
  "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
4
11
  };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.upload = void 0;
7
- const http_1 = __importDefault(require("http"));
8
- const https_1 = __importDefault(require("https"));
9
- const stream_1 = require("stream");
10
- const events_1 = require("events");
11
- const async_retry_1 = __importDefault(require("async-retry"));
12
- const async_sema_1 = require("async-sema");
13
- const utils_1 = require("./utils");
14
- const errors_1 = require("./errors");
15
- const deploy_1 = require("./deploy");
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var upload_exports = {};
30
+ __export(upload_exports, {
31
+ upload: () => upload
32
+ });
33
+ module.exports = __toCommonJS(upload_exports);
34
+ var import_http = __toESM(require("http"));
35
+ var import_https = __toESM(require("https"));
36
+ var import_stream = require("stream");
37
+ var import_events = require("events");
38
+ var import_async_retry = __toESM(require("async-retry"));
39
+ var import_async_sema = require("async-sema");
40
+ var import_utils = require("./utils");
41
+ var import_errors = require("./errors");
42
+ var import_deploy = require("./deploy");
16
43
  const isClientNetworkError = (err) => {
17
- if (err.message) {
18
- // These are common network errors that may happen occasionally and we should retry if we encounter these
19
- return (err.message.includes('ETIMEDOUT') ||
20
- err.message.includes('ECONNREFUSED') ||
21
- err.message.includes('ENOTFOUND') ||
22
- err.message.includes('ECONNRESET') ||
23
- err.message.includes('EAI_FAIL') ||
24
- err.message.includes('socket hang up') ||
25
- err.message.includes('network socket disconnected'));
26
- }
27
- return false;
44
+ if (err.message) {
45
+ return err.message.includes("ETIMEDOUT") || err.message.includes("ECONNREFUSED") || err.message.includes("ENOTFOUND") || err.message.includes("ECONNRESET") || err.message.includes("EAI_FAIL") || err.message.includes("socket hang up") || err.message.includes("network socket disconnected");
46
+ }
47
+ return false;
28
48
  };
29
49
  async function* upload(files, clientOptions, deploymentOptions) {
30
- const { token, teamId, apiUrl, userAgent } = clientOptions;
31
- const debug = (0, utils_1.createDebug)(clientOptions.debug);
32
- if (!files && !token && !teamId) {
33
- debug(`Neither 'files', 'token' nor 'teamId are present. Exiting`);
34
- return;
50
+ const { token, teamId, apiUrl, userAgent } = clientOptions;
51
+ const debug = (0, import_utils.createDebug)(clientOptions.debug);
52
+ if (!files && !token && !teamId) {
53
+ debug(`Neither 'files', 'token' nor 'teamId are present. Exiting`);
54
+ return;
55
+ }
56
+ let shas = [];
57
+ debug("Determining necessary files for upload...");
58
+ for await (const event of (0, import_deploy.deploy)(files, clientOptions, deploymentOptions)) {
59
+ if (event.type === "error") {
60
+ if (event.payload.code === "missing_files") {
61
+ shas = event.payload.missing;
62
+ debug(`${shas.length} files are required to upload`);
63
+ } else {
64
+ return yield event;
65
+ }
66
+ } else {
67
+ if (event.type === "alias-assigned") {
68
+ debug("Deployment succeeded on file check");
69
+ return yield event;
70
+ }
71
+ yield event;
35
72
  }
36
- let shas = [];
37
- debug('Determining necessary files for upload...');
38
- for await (const event of (0, deploy_1.deploy)(files, clientOptions, deploymentOptions)) {
39
- if (event.type === 'error') {
40
- if (event.payload.code === 'missing_files') {
41
- shas = event.payload.missing;
42
- debug(`${shas.length} files are required to upload`);
43
- }
44
- else {
45
- return yield event;
46
- }
73
+ }
74
+ const uploads = shas.map((sha) => {
75
+ return new UploadProgress(sha, files.get(sha));
76
+ });
77
+ yield {
78
+ type: "file-count",
79
+ payload: { total: files, missing: shas, uploads }
80
+ };
81
+ const uploadList = {};
82
+ debug("Building an upload list...");
83
+ const semaphore = new import_async_sema.Sema(50, { capacity: 50 });
84
+ const defaultAgent = apiUrl?.startsWith("https://") ? new import_https.default.Agent({ keepAlive: true }) : new import_http.default.Agent({ keepAlive: true });
85
+ shas.forEach((sha, index) => {
86
+ const uploadProgress = uploads[index];
87
+ uploadList[sha] = (0, import_async_retry.default)(
88
+ async (bail) => {
89
+ const file = files.get(sha);
90
+ if (!file) {
91
+ debug(`File ${sha} is undefined. Bailing`);
92
+ return bail(new Error(`File ${sha} is undefined`));
47
93
  }
48
- else {
49
- // If the deployment has succeeded here, don't continue
50
- if (event.type === 'alias-assigned') {
51
- debug('Deployment succeeded on file check');
52
- return yield event;
53
- }
54
- yield event;
94
+ await semaphore.acquire();
95
+ const { data } = file;
96
+ if (typeof data === "undefined") {
97
+ return;
55
98
  }
56
- }
57
- const uploads = shas.map(sha => {
58
- return new UploadProgress(sha, files.get(sha));
59
- });
60
- yield {
61
- type: 'file-count',
62
- payload: { total: files, missing: shas, uploads },
63
- };
64
- const uploadList = {};
65
- debug('Building an upload list...');
66
- const semaphore = new async_sema_1.Sema(50, { capacity: 50 });
67
- const defaultAgent = apiUrl?.startsWith('https://')
68
- ? new https_1.default.Agent({ keepAlive: true })
69
- : new http_1.default.Agent({ keepAlive: true });
70
- shas.forEach((sha, index) => {
71
- const uploadProgress = uploads[index];
72
- uploadList[sha] = (0, async_retry_1.default)(async (bail) => {
73
- const file = files.get(sha);
74
- if (!file) {
75
- debug(`File ${sha} is undefined. Bailing`);
76
- return bail(new Error(`File ${sha} is undefined`));
77
- }
78
- await semaphore.acquire();
79
- const { data } = file;
80
- if (typeof data === 'undefined') {
81
- // Directories don't need to be uploaded
82
- return;
83
- }
84
- uploadProgress.bytesUploaded = 0;
85
- // Split out into chunks
86
- const body = new stream_1.Readable();
87
- const originalRead = body.read.bind(body);
88
- body.read = function (...args) {
89
- const chunk = originalRead(...args);
90
- if (chunk) {
91
- uploadProgress.bytesUploaded += chunk.length;
92
- uploadProgress.emit('progress');
93
- }
94
- return chunk;
95
- };
96
- const chunkSize = 16384; /* 16kb - default Node.js `highWaterMark` */
97
- for (let i = 0; i < data.length; i += chunkSize) {
98
- const chunk = data.slice(i, i + chunkSize);
99
- body.push(chunk);
100
- }
101
- body.push(null);
102
- let err;
103
- let result;
104
- try {
105
- const res = await (0, utils_1.fetch)(utils_1.API_FILES, token, {
106
- agent: clientOptions.agent || defaultAgent,
107
- method: 'POST',
108
- headers: {
109
- 'Content-Type': 'application/octet-stream',
110
- 'Content-Length': data.length,
111
- 'x-now-digest': sha,
112
- 'x-now-size': data.length,
113
- },
114
- body,
115
- teamId,
116
- apiUrl,
117
- userAgent,
118
- }, clientOptions.debug, true);
119
- if (res.status === 200) {
120
- debug(`File ${sha} (${file.names[0]}${file.names.length > 1 ? ` +${file.names.length}` : ''}) uploaded`);
121
- result = {
122
- type: 'file-uploaded',
123
- payload: { sha, file },
124
- };
125
- }
126
- else if (res.status > 200 && res.status < 500) {
127
- // If something is wrong with our request, we don't retry
128
- debug(`An internal error occurred in upload request. Not retrying...`);
129
- const { error } = await res.json();
130
- err = new errors_1.DeploymentError(error);
131
- }
132
- else {
133
- // If something is wrong with the server, we retry
134
- debug(`A server error occurred in upload request. Retrying...`);
135
- const { error } = await res.json();
136
- throw new errors_1.DeploymentError(error);
137
- }
138
- }
139
- catch (e) {
140
- debug(`An unexpected error occurred in upload promise:\n${e}`);
141
- err = new Error(e);
142
- }
143
- semaphore.release();
144
- if (err) {
145
- if (isClientNetworkError(err)) {
146
- debug('Network error, retrying: ' + err.message);
147
- // If it's a network error, we retry
148
- throw err;
149
- }
150
- else {
151
- debug('Other error, bailing: ' + err.message);
152
- // Otherwise we bail
153
- return bail(err);
154
- }
155
- }
156
- return result;
157
- }, {
158
- retries: 5,
159
- factor: 6,
160
- minTimeout: 10,
161
- });
162
- });
163
- debug('Starting upload');
164
- while (Object.keys(uploadList).length > 0) {
99
+ uploadProgress.bytesUploaded = 0;
100
+ const body = new import_stream.Readable();
101
+ const originalRead = body.read.bind(body);
102
+ body.read = function(...args) {
103
+ const chunk = originalRead(...args);
104
+ if (chunk) {
105
+ uploadProgress.bytesUploaded += chunk.length;
106
+ uploadProgress.emit("progress");
107
+ }
108
+ return chunk;
109
+ };
110
+ const chunkSize = 16384;
111
+ for (let i = 0; i < data.length; i += chunkSize) {
112
+ const chunk = data.slice(i, i + chunkSize);
113
+ body.push(chunk);
114
+ }
115
+ body.push(null);
116
+ let err;
117
+ let result;
165
118
  try {
166
- const event = await Promise.race(Object.keys(uploadList).map((key) => uploadList[key]));
167
- delete uploadList[event.payload.sha];
168
- yield event;
119
+ const res = await (0, import_utils.fetch)(
120
+ import_utils.API_FILES,
121
+ token,
122
+ {
123
+ agent: clientOptions.agent || defaultAgent,
124
+ method: "POST",
125
+ headers: {
126
+ "Content-Type": "application/octet-stream",
127
+ "Content-Length": data.length,
128
+ "x-now-digest": sha,
129
+ "x-now-size": data.length
130
+ },
131
+ body,
132
+ teamId,
133
+ apiUrl,
134
+ userAgent
135
+ },
136
+ clientOptions.debug,
137
+ true
138
+ );
139
+ if (res.status === 200) {
140
+ debug(
141
+ `File ${sha} (${file.names[0]}${file.names.length > 1 ? ` +${file.names.length}` : ""}) uploaded`
142
+ );
143
+ result = {
144
+ type: "file-uploaded",
145
+ payload: { sha, file }
146
+ };
147
+ } else if (res.status > 200 && res.status < 500) {
148
+ debug(
149
+ `An internal error occurred in upload request. Not retrying...`
150
+ );
151
+ const { error } = await res.json();
152
+ err = new import_errors.DeploymentError(error);
153
+ } else {
154
+ debug(`A server error occurred in upload request. Retrying...`);
155
+ const { error } = await res.json();
156
+ throw new import_errors.DeploymentError(error);
157
+ }
158
+ } catch (e) {
159
+ debug(`An unexpected error occurred in upload promise:
160
+ ${e}`);
161
+ err = new Error(e);
169
162
  }
170
- catch (e) {
171
- return yield { type: 'error', payload: e };
163
+ semaphore.release();
164
+ if (err) {
165
+ if (isClientNetworkError(err)) {
166
+ debug("Network error, retrying: " + err.message);
167
+ throw err;
168
+ } else {
169
+ debug("Other error, bailing: " + err.message);
170
+ return bail(err);
171
+ }
172
172
  }
173
- }
174
- debug('All files uploaded');
175
- yield { type: 'all-files-uploaded', payload: files };
173
+ return result;
174
+ },
175
+ {
176
+ retries: 5,
177
+ factor: 6,
178
+ minTimeout: 10
179
+ }
180
+ );
181
+ });
182
+ debug("Starting upload");
183
+ while (Object.keys(uploadList).length > 0) {
176
184
  try {
177
- debug('Starting deployment creation');
178
- for await (const event of (0, deploy_1.deploy)(files, clientOptions, deploymentOptions)) {
179
- if (event.type === 'alias-assigned') {
180
- debug('Deployment is ready');
181
- return yield event;
182
- }
183
- yield event;
184
- }
185
+ const event = await Promise.race(
186
+ Object.keys(uploadList).map((key) => uploadList[key])
187
+ );
188
+ delete uploadList[event.payload.sha];
189
+ yield event;
190
+ } catch (e) {
191
+ return yield { type: "error", payload: e };
185
192
  }
186
- catch (e) {
187
- debug('An unexpected error occurred when starting deployment creation');
188
- yield { type: 'error', payload: e };
193
+ }
194
+ debug("All files uploaded");
195
+ yield { type: "all-files-uploaded", payload: files };
196
+ try {
197
+ debug("Starting deployment creation");
198
+ for await (const event of (0, import_deploy.deploy)(files, clientOptions, deploymentOptions)) {
199
+ if (event.type === "alias-assigned") {
200
+ debug("Deployment is ready");
201
+ return yield event;
202
+ }
203
+ yield event;
189
204
  }
205
+ } catch (e) {
206
+ debug("An unexpected error occurred when starting deployment creation");
207
+ yield { type: "error", payload: e };
208
+ }
190
209
  }
191
- exports.upload = upload;
192
- class UploadProgress extends events_1.EventEmitter {
193
- constructor(sha, file) {
194
- super();
195
- this.sha = sha;
196
- this.file = file;
197
- this.bytesUploaded = 0;
198
- }
210
+ class UploadProgress extends import_events.EventEmitter {
211
+ constructor(sha, file) {
212
+ super();
213
+ this.sha = sha;
214
+ this.file = file;
215
+ this.bytesUploaded = 0;
216
+ }
199
217
  }
218
+ // Annotate the CommonJS export names for ESM import in node:
219
+ 0 && (module.exports = {
220
+ upload
221
+ });
@@ -1,11 +1,42 @@
1
1
  "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
4
11
  };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.nodeFetch = exports.zeitFetch = void 0;
7
- const node_fetch_1 = __importDefault(require("node-fetch"));
8
- exports.nodeFetch = node_fetch_1.default;
9
- const fetch_1 = __importDefault(require("@zeit/fetch"));
10
- const zeitFetch = (0, fetch_1.default)(node_fetch_1.default);
11
- exports.zeitFetch = zeitFetch;
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var fetch_exports = {};
30
+ __export(fetch_exports, {
31
+ nodeFetch: () => import_node_fetch.default,
32
+ zeitFetch: () => zeitFetch
33
+ });
34
+ module.exports = __toCommonJS(fetch_exports);
35
+ var import_node_fetch = __toESM(require("node-fetch"));
36
+ var import_fetch = __toESM(require("@zeit/fetch"));
37
+ const zeitFetch = (0, import_fetch.default)(import_node_fetch.default);
38
+ // Annotate the CommonJS export names for ESM import in node:
39
+ 0 && (module.exports = {
40
+ nodeFetch,
41
+ zeitFetch
42
+ });
@@ -1,20 +1,50 @@
1
1
  "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
4
11
  };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.getPollingDelay = void 0;
7
- const ms_1 = __importDefault(require("ms"));
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var get_polling_delay_exports = {};
30
+ __export(get_polling_delay_exports, {
31
+ getPollingDelay: () => getPollingDelay
32
+ });
33
+ module.exports = __toCommonJS(get_polling_delay_exports);
34
+ var import_ms = __toESM(require("ms"));
8
35
  function getPollingDelay(elapsed) {
9
- if (elapsed <= (0, ms_1.default)('15s')) {
10
- return (0, ms_1.default)('1s');
11
- }
12
- if (elapsed <= (0, ms_1.default)('1m')) {
13
- return (0, ms_1.default)('5s');
14
- }
15
- if (elapsed <= (0, ms_1.default)('5m')) {
16
- return (0, ms_1.default)('15s');
17
- }
18
- return (0, ms_1.default)('30s');
36
+ if (elapsed <= (0, import_ms.default)("15s")) {
37
+ return (0, import_ms.default)("1s");
38
+ }
39
+ if (elapsed <= (0, import_ms.default)("1m")) {
40
+ return (0, import_ms.default)("5s");
41
+ }
42
+ if (elapsed <= (0, import_ms.default)("5m")) {
43
+ return (0, import_ms.default)("15s");
44
+ }
45
+ return (0, import_ms.default)("30s");
19
46
  }
20
- exports.getPollingDelay = getPollingDelay;
47
+ // Annotate the CommonJS export names for ESM import in node:
48
+ 0 && (module.exports = {
49
+ getPollingDelay
50
+ });