@vercel/client 13.0.0 → 13.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/check-deployment-status.js +130 -107
- package/dist/create-deployment.js +130 -109
- package/dist/deploy.js +144 -116
- package/dist/errors.js +31 -8
- package/dist/index.js +49 -26
- package/dist/pkg.js +28 -5
- package/dist/types.js +33 -4
- package/dist/upload.js +205 -183
- package/dist/utils/fetch.js +40 -9
- package/dist/utils/get-polling-delay.js +46 -16
- package/dist/utils/hashes.js +80 -66
- package/dist/utils/index.js +249 -221
- package/dist/utils/query-string.js +41 -18
- package/dist/utils/readdir-recursive.js +85 -91
- package/dist/utils/ready-state.js +50 -18
- package/package.json +3 -3
package/dist/utils/hashes.js
CHANGED
|
@@ -1,74 +1,88 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var
|
|
3
|
-
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
4
11
|
};
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var hashes_exports = {};
|
|
30
|
+
__export(hashes_exports, {
|
|
31
|
+
hash: () => hash,
|
|
32
|
+
hashes: () => hashes,
|
|
33
|
+
mapToObject: () => mapToObject
|
|
34
|
+
});
|
|
35
|
+
module.exports = __toCommonJS(hashes_exports);
|
|
36
|
+
var import_crypto = require("crypto");
|
|
37
|
+
var import_fs_extra = __toESM(require("fs-extra"));
|
|
38
|
+
var import_async_sema = require("async-sema");
|
|
16
39
|
function hash(buf) {
|
|
17
|
-
|
|
40
|
+
return (0, import_crypto.createHash)("sha1").update(buf).digest("hex");
|
|
18
41
|
}
|
|
19
|
-
exports.hash = hash;
|
|
20
|
-
/**
|
|
21
|
-
* Transforms map to object
|
|
22
|
-
* @param map with hashed files
|
|
23
|
-
* @return {object}
|
|
24
|
-
*/
|
|
25
42
|
const mapToObject = (map) => {
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
43
|
+
const obj = {};
|
|
44
|
+
for (const [key, value] of map) {
|
|
45
|
+
if (typeof key === "undefined")
|
|
46
|
+
continue;
|
|
47
|
+
obj[key] = value;
|
|
48
|
+
}
|
|
49
|
+
return obj;
|
|
33
50
|
};
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
let h;
|
|
51
|
-
if (!isDirectory) {
|
|
52
|
-
if (stat.isSymbolicLink()) {
|
|
53
|
-
const link = await fs_extra_1.default.readlink(name);
|
|
54
|
-
data = Buffer.from(link, 'utf8');
|
|
55
|
-
}
|
|
56
|
-
else {
|
|
57
|
-
data = await fs_extra_1.default.readFile(name);
|
|
58
|
-
}
|
|
59
|
-
h = hash(data);
|
|
60
|
-
}
|
|
61
|
-
const entry = map.get(h);
|
|
62
|
-
if (entry) {
|
|
63
|
-
const names = new Set(entry.names);
|
|
64
|
-
names.add(name);
|
|
65
|
-
entry.names = [...names];
|
|
66
|
-
}
|
|
67
|
-
else {
|
|
68
|
-
map.set(h, { names: [name], data, mode });
|
|
51
|
+
async function hashes(files, map = /* @__PURE__ */ new Map()) {
|
|
52
|
+
const semaphore = new import_async_sema.Sema(100);
|
|
53
|
+
await Promise.all(
|
|
54
|
+
files.map(async (name) => {
|
|
55
|
+
await semaphore.acquire();
|
|
56
|
+
const stat = await import_fs_extra.default.lstat(name);
|
|
57
|
+
const mode = stat.mode;
|
|
58
|
+
let data;
|
|
59
|
+
const isDirectory = stat.isDirectory();
|
|
60
|
+
let h;
|
|
61
|
+
if (!isDirectory) {
|
|
62
|
+
if (stat.isSymbolicLink()) {
|
|
63
|
+
const link = await import_fs_extra.default.readlink(name);
|
|
64
|
+
data = Buffer.from(link, "utf8");
|
|
65
|
+
} else {
|
|
66
|
+
data = await import_fs_extra.default.readFile(name);
|
|
69
67
|
}
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
68
|
+
h = hash(data);
|
|
69
|
+
}
|
|
70
|
+
const entry = map.get(h);
|
|
71
|
+
if (entry) {
|
|
72
|
+
const names = new Set(entry.names);
|
|
73
|
+
names.add(name);
|
|
74
|
+
entry.names = [...names];
|
|
75
|
+
} else {
|
|
76
|
+
map.set(h, { names: [name], data, mode });
|
|
77
|
+
}
|
|
78
|
+
semaphore.release();
|
|
79
|
+
})
|
|
80
|
+
);
|
|
81
|
+
return map;
|
|
73
82
|
}
|
|
74
|
-
|
|
83
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
84
|
+
0 && (module.exports = {
|
|
85
|
+
hash,
|
|
86
|
+
hashes,
|
|
87
|
+
mapToObject
|
|
88
|
+
});
|
package/dist/utils/index.js
CHANGED
|
@@ -1,242 +1,270 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var
|
|
3
|
-
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
4
11
|
};
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
exports
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var utils_exports = {};
|
|
30
|
+
__export(utils_exports, {
|
|
31
|
+
API_FILES: () => API_FILES,
|
|
32
|
+
EVENTS: () => EVENTS,
|
|
33
|
+
buildFileTree: () => buildFileTree,
|
|
34
|
+
createDebug: () => createDebug,
|
|
35
|
+
fetch: () => fetch,
|
|
36
|
+
getApiDeploymentsUrl: () => getApiDeploymentsUrl,
|
|
37
|
+
getVercelIgnore: () => getVercelIgnore,
|
|
38
|
+
parseVercelConfig: () => parseVercelConfig,
|
|
39
|
+
prepareFiles: () => prepareFiles
|
|
40
|
+
});
|
|
41
|
+
module.exports = __toCommonJS(utils_exports);
|
|
42
|
+
var import_fetch2 = require("./fetch");
|
|
43
|
+
var import_path = require("path");
|
|
44
|
+
var import_url = require("url");
|
|
45
|
+
var import_ignore = __toESM(require("ignore"));
|
|
46
|
+
var import_pkg = require("../pkg");
|
|
47
|
+
var import_build_utils = require("@vercel/build-utils");
|
|
48
|
+
var import_async_sema = require("async-sema");
|
|
49
|
+
var import_fs_extra = require("fs-extra");
|
|
50
|
+
var import_readdir_recursive = __toESM(require("./readdir-recursive"));
|
|
51
|
+
const semaphore = new import_async_sema.Sema(10);
|
|
52
|
+
const API_FILES = "/v2/files";
|
|
18
53
|
const EVENTS_ARRAY = [
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
54
|
+
// File events
|
|
55
|
+
"hashes-calculated",
|
|
56
|
+
"file-count",
|
|
57
|
+
"file-uploaded",
|
|
58
|
+
"all-files-uploaded",
|
|
59
|
+
// Deployment events
|
|
60
|
+
"created",
|
|
61
|
+
"building",
|
|
62
|
+
"ready",
|
|
63
|
+
"alias-assigned",
|
|
64
|
+
"warning",
|
|
65
|
+
"error",
|
|
66
|
+
"notice",
|
|
67
|
+
"tip",
|
|
68
|
+
"canceled",
|
|
69
|
+
// Checks events
|
|
70
|
+
"checks-registered",
|
|
71
|
+
"checks-completed",
|
|
72
|
+
"checks-running",
|
|
73
|
+
"checks-conclusion-succeeded",
|
|
74
|
+
"checks-conclusion-failed",
|
|
75
|
+
"checks-conclusion-skipped",
|
|
76
|
+
"checks-conclusion-canceled"
|
|
42
77
|
];
|
|
43
|
-
|
|
78
|
+
const EVENTS = new Set(EVENTS_ARRAY);
|
|
44
79
|
function getApiDeploymentsUrl(metadata) {
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
80
|
+
if (metadata && metadata.builds && !metadata.functions) {
|
|
81
|
+
return "/v10/deployments";
|
|
82
|
+
}
|
|
83
|
+
return "/v13/deployments";
|
|
49
84
|
}
|
|
50
|
-
exports.getApiDeploymentsUrl = getApiDeploymentsUrl;
|
|
51
85
|
async function parseVercelConfig(filePath) {
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
return {};
|
|
63
|
-
}
|
|
86
|
+
if (!filePath) {
|
|
87
|
+
return {};
|
|
88
|
+
}
|
|
89
|
+
try {
|
|
90
|
+
const jsonString = await (0, import_fs_extra.readFile)(filePath, "utf8");
|
|
91
|
+
return JSON.parse(jsonString);
|
|
92
|
+
} catch (e) {
|
|
93
|
+
console.error(e);
|
|
94
|
+
return {};
|
|
95
|
+
}
|
|
64
96
|
}
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
return default_;
|
|
72
|
-
}
|
|
97
|
+
const maybeRead = async function(path, default_) {
|
|
98
|
+
try {
|
|
99
|
+
return await (0, import_fs_extra.readFile)(path, "utf8");
|
|
100
|
+
} catch (err) {
|
|
101
|
+
return default_;
|
|
102
|
+
}
|
|
73
103
|
};
|
|
74
|
-
async function buildFileTree(path, {
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
}
|
|
103
|
-
return { fileList, ignoreList };
|
|
104
|
+
async function buildFileTree(path, {
|
|
105
|
+
isDirectory,
|
|
106
|
+
prebuilt
|
|
107
|
+
}, debug) {
|
|
108
|
+
const ignoreList = [];
|
|
109
|
+
let fileList;
|
|
110
|
+
let { ig, ignores } = await getVercelIgnore(path, prebuilt);
|
|
111
|
+
debug(`Found ${ignores.length} rules in .vercelignore`);
|
|
112
|
+
debug("Building file tree...");
|
|
113
|
+
if (isDirectory && !Array.isArray(path)) {
|
|
114
|
+
const ignores2 = (absPath) => {
|
|
115
|
+
const rel = (0, import_path.relative)(path, absPath);
|
|
116
|
+
const ignored = ig.ignores(rel);
|
|
117
|
+
if (ignored) {
|
|
118
|
+
ignoreList.push(rel);
|
|
119
|
+
}
|
|
120
|
+
return ignored;
|
|
121
|
+
};
|
|
122
|
+
fileList = await (0, import_readdir_recursive.default)(path, [ignores2]);
|
|
123
|
+
debug(`Found ${fileList.length} files in the specified directory`);
|
|
124
|
+
} else if (Array.isArray(path)) {
|
|
125
|
+
fileList = path;
|
|
126
|
+
debug(`Assigned ${fileList.length} files provided explicitly`);
|
|
127
|
+
} else {
|
|
128
|
+
fileList = [path];
|
|
129
|
+
debug(`Deploying the provided path as single file`);
|
|
130
|
+
}
|
|
131
|
+
return { fileList, ignoreList };
|
|
104
132
|
}
|
|
105
|
-
exports.buildFileTree = buildFileTree;
|
|
106
133
|
async function getVercelIgnore(cwd, prebuilt) {
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
}
|
|
165
|
-
|
|
134
|
+
const ig = (0, import_ignore.default)();
|
|
135
|
+
let ignores;
|
|
136
|
+
if (prebuilt) {
|
|
137
|
+
const outputDir = ".vercel/output";
|
|
138
|
+
ignores = ["*"];
|
|
139
|
+
const parts = outputDir.split("/");
|
|
140
|
+
parts.forEach((_, i) => {
|
|
141
|
+
const level = parts.slice(0, i + 1).join("/");
|
|
142
|
+
ignores.push(`!${level}`);
|
|
143
|
+
});
|
|
144
|
+
ignores.push(`!${outputDir}/**`);
|
|
145
|
+
ig.add(ignores.join("\n"));
|
|
146
|
+
} else {
|
|
147
|
+
ignores = [
|
|
148
|
+
".hg",
|
|
149
|
+
".git",
|
|
150
|
+
".gitmodules",
|
|
151
|
+
".svn",
|
|
152
|
+
".cache",
|
|
153
|
+
".next",
|
|
154
|
+
".now",
|
|
155
|
+
".vercel",
|
|
156
|
+
".npmignore",
|
|
157
|
+
".dockerignore",
|
|
158
|
+
".gitignore",
|
|
159
|
+
".*.swp",
|
|
160
|
+
".DS_Store",
|
|
161
|
+
".wafpicke-*",
|
|
162
|
+
".lock-wscript",
|
|
163
|
+
".env.local",
|
|
164
|
+
".env.*.local",
|
|
165
|
+
".venv",
|
|
166
|
+
"npm-debug.log",
|
|
167
|
+
"config.gypi",
|
|
168
|
+
"node_modules",
|
|
169
|
+
"__pycache__",
|
|
170
|
+
"venv",
|
|
171
|
+
"CVS"
|
|
172
|
+
];
|
|
173
|
+
const cwds = Array.isArray(cwd) ? cwd : [cwd];
|
|
174
|
+
const files = await Promise.all(
|
|
175
|
+
cwds.map(async (cwd2) => {
|
|
176
|
+
const [vercelignore, nowignore] = await Promise.all([
|
|
177
|
+
maybeRead((0, import_path.join)(cwd2, ".vercelignore"), ""),
|
|
178
|
+
maybeRead((0, import_path.join)(cwd2, ".nowignore"), "")
|
|
179
|
+
]);
|
|
180
|
+
if (vercelignore && nowignore) {
|
|
181
|
+
throw new import_build_utils.NowBuildError({
|
|
182
|
+
code: "CONFLICTING_IGNORE_FILES",
|
|
183
|
+
message: "Cannot use both a `.vercelignore` and `.nowignore` file. Please delete the `.nowignore` file.",
|
|
184
|
+
link: "https://vercel.link/combining-old-and-new-config"
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
return vercelignore || nowignore;
|
|
188
|
+
})
|
|
189
|
+
);
|
|
190
|
+
const ignoreFile = files.join("\n");
|
|
191
|
+
ig.add(`${ignores.join("\n")}
|
|
192
|
+
${clearRelative(ignoreFile)}`);
|
|
193
|
+
}
|
|
194
|
+
return { ig, ignores };
|
|
166
195
|
}
|
|
167
|
-
exports.getVercelIgnore = getVercelIgnore;
|
|
168
|
-
/**
|
|
169
|
-
* Remove leading `./` from the beginning of ignores
|
|
170
|
-
* because ignore doesn't like them :|
|
|
171
|
-
*/
|
|
172
196
|
function clearRelative(str) {
|
|
173
|
-
|
|
197
|
+
return str.replace(/(\n|^)\.\//g, "$1");
|
|
174
198
|
}
|
|
175
199
|
const fetch = async (url, token, opts = {}, debugEnabled, useNodeFetch) => {
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
semaphore.release();
|
|
202
|
-
return res;
|
|
200
|
+
semaphore.acquire();
|
|
201
|
+
const debug = createDebug(debugEnabled);
|
|
202
|
+
let time;
|
|
203
|
+
url = `${opts.apiUrl || "https://api.vercel.com"}${url}`;
|
|
204
|
+
delete opts.apiUrl;
|
|
205
|
+
if (opts.teamId) {
|
|
206
|
+
const parsedUrl = new import_url.URL(url);
|
|
207
|
+
parsedUrl.searchParams.set("teamId", opts.teamId);
|
|
208
|
+
url = parsedUrl.toString();
|
|
209
|
+
delete opts.teamId;
|
|
210
|
+
}
|
|
211
|
+
const userAgent = opts.userAgent || `client-v${import_pkg.pkgVersion}`;
|
|
212
|
+
delete opts.userAgent;
|
|
213
|
+
opts.headers = {
|
|
214
|
+
...opts.headers,
|
|
215
|
+
authorization: `Bearer ${token}`,
|
|
216
|
+
accept: "application/json",
|
|
217
|
+
"user-agent": userAgent
|
|
218
|
+
};
|
|
219
|
+
debug(`${opts.method || "GET"} ${url}`);
|
|
220
|
+
time = Date.now();
|
|
221
|
+
const res = useNodeFetch ? await (0, import_fetch2.nodeFetch)(url, opts) : await (0, import_fetch2.zeitFetch)(url, opts);
|
|
222
|
+
debug(`DONE in ${Date.now() - time}ms: ${opts.method || "GET"} ${url}`);
|
|
223
|
+
semaphore.release();
|
|
224
|
+
return res;
|
|
203
225
|
};
|
|
204
|
-
|
|
205
|
-
const isWin = process.platform.includes('win');
|
|
226
|
+
const isWin = process.platform.includes("win");
|
|
206
227
|
const prepareFiles = (files, clientOptions) => {
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
preparedFiles.push({
|
|
224
|
-
file: isWin ? fileName.replace(/\\/g, '/') : fileName,
|
|
225
|
-
size: file.data?.byteLength || file.data?.length,
|
|
226
|
-
mode: file.mode,
|
|
227
|
-
sha: sha || undefined,
|
|
228
|
-
});
|
|
229
|
-
}
|
|
228
|
+
const preparedFiles = [];
|
|
229
|
+
for (const [sha, file] of files) {
|
|
230
|
+
for (const name of file.names) {
|
|
231
|
+
let fileName;
|
|
232
|
+
if (clientOptions.isDirectory) {
|
|
233
|
+
fileName = typeof clientOptions.path === "string" ? (0, import_path.relative)(clientOptions.path, name) : name;
|
|
234
|
+
} else {
|
|
235
|
+
const segments = name.split(import_path.sep);
|
|
236
|
+
fileName = segments[segments.length - 1];
|
|
237
|
+
}
|
|
238
|
+
preparedFiles.push({
|
|
239
|
+
file: isWin ? fileName.replace(/\\/g, "/") : fileName,
|
|
240
|
+
size: file.data?.byteLength || file.data?.length,
|
|
241
|
+
mode: file.mode,
|
|
242
|
+
sha: sha || void 0
|
|
243
|
+
});
|
|
230
244
|
}
|
|
231
|
-
|
|
245
|
+
}
|
|
246
|
+
return preparedFiles;
|
|
232
247
|
};
|
|
233
|
-
exports.prepareFiles = prepareFiles;
|
|
234
248
|
function createDebug(debug) {
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
}
|
|
239
|
-
|
|
240
|
-
|
|
249
|
+
if (debug) {
|
|
250
|
+
return (...logs) => {
|
|
251
|
+
process.stderr.write(
|
|
252
|
+
[`[client-debug] ${(/* @__PURE__ */ new Date()).toISOString()}`, ...logs].join(" ") + "\n"
|
|
253
|
+
);
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
return () => {
|
|
257
|
+
};
|
|
241
258
|
}
|
|
242
|
-
|
|
259
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
260
|
+
0 && (module.exports = {
|
|
261
|
+
API_FILES,
|
|
262
|
+
EVENTS,
|
|
263
|
+
buildFileTree,
|
|
264
|
+
createDebug,
|
|
265
|
+
fetch,
|
|
266
|
+
getApiDeploymentsUrl,
|
|
267
|
+
getVercelIgnore,
|
|
268
|
+
parseVercelConfig,
|
|
269
|
+
prepareFiles
|
|
270
|
+
});
|