firebase-tools 11.29.1 → 12.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api.js +4 -2
- package/lib/commands/database-import.js +2 -2
- package/lib/commands/ext-configure.js +2 -1
- package/lib/commands/ext-dev-deprecate.js +24 -20
- package/lib/commands/ext-dev-list.js +12 -11
- package/lib/commands/ext-dev-publish.js +13 -47
- package/lib/commands/ext-dev-register.js +8 -5
- package/lib/commands/ext-dev-undeprecate.js +4 -4
- package/lib/commands/ext-dev-upload.js +88 -0
- package/lib/commands/ext-dev-usage.js +3 -3
- package/lib/commands/ext-install.js +5 -10
- package/lib/commands/ext-uninstall.js +0 -1
- package/lib/commands/ext-update.js +4 -10
- package/lib/commands/hosting-channel-deploy.js +3 -0
- package/lib/commands/index.js +9 -19
- package/lib/database/import.js +113 -18
- package/lib/deploy/extensions/planner.js +13 -7
- package/lib/deploy/extensions/prepare.js +16 -32
- package/lib/deploy/functions/ensure.js +7 -1
- package/lib/deploy/functions/release/fabricator.js +2 -0
- package/lib/deploy/functions/runtimes/discovery/index.js +1 -1
- package/lib/deploy/functions/runtimes/index.js +11 -3
- package/lib/deploy/functions/runtimes/node/parseRuntimeAndValidateSDK.js +3 -3
- package/lib/deploy/functions/runtimes/python/index.js +41 -13
- package/lib/deploy/hosting/convertConfig.js +8 -4
- package/lib/deploy/hosting/prepare.js +64 -6
- package/lib/deploy/index.js +24 -8
- package/lib/emulator/adminSdkConfig.js +8 -0
- package/lib/emulator/controller.js +7 -9
- package/lib/emulator/download.js +3 -12
- package/lib/emulator/downloadableEmulators.js +5 -5
- package/lib/emulator/functionsEmulator.js +57 -7
- package/lib/emulator/functionsEmulatorRuntime.js +4 -1
- package/lib/emulator/functionsEmulatorShared.js +1 -0
- package/lib/emulator/functionsRuntimeWorker.js +12 -4
- package/lib/emulator/storage/rules/config.js +17 -7
- package/lib/experiments.js +22 -8
- package/lib/extensions/extensionsApi.js +24 -151
- package/lib/extensions/extensionsHelper.js +283 -146
- package/lib/extensions/manifest.js +1 -8
- package/lib/extensions/publisherApi.js +215 -0
- package/lib/extensions/refs.js +1 -1
- package/lib/extensions/resolveSource.js +1 -18
- package/lib/extensions/tos.js +78 -0
- package/lib/extensions/warnings.js +21 -41
- package/lib/frameworks/angular/index.js +74 -192
- package/lib/frameworks/angular/interfaces.js +2 -0
- package/lib/frameworks/angular/utils.js +274 -0
- package/lib/frameworks/astro/index.js +3 -4
- package/lib/frameworks/constants.js +45 -0
- package/lib/frameworks/express/index.js +3 -2
- package/lib/frameworks/flutter/index.js +39 -0
- package/lib/frameworks/flutter/utils.js +11 -0
- package/lib/frameworks/index.js +104 -145
- package/lib/frameworks/interfaces.js +2 -0
- package/lib/frameworks/next/constants.js +2 -1
- package/lib/frameworks/next/index.js +197 -114
- package/lib/frameworks/next/utils.js +97 -15
- package/lib/frameworks/nuxt/index.js +4 -5
- package/lib/frameworks/nuxt/utils.js +2 -2
- package/lib/frameworks/nuxt2/index.js +5 -5
- package/lib/frameworks/utils.js +108 -1
- package/lib/frameworks/vite/index.js +5 -6
- package/lib/functions/ensureTargeted.js +4 -4
- package/lib/functions/python.js +12 -5
- package/lib/gcp/resourceManager.js +1 -0
- package/lib/hosting/api.js +32 -1
- package/lib/hosting/config.js +4 -8
- package/lib/init/features/functions/index.js +4 -7
- package/lib/init/features/hosting/github.js +7 -2
- package/lib/init/features/hosting/index.js +3 -2
- package/lib/serve/index.js +2 -1
- package/lib/unzip.js +126 -0
- package/package.json +2 -3
- package/schema/firebase-config.json +1 -1
- package/templates/extensions/POSTINSTALL.md +2 -2
- package/templates/extensions/PREINSTALL.md +1 -1
- package/templates/extensions/extension.yaml +10 -6
- package/templates/extensions/javascript/WELCOME.md +1 -1
- package/templates/extensions/typescript/WELCOME.md +1 -1
- package/templates/extensions/typescript/index.ts +1 -1
- package/templates/init/functions/javascript/index.js +16 -6
- package/templates/init/functions/javascript/package.lint.json +4 -4
- package/templates/init/functions/javascript/package.nolint.json +4 -4
- package/templates/init/functions/python/requirements.txt +1 -1
- package/templates/init/functions/typescript/index.ts +16 -6
- package/templates/init/functions/typescript/package.lint.json +4 -4
- package/templates/init/functions/typescript/package.nolint.json +4 -4
- package/lib/commands/ext-dev-emulators-exec.js +0 -27
- package/lib/commands/ext-dev-emulators-start.js +0 -24
- package/lib/commands/ext-dev-extension-delete.js +0 -45
- package/lib/commands/ext-dev-unpublish.js +0 -49
- package/lib/commands/ext-sources-create.js +0 -24
- package/lib/extensions/askUserForConsent.js +0 -33
- package/npm-shrinkwrap.json +0 -12649
package/lib/database/import.js
CHANGED
|
@@ -6,14 +6,91 @@ const Filter = require("stream-json/filters/Filter");
|
|
|
6
6
|
const stream = require("stream");
|
|
7
7
|
const StreamObject = require("stream-json/streamers/StreamObject");
|
|
8
8
|
const apiv2_1 = require("../apiv2");
|
|
9
|
+
const node_fetch_1 = require("node-fetch");
|
|
9
10
|
const error_1 = require("../error");
|
|
10
11
|
const pLimit = require("p-limit");
|
|
12
|
+
class BatchChunks extends stream.Transform {
|
|
13
|
+
constructor(maxSize, opts) {
|
|
14
|
+
super(Object.assign(Object.assign({}, opts), { objectMode: true }));
|
|
15
|
+
this.maxSize = maxSize;
|
|
16
|
+
this.batch = [];
|
|
17
|
+
this.size = 0;
|
|
18
|
+
}
|
|
19
|
+
_transform(chunk, _, callback) {
|
|
20
|
+
const totalChunkSize = chunk.size + chunk.pathname.length;
|
|
21
|
+
if (this.size + totalChunkSize > this.maxSize) {
|
|
22
|
+
this.push(this.transformBatchToPatchData(this.batch));
|
|
23
|
+
this.batch = [];
|
|
24
|
+
this.size = 0;
|
|
25
|
+
}
|
|
26
|
+
this.batch.push(chunk);
|
|
27
|
+
this.size += totalChunkSize;
|
|
28
|
+
callback(null);
|
|
29
|
+
}
|
|
30
|
+
transformBatchToPatchData(batch) {
|
|
31
|
+
return this.sanitizePatchData(this.compactData(batch));
|
|
32
|
+
}
|
|
33
|
+
compactData(batch) {
|
|
34
|
+
if (batch.length === 1) {
|
|
35
|
+
return batch[0];
|
|
36
|
+
}
|
|
37
|
+
const pathname = this.findLongestCommonPrefixArray(batch.map((d) => d.pathname));
|
|
38
|
+
let json = {};
|
|
39
|
+
let size = 0;
|
|
40
|
+
for (const chunk of batch) {
|
|
41
|
+
const truncatedPath = chunk.pathname.substring(pathname.length + 1);
|
|
42
|
+
json = Object.assign({}, json, { [truncatedPath]: chunk.json });
|
|
43
|
+
size += chunk.size;
|
|
44
|
+
}
|
|
45
|
+
return { json, pathname, size };
|
|
46
|
+
}
|
|
47
|
+
sanitizePatchData({ json, pathname, size }) {
|
|
48
|
+
if (typeof json === "string" || typeof json === "number" || typeof json === "boolean") {
|
|
49
|
+
const tokens = pathname.split("/");
|
|
50
|
+
const lastToken = tokens.pop();
|
|
51
|
+
return { json: { [lastToken]: json }, pathname: tokens.join("/"), size };
|
|
52
|
+
}
|
|
53
|
+
if (Array.isArray(json)) {
|
|
54
|
+
return { json: Object.assign({}, json), pathname, size };
|
|
55
|
+
}
|
|
56
|
+
return { json, pathname, size };
|
|
57
|
+
}
|
|
58
|
+
findLongestCommonPrefixArray(paths) {
|
|
59
|
+
const findLongestCommonPrefixPair = (p, q) => {
|
|
60
|
+
const pTokens = p.split("/");
|
|
61
|
+
const qTokens = q.split("/");
|
|
62
|
+
let prefix = pTokens.slice(0, qTokens.length);
|
|
63
|
+
for (let i = 0; i < prefix.length; i++) {
|
|
64
|
+
if (prefix[i] !== qTokens[i]) {
|
|
65
|
+
prefix = prefix.slice(0, i);
|
|
66
|
+
break;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
return prefix.join("/");
|
|
70
|
+
};
|
|
71
|
+
if (paths.length === 0) {
|
|
72
|
+
return "";
|
|
73
|
+
}
|
|
74
|
+
let prefix = paths[0];
|
|
75
|
+
for (let i = 1; i < paths.length; i++) {
|
|
76
|
+
prefix = findLongestCommonPrefixPair(prefix, paths[i]);
|
|
77
|
+
}
|
|
78
|
+
return prefix;
|
|
79
|
+
}
|
|
80
|
+
_flush(callback) {
|
|
81
|
+
if (this.size > 0) {
|
|
82
|
+
this.push(this.transformBatchToPatchData(this.batch));
|
|
83
|
+
}
|
|
84
|
+
callback(null);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
11
87
|
class DatabaseImporter {
|
|
12
|
-
constructor(dbUrl, inStream, dataPath,
|
|
88
|
+
constructor(dbUrl, inStream, dataPath, payloadSize, concurrency) {
|
|
13
89
|
this.dbUrl = dbUrl;
|
|
14
90
|
this.inStream = inStream;
|
|
15
91
|
this.dataPath = dataPath;
|
|
16
|
-
this.
|
|
92
|
+
this.payloadSize = payloadSize;
|
|
93
|
+
this.nonFatalRetryTimeout = 1000;
|
|
17
94
|
this.client = new apiv2_1.Client({ urlPrefix: dbUrl.origin, auth: true });
|
|
18
95
|
this.limit = pLimit(concurrency);
|
|
19
96
|
}
|
|
@@ -34,23 +111,23 @@ class DatabaseImporter {
|
|
|
34
111
|
}
|
|
35
112
|
}
|
|
36
113
|
readAndWriteChunks() {
|
|
37
|
-
const { dbUrl } = this;
|
|
114
|
+
const { dbUrl, payloadSize } = this;
|
|
38
115
|
const chunkData = this.chunkData.bind(this);
|
|
39
|
-
const
|
|
116
|
+
const doWriteBatch = this.doWriteBatch.bind(this);
|
|
40
117
|
const getJoinedPath = this.joinPath.bind(this);
|
|
41
118
|
const readChunks = new stream.Transform({ objectMode: true });
|
|
42
119
|
readChunks._transform = function (chunk, _, done) {
|
|
43
120
|
const data = { json: chunk.value, pathname: getJoinedPath(dbUrl.pathname, chunk.key) };
|
|
44
121
|
const chunkedData = chunkData(data);
|
|
45
|
-
const chunks = chunkedData.chunks || [data];
|
|
122
|
+
const chunks = chunkedData.chunks || [Object.assign(Object.assign({}, data), { size: JSON.stringify(data.json).length })];
|
|
46
123
|
for (const chunk of chunks) {
|
|
47
124
|
this.push(chunk);
|
|
48
125
|
}
|
|
49
126
|
done();
|
|
50
127
|
};
|
|
51
|
-
const
|
|
52
|
-
|
|
53
|
-
const res = await
|
|
128
|
+
const writeBatch = new stream.Transform({ objectMode: true });
|
|
129
|
+
writeBatch._transform = async function (batch, _, done) {
|
|
130
|
+
const res = await doWriteBatch(batch);
|
|
54
131
|
this.push(res);
|
|
55
132
|
done();
|
|
56
133
|
};
|
|
@@ -70,19 +147,37 @@ class DatabaseImporter {
|
|
|
70
147
|
exit: 2,
|
|
71
148
|
})))
|
|
72
149
|
.pipe(readChunks)
|
|
73
|
-
.pipe(
|
|
150
|
+
.pipe(new BatchChunks(payloadSize))
|
|
151
|
+
.pipe(writeBatch)
|
|
74
152
|
.on("data", (res) => responses.push(res))
|
|
75
153
|
.on("error", reject)
|
|
76
154
|
.once("end", () => resolve(responses));
|
|
77
155
|
});
|
|
78
156
|
}
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
157
|
+
doWriteBatch(batch) {
|
|
158
|
+
const doRequest = () => {
|
|
159
|
+
return this.client.request({
|
|
160
|
+
method: "PATCH",
|
|
161
|
+
path: `${batch.pathname}.json`,
|
|
162
|
+
body: batch.json,
|
|
163
|
+
queryParams: this.dbUrl.searchParams,
|
|
164
|
+
});
|
|
165
|
+
};
|
|
166
|
+
return this.limit(async () => {
|
|
167
|
+
try {
|
|
168
|
+
return await doRequest();
|
|
169
|
+
}
|
|
170
|
+
catch (err) {
|
|
171
|
+
const isTimeoutErr = err instanceof error_1.FirebaseError &&
|
|
172
|
+
err.original instanceof node_fetch_1.FetchError &&
|
|
173
|
+
err.original.code === "ETIMEDOUT";
|
|
174
|
+
if (isTimeoutErr) {
|
|
175
|
+
await new Promise((res) => setTimeout(res, this.nonFatalRetryTimeout));
|
|
176
|
+
return await doRequest();
|
|
177
|
+
}
|
|
178
|
+
throw err;
|
|
179
|
+
}
|
|
180
|
+
});
|
|
86
181
|
}
|
|
87
182
|
chunkData({ json, pathname }) {
|
|
88
183
|
if (typeof json === "string" || typeof json === "number" || typeof json === "boolean") {
|
|
@@ -102,10 +197,10 @@ class DatabaseImporter {
|
|
|
102
197
|
chunks.push(...childChunks.chunks);
|
|
103
198
|
}
|
|
104
199
|
else {
|
|
105
|
-
chunks.push(child);
|
|
200
|
+
chunks.push(Object.assign(Object.assign({}, child), { size: childChunks.size }));
|
|
106
201
|
}
|
|
107
202
|
}
|
|
108
|
-
if (hasChunkedChild || size >= this.
|
|
203
|
+
if (hasChunkedChild || size >= this.payloadSize) {
|
|
109
204
|
return { chunks, size };
|
|
110
205
|
}
|
|
111
206
|
else {
|
|
@@ -129,17 +129,23 @@ async function want(args) {
|
|
|
129
129
|
exports.want = want;
|
|
130
130
|
async function resolveVersion(ref) {
|
|
131
131
|
const extensionRef = refs.toExtensionRef(ref);
|
|
132
|
+
const extension = await extensionsApi.getExtension(extensionRef);
|
|
133
|
+
if (!ref.version || ref.version === "latest-approved") {
|
|
134
|
+
if (!extension.latestApprovedVersion) {
|
|
135
|
+
throw new error_1.FirebaseError(`${extensionRef} has not been published to Extensions Hub (https://extensions.dev). To install it, you must specify the version you want to install.`);
|
|
136
|
+
}
|
|
137
|
+
return extension.latestApprovedVersion;
|
|
138
|
+
}
|
|
139
|
+
if (ref.version === "latest") {
|
|
140
|
+
if (!extension.latestVersion) {
|
|
141
|
+
throw new error_1.FirebaseError(`${extensionRef} has no stable non-deprecated versions. If you wish to install a prerelease version, you must specify the version you want to install.`);
|
|
142
|
+
}
|
|
143
|
+
return extension.latestVersion;
|
|
144
|
+
}
|
|
132
145
|
const versions = await extensionsApi.listExtensionVersions(extensionRef, undefined, true);
|
|
133
146
|
if (versions.length === 0) {
|
|
134
147
|
throw new error_1.FirebaseError(`No versions found for ${extensionRef}`);
|
|
135
148
|
}
|
|
136
|
-
if (!ref.version || ref.version === "latest") {
|
|
137
|
-
return versions
|
|
138
|
-
.filter((ev) => ev.spec.version !== undefined)
|
|
139
|
-
.map((ev) => ev.spec.version)
|
|
140
|
-
.sort(semver.compare)
|
|
141
|
-
.pop();
|
|
142
|
-
}
|
|
143
149
|
const maxSatisfying = semver.maxSatisfying(versions.map((ev) => ev.spec.version), ref.version);
|
|
144
150
|
if (!maxSatisfying) {
|
|
145
151
|
throw new error_1.FirebaseError(`No version of ${extensionRef} matches requested version ${ref.version}`);
|
|
@@ -15,6 +15,7 @@ const secrets_1 = require("./secrets");
|
|
|
15
15
|
const warnings_1 = require("../../extensions/warnings");
|
|
16
16
|
const etags_1 = require("../../extensions/etags");
|
|
17
17
|
const v2FunctionHelper_1 = require("./v2FunctionHelper");
|
|
18
|
+
const tos_1 = require("../../extensions/tos");
|
|
18
19
|
async function prepare(context, options, payload) {
|
|
19
20
|
var _a, _b;
|
|
20
21
|
const projectId = (0, projectUtils_1.needProjectId)(options);
|
|
@@ -33,16 +34,10 @@ async function prepare(context, options, payload) {
|
|
|
33
34
|
const etagsChanged = (0, etags_1.detectEtagChanges)(options.rc, projectId, context.have);
|
|
34
35
|
if (etagsChanged.length) {
|
|
35
36
|
(0, warnings_1.outOfBandChangesWarning)(etagsChanged);
|
|
36
|
-
if (!
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
!options.nonInteractive &&
|
|
41
|
-
!(await prompt.promptOnce({
|
|
42
|
-
type: "confirm",
|
|
43
|
-
message: `Do you wish to continue deploying these extension instances?`,
|
|
44
|
-
default: false,
|
|
45
|
-
}))) {
|
|
37
|
+
if (!(await prompt.confirm({
|
|
38
|
+
message: `Do you wish to continue deploying these extension instances?`,
|
|
39
|
+
default: false,
|
|
40
|
+
}))) {
|
|
46
41
|
throw new error_1.FirebaseError("Deployment cancelled");
|
|
47
42
|
}
|
|
48
43
|
}
|
|
@@ -59,16 +54,10 @@ async function prepare(context, options, payload) {
|
|
|
59
54
|
payload.instancesToUpdate = context.want.filter((i) => { var _a; return (_a = context.have) === null || _a === void 0 ? void 0 : _a.some(isUpdate(i)); });
|
|
60
55
|
payload.instancesToDelete = context.have.filter((i) => { var _a; return !((_a = context.want) === null || _a === void 0 ? void 0 : _a.some(matchesInstanceId(i))); });
|
|
61
56
|
if (await (0, warnings_1.displayWarningsForDeploy)(payload.instancesToCreate)) {
|
|
62
|
-
if (!
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
!options.nonInteractive &&
|
|
67
|
-
!(await prompt.promptOnce({
|
|
68
|
-
type: "confirm",
|
|
69
|
-
message: `Do you wish to continue deploying these extension instances?`,
|
|
70
|
-
default: true,
|
|
71
|
-
}))) {
|
|
57
|
+
if (!(await prompt.confirm({
|
|
58
|
+
message: `Do you wish to continue deploying these extension instances?`,
|
|
59
|
+
default: true,
|
|
60
|
+
}))) {
|
|
72
61
|
throw new error_1.FirebaseError("Deployment cancelled");
|
|
73
62
|
}
|
|
74
63
|
}
|
|
@@ -87,18 +76,12 @@ async function prepare(context, options, payload) {
|
|
|
87
76
|
}
|
|
88
77
|
if (payload.instancesToDelete.length) {
|
|
89
78
|
logger_1.logger.info(deploymentSummary.deletesSummary(payload.instancesToDelete));
|
|
90
|
-
if (!
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
type: "confirm",
|
|
97
|
-
message: `Would you like to delete ${payload.instancesToDelete
|
|
98
|
-
.map((i) => i.instanceId)
|
|
99
|
-
.join(", ")}?`,
|
|
100
|
-
default: false,
|
|
101
|
-
}))) {
|
|
79
|
+
if (!(await prompt.confirm({
|
|
80
|
+
message: `Would you like to delete ${payload.instancesToDelete
|
|
81
|
+
.map((i) => i.instanceId)
|
|
82
|
+
.join(", ")}?`,
|
|
83
|
+
default: false,
|
|
84
|
+
}))) {
|
|
102
85
|
payload.instancesToDelete = [];
|
|
103
86
|
}
|
|
104
87
|
else {
|
|
@@ -106,6 +89,7 @@ async function prepare(context, options, payload) {
|
|
|
106
89
|
}
|
|
107
90
|
}
|
|
108
91
|
await (0, requirePermissions_1.requirePermissions)(options, permissionsNeeded);
|
|
92
|
+
await (0, tos_1.acceptLatestAppDeveloperTOS)(options, projectId, context.want.map((i) => i.instanceId));
|
|
109
93
|
}
|
|
110
94
|
exports.prepare = prepare;
|
|
111
95
|
const matchesInstanceId = (dep) => (test) => {
|
|
@@ -12,8 +12,14 @@ const track_1 = require("../../track");
|
|
|
12
12
|
const backend = require("./backend");
|
|
13
13
|
const FAQ_URL = "https://firebase.google.com/support/faq#functions-runtime";
|
|
14
14
|
const CLOUD_BUILD_API = "cloudbuild.googleapis.com";
|
|
15
|
+
const metadataCallCache = new Map();
|
|
15
16
|
async function defaultServiceAccount(e) {
|
|
16
|
-
|
|
17
|
+
let metadataCall = metadataCallCache.get(e.project);
|
|
18
|
+
if (!metadataCall) {
|
|
19
|
+
metadataCall = (0, projects_1.getFirebaseProject)(e.project);
|
|
20
|
+
metadataCallCache.set(e.project, metadataCall);
|
|
21
|
+
}
|
|
22
|
+
const metadata = await metadataCall;
|
|
17
23
|
if (e.platform === "gcfv1") {
|
|
18
24
|
return `${metadata.projectId}@appspot.gserviceaccount.com`;
|
|
19
25
|
}
|
|
@@ -276,6 +276,7 @@ class Fabricator {
|
|
|
276
276
|
.catch(rethrowAs(endpoint, "create"));
|
|
277
277
|
endpoint.uri = (_d = resultFunction.serviceConfig) === null || _d === void 0 ? void 0 : _d.uri;
|
|
278
278
|
const serviceName = (_e = resultFunction.serviceConfig) === null || _e === void 0 ? void 0 : _e.service;
|
|
279
|
+
endpoint.runServiceId = utils.last(serviceName === null || serviceName === void 0 ? void 0 : serviceName.split("/"));
|
|
279
280
|
if (!serviceName) {
|
|
280
281
|
logger_1.logger.debug("Result function unexpectedly didn't have a service name.");
|
|
281
282
|
utils.logLabeledWarning("functions", "Updated function is not associated with a service. This deployment is in an unexpected state - please re-deploy your functions.");
|
|
@@ -369,6 +370,7 @@ class Fabricator {
|
|
|
369
370
|
.catch(rethrowAs(endpoint, "update"));
|
|
370
371
|
endpoint.uri = (_c = resultFunction.serviceConfig) === null || _c === void 0 ? void 0 : _c.uri;
|
|
371
372
|
const serviceName = (_d = resultFunction.serviceConfig) === null || _d === void 0 ? void 0 : _d.service;
|
|
373
|
+
endpoint.runServiceId = utils.last(serviceName === null || serviceName === void 0 ? void 0 : serviceName.split("/"));
|
|
372
374
|
if (!serviceName) {
|
|
373
375
|
logger_1.logger.debug("Result function unexpectedly didn't have a service name.");
|
|
374
376
|
utils.logLabeledWarning("functions", "Updated function is not associated with a service. This deployment is in an unexpected state - please re-deploy your functions.");
|
|
@@ -45,7 +45,7 @@ async function detectFromYaml(directory, project, runtime) {
|
|
|
45
45
|
return yamlToBuild(parsed, project, api.functionsDefaultRegion, runtime);
|
|
46
46
|
}
|
|
47
47
|
exports.detectFromYaml = detectFromYaml;
|
|
48
|
-
async function detectFromPort(port, project, runtime, timeout =
|
|
48
|
+
async function detectFromPort(port, project, runtime, timeout = 10000) {
|
|
49
49
|
let res;
|
|
50
50
|
const timedOut = new Promise((resolve, reject) => {
|
|
51
51
|
setTimeout(() => {
|
|
@@ -5,8 +5,16 @@ const node = require("./node");
|
|
|
5
5
|
const python = require("./python");
|
|
6
6
|
const validate = require("../validate");
|
|
7
7
|
const error_1 = require("../../../error");
|
|
8
|
-
const RUNTIMES = [
|
|
9
|
-
|
|
8
|
+
const RUNTIMES = [
|
|
9
|
+
"nodejs10",
|
|
10
|
+
"nodejs12",
|
|
11
|
+
"nodejs14",
|
|
12
|
+
"nodejs16",
|
|
13
|
+
"nodejs18",
|
|
14
|
+
"python310",
|
|
15
|
+
"python311",
|
|
16
|
+
];
|
|
17
|
+
const EXPERIMENTAL_RUNTIMES = [];
|
|
10
18
|
const DEPRECATED_RUNTIMES = ["nodejs6", "nodejs8"];
|
|
11
19
|
function isDeprecatedRuntime(runtime) {
|
|
12
20
|
return DEPRECATED_RUNTIMES.includes(runtime);
|
|
@@ -25,7 +33,7 @@ const MESSAGE_FRIENDLY_RUNTIMES = {
|
|
|
25
33
|
nodejs16: "Node.js 16",
|
|
26
34
|
nodejs18: "Node.js 18",
|
|
27
35
|
python310: "Python 3.10",
|
|
28
|
-
python311: "Python 3.11
|
|
36
|
+
python311: "Python 3.11",
|
|
29
37
|
};
|
|
30
38
|
function getHumanFriendlyRuntimeName(runtime) {
|
|
31
39
|
return MESSAGE_FRIENDLY_RUNTIMES[runtime] || runtime;
|
|
@@ -19,11 +19,11 @@ const ENGINE_RUNTIMES = {
|
|
|
19
19
|
const ENGINE_RUNTIMES_NAMES = Object.values(ENGINE_RUNTIMES);
|
|
20
20
|
exports.RUNTIME_NOT_SET = "`runtime` field is required but was not found in firebase.json.\n" +
|
|
21
21
|
"To fix this, add the following lines to the `functions` section of your firebase.json:\n" +
|
|
22
|
-
'"runtime": "
|
|
22
|
+
'"runtime": "nodejs18"\n';
|
|
23
23
|
exports.UNSUPPORTED_NODE_VERSION_FIREBASE_JSON_MSG = clc.bold(`functions.runtime value is unsupported. ` +
|
|
24
|
-
`Valid choices are: ${clc.bold("nodejs{10|12|14|16}")}.`);
|
|
24
|
+
`Valid choices are: ${clc.bold("nodejs{10|12|14|16|18}")}.`);
|
|
25
25
|
exports.UNSUPPORTED_NODE_VERSION_PACKAGE_JSON_MSG = clc.bold(`package.json in functions directory has an engines field which is unsupported. ` +
|
|
26
|
-
`Valid choices are: ${clc.bold('{"node": 10|12|14|16}')}`);
|
|
26
|
+
`Valid choices are: ${clc.bold('{"node": 10|12|14|16|18}')}`);
|
|
27
27
|
exports.DEPRECATED_NODE_VERSION_INFO = `\n\nDeploys to runtimes below Node.js 10 are now disabled in the Firebase CLI. ` +
|
|
28
28
|
`${clc.bold(`Existing Node.js 8 functions ${clc.underline("will stop executing at a future date")}`)}. Update existing functions to Node.js 10 or greater as soon as possible.`;
|
|
29
29
|
function getRuntimeChoiceFromPackageJson(sourceDir) {
|
|
@@ -11,7 +11,8 @@ const discovery = require("../discovery");
|
|
|
11
11
|
const logger_1 = require("../../../../logger");
|
|
12
12
|
const python_1 = require("../../../../functions/python");
|
|
13
13
|
const error_1 = require("../../../../error");
|
|
14
|
-
|
|
14
|
+
const utils_1 = require("../../../../utils");
|
|
15
|
+
exports.LATEST_VERSION = "python311";
|
|
15
16
|
async function tryCreateDelegate(context) {
|
|
16
17
|
const requirementsTextPath = path.join(context.sourceDir, "requirements.txt");
|
|
17
18
|
if (!(await (0, util_1.promisify)(fs.exists)(requirementsTextPath))) {
|
|
@@ -54,15 +55,21 @@ class Delegate {
|
|
|
54
55
|
return this._bin;
|
|
55
56
|
}
|
|
56
57
|
async modulesDir() {
|
|
57
|
-
var _a;
|
|
58
|
+
var _a, _b;
|
|
58
59
|
if (!this._modulesDir) {
|
|
60
|
+
let out = "";
|
|
61
|
+
let stderr = "";
|
|
59
62
|
const child = (0, python_1.runWithVirtualEnv)([
|
|
60
63
|
this.bin,
|
|
61
64
|
"-c",
|
|
62
65
|
'"import firebase_functions; import os; print(os.path.dirname(firebase_functions.__file__))"',
|
|
63
66
|
], this.sourceDir, {});
|
|
64
|
-
|
|
65
|
-
|
|
67
|
+
(_a = child.stderr) === null || _a === void 0 ? void 0 : _a.on("data", (chunk) => {
|
|
68
|
+
const chunkString = chunk.toString();
|
|
69
|
+
stderr = stderr + chunkString;
|
|
70
|
+
logger_1.logger.debug(`stderr: ${chunkString}`);
|
|
71
|
+
});
|
|
72
|
+
(_b = child.stdout) === null || _b === void 0 ? void 0 : _b.on("data", (chunk) => {
|
|
66
73
|
const chunkString = chunk.toString();
|
|
67
74
|
out = out + chunkString;
|
|
68
75
|
logger_1.logger.debug(`stdout: ${chunkString}`);
|
|
@@ -72,6 +79,15 @@ class Delegate {
|
|
|
72
79
|
child.on("error", reject);
|
|
73
80
|
});
|
|
74
81
|
this._modulesDir = out.trim();
|
|
82
|
+
if (this._modulesDir === "") {
|
|
83
|
+
if (stderr.includes("venv") && stderr.includes("activate")) {
|
|
84
|
+
throw new error_1.FirebaseError("Failed to find location of Firebase Functions SDK: Missing virtual environment at venv directory. " +
|
|
85
|
+
`Did you forget to run '${this.bin} -m venv venv'?`);
|
|
86
|
+
}
|
|
87
|
+
const { command, args } = (0, python_1.virtualEnvCmd)(this.sourceDir, python_1.DEFAULT_VENV_DIR);
|
|
88
|
+
throw new error_1.FirebaseError("Failed to find location of Firebase Functions SDK. " +
|
|
89
|
+
`Did you forget to run '${command} ${args.join(" ")} && ${this.bin} -m pip install -r requirements.txt'?`);
|
|
90
|
+
}
|
|
75
91
|
}
|
|
76
92
|
return this._modulesDir;
|
|
77
93
|
}
|
|
@@ -92,24 +108,32 @@ class Delegate {
|
|
|
92
108
|
const modulesDir = await this.modulesDir();
|
|
93
109
|
const envWithAdminPort = Object.assign(Object.assign({}, envs), { ADMIN_PORT: port.toString() });
|
|
94
110
|
const args = [this.bin, path.join(modulesDir, "private", "serving.py")];
|
|
111
|
+
const stdout = [];
|
|
112
|
+
const stderr = [];
|
|
95
113
|
logger_1.logger.debug(`Running admin server with args: ${JSON.stringify(args)} and env: ${JSON.stringify(envWithAdminPort)} in ${this.sourceDir}`);
|
|
96
114
|
const childProcess = (0, python_1.runWithVirtualEnv)(args, this.sourceDir, envWithAdminPort);
|
|
97
115
|
(_a = childProcess.stdout) === null || _a === void 0 ? void 0 : _a.on("data", (chunk) => {
|
|
98
116
|
const chunkString = chunk.toString();
|
|
117
|
+
stdout.push(chunkString);
|
|
99
118
|
logger_1.logger.debug(`stdout: ${chunkString}`);
|
|
100
119
|
});
|
|
101
120
|
(_b = childProcess.stderr) === null || _b === void 0 ? void 0 : _b.on("data", (chunk) => {
|
|
102
121
|
const chunkString = chunk.toString();
|
|
122
|
+
stderr.push(chunkString);
|
|
103
123
|
logger_1.logger.debug(`stderr: ${chunkString}`);
|
|
104
124
|
});
|
|
105
|
-
return Promise.resolve(
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
125
|
+
return Promise.resolve({
|
|
126
|
+
stderr,
|
|
127
|
+
stdout,
|
|
128
|
+
killProcess: async () => {
|
|
129
|
+
await (0, node_fetch_1.default)(`http://127.0.0.1:${port}/__/quitquitquit`);
|
|
130
|
+
const quitTimeout = setTimeout(() => {
|
|
131
|
+
if (!childProcess.killed) {
|
|
132
|
+
childProcess.kill("SIGKILL");
|
|
133
|
+
}
|
|
134
|
+
}, 10000);
|
|
135
|
+
clearTimeout(quitTimeout);
|
|
136
|
+
},
|
|
113
137
|
});
|
|
114
138
|
}
|
|
115
139
|
async discoverBuild(_configValues, envs) {
|
|
@@ -118,10 +142,14 @@ class Delegate {
|
|
|
118
142
|
const adminPort = await portfinder.getPortPromise({
|
|
119
143
|
port: 8081,
|
|
120
144
|
});
|
|
121
|
-
const killProcess = await this.serveAdmin(adminPort, envs);
|
|
145
|
+
const { killProcess, stderr } = await this.serveAdmin(adminPort, envs);
|
|
122
146
|
try {
|
|
123
147
|
discovered = await discovery.detectFromPort(adminPort, this.projectId, this.runtime);
|
|
124
148
|
}
|
|
149
|
+
catch (e) {
|
|
150
|
+
(0, utils_1.logLabeledWarning)("functions", `Failed to detect functions from source ${e}.\nstderr:${stderr.join("\n")}`);
|
|
151
|
+
throw e;
|
|
152
|
+
}
|
|
125
153
|
finally {
|
|
126
154
|
await killProcess();
|
|
127
155
|
}
|
|
@@ -113,10 +113,10 @@ async function convertConfig(context, functionsPayload, deploy) {
|
|
|
113
113
|
}
|
|
114
114
|
if (endpoint.platform === "gcfv1") {
|
|
115
115
|
if (!backend.isHttpsTriggered(endpoint) && !backend.isCallableTriggered(endpoint)) {
|
|
116
|
-
throw new error_1.FirebaseError(`Function ${endpoint.id} is a gen
|
|
116
|
+
throw new error_1.FirebaseError(`Function ${endpoint.id} is a 1st gen function and therefore must be an https function type`);
|
|
117
117
|
}
|
|
118
118
|
if (rewrite.function.pinTag) {
|
|
119
|
-
throw new error_1.FirebaseError(`Function ${endpoint.id} is a gen
|
|
119
|
+
throw new error_1.FirebaseError(`Function ${endpoint.id} is a 1st gen function and therefore does not support the ${(0, colorette_1.bold)("pinTag")} option`);
|
|
120
120
|
}
|
|
121
121
|
return Object.assign(Object.assign({}, target), { function: endpoint.id, functionRegion: endpoint.region });
|
|
122
122
|
}
|
|
@@ -137,9 +137,13 @@ async function convertConfig(context, functionsPayload, deploy) {
|
|
|
137
137
|
return Object.assign(Object.assign({}, target), { dynamicLinks: true });
|
|
138
138
|
}
|
|
139
139
|
if ("run" in rewrite) {
|
|
140
|
-
const apiRewrite = Object.assign(Object.assign({}, target), { run:
|
|
141
|
-
|
|
140
|
+
const apiRewrite = Object.assign(Object.assign({}, target), { run: {
|
|
141
|
+
serviceId: rewrite.run.serviceId,
|
|
142
|
+
region: rewrite.run.region || "us-central1",
|
|
143
|
+
} });
|
|
144
|
+
if (rewrite.run.pinTag) {
|
|
142
145
|
experiments.assertEnabled("pintags", "pin to a run service revision");
|
|
146
|
+
apiRewrite.run.tag = runTags.TODO_TAG_NAME;
|
|
143
147
|
}
|
|
144
148
|
return apiRewrite;
|
|
145
149
|
}
|
|
@@ -1,36 +1,94 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.unsafePins = exports.prepare = void 0;
|
|
3
|
+
exports.unsafePins = exports.prepare = exports.addPinnedFunctionsToOnlyString = exports.hasPinnedFunctions = void 0;
|
|
4
4
|
const error_1 = require("../../error");
|
|
5
5
|
const api = require("../../hosting/api");
|
|
6
6
|
const config = require("../../hosting/config");
|
|
7
7
|
const deploymentTool = require("../../deploymentTool");
|
|
8
|
+
const clc = require("colorette");
|
|
8
9
|
const functional_1 = require("../../functional");
|
|
9
10
|
const track_1 = require("../../track");
|
|
10
11
|
const utils = require("../../utils");
|
|
11
12
|
const backend = require("../functions/backend");
|
|
12
|
-
|
|
13
|
+
const ensureTargeted_1 = require("../../functions/ensureTargeted");
|
|
14
|
+
function handlePublicDirectoryFlag(options) {
|
|
13
15
|
if (options.public) {
|
|
14
16
|
if (Array.isArray(options.config.get("hosting"))) {
|
|
15
17
|
throw new error_1.FirebaseError("Cannot specify --public option with multi-site configuration.");
|
|
16
18
|
}
|
|
17
19
|
options.config.set("hosting.public", options.public);
|
|
18
20
|
}
|
|
21
|
+
}
|
|
22
|
+
function hasPinnedFunctions(options) {
|
|
23
|
+
handlePublicDirectoryFlag(options);
|
|
24
|
+
for (const c of config.hostingConfig(options)) {
|
|
25
|
+
for (const r of c.rewrites || []) {
|
|
26
|
+
if ("function" in r && typeof r.function === "object" && r.function.pinTag) {
|
|
27
|
+
return true;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
33
|
+
exports.hasPinnedFunctions = hasPinnedFunctions;
|
|
34
|
+
async function addPinnedFunctionsToOnlyString(context, options) {
|
|
35
|
+
var _a;
|
|
36
|
+
if (!options.only) {
|
|
37
|
+
return false;
|
|
38
|
+
}
|
|
39
|
+
handlePublicDirectoryFlag(options);
|
|
40
|
+
const addedFunctions = [];
|
|
41
|
+
for (const c of config.hostingConfig(options)) {
|
|
42
|
+
const addedFunctionsPerSite = [];
|
|
43
|
+
for (const r of c.rewrites || []) {
|
|
44
|
+
if (!("function" in r) || typeof r.function !== "object" || !r.function.pinTag) {
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
const endpoint = (_a = (await backend.existingBackend(context)).endpoints[r.function.region || "us-central1"]) === null || _a === void 0 ? void 0 : _a[r.function.functionId];
|
|
48
|
+
if (endpoint) {
|
|
49
|
+
options.only = (0, ensureTargeted_1.ensureTargeted)(options.only, endpoint.codebase || "default", endpoint.id);
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
options.only = (0, ensureTargeted_1.ensureTargeted)(options.only, r.function.functionId);
|
|
53
|
+
}
|
|
54
|
+
addedFunctionsPerSite.push(r.function.functionId);
|
|
55
|
+
}
|
|
56
|
+
if (addedFunctionsPerSite.length) {
|
|
57
|
+
utils.logLabeledBullet("hosting", "The following function(s) are pinned to site " +
|
|
58
|
+
`${clc.bold(c.site)} and will be deployed as well: ` +
|
|
59
|
+
addedFunctionsPerSite.map(clc.bold).join(","));
|
|
60
|
+
addedFunctions.push(...addedFunctionsPerSite);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
return addedFunctions.length !== 0;
|
|
64
|
+
}
|
|
65
|
+
exports.addPinnedFunctionsToOnlyString = addPinnedFunctionsToOnlyString;
|
|
66
|
+
async function prepare(context, options) {
|
|
67
|
+
handlePublicDirectoryFlag(options);
|
|
19
68
|
const configs = config.hostingConfig(options);
|
|
20
69
|
if (configs.length === 0) {
|
|
21
70
|
return Promise.resolve();
|
|
22
71
|
}
|
|
23
72
|
const versions = await Promise.all(configs.map(async (config) => {
|
|
73
|
+
var _a, _b;
|
|
24
74
|
const labels = Object.assign({}, deploymentTool.labels());
|
|
25
75
|
if (config.webFramework) {
|
|
26
76
|
labels["firebase-web-framework"] = config.webFramework;
|
|
27
77
|
}
|
|
28
78
|
const unsafe = await unsafePins(context, config);
|
|
29
79
|
if (unsafe.length) {
|
|
30
|
-
const msg = `Cannot deploy site ${config.site} to channel
|
|
80
|
+
const msg = `Cannot deploy site ${clc.bold(config.site)} to channel ` +
|
|
81
|
+
`${clc.bold(context.hostingChannel)} because it would modify one or ` +
|
|
82
|
+
`more rewrites in "live" that are not pinned, breaking production. ` +
|
|
83
|
+
`Please pin "live" before pinning other channels.`;
|
|
31
84
|
utils.logLabeledError("Hosting", msg);
|
|
32
85
|
throw new Error(msg);
|
|
33
86
|
}
|
|
87
|
+
const runPins = (_b = (_a = config.rewrites) === null || _a === void 0 ? void 0 : _a.filter((r) => "run" in r && r.run.pinTag)) === null || _b === void 0 ? void 0 : _b.map((r) => r.run.serviceId);
|
|
88
|
+
if (runPins === null || runPins === void 0 ? void 0 : runPins.length) {
|
|
89
|
+
utils.logLabeledBullet("hosting", `The site ${clc.bold(config.site)} will pin rewrites to the current ` +
|
|
90
|
+
`latest revision of service(s) ${runPins.map(clc.bold).join(",")}`);
|
|
91
|
+
}
|
|
34
92
|
const version = {
|
|
35
93
|
status: "CREATED",
|
|
36
94
|
labels,
|
|
@@ -64,7 +122,7 @@ function rewriteTarget(source) {
|
|
|
64
122
|
}
|
|
65
123
|
}
|
|
66
124
|
async function unsafePins(context, config) {
|
|
67
|
-
var _a, _b, _c;
|
|
125
|
+
var _a, _b, _c, _d;
|
|
68
126
|
if ((context.hostingChannel || "live") === "live") {
|
|
69
127
|
return [];
|
|
70
128
|
}
|
|
@@ -76,7 +134,7 @@ async function unsafePins(context, config) {
|
|
|
76
134
|
}
|
|
77
135
|
if ("function" in rewrite && typeof rewrite.function === "object" && rewrite.function.pinTag) {
|
|
78
136
|
const region = rewrite.function.region || "us-central1";
|
|
79
|
-
const endpoint = (await backend.existingBackend(context)).endpoints[region][rewrite.function.functionId];
|
|
137
|
+
const endpoint = (_a = (await backend.existingBackend(context)).endpoints[region]) === null || _a === void 0 ? void 0 : _a[rewrite.function.functionId];
|
|
80
138
|
if (!endpoint) {
|
|
81
139
|
continue;
|
|
82
140
|
}
|
|
@@ -88,7 +146,7 @@ async function unsafePins(context, config) {
|
|
|
88
146
|
}
|
|
89
147
|
const channelConfig = await api.getChannel(context.projectId, config.site, "live");
|
|
90
148
|
const existingUntaggedRewrites = {};
|
|
91
|
-
for (const rewrite of ((
|
|
149
|
+
for (const rewrite of ((_d = (_c = (_b = channelConfig === null || channelConfig === void 0 ? void 0 : channelConfig.release) === null || _b === void 0 ? void 0 : _b.version) === null || _c === void 0 ? void 0 : _c.config) === null || _d === void 0 ? void 0 : _d.rewrites) || []) {
|
|
92
150
|
if ("run" in rewrite && !rewrite.run.tag) {
|
|
93
151
|
existingUntaggedRewrites[rewriteTarget(rewrite)] = `${rewrite.run.region}/${rewrite.run.serviceId}`;
|
|
94
152
|
}
|