firebase-tools 10.7.0 → 10.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/commands/ext-configure.js +26 -15
- package/lib/commands/ext-export.js +14 -5
- package/lib/commands/ext-install.js +31 -2
- package/lib/commands/ext-update.js +17 -10
- package/lib/commands/functions-delete.js +9 -2
- package/lib/commands/functions-secrets-set.js +1 -13
- package/lib/deploy/extensions/planner.js +12 -0
- package/lib/deploy/extensions/tasks.js +13 -0
- package/lib/deploy/functions/backend.js +67 -10
- package/lib/deploy/functions/build.js +28 -9
- package/lib/deploy/functions/checkIam.js +71 -56
- package/lib/deploy/functions/containerCleaner.js +8 -7
- package/lib/deploy/functions/deploy.js +49 -27
- package/lib/deploy/functions/functionsDeployHelper.js +48 -4
- package/lib/deploy/functions/prepare.js +125 -74
- package/lib/deploy/functions/pricing.js +2 -2
- package/lib/deploy/functions/release/executor.js +1 -1
- package/lib/deploy/functions/release/fabricator.js +94 -36
- package/lib/deploy/functions/release/index.js +16 -27
- package/lib/deploy/functions/release/planner.js +12 -7
- package/lib/deploy/functions/runtimes/discovery/v1alpha1.js +13 -1
- package/lib/deploy/functions/runtimes/golang/index.js +3 -0
- package/lib/deploy/functions/runtimes/node/index.js +7 -0
- package/lib/deploy/functions/runtimes/node/parseTriggers.js +108 -1
- package/lib/deploy/functions/services/storage.js +6 -12
- package/lib/deploy/functions/validate.js +58 -8
- package/lib/deploy/hosting/convertConfig.js +6 -4
- package/lib/emulator/auth/cloudFunctions.js +6 -2
- package/lib/emulator/auth/operations.js +0 -1
- package/lib/emulator/auth/server.js +8 -1
- package/lib/emulator/auth/state.js +27 -24
- package/lib/emulator/controller.js +12 -9
- package/lib/emulator/databaseEmulator.js +36 -3
- package/lib/emulator/downloadableEmulators.js +7 -7
- package/lib/emulator/extensionsEmulator.js +3 -0
- package/lib/emulator/functionsEmulator.js +11 -9
- package/lib/emulator/functionsEmulatorRuntime.js +1 -1
- package/lib/emulator/functionsEmulatorShared.js +5 -1
- package/lib/emulator/functionsEmulatorShell.js +2 -3
- package/lib/emulator/functionsEmulatorUtils.js +5 -1
- package/lib/emulator/pubsubEmulator.js +13 -9
- package/lib/emulator/storage/apis/firebase.js +26 -4
- package/lib/ensureApiEnabled.js +1 -1
- package/lib/extensions/askUserForEventsConfig.js +97 -0
- package/lib/extensions/export.js +7 -0
- package/lib/extensions/extensionsApi.js +47 -7
- package/lib/extensions/manifest.js +1 -1
- package/lib/extensions/paramHelper.js +2 -0
- package/lib/extensions/updateHelper.js +7 -1
- package/lib/extensions/warnings.js +11 -4
- package/lib/functions/projectConfig.js +13 -8
- package/lib/functionsShellCommandAction.js +1 -1
- package/lib/gcp/cloudfunctions.js +9 -2
- package/lib/gcp/cloudfunctionsv2.js +28 -10
- package/lib/gcp/serviceusage.js +24 -0
- package/lib/previews.js +1 -1
- package/lib/serve/functions.js +16 -19
- package/lib/throttler/throttler.js +2 -1
- package/npm-shrinkwrap.json +214 -527
- package/package.json +3 -3
- package/templates/extensions/typescript/package.lint.json +2 -1
- package/templates/extensions/typescript/package.nolint.json +2 -1
- package/templates/init/functions/typescript/package.lint.json +1 -0
- package/templates/init/functions/typescript/package.nolint.json +1 -0
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.inferBlockingDetails = exports.inferDetailsFromExisting = exports.prepare = void 0;
|
|
3
|
+
exports.resolveCpu = exports.inferBlockingDetails = exports.inferDetailsFromExisting = exports.prepare = void 0;
|
|
4
4
|
const clc = require("cli-color");
|
|
5
5
|
const backend = require("./backend");
|
|
6
|
+
const build = require("./build");
|
|
6
7
|
const ensureApiEnabled = require("../../ensureApiEnabled");
|
|
7
8
|
const functionsConfig = require("../../functionsConfig");
|
|
8
9
|
const functionsEnv = require("../../functions/env");
|
|
@@ -22,19 +23,17 @@ const error_1 = require("../../error");
|
|
|
22
23
|
const projectConfig_1 = require("../../functions/projectConfig");
|
|
23
24
|
const previews_1 = require("../../previews");
|
|
24
25
|
const v1_1 = require("../../functions/events/v1");
|
|
26
|
+
const serviceusage_1 = require("../../gcp/serviceusage");
|
|
25
27
|
function hasUserConfig(config) {
|
|
26
28
|
return Object.keys(config).length > 1;
|
|
27
29
|
}
|
|
28
|
-
function hasDotenv(opts) {
|
|
29
|
-
return functionsEnv.hasUserEnvs(opts);
|
|
30
|
-
}
|
|
31
30
|
async function prepare(context, options, payload) {
|
|
32
31
|
const projectId = (0, projectUtils_1.needProjectId)(options);
|
|
33
32
|
const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
|
|
34
|
-
context.config = (0, projectConfig_1.normalizeAndValidate)(options.config.src.functions)
|
|
33
|
+
context.config = (0, projectConfig_1.normalizeAndValidate)(options.config.src.functions);
|
|
35
34
|
context.filters = (0, functionsDeployHelper_1.getEndpointFilters)(options);
|
|
36
|
-
|
|
37
|
-
|
|
35
|
+
const codebases = (0, functionsDeployHelper_1.targetCodebases)(context.config, context.filters);
|
|
36
|
+
if (codebases.length === 0) {
|
|
38
37
|
throw new error_1.FirebaseError("No function matches given --only filters. Aborting deployment.");
|
|
39
38
|
}
|
|
40
39
|
const checkAPIsEnabled = await Promise.all([
|
|
@@ -50,79 +49,106 @@ async function prepare(context, options, payload) {
|
|
|
50
49
|
if (checkAPIsEnabled[1]) {
|
|
51
50
|
runtimeConfig = Object.assign(Object.assign({}, runtimeConfig), (await (0, prepareFunctionsUpload_1.getFunctionsConfig)(projectId)));
|
|
52
51
|
}
|
|
53
|
-
|
|
54
|
-
const
|
|
55
|
-
|
|
56
|
-
|
|
52
|
+
context.sources = {};
|
|
53
|
+
const codebaseUsesEnvs = [];
|
|
54
|
+
const wantBackends = {};
|
|
55
|
+
for (const codebase of codebases) {
|
|
56
|
+
(0, utils_1.logLabeledBullet)("functions", `preparing codebase ${clc.bold(codebase)} for deployment`);
|
|
57
|
+
const config = (0, projectConfig_1.configForCodebase)(context.config, codebase);
|
|
58
|
+
const sourceDirName = config.source;
|
|
59
|
+
if (!sourceDirName) {
|
|
60
|
+
throw new error_1.FirebaseError(`No functions code detected at default location (./functions), and no functions source defined in firebase.json`);
|
|
61
|
+
}
|
|
62
|
+
const sourceDir = options.config.path(sourceDirName);
|
|
63
|
+
const delegateContext = {
|
|
64
|
+
projectId,
|
|
65
|
+
sourceDir,
|
|
66
|
+
projectDir: options.config.projectDir,
|
|
67
|
+
runtime: config.runtime || "",
|
|
68
|
+
};
|
|
69
|
+
const runtimeDelegate = await runtimes.getRuntimeDelegate(delegateContext);
|
|
70
|
+
logger_1.logger.debug(`Validating ${runtimeDelegate.name} source`);
|
|
71
|
+
await runtimeDelegate.validate();
|
|
72
|
+
logger_1.logger.debug(`Building ${runtimeDelegate.name} source`);
|
|
73
|
+
await runtimeDelegate.build();
|
|
74
|
+
const firebaseEnvs = functionsEnv.loadFirebaseEnvs(firebaseConfig, projectId);
|
|
75
|
+
const userEnvOpt = {
|
|
76
|
+
functionsSource: sourceDir,
|
|
77
|
+
projectId: projectId,
|
|
78
|
+
projectAlias: options.projectAlias,
|
|
79
|
+
};
|
|
80
|
+
const userEnvs = functionsEnv.loadUserEnvs(userEnvOpt);
|
|
81
|
+
const envs = Object.assign(Object.assign({}, userEnvs), firebaseEnvs);
|
|
82
|
+
let wantBackend;
|
|
83
|
+
if (previews_1.previews.functionsparams) {
|
|
84
|
+
const wantBuild = await runtimeDelegate.discoverBuild(runtimeConfig, firebaseEnvs);
|
|
85
|
+
wantBackend = build.resolveBackend(wantBuild, userEnvs);
|
|
86
|
+
}
|
|
87
|
+
else {
|
|
88
|
+
logger_1.logger.debug(`Analyzing ${runtimeDelegate.name} backend spec`);
|
|
89
|
+
wantBackend = await runtimeDelegate.discoverSpec(runtimeConfig, firebaseEnvs);
|
|
90
|
+
}
|
|
91
|
+
wantBackend.environmentVariables = envs;
|
|
92
|
+
for (const endpoint of backend.allEndpoints(wantBackend)) {
|
|
93
|
+
endpoint.environmentVariables = wantBackend.environmentVariables;
|
|
94
|
+
endpoint.codebase = codebase;
|
|
95
|
+
}
|
|
96
|
+
wantBackends[codebase] = wantBackend;
|
|
97
|
+
if (functionsEnv.hasUserEnvs(userEnvOpt)) {
|
|
98
|
+
codebaseUsesEnvs.push(codebase);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
validate.endpointsAreUnique(wantBackends);
|
|
102
|
+
for (const [codebase, wantBackend] of Object.entries(wantBackends)) {
|
|
103
|
+
const config = (0, projectConfig_1.configForCodebase)(context.config, codebase);
|
|
104
|
+
const sourceDirName = config.source;
|
|
105
|
+
const sourceDir = options.config.path(sourceDirName);
|
|
106
|
+
const source = {};
|
|
107
|
+
if (backend.someEndpoint(wantBackend, () => true)) {
|
|
108
|
+
(0, utils_1.logLabeledBullet)("functions", `preparing ${clc.bold(sourceDirName)} directory for uploading...`);
|
|
109
|
+
}
|
|
110
|
+
if (backend.someEndpoint(wantBackend, (e) => e.platform === "gcfv2")) {
|
|
111
|
+
if (!previews_1.previews.functionsv2) {
|
|
112
|
+
throw new error_1.FirebaseError("This version of firebase-tools does not support Google Cloud " +
|
|
113
|
+
"Functions gen 2\n" +
|
|
114
|
+
"If Cloud Functions for Firebase gen 2 is still in alpha, sign up " +
|
|
115
|
+
"for the alpha program at " +
|
|
116
|
+
"https://services.google.com/fb/forms/firebasealphaprogram/\n" +
|
|
117
|
+
"If Cloud Functions for Firebase gen 2 is in beta, get the latest " +
|
|
118
|
+
"version of Firebse Tools with `npm i -g firebase-tools@latest`");
|
|
119
|
+
}
|
|
120
|
+
source.functionsSourceV2 = await (0, prepareFunctionsUpload_1.prepareFunctionsUpload)(sourceDir, config);
|
|
121
|
+
}
|
|
122
|
+
if (backend.someEndpoint(wantBackend, (e) => e.platform === "gcfv1")) {
|
|
123
|
+
source.functionsSourceV1 = await (0, prepareFunctionsUpload_1.prepareFunctionsUpload)(sourceDir, config, runtimeConfig);
|
|
124
|
+
}
|
|
125
|
+
context.sources[codebase] = source;
|
|
126
|
+
}
|
|
127
|
+
payload.functions = {};
|
|
128
|
+
const haveBackends = (0, functionsDeployHelper_1.groupEndpointsByCodebase)(wantBackends, backend.allEndpoints(await backend.existingBackend(context)));
|
|
129
|
+
for (const [codebase, wantBackend] of Object.entries(wantBackends)) {
|
|
130
|
+
const haveBackend = haveBackends[codebase] || backend.empty();
|
|
131
|
+
payload.functions[codebase] = { wantBackend, haveBackend };
|
|
132
|
+
}
|
|
133
|
+
for (const [codebase, { wantBackend, haveBackend }] of Object.entries(payload.functions)) {
|
|
134
|
+
inferDetailsFromExisting(wantBackend, haveBackend, codebaseUsesEnvs.includes(codebase));
|
|
135
|
+
await (0, triggerRegionHelper_1.ensureTriggerRegions)(wantBackend);
|
|
136
|
+
resolveCpu(wantBackend);
|
|
137
|
+
validate.endpointsAreValid(wantBackend);
|
|
138
|
+
inferBlockingDetails(wantBackend);
|
|
57
139
|
}
|
|
58
|
-
const sourceDir = options.config.path(sourceDirName);
|
|
59
|
-
const delegateContext = {
|
|
60
|
-
projectId,
|
|
61
|
-
sourceDir,
|
|
62
|
-
projectDir: options.config.projectDir,
|
|
63
|
-
runtime: context.config.runtime || "",
|
|
64
|
-
};
|
|
65
|
-
const runtimeDelegate = await runtimes.getRuntimeDelegate(delegateContext);
|
|
66
|
-
logger_1.logger.debug(`Validating ${runtimeDelegate.name} source`);
|
|
67
|
-
await runtimeDelegate.validate();
|
|
68
|
-
logger_1.logger.debug(`Building ${runtimeDelegate.name} source`);
|
|
69
|
-
await runtimeDelegate.build();
|
|
70
|
-
const firebaseEnvs = functionsEnv.loadFirebaseEnvs(firebaseConfig, projectId);
|
|
71
|
-
const userEnvOpt = {
|
|
72
|
-
functionsSource: sourceDir,
|
|
73
|
-
projectId: projectId,
|
|
74
|
-
projectAlias: options.projectAlias,
|
|
75
|
-
};
|
|
76
|
-
const userEnvs = functionsEnv.loadUserEnvs(userEnvOpt);
|
|
77
|
-
const usedDotenv = hasDotenv(userEnvOpt);
|
|
78
140
|
const tag = hasUserConfig(runtimeConfig)
|
|
79
|
-
?
|
|
141
|
+
? codebaseUsesEnvs.length > 0
|
|
80
142
|
? "mixed"
|
|
81
143
|
: "runtime_config"
|
|
82
|
-
:
|
|
144
|
+
: codebaseUsesEnvs.length > 0
|
|
83
145
|
? "dotenv"
|
|
84
146
|
: "none";
|
|
85
147
|
void (0, track_1.track)("functions_codebase_deploy_env_method", tag);
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
wantBackend
|
|
89
|
-
|
|
90
|
-
endpoint.environmentVariables = wantBackend.environmentVariables;
|
|
91
|
-
endpoint.codebase = context.config.codebase;
|
|
92
|
-
}
|
|
93
|
-
const source = {};
|
|
94
|
-
if (backend.someEndpoint(wantBackend, () => true)) {
|
|
95
|
-
(0, utils_1.logLabeledBullet)("functions", `preparing ${clc.bold(sourceDirName)} directory for uploading...`);
|
|
96
|
-
}
|
|
97
|
-
if (backend.someEndpoint(wantBackend, (e) => e.platform === "gcfv2")) {
|
|
98
|
-
if (!previews_1.previews.functionsv2) {
|
|
99
|
-
throw new error_1.FirebaseError("This version of firebase-tools does not support Google Cloud " +
|
|
100
|
-
"Functions gen 2\n" +
|
|
101
|
-
"If Cloud Functions for Firebase gen 2 is still in alpha, sign up " +
|
|
102
|
-
"for the alpha program at " +
|
|
103
|
-
"https://services.google.com/fb/forms/firebasealphaprogram/\n" +
|
|
104
|
-
"If Cloud Functions for Firebase gen 2 is in beta, get the latest " +
|
|
105
|
-
"version of Firebse Tools with `npm i -g firebase-tools@latest`");
|
|
106
|
-
}
|
|
107
|
-
source.functionsSourceV2 = await (0, prepareFunctionsUpload_1.prepareFunctionsUpload)(sourceDir, context.config);
|
|
108
|
-
}
|
|
109
|
-
if (backend.someEndpoint(wantBackend, (e) => e.platform === "gcfv1")) {
|
|
110
|
-
source.functionsSourceV1 = await (0, prepareFunctionsUpload_1.prepareFunctionsUpload)(sourceDir, context.config, runtimeConfig);
|
|
111
|
-
}
|
|
112
|
-
context.source = source;
|
|
113
|
-
const wantEndpointNames = backend.allEndpoints(wantBackend).map((e) => backend.functionName(e));
|
|
114
|
-
const haveBackend = backend.matchingBackend(await backend.existingBackend(context), (endpoint) => {
|
|
115
|
-
var _a;
|
|
116
|
-
if (endpoint.codebase === ((_a = context.config) === null || _a === void 0 ? void 0 : _a.codebase)) {
|
|
117
|
-
return true;
|
|
118
|
-
}
|
|
119
|
-
return wantEndpointNames.includes(backend.functionName(endpoint));
|
|
120
|
-
});
|
|
121
|
-
inferDetailsFromExisting(wantBackend, haveBackend, usedDotenv);
|
|
122
|
-
await (0, triggerRegionHelper_1.ensureTriggerRegions)(wantBackend);
|
|
123
|
-
validate.endpointsAreValid(wantBackend);
|
|
124
|
-
inferBlockingDetails(wantBackend);
|
|
125
|
-
payload.functions = { wantBackend: wantBackend, haveBackend: haveBackend };
|
|
148
|
+
const codebaseCnt = Object.keys(payload.functions).length;
|
|
149
|
+
void (0, track_1.track)("functions_codebase_deploy_count", codebaseCnt >= 5 ? "5+" : codebaseCnt.toString());
|
|
150
|
+
const wantBackend = backend.merge(...Object.values(wantBackends));
|
|
151
|
+
const haveBackend = backend.merge(...Object.values(haveBackends));
|
|
126
152
|
await Promise.all(Object.values(wantBackend.requiredAPIs).map(({ api }) => {
|
|
127
153
|
return ensureApiEnabled.ensure(projectId, api, "functions", false);
|
|
128
154
|
}));
|
|
@@ -138,6 +164,11 @@ async function prepare(context, options, payload) {
|
|
|
138
164
|
return ensureApiEnabled.ensure(context.projectId, api, "functions");
|
|
139
165
|
});
|
|
140
166
|
await Promise.all(enablements);
|
|
167
|
+
const services = ["pubsub.googleapis.com", "eventarc.googleapis.com"];
|
|
168
|
+
const generateServiceAccounts = services.map((service) => {
|
|
169
|
+
return (0, serviceusage_1.generateServiceIdentity)(projectNumber, service, "functions");
|
|
170
|
+
});
|
|
171
|
+
await Promise.all(generateServiceAccounts);
|
|
141
172
|
}
|
|
142
173
|
const matchingBackend = backend.matchingBackend(wantBackend, (endpoint) => {
|
|
143
174
|
return (0, functionsDeployHelper_1.endpointMatchesAnyFilter)(endpoint, context.filters);
|
|
@@ -145,7 +176,7 @@ async function prepare(context, options, payload) {
|
|
|
145
176
|
await (0, prompts_1.promptForFailurePolicies)(options, matchingBackend, haveBackend);
|
|
146
177
|
await (0, prompts_1.promptForMinInstances)(options, matchingBackend, haveBackend);
|
|
147
178
|
await backend.checkAvailability(context, matchingBackend);
|
|
148
|
-
await (0, checkIam_1.ensureServiceAgentRoles)(projectNumber, matchingBackend, haveBackend);
|
|
179
|
+
await (0, checkIam_1.ensureServiceAgentRoles)(projectId, projectNumber, matchingBackend, haveBackend);
|
|
149
180
|
await validate.secretsAreValid(projectId, matchingBackend);
|
|
150
181
|
await ensure.secretAccess(projectId, matchingBackend, haveBackend);
|
|
151
182
|
}
|
|
@@ -163,6 +194,12 @@ function inferDetailsFromExisting(want, have, usedDotenv) {
|
|
|
163
194
|
if (!wantE.availableMemoryMb && haveE.availableMemoryMb) {
|
|
164
195
|
wantE.availableMemoryMb = haveE.availableMemoryMb;
|
|
165
196
|
}
|
|
197
|
+
if (!wantE.concurrency && haveE.concurrency) {
|
|
198
|
+
wantE.concurrency = haveE.concurrency;
|
|
199
|
+
}
|
|
200
|
+
if (!wantE.cpu && haveE.cpu) {
|
|
201
|
+
wantE.cpu = haveE.cpu;
|
|
202
|
+
}
|
|
166
203
|
wantE.securityLevel = haveE.securityLevel ? haveE.securityLevel : "SECURE_ALWAYS";
|
|
167
204
|
maybeCopyTriggerRegion(wantE, haveE);
|
|
168
205
|
}
|
|
@@ -208,3 +245,17 @@ function inferBlockingDetails(want) {
|
|
|
208
245
|
}
|
|
209
246
|
}
|
|
210
247
|
exports.inferBlockingDetails = inferBlockingDetails;
|
|
248
|
+
function resolveCpu(want) {
|
|
249
|
+
for (const e of backend.allEndpoints(want)) {
|
|
250
|
+
if (e.platform === "gcfv1") {
|
|
251
|
+
continue;
|
|
252
|
+
}
|
|
253
|
+
if (e.cpu === "gcf_gen1") {
|
|
254
|
+
e.cpu = backend.memoryToGen1Cpu(e.availableMemoryMb || backend.DEFAULT_MEMORY);
|
|
255
|
+
}
|
|
256
|
+
else if (!e.cpu) {
|
|
257
|
+
e.cpu = backend.memoryToGen2Cpu(e.availableMemoryMb || backend.DEFAULT_MEMORY);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
exports.resolveCpu = resolveCpu;
|
|
@@ -145,12 +145,12 @@ function monthlyMinInstanceCost(endpoints) {
|
|
|
145
145
|
usage["gcfv1"][tier].cpu + cpu * SECONDS_PER_MONTH * endpoint.minInstances;
|
|
146
146
|
}
|
|
147
147
|
else {
|
|
148
|
-
const cpu = 1;
|
|
149
148
|
const tier = V2_REGION_TO_TIER[endpoint.region];
|
|
150
149
|
usage["gcfv2"][tier].ram =
|
|
151
150
|
usage["gcfv2"][tier].ram + ramGb * SECONDS_PER_MONTH * endpoint.minInstances;
|
|
152
151
|
usage["gcfv2"][tier].cpu =
|
|
153
|
-
usage["gcfv2"][tier].cpu +
|
|
152
|
+
usage["gcfv2"][tier].cpu +
|
|
153
|
+
endpoint.cpu * SECONDS_PER_MONTH * endpoint.minInstances;
|
|
154
154
|
}
|
|
155
155
|
}
|
|
156
156
|
let v1MemoryBill = usage["gcfv1"][1].ram * exports.V1_RATES.memoryGb[1] + usage["gcfv1"][2].ram * exports.V1_RATES.memoryGb[2];
|
|
@@ -13,7 +13,7 @@ async function handler(op) {
|
|
|
13
13
|
((_b = (_a = err.context) === null || _a === void 0 ? void 0 : _a.response) === null || _b === void 0 ? void 0 : _b.statusCode) ||
|
|
14
14
|
((_c = err.original) === null || _c === void 0 ? void 0 : _c.code) ||
|
|
15
15
|
((_f = (_e = (_d = err.original) === null || _d === void 0 ? void 0 : _d.context) === null || _e === void 0 ? void 0 : _e.response) === null || _f === void 0 ? void 0 : _f.statusCode);
|
|
16
|
-
if (code === 429 || code === 409) {
|
|
16
|
+
if (code === 429 || code === 409 || code === 503) {
|
|
17
17
|
throw err;
|
|
18
18
|
}
|
|
19
19
|
op.error = err;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.Fabricator = void 0;
|
|
3
|
+
exports.serviceIsResolved = exports.Fabricator = void 0;
|
|
4
4
|
const clc = require("cli-color");
|
|
5
5
|
const error_1 = require("../../../error");
|
|
6
6
|
const sourceTokenScraper_1 = require("./sourceTokenScraper");
|
|
@@ -23,6 +23,7 @@ const scheduler = require("../../../gcp/cloudscheduler");
|
|
|
23
23
|
const utils = require("../../../utils");
|
|
24
24
|
const services = require("../services");
|
|
25
25
|
const v1_1 = require("../../../functions/events/v1");
|
|
26
|
+
const throttler_1 = require("../../../throttler/throttler");
|
|
26
27
|
const gcfV1PollerOptions = {
|
|
27
28
|
apiOrigin: api_1.functionsOrigin,
|
|
28
29
|
apiVersion: gcf.API_VERSION,
|
|
@@ -44,8 +45,7 @@ class Fabricator {
|
|
|
44
45
|
constructor(args) {
|
|
45
46
|
this.executor = args.executor;
|
|
46
47
|
this.functionExecutor = args.functionExecutor;
|
|
47
|
-
this.
|
|
48
|
-
this.storage = args.storage;
|
|
48
|
+
this.sources = args.sources;
|
|
49
49
|
this.appEngineLocation = args.appEngineLocation;
|
|
50
50
|
}
|
|
51
51
|
async applyPlan(plan) {
|
|
@@ -154,12 +154,13 @@ class Fabricator {
|
|
|
154
154
|
}
|
|
155
155
|
}
|
|
156
156
|
async createV1Function(endpoint, scraper) {
|
|
157
|
-
var _a;
|
|
158
|
-
|
|
157
|
+
var _a, _b;
|
|
158
|
+
const sourceUrl = (_a = this.sources[endpoint.codebase]) === null || _a === void 0 ? void 0 : _a.sourceUrl;
|
|
159
|
+
if (!sourceUrl) {
|
|
159
160
|
logger_1.logger.debug("Precondition failed. Cannot create a GCF function without sourceUrl");
|
|
160
161
|
throw new Error("Precondition failed");
|
|
161
162
|
}
|
|
162
|
-
const apiFunction = gcf.functionFromEndpoint(endpoint,
|
|
163
|
+
const apiFunction = gcf.functionFromEndpoint(endpoint, sourceUrl);
|
|
163
164
|
if (apiFunction.httpsTrigger) {
|
|
164
165
|
apiFunction.httpsTrigger.securityLevel = "SECURE_ALWAYS";
|
|
165
166
|
}
|
|
@@ -167,10 +168,10 @@ class Fabricator {
|
|
|
167
168
|
const resultFunction = await this.functionExecutor
|
|
168
169
|
.run(async () => {
|
|
169
170
|
const op = await gcf.createFunction(apiFunction);
|
|
170
|
-
return poller.pollOperation(Object.assign(Object.assign({}, gcfV1PollerOptions), { pollerName: `create-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name, onPoll: scraper.poller }));
|
|
171
|
+
return poller.pollOperation(Object.assign(Object.assign({}, gcfV1PollerOptions), { pollerName: `create-${endpoint.codebase}-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name, onPoll: scraper.poller }));
|
|
171
172
|
})
|
|
172
173
|
.catch(rethrowAs(endpoint, "create"));
|
|
173
|
-
endpoint.uri = (
|
|
174
|
+
endpoint.uri = (_b = resultFunction === null || resultFunction === void 0 ? void 0 : resultFunction.httpsTrigger) === null || _b === void 0 ? void 0 : _b.url;
|
|
174
175
|
if (backend.isHttpsTriggered(endpoint)) {
|
|
175
176
|
const invoker = endpoint.httpsTrigger.invoker || ["public"];
|
|
176
177
|
if (!invoker.includes("private")) {
|
|
@@ -208,13 +209,14 @@ class Fabricator {
|
|
|
208
209
|
}
|
|
209
210
|
}
|
|
210
211
|
async createV2Function(endpoint) {
|
|
211
|
-
var _a;
|
|
212
|
-
|
|
212
|
+
var _a, _b;
|
|
213
|
+
const storage = (_a = this.sources[endpoint.codebase]) === null || _a === void 0 ? void 0 : _a.storage;
|
|
214
|
+
if (!storage) {
|
|
213
215
|
logger_1.logger.debug("Precondition failed. Cannot create a GCFv2 function without storage");
|
|
214
216
|
throw new Error("Precondition failed");
|
|
215
217
|
}
|
|
216
|
-
const apiFunction = gcfV2.functionFromEndpoint(endpoint,
|
|
217
|
-
const topic = (
|
|
218
|
+
const apiFunction = gcfV2.functionFromEndpoint(endpoint, storage);
|
|
219
|
+
const topic = (_b = apiFunction.eventTrigger) === null || _b === void 0 ? void 0 : _b.pubsubTopic;
|
|
218
220
|
if (topic) {
|
|
219
221
|
await this.executor
|
|
220
222
|
.run(async () => {
|
|
@@ -235,7 +237,7 @@ class Fabricator {
|
|
|
235
237
|
const resultFunction = await this.functionExecutor
|
|
236
238
|
.run(async () => {
|
|
237
239
|
const op = await gcfV2.createFunction(apiFunction);
|
|
238
|
-
return await poller.pollOperation(Object.assign(Object.assign({}, gcfV2PollerOptions), { pollerName: `create-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name }));
|
|
240
|
+
return await poller.pollOperation(Object.assign(Object.assign({}, gcfV2PollerOptions), { pollerName: `create-${endpoint.codebase}-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name }));
|
|
239
241
|
})
|
|
240
242
|
.catch(rethrowAs(endpoint, "create"));
|
|
241
243
|
endpoint.uri = resultFunction.serviceConfig.uri;
|
|
@@ -270,25 +272,34 @@ class Fabricator {
|
|
|
270
272
|
.catch(rethrowAs(endpoint, "set invoker"));
|
|
271
273
|
}
|
|
272
274
|
const mem = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY;
|
|
273
|
-
|
|
274
|
-
|
|
275
|
+
const hasCustomCPU = endpoint.cpu !== backend.memoryToGen1Cpu(mem);
|
|
276
|
+
if (!endpoint.concurrency) {
|
|
277
|
+
endpoint.concurrency =
|
|
278
|
+
endpoint.cpu >= backend.MIN_CPU_FOR_CONCURRENCY
|
|
279
|
+
? backend.DEFAULT_CONCURRENCY
|
|
280
|
+
: 1;
|
|
281
|
+
}
|
|
282
|
+
const hasConcurrency = endpoint.concurrency !== 1;
|
|
283
|
+
if (hasCustomCPU || hasConcurrency) {
|
|
284
|
+
await this.setRunTraits(serviceName, endpoint);
|
|
275
285
|
}
|
|
276
286
|
}
|
|
277
287
|
async updateV1Function(endpoint, scraper) {
|
|
278
|
-
var _a;
|
|
279
|
-
|
|
288
|
+
var _a, _b;
|
|
289
|
+
const sourceUrl = (_a = this.sources[endpoint.codebase]) === null || _a === void 0 ? void 0 : _a.sourceUrl;
|
|
290
|
+
if (!sourceUrl) {
|
|
280
291
|
logger_1.logger.debug("Precondition failed. Cannot update a GCF function without sourceUrl");
|
|
281
292
|
throw new Error("Precondition failed");
|
|
282
293
|
}
|
|
283
|
-
const apiFunction = gcf.functionFromEndpoint(endpoint,
|
|
294
|
+
const apiFunction = gcf.functionFromEndpoint(endpoint, sourceUrl);
|
|
284
295
|
apiFunction.sourceToken = await scraper.tokenPromise();
|
|
285
296
|
const resultFunction = await this.functionExecutor
|
|
286
297
|
.run(async () => {
|
|
287
298
|
const op = await gcf.updateFunction(apiFunction);
|
|
288
|
-
return await poller.pollOperation(Object.assign(Object.assign({}, gcfV1PollerOptions), { pollerName: `update-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name, onPoll: scraper.poller }));
|
|
299
|
+
return await poller.pollOperation(Object.assign(Object.assign({}, gcfV1PollerOptions), { pollerName: `update-${endpoint.codebase}-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name, onPoll: scraper.poller }));
|
|
289
300
|
})
|
|
290
301
|
.catch(rethrowAs(endpoint, "update"));
|
|
291
|
-
endpoint.uri = (
|
|
302
|
+
endpoint.uri = (_b = resultFunction === null || resultFunction === void 0 ? void 0 : resultFunction.httpsTrigger) === null || _b === void 0 ? void 0 : _b.url;
|
|
292
303
|
let invoker;
|
|
293
304
|
if (backend.isHttpsTriggered(endpoint)) {
|
|
294
305
|
invoker = endpoint.httpsTrigger.invoker;
|
|
@@ -307,19 +318,20 @@ class Fabricator {
|
|
|
307
318
|
}
|
|
308
319
|
}
|
|
309
320
|
async updateV2Function(endpoint) {
|
|
310
|
-
var _a;
|
|
311
|
-
|
|
321
|
+
var _a, _b;
|
|
322
|
+
const storage = (_a = this.sources[endpoint.codebase]) === null || _a === void 0 ? void 0 : _a.storage;
|
|
323
|
+
if (!storage) {
|
|
312
324
|
logger_1.logger.debug("Precondition failed. Cannot update a GCFv2 function without storage");
|
|
313
325
|
throw new Error("Precondition failed");
|
|
314
326
|
}
|
|
315
|
-
const apiFunction = gcfV2.functionFromEndpoint(endpoint,
|
|
316
|
-
if ((
|
|
327
|
+
const apiFunction = gcfV2.functionFromEndpoint(endpoint, storage);
|
|
328
|
+
if ((_b = apiFunction.eventTrigger) === null || _b === void 0 ? void 0 : _b.pubsubTopic) {
|
|
317
329
|
delete apiFunction.eventTrigger.pubsubTopic;
|
|
318
330
|
}
|
|
319
331
|
const resultFunction = await this.functionExecutor
|
|
320
332
|
.run(async () => {
|
|
321
333
|
const op = await gcfV2.updateFunction(apiFunction);
|
|
322
|
-
return await poller.pollOperation(Object.assign(Object.assign({}, gcfV2PollerOptions), { pollerName: `update-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name }));
|
|
334
|
+
return await poller.pollOperation(Object.assign(Object.assign({}, gcfV2PollerOptions), { pollerName: `update-${endpoint.codebase}-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name }));
|
|
323
335
|
})
|
|
324
336
|
.catch(rethrowAs(endpoint, "update"));
|
|
325
337
|
endpoint.uri = resultFunction.serviceConfig.uri;
|
|
@@ -340,8 +352,17 @@ class Fabricator {
|
|
|
340
352
|
.run(() => run.setInvokerUpdate(endpoint.project, serviceName, invoker))
|
|
341
353
|
.catch(rethrowAs(endpoint, "set invoker"));
|
|
342
354
|
}
|
|
343
|
-
|
|
344
|
-
|
|
355
|
+
const hasCustomCPU = endpoint.cpu !==
|
|
356
|
+
backend.memoryToGen1Cpu(endpoint.availableMemoryMb || backend.DEFAULT_MEMORY);
|
|
357
|
+
const explicitConcurrency = endpoint.concurrency !== undefined;
|
|
358
|
+
if (hasCustomCPU || explicitConcurrency) {
|
|
359
|
+
if (endpoint.concurrency === undefined) {
|
|
360
|
+
endpoint.concurrency =
|
|
361
|
+
endpoint.cpu < backend.MIN_CPU_FOR_CONCURRENCY
|
|
362
|
+
? 1
|
|
363
|
+
: backend.DEFAULT_CONCURRENCY;
|
|
364
|
+
}
|
|
365
|
+
await this.setRunTraits(serviceName, endpoint);
|
|
345
366
|
}
|
|
346
367
|
}
|
|
347
368
|
async deleteV1Function(endpoint) {
|
|
@@ -349,7 +370,7 @@ class Fabricator {
|
|
|
349
370
|
await this.functionExecutor
|
|
350
371
|
.run(async () => {
|
|
351
372
|
const op = await gcf.deleteFunction(fnName);
|
|
352
|
-
const pollerOptions = Object.assign(Object.assign({}, gcfV1PollerOptions), { pollerName: `delete-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name });
|
|
373
|
+
const pollerOptions = Object.assign(Object.assign({}, gcfV1PollerOptions), { pollerName: `delete-${endpoint.codebase}-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name });
|
|
353
374
|
await poller.pollOperation(pollerOptions);
|
|
354
375
|
})
|
|
355
376
|
.catch(rethrowAs(endpoint, "delete"));
|
|
@@ -359,23 +380,37 @@ class Fabricator {
|
|
|
359
380
|
await this.functionExecutor
|
|
360
381
|
.run(async () => {
|
|
361
382
|
const op = await gcfV2.deleteFunction(fnName);
|
|
362
|
-
const pollerOptions = Object.assign(Object.assign({}, gcfV2PollerOptions), { pollerName: `delete-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name });
|
|
383
|
+
const pollerOptions = Object.assign(Object.assign({}, gcfV2PollerOptions), { pollerName: `delete-${endpoint.codebase}-${endpoint.region}-${endpoint.id}`, operationResourceName: op.name });
|
|
363
384
|
await poller.pollOperation(pollerOptions);
|
|
364
385
|
})
|
|
365
386
|
.catch(rethrowAs(endpoint, "delete"));
|
|
366
387
|
}
|
|
367
|
-
async
|
|
388
|
+
async setRunTraits(serviceName, endpoint) {
|
|
368
389
|
await this.functionExecutor
|
|
369
390
|
.run(async () => {
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
391
|
+
let service = await run.getService(serviceName);
|
|
392
|
+
let changed = false;
|
|
393
|
+
if (service.spec.template.spec.containerConcurrency !== endpoint.concurrency) {
|
|
394
|
+
service.spec.template.spec.containerConcurrency = endpoint.concurrency;
|
|
395
|
+
changed = true;
|
|
396
|
+
}
|
|
397
|
+
if (+service.spec.template.spec.containers[0].resources.limits.cpu !== endpoint.cpu) {
|
|
398
|
+
service.spec.template.spec.containers[0].resources.limits.cpu = `${endpoint.cpu}`;
|
|
399
|
+
changed = true;
|
|
400
|
+
}
|
|
401
|
+
if (!changed) {
|
|
402
|
+
logger_1.logger.debug("Skipping setRunTraits on", serviceName, " because it already matches");
|
|
373
403
|
return;
|
|
374
404
|
}
|
|
375
405
|
delete service.status;
|
|
376
406
|
delete service.spec.template.metadata.name;
|
|
377
|
-
service
|
|
378
|
-
|
|
407
|
+
service = await run.replaceService(serviceName, service);
|
|
408
|
+
let retry = 0;
|
|
409
|
+
while (!exports.serviceIsResolved(service)) {
|
|
410
|
+
await (0, throttler_1.backoff)(retry, 2, 30);
|
|
411
|
+
retry = retry + 1;
|
|
412
|
+
service = await run.getService(serviceName);
|
|
413
|
+
}
|
|
379
414
|
})
|
|
380
415
|
.catch(rethrowAs(endpoint, "set concurrency"));
|
|
381
416
|
}
|
|
@@ -471,7 +506,7 @@ class Fabricator {
|
|
|
471
506
|
logOpStart(op, endpoint) {
|
|
472
507
|
const runtime = (0, runtimes_1.getHumanFriendlyRuntimeName)(endpoint.runtime);
|
|
473
508
|
const label = helper.getFunctionLabel(endpoint);
|
|
474
|
-
utils.
|
|
509
|
+
utils.logLabeledBullet("functions", `${op} ${runtime} function ${clc.bold(label)}...`);
|
|
475
510
|
}
|
|
476
511
|
logOpSuccess(op, endpoint) {
|
|
477
512
|
const label = helper.getFunctionLabel(endpoint);
|
|
@@ -479,3 +514,26 @@ class Fabricator {
|
|
|
479
514
|
}
|
|
480
515
|
}
|
|
481
516
|
exports.Fabricator = Fabricator;
|
|
517
|
+
function serviceIsResolved(service) {
|
|
518
|
+
var _a, _b, _c, _d, _e;
|
|
519
|
+
if (((_a = service.status) === null || _a === void 0 ? void 0 : _a.observedGeneration) !== service.metadata.generation) {
|
|
520
|
+
logger_1.logger.debug(`Service ${service.metadata.name} is not resolved because` +
|
|
521
|
+
`observed generation ${(_b = service.status) === null || _b === void 0 ? void 0 : _b.observedGeneration} does not ` +
|
|
522
|
+
`match spec generation ${service.metadata.generation}`);
|
|
523
|
+
return false;
|
|
524
|
+
}
|
|
525
|
+
const readyCondition = (_d = (_c = service.status) === null || _c === void 0 ? void 0 : _c.conditions) === null || _d === void 0 ? void 0 : _d.find((condition) => {
|
|
526
|
+
return condition.type === "Ready";
|
|
527
|
+
});
|
|
528
|
+
if ((readyCondition === null || readyCondition === void 0 ? void 0 : readyCondition.status) === "Unknown") {
|
|
529
|
+
logger_1.logger.debug(`Waiting for service ${service.metadata.name} to be ready. ` +
|
|
530
|
+
`Status is ${JSON.stringify((_e = service.status) === null || _e === void 0 ? void 0 : _e.conditions)}`);
|
|
531
|
+
return false;
|
|
532
|
+
}
|
|
533
|
+
else if ((readyCondition === null || readyCondition === void 0 ? void 0 : readyCondition.status) === "True") {
|
|
534
|
+
return true;
|
|
535
|
+
}
|
|
536
|
+
logger_1.logger.debug(`Service ${service.metadata.name} has unexpected ready status ${JSON.stringify(readyCondition)}. It may have failed rollout.`);
|
|
537
|
+
throw new error_1.FirebaseError(`Unexpected Status ${readyCondition === null || readyCondition === void 0 ? void 0 : readyCondition.status} for service ${service.metadata.name}`);
|
|
538
|
+
}
|
|
539
|
+
exports.serviceIsResolved = serviceIsResolved;
|
|
@@ -11,12 +11,9 @@ const fabricator = require("./fabricator");
|
|
|
11
11
|
const reporter = require("./reporter");
|
|
12
12
|
const executor = require("./executor");
|
|
13
13
|
const prompts = require("../prompts");
|
|
14
|
-
const secrets = require("../../../functions/secrets");
|
|
15
14
|
const functionsConfig_1 = require("../../../functionsConfig");
|
|
16
15
|
const functionsDeployHelper_1 = require("../functionsDeployHelper");
|
|
17
16
|
const error_1 = require("../../../error");
|
|
18
|
-
const projectUtils_1 = require("../../../projectUtils");
|
|
19
|
-
const utils_1 = require("../../../utils");
|
|
20
17
|
async function release(context, options, payload) {
|
|
21
18
|
if (!context.config) {
|
|
22
19
|
return;
|
|
@@ -24,8 +21,18 @@ async function release(context, options, payload) {
|
|
|
24
21
|
if (!payload.functions) {
|
|
25
22
|
return;
|
|
26
23
|
}
|
|
27
|
-
|
|
28
|
-
|
|
24
|
+
if (!context.sources) {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
let plan = {};
|
|
28
|
+
for (const [codebase, { wantBackend, haveBackend }] of Object.entries(payload.functions)) {
|
|
29
|
+
plan = Object.assign(Object.assign({}, plan), planner.createDeploymentPlan({
|
|
30
|
+
codebase,
|
|
31
|
+
wantBackend,
|
|
32
|
+
haveBackend,
|
|
33
|
+
filters: context.filters,
|
|
34
|
+
}));
|
|
35
|
+
}
|
|
29
36
|
const fnsToDelete = Object.values(plan)
|
|
30
37
|
.map((regionalChanges) => regionalChanges.endpointsToDelete)
|
|
31
38
|
.reduce(functional_1.reduceFlat, []);
|
|
@@ -44,15 +51,15 @@ async function release(context, options, payload) {
|
|
|
44
51
|
const fab = new fabricator.Fabricator({
|
|
45
52
|
functionExecutor,
|
|
46
53
|
executor: new executor.QueueExecutor({}),
|
|
47
|
-
|
|
48
|
-
storage: context.source.storage,
|
|
54
|
+
sources: context.sources,
|
|
49
55
|
appEngineLocation: (0, functionsConfig_1.getAppEngineLocation)(context.firebaseConfig),
|
|
50
56
|
});
|
|
51
57
|
const summary = await fab.applyPlan(plan);
|
|
52
58
|
await reporter.logAndTrackDeployStats(summary);
|
|
53
59
|
reporter.printErrors(summary);
|
|
54
|
-
|
|
55
|
-
|
|
60
|
+
const wantBackend = backend.merge(...Object.values(payload.functions).map((p) => p.wantBackend));
|
|
61
|
+
printTriggerUrls(wantBackend);
|
|
62
|
+
const haveEndpoints = backend.allEndpoints(wantBackend);
|
|
56
63
|
const deletedEndpoints = Object.values(plan)
|
|
57
64
|
.map((r) => r.endpointsToDelete)
|
|
58
65
|
.reduce(functional_1.reduceFlat, []);
|
|
@@ -66,24 +73,6 @@ async function release(context, options, payload) {
|
|
|
66
73
|
const opts = allErrors.length === 1 ? { original: allErrors[0] } : { children: allErrors };
|
|
67
74
|
throw new error_1.FirebaseError("There was an error deploying functions", Object.assign(Object.assign({}, opts), { exit: 2 }));
|
|
68
75
|
}
|
|
69
|
-
else {
|
|
70
|
-
if (secrets.of(haveEndpoints).length > 0) {
|
|
71
|
-
const projectId = (0, projectUtils_1.needProjectId)(options);
|
|
72
|
-
const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
|
|
73
|
-
const reloadedBackend = await backend.existingBackend({ projectId });
|
|
74
|
-
const prunedResult = await secrets.pruneAndDestroySecrets({ projectId, projectNumber }, backend.allEndpoints(reloadedBackend));
|
|
75
|
-
if (prunedResult.destroyed.length > 0) {
|
|
76
|
-
(0, utils_1.logLabeledBullet)("functions", `Destroyed unused secret versions: ${prunedResult.destroyed
|
|
77
|
-
.map((s) => `${s.secret}@${s.version}`)
|
|
78
|
-
.join(", ")}`);
|
|
79
|
-
}
|
|
80
|
-
if (prunedResult.erred.length > 0) {
|
|
81
|
-
(0, utils_1.logLabeledWarning)("functions", `Failed to destroy unused secret versions:\n\t${prunedResult.erred
|
|
82
|
-
.map((err) => err.message)
|
|
83
|
-
.join("\n\t")}`);
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
76
|
}
|
|
88
77
|
exports.release = release;
|
|
89
78
|
function printTriggerUrls(results) {
|