firebase-tools 10.7.2 → 10.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/lib/commands/ext-configure.js +26 -15
  2. package/lib/commands/ext-export.js +14 -5
  3. package/lib/commands/ext-install.js +31 -2
  4. package/lib/commands/ext-update.js +17 -10
  5. package/lib/commands/functions-list.js +12 -20
  6. package/lib/commands/functions-secrets-set.js +1 -13
  7. package/lib/deploy/extensions/planner.js +12 -0
  8. package/lib/deploy/extensions/tasks.js +13 -0
  9. package/lib/deploy/functions/backend.js +47 -14
  10. package/lib/deploy/functions/build.js +9 -1
  11. package/lib/deploy/functions/checkIam.js +65 -46
  12. package/lib/deploy/functions/functionsDeployHelper.js +1 -1
  13. package/lib/deploy/functions/prepare.js +42 -15
  14. package/lib/deploy/functions/pricing.js +2 -2
  15. package/lib/deploy/functions/release/fabricator.js +66 -11
  16. package/lib/deploy/functions/release/index.js +0 -21
  17. package/lib/deploy/functions/runtimes/discovery/index.js +2 -1
  18. package/lib/deploy/functions/runtimes/discovery/v1alpha1.js +13 -1
  19. package/lib/deploy/functions/runtimes/node/index.js +26 -26
  20. package/lib/deploy/functions/services/storage.js +6 -12
  21. package/lib/deploy/functions/validate.js +79 -15
  22. package/lib/deploy/index.js +2 -1
  23. package/lib/emulator/controller.js +10 -5
  24. package/lib/emulator/downloadableEmulators.js +18 -34
  25. package/lib/emulator/extensionsEmulator.js +4 -1
  26. package/lib/emulator/functionsEmulator.js +4 -1
  27. package/lib/extensions/askUserForEventsConfig.js +97 -0
  28. package/lib/extensions/export.js +7 -0
  29. package/lib/extensions/extensionsApi.js +47 -7
  30. package/lib/extensions/manifest.js +1 -1
  31. package/lib/extensions/updateHelper.js +7 -1
  32. package/lib/extensions/warnings.js +3 -3
  33. package/lib/frameworks/index.js +121 -0
  34. package/lib/functions/functionslog.js +4 -9
  35. package/lib/gcp/cloudfunctions.js +1 -1
  36. package/lib/gcp/cloudfunctionsv2.js +9 -11
  37. package/lib/gcp/serviceusage.js +24 -0
  38. package/lib/hosting/normalizedHostingConfigs.js +3 -0
  39. package/lib/previews.js +1 -1
  40. package/lib/serve/index.js +2 -1
  41. package/lib/throttler/throttler.js +2 -1
  42. package/npm-shrinkwrap.json +103 -9
  43. package/package.json +2 -2
  44. package/schema/firebase-config.json +9 -0
  45. package/templates/extensions/javascript/package.lint.json +5 -5
  46. package/templates/extensions/javascript/package.nolint.json +3 -3
  47. package/templates/extensions/typescript/package.lint.json +8 -7
  48. package/templates/extensions/typescript/package.nolint.json +2 -1
  49. package/templates/init/functions/typescript/package.lint.json +1 -0
  50. package/templates/init/functions/typescript/package.nolint.json +5 -5
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ensureServiceAgentRoles = exports.mergeBindings = exports.obtainDefaultComputeServiceAgentBindings = exports.obtainPubSubServiceAgentBindings = exports.obtainBinding = exports.checkHttpIam = exports.checkServiceAccountIam = exports.EVENTARC_EVENT_RECEIVER_ROLE = exports.RUN_INVOKER_ROLE = exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = void 0;
3
+ exports.ensureServiceAgentRoles = exports.mergeBindings = exports.obtainDefaultComputeServiceAgentBindings = exports.obtainPubSubServiceAgentBindings = exports.checkHttpIam = exports.checkServiceAccountIam = exports.EVENTARC_EVENT_RECEIVER_ROLE = exports.RUN_INVOKER_ROLE = exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = void 0;
4
4
  const cli_color_1 = require("cli-color");
5
5
  const logger_1 = require("../../logger");
6
6
  const functionsDeployHelper_1 = require("./functionsDeployHelper");
@@ -67,6 +67,12 @@ async function checkHttpIam(context, options, payload) {
67
67
  logger_1.logger.debug("[functions] found setIamPolicy permission, proceeding with deploy");
68
68
  }
69
69
  exports.checkHttpIam = checkHttpIam;
70
+ function getPubsubServiceAgent(projectNumber) {
71
+ return `serviceAccount:service-${projectNumber}@gcp-sa-pubsub.iam.gserviceaccount.com`;
72
+ }
73
+ function getDefaultComputeServiceAgent(projectNumber) {
74
+ return `serviceAccount:${projectNumber}-compute@developer.gserviceaccount.com`;
75
+ }
70
76
  function reduceEventsToServices(services, endpoint) {
71
77
  const service = (0, services_1.serviceForEndpoint)(endpoint);
72
78
  if (service.requiredProjectBindings && !services.find((s) => s.name === service.name)) {
@@ -74,84 +80,97 @@ function reduceEventsToServices(services, endpoint) {
74
80
  }
75
81
  return services;
76
82
  }
77
- function obtainBinding(existingPolicy, serviceAccount, role) {
78
- let binding = existingPolicy.bindings.find((b) => b.role === role);
79
- if (!binding) {
80
- binding = {
81
- role,
82
- members: [],
83
- };
84
- }
85
- if (!binding.members.find((m) => m === serviceAccount)) {
86
- binding.members.push(serviceAccount);
87
- }
88
- return binding;
89
- }
90
- exports.obtainBinding = obtainBinding;
91
- function obtainPubSubServiceAgentBindings(projectNumber, existingPolicy) {
92
- const pubsubServiceAgent = `serviceAccount:service-${projectNumber}@gcp-sa-pubsub.iam.gserviceaccount.com`;
93
- return [obtainBinding(existingPolicy, pubsubServiceAgent, exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE)];
83
+ function obtainPubSubServiceAgentBindings(projectNumber) {
84
+ const serviceAccountTokenCreatorBinding = {
85
+ role: exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE,
86
+ members: [getPubsubServiceAgent(projectNumber)],
87
+ };
88
+ return [serviceAccountTokenCreatorBinding];
94
89
  }
95
90
  exports.obtainPubSubServiceAgentBindings = obtainPubSubServiceAgentBindings;
96
- function obtainDefaultComputeServiceAgentBindings(projectNumber, existingPolicy) {
97
- const defaultComputeServiceAgent = `serviceAccount:${projectNumber}-compute@developer.gserviceaccount.com`;
98
- const invokerBinding = obtainBinding(existingPolicy, defaultComputeServiceAgent, exports.RUN_INVOKER_ROLE);
99
- const eventReceiverBinding = obtainBinding(existingPolicy, defaultComputeServiceAgent, exports.EVENTARC_EVENT_RECEIVER_ROLE);
100
- return [invokerBinding, eventReceiverBinding];
91
+ function obtainDefaultComputeServiceAgentBindings(projectNumber) {
92
+ const defaultComputeServiceAgent = getDefaultComputeServiceAgent(projectNumber);
93
+ const runInvokerBinding = {
94
+ role: exports.RUN_INVOKER_ROLE,
95
+ members: [defaultComputeServiceAgent],
96
+ };
97
+ const eventarcEventReceiverBinding = {
98
+ role: exports.EVENTARC_EVENT_RECEIVER_ROLE,
99
+ members: [defaultComputeServiceAgent],
100
+ };
101
+ return [runInvokerBinding, eventarcEventReceiverBinding];
101
102
  }
102
103
  exports.obtainDefaultComputeServiceAgentBindings = obtainDefaultComputeServiceAgentBindings;
103
- function mergeBindings(policy, allRequiredBindings) {
104
- for (const requiredBindings of allRequiredBindings) {
105
- if (requiredBindings.length === 0) {
104
+ function mergeBindings(policy, requiredBindings) {
105
+ let updated = false;
106
+ for (const requiredBinding of requiredBindings) {
107
+ const match = policy.bindings.find((b) => b.role === requiredBinding.role);
108
+ if (!match) {
109
+ updated = true;
110
+ policy.bindings.push(requiredBinding);
106
111
  continue;
107
112
  }
108
- for (const requiredBinding of requiredBindings) {
109
- const ndx = policy.bindings.findIndex((policyBinding) => policyBinding.role === requiredBinding.role);
110
- if (ndx === -1) {
111
- policy.bindings.push(requiredBinding);
112
- continue;
113
+ for (const requiredMember of requiredBinding.members) {
114
+ if (!match.members.find((m) => m === requiredMember)) {
115
+ updated = true;
116
+ match.members.push(requiredMember);
113
117
  }
114
- requiredBinding.members.forEach((updatedMember) => {
115
- if (!policy.bindings[ndx].members.find((member) => member === updatedMember)) {
116
- policy.bindings[ndx].members.push(updatedMember);
117
- }
118
- });
119
118
  }
120
119
  }
120
+ return updated;
121
121
  }
122
122
  exports.mergeBindings = mergeBindings;
123
- async function ensureServiceAgentRoles(projectNumber, want, have) {
123
+ function printManualIamConfig(requiredBindings, projectId) {
124
+ utils.logLabeledBullet("functions", "Failed to verify the project has the correct IAM bindings for a successful deployment.", "warn");
125
+ utils.logLabeledBullet("functions", "You can either re-run `firebase deploy` as a project owner or manually run the following set of `gcloud` commands:", "warn");
126
+ for (const binding of requiredBindings) {
127
+ for (const member of binding.members) {
128
+ utils.logLabeledBullet("functions", `\`gcloud projects add-iam-policy-binding ${projectId} ` +
129
+ `--member=${member} ` +
130
+ `--role=${binding.role}\``, "warn");
131
+ }
132
+ }
133
+ }
134
+ async function ensureServiceAgentRoles(projectId, projectNumber, want, have) {
124
135
  const wantServices = backend.allEndpoints(want).reduce(reduceEventsToServices, []);
125
136
  const haveServices = backend.allEndpoints(have).reduce(reduceEventsToServices, []);
126
137
  const newServices = wantServices.filter((wantS) => !haveServices.find((haveS) => wantS.name === haveS.name));
127
138
  if (newServices.length === 0) {
128
139
  return;
129
140
  }
141
+ const requiredBindingsPromises = [];
142
+ for (const service of newServices) {
143
+ requiredBindingsPromises.push(service.requiredProjectBindings(projectNumber));
144
+ }
145
+ const nestedRequiredBindings = await Promise.all(requiredBindingsPromises);
146
+ const requiredBindings = [...(0, functional_1.flattenArray)(nestedRequiredBindings)];
147
+ if (haveServices.length === 0) {
148
+ requiredBindings.push(...obtainPubSubServiceAgentBindings(projectNumber));
149
+ requiredBindings.push(...obtainDefaultComputeServiceAgentBindings(projectNumber));
150
+ }
151
+ if (requiredBindings.length === 0) {
152
+ return;
153
+ }
130
154
  let policy;
131
155
  try {
132
156
  policy = await (0, resourceManager_1.getIamPolicy)(projectNumber);
133
157
  }
134
158
  catch (err) {
159
+ printManualIamConfig(requiredBindings, projectId);
135
160
  utils.logLabeledBullet("functions", "Could not verify the necessary IAM configuration for the following newly-integrated services: " +
136
161
  `${newServices.map((service) => service.api).join(", ")}` +
137
162
  ". Deployment may fail.", "warn");
138
163
  return;
139
164
  }
140
- const findRequiredBindings = [];
141
- newServices.forEach((service) => findRequiredBindings.push(service.requiredProjectBindings(projectNumber, policy)));
142
- const allRequiredBindings = await Promise.all(findRequiredBindings);
143
- if (haveServices.length === 0) {
144
- allRequiredBindings.push(obtainPubSubServiceAgentBindings(projectNumber, policy));
145
- allRequiredBindings.push(obtainDefaultComputeServiceAgentBindings(projectNumber, policy));
146
- }
147
- if (!allRequiredBindings.find((bindings) => bindings.length > 0)) {
165
+ const hasUpdatedBindings = mergeBindings(policy, requiredBindings);
166
+ if (!hasUpdatedBindings) {
148
167
  return;
149
168
  }
150
- mergeBindings(policy, allRequiredBindings);
151
169
  try {
152
170
  await (0, resourceManager_1.setIamPolicy)(projectNumber, policy, "bindings");
153
171
  }
154
172
  catch (err) {
173
+ printManualIamConfig(requiredBindings, projectId);
155
174
  throw new error_1.FirebaseError("We failed to modify the IAM policy for the project. The functions " +
156
175
  "deployment requires specific roles to be granted to service agents," +
157
176
  " otherwise the deployment will fail.", { original: err });
@@ -70,7 +70,7 @@ exports.getEndpointFilters = getEndpointFilters;
70
70
  function getFunctionLabel(fn) {
71
71
  let id = `${fn.id}(${fn.region})`;
72
72
  if (fn.codebase && fn.codebase !== projectConfig_1.DEFAULT_CODEBASE) {
73
- id = `[${fn.codebase}]${id}`;
73
+ id = `${fn.codebase}:${id}`;
74
74
  }
75
75
  return id;
76
76
  }
@@ -1,8 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.inferBlockingDetails = exports.inferDetailsFromExisting = exports.prepare = void 0;
3
+ exports.resolveCpu = exports.inferBlockingDetails = exports.inferDetailsFromExisting = exports.prepare = void 0;
4
4
  const clc = require("cli-color");
5
5
  const backend = require("./backend");
6
+ const build = require("./build");
6
7
  const ensureApiEnabled = require("../../ensureApiEnabled");
7
8
  const functionsConfig = require("../../functionsConfig");
8
9
  const functionsEnv = require("../../functions/env");
@@ -22,6 +23,7 @@ const error_1 = require("../../error");
22
23
  const projectConfig_1 = require("../../functions/projectConfig");
23
24
  const previews_1 = require("../../previews");
24
25
  const v1_1 = require("../../functions/events/v1");
26
+ const serviceusage_1 = require("../../gcp/serviceusage");
25
27
  function hasUserConfig(config) {
26
28
  return Object.keys(config).length > 1;
27
29
  }
@@ -76,9 +78,17 @@ async function prepare(context, options, payload) {
76
78
  projectAlias: options.projectAlias,
77
79
  };
78
80
  const userEnvs = functionsEnv.loadUserEnvs(userEnvOpt);
79
- logger_1.logger.debug(`Analyzing ${runtimeDelegate.name} backend spec`);
80
- const wantBackend = await runtimeDelegate.discoverSpec(runtimeConfig, firebaseEnvs);
81
- wantBackend.environmentVariables = Object.assign(Object.assign({}, userEnvs), firebaseEnvs);
81
+ const envs = Object.assign(Object.assign({}, userEnvs), firebaseEnvs);
82
+ let wantBackend;
83
+ if (previews_1.previews.functionsparams) {
84
+ const wantBuild = await runtimeDelegate.discoverBuild(runtimeConfig, firebaseEnvs);
85
+ wantBackend = build.resolveBackend(wantBuild, userEnvs);
86
+ }
87
+ else {
88
+ logger_1.logger.debug(`Analyzing ${runtimeDelegate.name} backend spec`);
89
+ wantBackend = await runtimeDelegate.discoverSpec(runtimeConfig, firebaseEnvs);
90
+ }
91
+ wantBackend.environmentVariables = envs;
82
92
  for (const endpoint of backend.allEndpoints(wantBackend)) {
83
93
  endpoint.environmentVariables = wantBackend.environmentVariables;
84
94
  endpoint.codebase = codebase;
@@ -98,15 +108,6 @@ async function prepare(context, options, payload) {
98
108
  (0, utils_1.logLabeledBullet)("functions", `preparing ${clc.bold(sourceDirName)} directory for uploading...`);
99
109
  }
100
110
  if (backend.someEndpoint(wantBackend, (e) => e.platform === "gcfv2")) {
101
- if (!previews_1.previews.functionsv2) {
102
- throw new error_1.FirebaseError("This version of firebase-tools does not support Google Cloud " +
103
- "Functions gen 2\n" +
104
- "If Cloud Functions for Firebase gen 2 is still in alpha, sign up " +
105
- "for the alpha program at " +
106
- "https://services.google.com/fb/forms/firebasealphaprogram/\n" +
107
- "If Cloud Functions for Firebase gen 2 is in beta, get the latest " +
108
- "version of Firebse Tools with `npm i -g firebase-tools@latest`");
109
- }
110
111
  source.functionsSourceV2 = await (0, prepareFunctionsUpload_1.prepareFunctionsUpload)(sourceDir, config);
111
112
  }
112
113
  if (backend.someEndpoint(wantBackend, (e) => e.platform === "gcfv1")) {
@@ -117,12 +118,13 @@ async function prepare(context, options, payload) {
117
118
  payload.functions = {};
118
119
  const haveBackends = (0, functionsDeployHelper_1.groupEndpointsByCodebase)(wantBackends, backend.allEndpoints(await backend.existingBackend(context)));
119
120
  for (const [codebase, wantBackend] of Object.entries(wantBackends)) {
120
- const haveBackend = haveBackends[codebase] || Object.assign({}, backend.empty());
121
+ const haveBackend = haveBackends[codebase] || backend.empty();
121
122
  payload.functions[codebase] = { wantBackend, haveBackend };
122
123
  }
123
124
  for (const [codebase, { wantBackend, haveBackend }] of Object.entries(payload.functions)) {
124
125
  inferDetailsFromExisting(wantBackend, haveBackend, codebaseUsesEnvs.includes(codebase));
125
126
  await (0, triggerRegionHelper_1.ensureTriggerRegions)(wantBackend);
127
+ resolveCpu(wantBackend);
126
128
  validate.endpointsAreValid(wantBackend);
127
129
  inferBlockingDetails(wantBackend);
128
130
  }
@@ -153,6 +155,11 @@ async function prepare(context, options, payload) {
153
155
  return ensureApiEnabled.ensure(context.projectId, api, "functions");
154
156
  });
155
157
  await Promise.all(enablements);
158
+ const services = ["pubsub.googleapis.com", "eventarc.googleapis.com"];
159
+ const generateServiceAccounts = services.map((service) => {
160
+ return (0, serviceusage_1.generateServiceIdentity)(projectNumber, service, "functions");
161
+ });
162
+ await Promise.all(generateServiceAccounts);
156
163
  }
157
164
  const matchingBackend = backend.matchingBackend(wantBackend, (endpoint) => {
158
165
  return (0, functionsDeployHelper_1.endpointMatchesAnyFilter)(endpoint, context.filters);
@@ -160,7 +167,7 @@ async function prepare(context, options, payload) {
160
167
  await (0, prompts_1.promptForFailurePolicies)(options, matchingBackend, haveBackend);
161
168
  await (0, prompts_1.promptForMinInstances)(options, matchingBackend, haveBackend);
162
169
  await backend.checkAvailability(context, matchingBackend);
163
- await (0, checkIam_1.ensureServiceAgentRoles)(projectNumber, matchingBackend, haveBackend);
170
+ await (0, checkIam_1.ensureServiceAgentRoles)(projectId, projectNumber, matchingBackend, haveBackend);
164
171
  await validate.secretsAreValid(projectId, matchingBackend);
165
172
  await ensure.secretAccess(projectId, matchingBackend, haveBackend);
166
173
  }
@@ -178,6 +185,12 @@ function inferDetailsFromExisting(want, have, usedDotenv) {
178
185
  if (!wantE.availableMemoryMb && haveE.availableMemoryMb) {
179
186
  wantE.availableMemoryMb = haveE.availableMemoryMb;
180
187
  }
188
+ if (!wantE.concurrency && haveE.concurrency) {
189
+ wantE.concurrency = haveE.concurrency;
190
+ }
191
+ if (!wantE.cpu && haveE.cpu) {
192
+ wantE.cpu = haveE.cpu;
193
+ }
181
194
  wantE.securityLevel = haveE.securityLevel ? haveE.securityLevel : "SECURE_ALWAYS";
182
195
  maybeCopyTriggerRegion(wantE, haveE);
183
196
  }
@@ -223,3 +236,17 @@ function inferBlockingDetails(want) {
223
236
  }
224
237
  }
225
238
  exports.inferBlockingDetails = inferBlockingDetails;
239
+ function resolveCpu(want) {
240
+ for (const e of backend.allEndpoints(want)) {
241
+ if (e.platform === "gcfv1") {
242
+ continue;
243
+ }
244
+ if (e.cpu === "gcf_gen1") {
245
+ e.cpu = backend.memoryToGen1Cpu(e.availableMemoryMb || backend.DEFAULT_MEMORY);
246
+ }
247
+ else if (!e.cpu) {
248
+ e.cpu = backend.memoryToGen2Cpu(e.availableMemoryMb || backend.DEFAULT_MEMORY);
249
+ }
250
+ }
251
+ }
252
+ exports.resolveCpu = resolveCpu;
@@ -145,12 +145,12 @@ function monthlyMinInstanceCost(endpoints) {
145
145
  usage["gcfv1"][tier].cpu + cpu * SECONDS_PER_MONTH * endpoint.minInstances;
146
146
  }
147
147
  else {
148
- const cpu = 1;
149
148
  const tier = V2_REGION_TO_TIER[endpoint.region];
150
149
  usage["gcfv2"][tier].ram =
151
150
  usage["gcfv2"][tier].ram + ramGb * SECONDS_PER_MONTH * endpoint.minInstances;
152
151
  usage["gcfv2"][tier].cpu =
153
- usage["gcfv2"][tier].cpu + cpu * SECONDS_PER_MONTH * endpoint.minInstances;
152
+ usage["gcfv2"][tier].cpu +
153
+ endpoint.cpu * SECONDS_PER_MONTH * endpoint.minInstances;
154
154
  }
155
155
  }
156
156
  let v1MemoryBill = usage["gcfv1"][1].ram * exports.V1_RATES.memoryGb[1] + usage["gcfv1"][2].ram * exports.V1_RATES.memoryGb[2];
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.Fabricator = void 0;
3
+ exports.serviceIsResolved = exports.Fabricator = void 0;
4
4
  const clc = require("cli-color");
5
5
  const error_1 = require("../../../error");
6
6
  const sourceTokenScraper_1 = require("./sourceTokenScraper");
@@ -23,6 +23,7 @@ const scheduler = require("../../../gcp/cloudscheduler");
23
23
  const utils = require("../../../utils");
24
24
  const services = require("../services");
25
25
  const v1_1 = require("../../../functions/events/v1");
26
+ const throttler_1 = require("../../../throttler/throttler");
26
27
  const gcfV1PollerOptions = {
27
28
  apiOrigin: api_1.functionsOrigin,
28
29
  apiVersion: gcf.API_VERSION,
@@ -271,8 +272,16 @@ class Fabricator {
271
272
  .catch(rethrowAs(endpoint, "set invoker"));
272
273
  }
273
274
  const mem = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY;
274
- if (mem >= backend.MIN_MEMORY_FOR_CONCURRENCY && endpoint.concurrency !== 1) {
275
- await this.setConcurrency(endpoint, serviceName, endpoint.concurrency || DEFAULT_GCFV2_CONCURRENCY);
275
+ const hasCustomCPU = endpoint.cpu !== backend.memoryToGen1Cpu(mem);
276
+ if (!endpoint.concurrency) {
277
+ endpoint.concurrency =
278
+ endpoint.cpu >= backend.MIN_CPU_FOR_CONCURRENCY
279
+ ? backend.DEFAULT_CONCURRENCY
280
+ : 1;
281
+ }
282
+ const hasConcurrency = endpoint.concurrency !== 1;
283
+ if (hasCustomCPU || hasConcurrency) {
284
+ await this.setRunTraits(serviceName, endpoint);
276
285
  }
277
286
  }
278
287
  async updateV1Function(endpoint, scraper) {
@@ -343,8 +352,17 @@ class Fabricator {
343
352
  .run(() => run.setInvokerUpdate(endpoint.project, serviceName, invoker))
344
353
  .catch(rethrowAs(endpoint, "set invoker"));
345
354
  }
346
- if (endpoint.concurrency) {
347
- await this.setConcurrency(endpoint, serviceName, endpoint.concurrency);
355
+ const hasCustomCPU = endpoint.cpu !==
356
+ backend.memoryToGen1Cpu(endpoint.availableMemoryMb || backend.DEFAULT_MEMORY);
357
+ const explicitConcurrency = endpoint.concurrency !== undefined;
358
+ if (hasCustomCPU || explicitConcurrency) {
359
+ if (endpoint.concurrency === undefined) {
360
+ endpoint.concurrency =
361
+ endpoint.cpu < backend.MIN_CPU_FOR_CONCURRENCY
362
+ ? 1
363
+ : backend.DEFAULT_CONCURRENCY;
364
+ }
365
+ await this.setRunTraits(serviceName, endpoint);
348
366
  }
349
367
  }
350
368
  async deleteV1Function(endpoint) {
@@ -367,18 +385,32 @@ class Fabricator {
367
385
  })
368
386
  .catch(rethrowAs(endpoint, "delete"));
369
387
  }
370
- async setConcurrency(endpoint, serviceName, concurrency) {
388
+ async setRunTraits(serviceName, endpoint) {
371
389
  await this.functionExecutor
372
390
  .run(async () => {
373
- const service = await run.getService(serviceName);
374
- if (service.spec.template.spec.containerConcurrency === concurrency) {
375
- logger_1.logger.debug("Skipping setConcurrency on", serviceName, " because it already matches");
391
+ let service = await run.getService(serviceName);
392
+ let changed = false;
393
+ if (service.spec.template.spec.containerConcurrency !== endpoint.concurrency) {
394
+ service.spec.template.spec.containerConcurrency = endpoint.concurrency;
395
+ changed = true;
396
+ }
397
+ if (+service.spec.template.spec.containers[0].resources.limits.cpu !== endpoint.cpu) {
398
+ service.spec.template.spec.containers[0].resources.limits.cpu = `${endpoint.cpu}`;
399
+ changed = true;
400
+ }
401
+ if (!changed) {
402
+ logger_1.logger.debug("Skipping setRunTraits on", serviceName, " because it already matches");
376
403
  return;
377
404
  }
378
405
  delete service.status;
379
406
  delete service.spec.template.metadata.name;
380
- service.spec.template.spec.containerConcurrency = concurrency;
381
- await run.replaceService(serviceName, service);
407
+ service = await run.replaceService(serviceName, service);
408
+ let retry = 0;
409
+ while (!exports.serviceIsResolved(service)) {
410
+ await (0, throttler_1.backoff)(retry, 2, 30);
411
+ retry = retry + 1;
412
+ service = await run.getService(serviceName);
413
+ }
382
414
  })
383
415
  .catch(rethrowAs(endpoint, "set concurrency"));
384
416
  }
@@ -482,3 +514,26 @@ class Fabricator {
482
514
  }
483
515
  }
484
516
  exports.Fabricator = Fabricator;
517
+ function serviceIsResolved(service) {
518
+ var _a, _b, _c, _d, _e;
519
+ if (((_a = service.status) === null || _a === void 0 ? void 0 : _a.observedGeneration) !== service.metadata.generation) {
520
+ logger_1.logger.debug(`Service ${service.metadata.name} is not resolved because` +
521
+ `observed generation ${(_b = service.status) === null || _b === void 0 ? void 0 : _b.observedGeneration} does not ` +
522
+ `match spec generation ${service.metadata.generation}`);
523
+ return false;
524
+ }
525
+ const readyCondition = (_d = (_c = service.status) === null || _c === void 0 ? void 0 : _c.conditions) === null || _d === void 0 ? void 0 : _d.find((condition) => {
526
+ return condition.type === "Ready";
527
+ });
528
+ if ((readyCondition === null || readyCondition === void 0 ? void 0 : readyCondition.status) === "Unknown") {
529
+ logger_1.logger.debug(`Waiting for service ${service.metadata.name} to be ready. ` +
530
+ `Status is ${JSON.stringify((_e = service.status) === null || _e === void 0 ? void 0 : _e.conditions)}`);
531
+ return false;
532
+ }
533
+ else if ((readyCondition === null || readyCondition === void 0 ? void 0 : readyCondition.status) === "True") {
534
+ return true;
535
+ }
536
+ logger_1.logger.debug(`Service ${service.metadata.name} has unexpected ready status ${JSON.stringify(readyCondition)}. It may have failed rollout.`);
537
+ throw new error_1.FirebaseError(`Unexpected Status ${readyCondition === null || readyCondition === void 0 ? void 0 : readyCondition.status} for service ${service.metadata.name}`);
538
+ }
539
+ exports.serviceIsResolved = serviceIsResolved;
@@ -11,12 +11,9 @@ const fabricator = require("./fabricator");
11
11
  const reporter = require("./reporter");
12
12
  const executor = require("./executor");
13
13
  const prompts = require("../prompts");
14
- const secrets = require("../../../functions/secrets");
15
14
  const functionsConfig_1 = require("../../../functionsConfig");
16
15
  const functionsDeployHelper_1 = require("../functionsDeployHelper");
17
16
  const error_1 = require("../../../error");
18
- const projectUtils_1 = require("../../../projectUtils");
19
- const utils_1 = require("../../../utils");
20
17
  async function release(context, options, payload) {
21
18
  if (!context.config) {
22
19
  return;
@@ -76,24 +73,6 @@ async function release(context, options, payload) {
76
73
  const opts = allErrors.length === 1 ? { original: allErrors[0] } : { children: allErrors };
77
74
  throw new error_1.FirebaseError("There was an error deploying functions", Object.assign(Object.assign({}, opts), { exit: 2 }));
78
75
  }
79
- else {
80
- if (secrets.of(haveEndpoints).length > 0) {
81
- const projectId = (0, projectUtils_1.needProjectId)(options);
82
- const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
83
- const reloadedBackend = await backend.existingBackend({ projectId });
84
- const prunedResult = await secrets.pruneAndDestroySecrets({ projectId, projectNumber }, backend.allEndpoints(reloadedBackend));
85
- if (prunedResult.destroyed.length > 0) {
86
- (0, utils_1.logLabeledBullet)("functions", `Destroyed unused secret versions: ${prunedResult.destroyed
87
- .map((s) => `${s.secret}@${s.version}`)
88
- .join(", ")}`);
89
- }
90
- if (prunedResult.erred.length > 0) {
91
- (0, utils_1.logLabeledWarning)("functions", `Failed to destroy unused secret versions:\n\t${prunedResult.erred
92
- .map((err) => err.message)
93
- .join("\n\t")}`);
94
- }
95
- }
96
- }
97
76
  }
98
77
  exports.release = release;
99
78
  function printTriggerUrls(results) {
@@ -71,7 +71,8 @@ async function detectFromPort(port, project, runtime, timeout = 30000) {
71
71
  parsed = yaml.load(text);
72
72
  }
73
73
  catch (err) {
74
- throw new error_1.FirebaseError("Failed to parse backend specification", { children: [err] });
74
+ logger_1.logger.debug("Failed to parse functions.yaml", err);
75
+ throw new error_1.FirebaseError(`Failed to load function definition from source: ${text}`);
75
76
  }
76
77
  return yamlToBackend(parsed, project, api.functionsDefaultRegion, runtime);
77
78
  }
@@ -187,7 +187,19 @@ function parseEndpoints(manifest, id, project, defaultRegion, runtime) {
187
187
  region,
188
188
  project,
189
189
  runtime, entryPoint: ep.entryPoint }, triggered);
190
- (0, proto_1.copyIfPresent)(parsed, ep, "availableMemoryMb", "maxInstances", "minInstances", "concurrency", "serviceAccountEmail", "timeoutSeconds", "vpc", "labels", "ingressSettings", "environmentVariables");
190
+ (0, proto_1.copyIfPresent)(parsed, ep, "availableMemoryMb", "maxInstances", "minInstances", "concurrency", "serviceAccountEmail", "timeoutSeconds", "vpc", "labels", "ingressSettings", "environmentVariables", "cpu");
191
+ (0, proto_1.renameIfPresent)(parsed, ep, "secretEnvironmentVariables", "secretEnvironmentVariables", (senvs) => {
192
+ if (senvs && senvs.length > 0) {
193
+ ep.secretEnvironmentVariables = [];
194
+ for (const { key, secret } of senvs) {
195
+ ep.secretEnvironmentVariables.push({
196
+ key,
197
+ secret: secret || key,
198
+ projectId: project,
199
+ });
200
+ }
201
+ }
202
+ });
191
203
  allParsed.push(parsed);
192
204
  }
193
205
  return allParsed;
@@ -11,7 +11,6 @@ const node_fetch_1 = require("node-fetch");
11
11
  const error_1 = require("../../../../error");
12
12
  const parseRuntimeAndValidateSDK_1 = require("./parseRuntimeAndValidateSDK");
13
13
  const logger_1 = require("../../../../logger");
14
- const previews_1 = require("../../../../previews");
15
14
  const utils_1 = require("../../../../utils");
16
15
  const discovery = require("../discovery");
17
16
  const validate = require("./validate");
@@ -58,10 +57,14 @@ class Delegate {
58
57
  watch() {
59
58
  return Promise.resolve(() => Promise.resolve());
60
59
  }
61
- serve(port, envs) {
60
+ serve(port, config, envs) {
62
61
  var _a;
62
+ const env = Object.assign(Object.assign({}, envs), { PORT: port.toString(), FUNCTIONS_CONTROL_API: "true", HOME: process.env.HOME, PATH: process.env.PATH });
63
+ if (Object.keys(config || {}).length) {
64
+ env.CLOUD_RUNTIME_CONFIG = JSON.stringify(config);
65
+ }
63
66
  const childProcess = spawn("./node_modules/.bin/firebase-functions", [this.sourceDir], {
64
- env: Object.assign(Object.assign({}, envs), { PORT: port.toString(), FUNCTIONS_CONTROL_API: "true", HOME: process.env.HOME, PATH: process.env.PATH }),
67
+ env,
65
68
  cwd: this.sourceDir,
66
69
  stdio: ["ignore", "pipe", "inherit"],
67
70
  });
@@ -83,32 +86,29 @@ class Delegate {
83
86
  });
84
87
  }
85
88
  async discoverSpec(config, env) {
86
- if (previews_1.previews.functionsv2) {
87
- if (!semver.valid(this.sdkVersion)) {
88
- logger_1.logger.debug(`Could not parse firebase-functions version '${this.sdkVersion}' into semver. Falling back to parseTriggers.`);
89
- return parseTriggers.discoverBackend(this.projectId, this.sourceDir, this.runtime, config, env);
90
- }
91
- if (semver.lt(this.sdkVersion, MIN_FUNCTIONS_SDK_VERSION)) {
92
- (0, utils_1.logLabeledWarning)("functions", `You are using an old version of firebase-functions SDK (${this.sdkVersion}). ` +
93
- `Please update firebase-functions SDK to >=${MIN_FUNCTIONS_SDK_VERSION}`);
94
- return parseTriggers.discoverBackend(this.projectId, this.sourceDir, this.runtime, config, env);
89
+ if (!semver.valid(this.sdkVersion)) {
90
+ logger_1.logger.debug(`Could not parse firebase-functions version '${this.sdkVersion}' into semver. Falling back to parseTriggers.`);
91
+ return parseTriggers.discoverBackend(this.projectId, this.sourceDir, this.runtime, config, env);
92
+ }
93
+ if (semver.lt(this.sdkVersion, MIN_FUNCTIONS_SDK_VERSION)) {
94
+ (0, utils_1.logLabeledWarning)("functions", `You are using an old version of firebase-functions SDK (${this.sdkVersion}). ` +
95
+ `Please update firebase-functions SDK to >=${MIN_FUNCTIONS_SDK_VERSION}`);
96
+ return parseTriggers.discoverBackend(this.projectId, this.sourceDir, this.runtime, config, env);
97
+ }
98
+ let discovered = await discovery.detectFromYaml(this.sourceDir, this.projectId, this.runtime);
99
+ if (!discovered) {
100
+ const getPort = (0, util_1.promisify)(portfinder.getPort);
101
+ const port = await getPort();
102
+ const kill = await this.serve(port, config, env);
103
+ try {
104
+ discovered = await discovery.detectFromPort(port, this.projectId, this.runtime);
95
105
  }
96
- let discovered = await discovery.detectFromYaml(this.sourceDir, this.projectId, this.runtime);
97
- if (!discovered) {
98
- const getPort = (0, util_1.promisify)(portfinder.getPort);
99
- const port = await getPort();
100
- const kill = await this.serve(port, env);
101
- try {
102
- discovered = await discovery.detectFromPort(port, this.projectId, this.runtime);
103
- }
104
- finally {
105
- await kill();
106
- }
106
+ finally {
107
+ await kill();
107
108
  }
108
- discovered.environmentVariables = env;
109
- return discovered;
110
109
  }
111
- return parseTriggers.discoverBackend(this.projectId, this.sourceDir, this.runtime, config, env);
110
+ discovered.environmentVariables = env;
111
+ return discovered;
112
112
  }
113
113
  async discoverBuild(config, env) {
114
114
  return parseTriggers.discoverBuild(this.projectId, this.sourceDir, this.runtime, config, env);
@@ -6,20 +6,14 @@ const logger_1 = require("../../../logger");
6
6
  const error_1 = require("../../../error");
7
7
  const location_1 = require("../../../gcp/location");
8
8
  const PUBSUB_PUBLISHER_ROLE = "roles/pubsub.publisher";
9
- async function obtainStorageBindings(projectNumber, existingPolicy) {
9
+ async function obtainStorageBindings(projectNumber) {
10
10
  const storageResponse = await storage.getServiceAccount(projectNumber);
11
11
  const storageServiceAgent = `serviceAccount:${storageResponse.email_address}`;
12
- let pubsubBinding = existingPolicy.bindings.find((b) => b.role === PUBSUB_PUBLISHER_ROLE);
13
- if (!pubsubBinding) {
14
- pubsubBinding = {
15
- role: PUBSUB_PUBLISHER_ROLE,
16
- members: [],
17
- };
18
- }
19
- if (!pubsubBinding.members.find((m) => m === storageServiceAgent)) {
20
- pubsubBinding.members.push(storageServiceAgent);
21
- }
22
- return [pubsubBinding];
12
+ const pubsubPublisherBinding = {
13
+ role: PUBSUB_PUBLISHER_ROLE,
14
+ members: [storageServiceAgent],
15
+ };
16
+ return [pubsubPublisherBinding];
23
17
  }
24
18
  exports.obtainStorageBindings = obtainStorageBindings;
25
19
  async function ensureStorageTriggerRegion(endpoint) {