firebase-tools 10.7.2 → 10.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/lib/commands/ext-configure.js +26 -15
  2. package/lib/commands/ext-export.js +14 -5
  3. package/lib/commands/ext-install.js +31 -2
  4. package/lib/commands/ext-update.js +17 -10
  5. package/lib/commands/functions-secrets-set.js +1 -13
  6. package/lib/deploy/extensions/planner.js +12 -0
  7. package/lib/deploy/extensions/tasks.js +13 -0
  8. package/lib/deploy/functions/backend.js +47 -10
  9. package/lib/deploy/functions/build.js +9 -1
  10. package/lib/deploy/functions/checkIam.js +65 -46
  11. package/lib/deploy/functions/functionsDeployHelper.js +1 -1
  12. package/lib/deploy/functions/prepare.js +42 -6
  13. package/lib/deploy/functions/pricing.js +2 -2
  14. package/lib/deploy/functions/release/fabricator.js +66 -11
  15. package/lib/deploy/functions/release/index.js +0 -21
  16. package/lib/deploy/functions/runtimes/discovery/v1alpha1.js +13 -1
  17. package/lib/deploy/functions/services/storage.js +6 -12
  18. package/lib/deploy/functions/validate.js +33 -6
  19. package/lib/emulator/extensionsEmulator.js +3 -0
  20. package/lib/extensions/askUserForEventsConfig.js +97 -0
  21. package/lib/extensions/export.js +7 -0
  22. package/lib/extensions/extensionsApi.js +47 -7
  23. package/lib/extensions/manifest.js +1 -1
  24. package/lib/extensions/updateHelper.js +7 -1
  25. package/lib/extensions/warnings.js +3 -3
  26. package/lib/gcp/cloudfunctions.js +1 -1
  27. package/lib/gcp/cloudfunctionsv2.js +7 -3
  28. package/lib/gcp/serviceusage.js +24 -0
  29. package/lib/previews.js +1 -1
  30. package/lib/throttler/throttler.js +2 -1
  31. package/npm-shrinkwrap.json +2 -2
  32. package/package.json +1 -1
  33. package/templates/extensions/typescript/package.lint.json +2 -1
  34. package/templates/extensions/typescript/package.nolint.json +2 -1
  35. package/templates/init/functions/typescript/package.lint.json +1 -0
  36. package/templates/init/functions/typescript/package.nolint.json +1 -0
@@ -1,8 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.inferBlockingDetails = exports.inferDetailsFromExisting = exports.prepare = void 0;
3
+ exports.resolveCpu = exports.inferBlockingDetails = exports.inferDetailsFromExisting = exports.prepare = void 0;
4
4
  const clc = require("cli-color");
5
5
  const backend = require("./backend");
6
+ const build = require("./build");
6
7
  const ensureApiEnabled = require("../../ensureApiEnabled");
7
8
  const functionsConfig = require("../../functionsConfig");
8
9
  const functionsEnv = require("../../functions/env");
@@ -22,6 +23,7 @@ const error_1 = require("../../error");
22
23
  const projectConfig_1 = require("../../functions/projectConfig");
23
24
  const previews_1 = require("../../previews");
24
25
  const v1_1 = require("../../functions/events/v1");
26
+ const serviceusage_1 = require("../../gcp/serviceusage");
25
27
  function hasUserConfig(config) {
26
28
  return Object.keys(config).length > 1;
27
29
  }
@@ -76,9 +78,17 @@ async function prepare(context, options, payload) {
76
78
  projectAlias: options.projectAlias,
77
79
  };
78
80
  const userEnvs = functionsEnv.loadUserEnvs(userEnvOpt);
79
- logger_1.logger.debug(`Analyzing ${runtimeDelegate.name} backend spec`);
80
- const wantBackend = await runtimeDelegate.discoverSpec(runtimeConfig, firebaseEnvs);
81
- wantBackend.environmentVariables = Object.assign(Object.assign({}, userEnvs), firebaseEnvs);
81
+ const envs = Object.assign(Object.assign({}, userEnvs), firebaseEnvs);
82
+ let wantBackend;
83
+ if (previews_1.previews.functionsparams) {
84
+ const wantBuild = await runtimeDelegate.discoverBuild(runtimeConfig, firebaseEnvs);
85
+ wantBackend = build.resolveBackend(wantBuild, userEnvs);
86
+ }
87
+ else {
88
+ logger_1.logger.debug(`Analyzing ${runtimeDelegate.name} backend spec`);
89
+ wantBackend = await runtimeDelegate.discoverSpec(runtimeConfig, firebaseEnvs);
90
+ }
91
+ wantBackend.environmentVariables = envs;
82
92
  for (const endpoint of backend.allEndpoints(wantBackend)) {
83
93
  endpoint.environmentVariables = wantBackend.environmentVariables;
84
94
  endpoint.codebase = codebase;
@@ -117,12 +127,13 @@ async function prepare(context, options, payload) {
117
127
  payload.functions = {};
118
128
  const haveBackends = (0, functionsDeployHelper_1.groupEndpointsByCodebase)(wantBackends, backend.allEndpoints(await backend.existingBackend(context)));
119
129
  for (const [codebase, wantBackend] of Object.entries(wantBackends)) {
120
- const haveBackend = haveBackends[codebase] || Object.assign({}, backend.empty());
130
+ const haveBackend = haveBackends[codebase] || backend.empty();
121
131
  payload.functions[codebase] = { wantBackend, haveBackend };
122
132
  }
123
133
  for (const [codebase, { wantBackend, haveBackend }] of Object.entries(payload.functions)) {
124
134
  inferDetailsFromExisting(wantBackend, haveBackend, codebaseUsesEnvs.includes(codebase));
125
135
  await (0, triggerRegionHelper_1.ensureTriggerRegions)(wantBackend);
136
+ resolveCpu(wantBackend);
126
137
  validate.endpointsAreValid(wantBackend);
127
138
  inferBlockingDetails(wantBackend);
128
139
  }
@@ -153,6 +164,11 @@ async function prepare(context, options, payload) {
153
164
  return ensureApiEnabled.ensure(context.projectId, api, "functions");
154
165
  });
155
166
  await Promise.all(enablements);
167
+ const services = ["pubsub.googleapis.com", "eventarc.googleapis.com"];
168
+ const generateServiceAccounts = services.map((service) => {
169
+ return (0, serviceusage_1.generateServiceIdentity)(projectNumber, service, "functions");
170
+ });
171
+ await Promise.all(generateServiceAccounts);
156
172
  }
157
173
  const matchingBackend = backend.matchingBackend(wantBackend, (endpoint) => {
158
174
  return (0, functionsDeployHelper_1.endpointMatchesAnyFilter)(endpoint, context.filters);
@@ -160,7 +176,7 @@ async function prepare(context, options, payload) {
160
176
  await (0, prompts_1.promptForFailurePolicies)(options, matchingBackend, haveBackend);
161
177
  await (0, prompts_1.promptForMinInstances)(options, matchingBackend, haveBackend);
162
178
  await backend.checkAvailability(context, matchingBackend);
163
- await (0, checkIam_1.ensureServiceAgentRoles)(projectNumber, matchingBackend, haveBackend);
179
+ await (0, checkIam_1.ensureServiceAgentRoles)(projectId, projectNumber, matchingBackend, haveBackend);
164
180
  await validate.secretsAreValid(projectId, matchingBackend);
165
181
  await ensure.secretAccess(projectId, matchingBackend, haveBackend);
166
182
  }
@@ -178,6 +194,12 @@ function inferDetailsFromExisting(want, have, usedDotenv) {
178
194
  if (!wantE.availableMemoryMb && haveE.availableMemoryMb) {
179
195
  wantE.availableMemoryMb = haveE.availableMemoryMb;
180
196
  }
197
+ if (!wantE.concurrency && haveE.concurrency) {
198
+ wantE.concurrency = haveE.concurrency;
199
+ }
200
+ if (!wantE.cpu && haveE.cpu) {
201
+ wantE.cpu = haveE.cpu;
202
+ }
181
203
  wantE.securityLevel = haveE.securityLevel ? haveE.securityLevel : "SECURE_ALWAYS";
182
204
  maybeCopyTriggerRegion(wantE, haveE);
183
205
  }
@@ -223,3 +245,17 @@ function inferBlockingDetails(want) {
223
245
  }
224
246
  }
225
247
  exports.inferBlockingDetails = inferBlockingDetails;
248
+ function resolveCpu(want) {
249
+ for (const e of backend.allEndpoints(want)) {
250
+ if (e.platform === "gcfv1") {
251
+ continue;
252
+ }
253
+ if (e.cpu === "gcf_gen1") {
254
+ e.cpu = backend.memoryToGen1Cpu(e.availableMemoryMb || backend.DEFAULT_MEMORY);
255
+ }
256
+ else if (!e.cpu) {
257
+ e.cpu = backend.memoryToGen2Cpu(e.availableMemoryMb || backend.DEFAULT_MEMORY);
258
+ }
259
+ }
260
+ }
261
+ exports.resolveCpu = resolveCpu;
@@ -145,12 +145,12 @@ function monthlyMinInstanceCost(endpoints) {
145
145
  usage["gcfv1"][tier].cpu + cpu * SECONDS_PER_MONTH * endpoint.minInstances;
146
146
  }
147
147
  else {
148
- const cpu = 1;
149
148
  const tier = V2_REGION_TO_TIER[endpoint.region];
150
149
  usage["gcfv2"][tier].ram =
151
150
  usage["gcfv2"][tier].ram + ramGb * SECONDS_PER_MONTH * endpoint.minInstances;
152
151
  usage["gcfv2"][tier].cpu =
153
- usage["gcfv2"][tier].cpu + cpu * SECONDS_PER_MONTH * endpoint.minInstances;
152
+ usage["gcfv2"][tier].cpu +
153
+ endpoint.cpu * SECONDS_PER_MONTH * endpoint.minInstances;
154
154
  }
155
155
  }
156
156
  let v1MemoryBill = usage["gcfv1"][1].ram * exports.V1_RATES.memoryGb[1] + usage["gcfv1"][2].ram * exports.V1_RATES.memoryGb[2];
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.Fabricator = void 0;
3
+ exports.serviceIsResolved = exports.Fabricator = void 0;
4
4
  const clc = require("cli-color");
5
5
  const error_1 = require("../../../error");
6
6
  const sourceTokenScraper_1 = require("./sourceTokenScraper");
@@ -23,6 +23,7 @@ const scheduler = require("../../../gcp/cloudscheduler");
23
23
  const utils = require("../../../utils");
24
24
  const services = require("../services");
25
25
  const v1_1 = require("../../../functions/events/v1");
26
+ const throttler_1 = require("../../../throttler/throttler");
26
27
  const gcfV1PollerOptions = {
27
28
  apiOrigin: api_1.functionsOrigin,
28
29
  apiVersion: gcf.API_VERSION,
@@ -271,8 +272,16 @@ class Fabricator {
271
272
  .catch(rethrowAs(endpoint, "set invoker"));
272
273
  }
273
274
  const mem = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY;
274
- if (mem >= backend.MIN_MEMORY_FOR_CONCURRENCY && endpoint.concurrency !== 1) {
275
- await this.setConcurrency(endpoint, serviceName, endpoint.concurrency || DEFAULT_GCFV2_CONCURRENCY);
275
+ const hasCustomCPU = endpoint.cpu !== backend.memoryToGen1Cpu(mem);
276
+ if (!endpoint.concurrency) {
277
+ endpoint.concurrency =
278
+ endpoint.cpu >= backend.MIN_CPU_FOR_CONCURRENCY
279
+ ? backend.DEFAULT_CONCURRENCY
280
+ : 1;
281
+ }
282
+ const hasConcurrency = endpoint.concurrency !== 1;
283
+ if (hasCustomCPU || hasConcurrency) {
284
+ await this.setRunTraits(serviceName, endpoint);
276
285
  }
277
286
  }
278
287
  async updateV1Function(endpoint, scraper) {
@@ -343,8 +352,17 @@ class Fabricator {
343
352
  .run(() => run.setInvokerUpdate(endpoint.project, serviceName, invoker))
344
353
  .catch(rethrowAs(endpoint, "set invoker"));
345
354
  }
346
- if (endpoint.concurrency) {
347
- await this.setConcurrency(endpoint, serviceName, endpoint.concurrency);
355
+ const hasCustomCPU = endpoint.cpu !==
356
+ backend.memoryToGen1Cpu(endpoint.availableMemoryMb || backend.DEFAULT_MEMORY);
357
+ const explicitConcurrency = endpoint.concurrency !== undefined;
358
+ if (hasCustomCPU || explicitConcurrency) {
359
+ if (endpoint.concurrency === undefined) {
360
+ endpoint.concurrency =
361
+ endpoint.cpu < backend.MIN_CPU_FOR_CONCURRENCY
362
+ ? 1
363
+ : backend.DEFAULT_CONCURRENCY;
364
+ }
365
+ await this.setRunTraits(serviceName, endpoint);
348
366
  }
349
367
  }
350
368
  async deleteV1Function(endpoint) {
@@ -367,18 +385,32 @@ class Fabricator {
367
385
  })
368
386
  .catch(rethrowAs(endpoint, "delete"));
369
387
  }
370
- async setConcurrency(endpoint, serviceName, concurrency) {
388
+ async setRunTraits(serviceName, endpoint) {
371
389
  await this.functionExecutor
372
390
  .run(async () => {
373
- const service = await run.getService(serviceName);
374
- if (service.spec.template.spec.containerConcurrency === concurrency) {
375
- logger_1.logger.debug("Skipping setConcurrency on", serviceName, " because it already matches");
391
+ let service = await run.getService(serviceName);
392
+ let changed = false;
393
+ if (service.spec.template.spec.containerConcurrency !== endpoint.concurrency) {
394
+ service.spec.template.spec.containerConcurrency = endpoint.concurrency;
395
+ changed = true;
396
+ }
397
+ if (+service.spec.template.spec.containers[0].resources.limits.cpu !== endpoint.cpu) {
398
+ service.spec.template.spec.containers[0].resources.limits.cpu = `${endpoint.cpu}`;
399
+ changed = true;
400
+ }
401
+ if (!changed) {
402
+ logger_1.logger.debug("Skipping setRunTraits on", serviceName, " because it already matches");
376
403
  return;
377
404
  }
378
405
  delete service.status;
379
406
  delete service.spec.template.metadata.name;
380
- service.spec.template.spec.containerConcurrency = concurrency;
381
- await run.replaceService(serviceName, service);
407
+ service = await run.replaceService(serviceName, service);
408
+ let retry = 0;
409
+ while (!exports.serviceIsResolved(service)) {
410
+ await (0, throttler_1.backoff)(retry, 2, 30);
411
+ retry = retry + 1;
412
+ service = await run.getService(serviceName);
413
+ }
382
414
  })
383
415
  .catch(rethrowAs(endpoint, "set concurrency"));
384
416
  }
@@ -482,3 +514,26 @@ class Fabricator {
482
514
  }
483
515
  }
484
516
  exports.Fabricator = Fabricator;
517
+ function serviceIsResolved(service) {
518
+ var _a, _b, _c, _d, _e;
519
+ if (((_a = service.status) === null || _a === void 0 ? void 0 : _a.observedGeneration) !== service.metadata.generation) {
520
+ logger_1.logger.debug(`Service ${service.metadata.name} is not resolved because` +
521
+ `observed generation ${(_b = service.status) === null || _b === void 0 ? void 0 : _b.observedGeneration} does not ` +
522
+ `match spec generation ${service.metadata.generation}`);
523
+ return false;
524
+ }
525
+ const readyCondition = (_d = (_c = service.status) === null || _c === void 0 ? void 0 : _c.conditions) === null || _d === void 0 ? void 0 : _d.find((condition) => {
526
+ return condition.type === "Ready";
527
+ });
528
+ if ((readyCondition === null || readyCondition === void 0 ? void 0 : readyCondition.status) === "Unknown") {
529
+ logger_1.logger.debug(`Waiting for service ${service.metadata.name} to be ready. ` +
530
+ `Status is ${JSON.stringify((_e = service.status) === null || _e === void 0 ? void 0 : _e.conditions)}`);
531
+ return false;
532
+ }
533
+ else if ((readyCondition === null || readyCondition === void 0 ? void 0 : readyCondition.status) === "True") {
534
+ return true;
535
+ }
536
+ logger_1.logger.debug(`Service ${service.metadata.name} has unexpected ready status ${JSON.stringify(readyCondition)}. It may have failed rollout.`);
537
+ throw new error_1.FirebaseError(`Unexpected Status ${readyCondition === null || readyCondition === void 0 ? void 0 : readyCondition.status} for service ${service.metadata.name}`);
538
+ }
539
+ exports.serviceIsResolved = serviceIsResolved;
@@ -11,12 +11,9 @@ const fabricator = require("./fabricator");
11
11
  const reporter = require("./reporter");
12
12
  const executor = require("./executor");
13
13
  const prompts = require("../prompts");
14
- const secrets = require("../../../functions/secrets");
15
14
  const functionsConfig_1 = require("../../../functionsConfig");
16
15
  const functionsDeployHelper_1 = require("../functionsDeployHelper");
17
16
  const error_1 = require("../../../error");
18
- const projectUtils_1 = require("../../../projectUtils");
19
- const utils_1 = require("../../../utils");
20
17
  async function release(context, options, payload) {
21
18
  if (!context.config) {
22
19
  return;
@@ -76,24 +73,6 @@ async function release(context, options, payload) {
76
73
  const opts = allErrors.length === 1 ? { original: allErrors[0] } : { children: allErrors };
77
74
  throw new error_1.FirebaseError("There was an error deploying functions", Object.assign(Object.assign({}, opts), { exit: 2 }));
78
75
  }
79
- else {
80
- if (secrets.of(haveEndpoints).length > 0) {
81
- const projectId = (0, projectUtils_1.needProjectId)(options);
82
- const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
83
- const reloadedBackend = await backend.existingBackend({ projectId });
84
- const prunedResult = await secrets.pruneAndDestroySecrets({ projectId, projectNumber }, backend.allEndpoints(reloadedBackend));
85
- if (prunedResult.destroyed.length > 0) {
86
- (0, utils_1.logLabeledBullet)("functions", `Destroyed unused secret versions: ${prunedResult.destroyed
87
- .map((s) => `${s.secret}@${s.version}`)
88
- .join(", ")}`);
89
- }
90
- if (prunedResult.erred.length > 0) {
91
- (0, utils_1.logLabeledWarning)("functions", `Failed to destroy unused secret versions:\n\t${prunedResult.erred
92
- .map((err) => err.message)
93
- .join("\n\t")}`);
94
- }
95
- }
96
- }
97
76
  }
98
77
  exports.release = release;
99
78
  function printTriggerUrls(results) {
@@ -187,7 +187,19 @@ function parseEndpoints(manifest, id, project, defaultRegion, runtime) {
187
187
  region,
188
188
  project,
189
189
  runtime, entryPoint: ep.entryPoint }, triggered);
190
- (0, proto_1.copyIfPresent)(parsed, ep, "availableMemoryMb", "maxInstances", "minInstances", "concurrency", "serviceAccountEmail", "timeoutSeconds", "vpc", "labels", "ingressSettings", "environmentVariables");
190
+ (0, proto_1.copyIfPresent)(parsed, ep, "availableMemoryMb", "maxInstances", "minInstances", "concurrency", "serviceAccountEmail", "timeoutSeconds", "vpc", "labels", "ingressSettings", "environmentVariables", "cpu");
191
+ (0, proto_1.renameIfPresent)(parsed, ep, "secretEnvironmentVariables", "secretEnvironmentVariables", (senvs) => {
192
+ if (senvs && senvs.length > 0) {
193
+ ep.secretEnvironmentVariables = [];
194
+ for (const { key, secret } of senvs) {
195
+ ep.secretEnvironmentVariables.push({
196
+ key,
197
+ secret: secret || key,
198
+ projectId: project,
199
+ });
200
+ }
201
+ }
202
+ });
191
203
  allParsed.push(parsed);
192
204
  }
193
205
  return allParsed;
@@ -6,20 +6,14 @@ const logger_1 = require("../../../logger");
6
6
  const error_1 = require("../../../error");
7
7
  const location_1 = require("../../../gcp/location");
8
8
  const PUBSUB_PUBLISHER_ROLE = "roles/pubsub.publisher";
9
- async function obtainStorageBindings(projectNumber, existingPolicy) {
9
+ async function obtainStorageBindings(projectNumber) {
10
10
  const storageResponse = await storage.getServiceAccount(projectNumber);
11
11
  const storageServiceAgent = `serviceAccount:${storageResponse.email_address}`;
12
- let pubsubBinding = existingPolicy.bindings.find((b) => b.role === PUBSUB_PUBLISHER_ROLE);
13
- if (!pubsubBinding) {
14
- pubsubBinding = {
15
- role: PUBSUB_PUBLISHER_ROLE,
16
- members: [],
17
- };
18
- }
19
- if (!pubsubBinding.members.find((m) => m === storageServiceAgent)) {
20
- pubsubBinding.members.push(storageServiceAgent);
21
- }
22
- return [pubsubBinding];
12
+ const pubsubPublisherBinding = {
13
+ role: PUBSUB_PUBLISHER_ROLE,
14
+ members: [storageServiceAgent],
15
+ };
16
+ return [pubsubPublisherBinding];
23
17
  }
24
18
  exports.obtainStorageBindings = obtainStorageBindings;
25
19
  async function ensureStorageTriggerRegion(endpoint) {
@@ -29,12 +29,39 @@ function endpointsAreValid(wantBackend) {
29
29
  if ((endpoint.concurrency || 1) === 1) {
30
30
  return false;
31
31
  }
32
- const mem = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY;
33
- return mem < backend.MIN_MEMORY_FOR_CONCURRENCY;
32
+ return endpoint.cpu < backend.MIN_CPU_FOR_CONCURRENCY;
34
33
  })
35
34
  .map((endpoint) => endpoint.id);
36
35
  if (tooSmallForConcurrency.length) {
37
- const msg = `Cannot set concurency on the functions ${tooSmallForConcurrency.join(",")} because they have fewer than 2GB memory`;
36
+ const msg = "The following functions are configured to allow concurrent " +
37
+ "execution and less than one full CPU. This is not supported: " +
38
+ tooSmallForConcurrency.join(",");
39
+ throw new error_1.FirebaseError(msg);
40
+ }
41
+ const gcfV1WithCPU = endpoints
42
+ .filter((endpoint) => endpoint.platform === "gcfv1" && typeof endpoint["cpu"] !== "undefined")
43
+ .map((endpoint) => endpoint.id);
44
+ if (gcfV1WithCPU.length) {
45
+ const msg = `Cannot set CPU on the functions ${gcfV1WithCPU.join(",")} because they are GCF gen 1`;
46
+ throw new error_1.FirebaseError(msg);
47
+ }
48
+ const invalidCPU = endpoints
49
+ .filter((endpoint) => {
50
+ if (typeof endpoint.cpu === "undefined") {
51
+ return false;
52
+ }
53
+ if (endpoint.cpu === "gcf_gen1") {
54
+ return false;
55
+ }
56
+ const cpu = endpoint.cpu;
57
+ if (cpu < 1) {
58
+ return false;
59
+ }
60
+ return ![1, 2, 4, 6, 8].includes(cpu);
61
+ })
62
+ .map((endpoint) => endpoint.id);
63
+ if (invalidCPU.length) {
64
+ const msg = `The following functions have invalid CPU settings ${invalidCPU.join(",")}. Valid CPU options are (0, 1], 2, 4, 6, 8, or "gcf_gen1"`;
38
65
  throw new error_1.FirebaseError(msg);
39
66
  }
40
67
  }
@@ -100,13 +127,13 @@ async function secretsAreValid(projectId, wantBackend) {
100
127
  await validateSecretVersions(projectId, endpoints);
101
128
  }
102
129
  exports.secretsAreValid = secretsAreValid;
130
+ const secretsSupportedPlatforms = ["gcfv1", "gcfv2"];
103
131
  function validatePlatformTargets(endpoints) {
104
- const supportedPlatforms = ["gcfv1"];
105
- const unsupported = endpoints.filter((e) => !supportedPlatforms.includes(e.platform));
132
+ const unsupported = endpoints.filter((e) => !secretsSupportedPlatforms.includes(e.platform));
106
133
  if (unsupported.length > 0) {
107
134
  const errs = unsupported.map((e) => `${e.id}[platform=${e.platform}]`);
108
135
  throw new error_1.FirebaseError(`Tried to set secret environment variables on ${errs.join(", ")}. ` +
109
- `Only ${supportedPlatforms.join(", ")} support secret environments.`);
136
+ `Only ${secretsSupportedPlatforms.join(", ")} support secret environments.`);
110
137
  }
111
138
  }
112
139
  async function validateSecretVersions(projectId, endpoints) {
@@ -152,6 +152,7 @@ class ExtensionsEmulator {
152
152
  return emulatableBackend;
153
153
  }
154
154
  autoPopulatedParams(instance) {
155
+ var _a;
155
156
  const projectId = this.args.projectId;
156
157
  return {
157
158
  PROJECT_ID: projectId !== null && projectId !== void 0 ? projectId : "",
@@ -159,6 +160,8 @@ class ExtensionsEmulator {
159
160
  DATABASE_INSTANCE: projectId !== null && projectId !== void 0 ? projectId : "",
160
161
  DATABASE_URL: `https://${projectId}.firebaseio.com`,
161
162
  STORAGE_BUCKET: `${projectId}.appspot.com`,
163
+ ALLOWED_EVENT_TYPES: instance.allowedEventTypes ? instance.allowedEventTypes.join(",") : "",
164
+ EVENTARC_CHANNEL: (_a = instance.eventarcChannel) !== null && _a !== void 0 ? _a : "",
162
165
  };
163
166
  }
164
167
  async checkAndWarnAPIs(instances) {
@@ -0,0 +1,97 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.askForEventArcLocation = exports.askShouldCollectEventsConfig = exports.askForAllowedEventTypes = exports.askForEventsConfig = exports.checkAllowedEventTypesResponse = void 0;
4
+ const prompt_1 = require("../prompt");
5
+ const extensionsApi = require("../extensions/extensionsApi");
6
+ const utils = require("../utils");
7
+ const clc = require("cli-color");
8
+ const logger_1 = require("../logger");
9
+ const { marked } = require("marked");
10
+ function checkAllowedEventTypesResponse(response, validEvents) {
11
+ const validEventTypes = validEvents.map((e) => e.type);
12
+ if (response.length === 0) {
13
+ return false;
14
+ }
15
+ for (const e of response) {
16
+ if (!validEventTypes.includes(e)) {
17
+ utils.logWarning(`Unexpected event type '${e}' was configured to be emitted. This event type is not part of the extension spec.`);
18
+ return false;
19
+ }
20
+ }
21
+ return true;
22
+ }
23
+ exports.checkAllowedEventTypesResponse = checkAllowedEventTypesResponse;
24
+ async function askForEventsConfig(events, projectId, instanceId) {
25
+ var _a, _b;
26
+ logger_1.logger.info(`\n${clc.bold("Enable Events")}: ${marked("If you enable events, you can write custom event handlers ([https://firebase.google.com/docs/extensions/install-extensions#eventarc](https://firebase.google.com/docs/extensions/install-extensions#eventarc)) that respond to these events.\n\nYou can always enable or disable events later. Events will be emitted via Eventarc. Fees apply ([https://cloud.google.com/eventarc/pricing](https://cloud.google.com/eventarc/pricing)).")}`);
27
+ if (!(await askShouldCollectEventsConfig())) {
28
+ return undefined;
29
+ }
30
+ let existingInstance;
31
+ try {
32
+ existingInstance = instanceId
33
+ ? await extensionsApi.getInstance(projectId, instanceId)
34
+ : undefined;
35
+ }
36
+ catch (_c) {
37
+ }
38
+ const preselectedTypes = (_a = existingInstance === null || existingInstance === void 0 ? void 0 : existingInstance.config.allowedEventTypes) !== null && _a !== void 0 ? _a : [];
39
+ const oldLocation = (_b = existingInstance === null || existingInstance === void 0 ? void 0 : existingInstance.config.eventarcChannel) === null || _b === void 0 ? void 0 : _b.split("/")[3];
40
+ const location = await askForEventArcLocation(oldLocation);
41
+ const channel = `projects/${projectId}/locations/${location}/channels/firebase`;
42
+ const allowedEventTypes = await askForAllowedEventTypes(events, preselectedTypes);
43
+ return { channel, allowedEventTypes };
44
+ }
45
+ exports.askForEventsConfig = askForEventsConfig;
46
+ async function askForAllowedEventTypes(eventDescriptors, preselectedTypes) {
47
+ let valid = false;
48
+ let response = [];
49
+ const eventTypes = eventDescriptors.map((e, index) => ({
50
+ checked: false,
51
+ name: `${index + 1}. ${e.type}\n ${e.description}`,
52
+ value: e.type,
53
+ }));
54
+ while (!valid) {
55
+ response = await (0, prompt_1.promptOnce)({
56
+ name: "selectedEventTypesInput",
57
+ type: "checkbox",
58
+ default: preselectedTypes !== null && preselectedTypes !== void 0 ? preselectedTypes : [],
59
+ message: `Please select the events [${eventTypes.length} types total] that this extension is permitted to emit. ` +
60
+ "You can implement your own handlers that trigger when these events are emitted to customize the extension's behavior. ",
61
+ choices: eventTypes,
62
+ pageSize: 20,
63
+ });
64
+ valid = checkAllowedEventTypesResponse(response, eventDescriptors);
65
+ }
66
+ return response.filter((e) => e !== "");
67
+ }
68
+ exports.askForAllowedEventTypes = askForAllowedEventTypes;
69
+ async function askShouldCollectEventsConfig() {
70
+ return (0, prompt_1.promptOnce)({
71
+ type: "confirm",
72
+ name: "shouldCollectEvents",
73
+ message: `Would you like to enable events?`,
74
+ default: false,
75
+ });
76
+ }
77
+ exports.askShouldCollectEventsConfig = askShouldCollectEventsConfig;
78
+ async function askForEventArcLocation(preselectedLocation) {
79
+ let valid = false;
80
+ const allowedRegions = ["us-central1", "us-west1", "europe-west4", "asia-northeast1"];
81
+ let location = "";
82
+ while (!valid) {
83
+ location = await (0, prompt_1.promptOnce)({
84
+ name: "input",
85
+ type: "list",
86
+ default: preselectedLocation !== null && preselectedLocation !== void 0 ? preselectedLocation : "us-central1",
87
+ message: "Which location would you like the Eventarc channel to live in? We recommend using the default option. A channel location that differs from the extension's Cloud Functions location can incur egress cost.",
88
+ choices: allowedRegions.map((e) => ({ checked: false, value: e })),
89
+ });
90
+ valid = allowedRegions.includes(location);
91
+ if (!valid) {
92
+ utils.logWarning(`Unexpected EventArc region '${location}' was specified. Allowed regions: ${allowedRegions.join(", ")}`);
93
+ }
94
+ }
95
+ return location;
96
+ }
97
+ exports.askForEventArcLocation = askForEventArcLocation;
@@ -45,6 +45,7 @@ function displayExportInfo(withRef, withoutRef) {
45
45
  }
46
46
  exports.displayExportInfo = displayExportInfo;
47
47
  function displaySpecs(specs) {
48
+ var _a;
48
49
  for (let i = 0; i < specs.length; i++) {
49
50
  const spec = specs[i];
50
51
  logger_1.logger.info(`${i + 1}. ${(0, deploymentSummary_1.humanReadable)(spec)}`);
@@ -52,6 +53,12 @@ function displaySpecs(specs) {
52
53
  for (const p of Object.entries(spec.params)) {
53
54
  logger_1.logger.info(`\t${p[0]}=${p[1]}`);
54
55
  }
56
+ if ((_a = spec.allowedEventTypes) === null || _a === void 0 ? void 0 : _a.length) {
57
+ logger_1.logger.info(`\tALLOWED_EVENTS=${spec.allowedEventTypes}`);
58
+ }
59
+ if (spec.eventarcChannel) {
60
+ logger_1.logger.info(`\tEVENTARC_CHANNEL=${spec.eventarcChannel}`);
61
+ }
55
62
  logger_1.logger.info("");
56
63
  }
57
64
  }
@@ -58,6 +58,8 @@ async function createInstance(args) {
58
58
  var _a, _b;
59
59
  const config = {
60
60
  params: args.params,
61
+ allowedEventTypes: args.allowedEventTypes,
62
+ eventarcChannel: args.eventarcChannel,
61
63
  };
62
64
  if (args.extensionSource && args.extensionVersionRef) {
63
65
  throw new error_1.FirebaseError("ExtensionSource and ExtensionVersion both provided, but only one should be.");
@@ -73,6 +75,12 @@ async function createInstance(args) {
73
75
  else {
74
76
  throw new error_1.FirebaseError("No ExtensionVersion or ExtensionSource provided but one is required.");
75
77
  }
78
+ if (args.allowedEventTypes) {
79
+ config.allowedEventTypes = args.allowedEventTypes;
80
+ }
81
+ if (args.eventarcChannel) {
82
+ config.eventarcChannel = args.eventarcChannel;
83
+ }
76
84
  return createInstanceHelper(args.projectId, args.instanceId, config, args.validateOnly);
77
85
  }
78
86
  exports.createInstance = createInstance;
@@ -88,8 +96,16 @@ async function deleteInstance(projectId, instanceId) {
88
96
  }
89
97
  exports.deleteInstance = deleteInstance;
90
98
  async function getInstance(projectId, instanceId) {
91
- const res = await apiClient.get(`/projects/${projectId}/instances/${instanceId}`);
92
- return res.body;
99
+ try {
100
+ const res = await apiClient.get(`/projects/${projectId}/instances/${instanceId}`);
101
+ return res.body;
102
+ }
103
+ catch (err) {
104
+ if (err.status === 404) {
105
+ throw new error_1.FirebaseError(`Extension instance '${clc.bold(instanceId)}' not found in project '${clc.bold(projectId)}'.`, { status: 404 });
106
+ }
107
+ throw err;
108
+ }
93
109
  }
94
110
  exports.getInstance = getInstance;
95
111
  async function listInstances(projectId) {
@@ -114,7 +130,7 @@ async function listInstances(projectId) {
114
130
  exports.listInstances = listInstances;
115
131
  async function configureInstance(args) {
116
132
  var _a;
117
- const res = await patchInstance({
133
+ const reqBody = {
118
134
  projectId: args.projectId,
119
135
  instanceId: args.instanceId,
120
136
  updateMask: "config.params",
@@ -124,8 +140,16 @@ async function configureInstance(args) {
124
140
  params: args.params,
125
141
  },
126
142
  },
127
- });
128
- return res;
143
+ };
144
+ if (args.canEmitEvents) {
145
+ if (args.allowedEventTypes === undefined || args.eventarcChannel === undefined) {
146
+ throw new error_1.FirebaseError(`This instance is configured to emit events, but either allowed event types or eventarc channel is undefined.`);
147
+ }
148
+ reqBody.data.config.allowedEventTypes = args.allowedEventTypes;
149
+ reqBody.data.config.eventarcChannel = args.eventarcChannel;
150
+ }
151
+ reqBody.updateMask += ",config.allowed_event_types,config.eventarc_channel";
152
+ return patchInstance(reqBody);
129
153
  }
130
154
  exports.configureInstance = configureInstance;
131
155
  async function updateInstance(args) {
@@ -140,7 +164,15 @@ async function updateInstance(args) {
140
164
  body.config.params = args.params;
141
165
  updateMask += ",config.params";
142
166
  }
143
- return await patchInstance({
167
+ if (args.canEmitEvents) {
168
+ if (args.allowedEventTypes === undefined || args.eventarcChannel === undefined) {
169
+ throw new error_1.FirebaseError(`This instance is configured to emit events, but either allowed event types or eventarc channel is undefined.`);
170
+ }
171
+ body.config.allowedEventTypes = args.allowedEventTypes;
172
+ body.config.eventarcChannel = args.eventarcChannel;
173
+ }
174
+ updateMask += ",config.allowed_event_types,config.eventarc_channel";
175
+ return patchInstance({
144
176
  projectId: args.projectId,
145
177
  instanceId: args.instanceId,
146
178
  updateMask,
@@ -163,7 +195,15 @@ async function updateInstanceFromRegistry(args) {
163
195
  body.config.params = args.params;
164
196
  updateMask += ",config.params";
165
197
  }
166
- return await patchInstance({
198
+ if (args.canEmitEvents) {
199
+ if (args.allowedEventTypes === undefined || args.eventarcChannel === undefined) {
200
+ throw new error_1.FirebaseError(`This instance is configured to emit events, but either allowed event types or eventarc channel is undefined.`);
201
+ }
202
+ body.config.allowedEventTypes = args.allowedEventTypes;
203
+ body.config.eventarcChannel = args.eventarcChannel;
204
+ }
205
+ updateMask += ",config.allowed_event_types,config.eventarc_channel";
206
+ return patchInstance({
167
207
  projectId: args.projectId,
168
208
  instanceId: args.instanceId,
169
209
  updateMask,
@@ -48,7 +48,7 @@ async function writeLocalSecrets(specs, config, force) {
48
48
  continue;
49
49
  }
50
50
  const writeBuffer = {};
51
- const locallyOverridenSecretParams = extensionSpec.params.filter((p) => p.type === extensionsApi_1.ParamType.SECRET && spec.params[p.param].local);
51
+ const locallyOverridenSecretParams = extensionSpec.params.filter((p) => { var _a; return p.type === extensionsApi_1.ParamType.SECRET && ((_a = spec.params[p.param]) === null || _a === void 0 ? void 0 : _a.local); });
52
52
  for (const paramSpec of locallyOverridenSecretParams) {
53
53
  const key = paramSpec.param;
54
54
  const localValue = spec.params[key].local;