firebase-tools 10.5.0 → 10.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/lib/commands/functions-secrets-destroy.js +23 -3
  2. package/lib/commands/functions-secrets-prune.js +15 -12
  3. package/lib/commands/functions-secrets-set.js +51 -4
  4. package/lib/deploy/functions/backend.js +1 -5
  5. package/lib/deploy/functions/prepare.js +13 -3
  6. package/lib/deploy/functions/release/fabricator.js +1 -3
  7. package/lib/deploy/functions/release/index.js +21 -0
  8. package/lib/deploy/functions/release/planner.js +1 -2
  9. package/lib/deploy/functions/runtimes/discovery/v1alpha1.js +11 -10
  10. package/lib/deploy/functions/runtimes/node/index.js +1 -1
  11. package/lib/deploy/functions/runtimes/node/parseTriggers.js +5 -19
  12. package/lib/deploy/functions/services/storage.js +1 -6
  13. package/lib/emulator/extensions/postinstall.js +41 -0
  14. package/lib/emulator/functionsEmulatorShared.js +16 -20
  15. package/lib/emulator/storage/apis/firebase.js +6 -6
  16. package/lib/emulator/storage/apis/gcloud.js +9 -6
  17. package/lib/emulator/storage/files.js +6 -3
  18. package/lib/emulator/storage/index.js +9 -1
  19. package/lib/emulator/storage/metadata.js +18 -8
  20. package/lib/emulator/storage/rules/manager.js +7 -17
  21. package/lib/emulator/storage/server.js +38 -12
  22. package/lib/extensions/askUserForParam.js +14 -11
  23. package/lib/extensions/emulator/optionsHelper.js +5 -7
  24. package/lib/extensions/emulator/triggerHelper.js +11 -14
  25. package/lib/extensions/extensionsApi.js +2 -1
  26. package/lib/extensions/manifest.js +1 -1
  27. package/lib/extensions/paramHelper.js +16 -10
  28. package/lib/functions/env.js +10 -2
  29. package/lib/functions/runtimeConfigExport.js +10 -6
  30. package/lib/functions/secrets.js +99 -6
  31. package/lib/gcp/cloudfunctions.js +6 -13
  32. package/lib/gcp/cloudfunctionsv2.js +14 -23
  33. package/lib/gcp/cloudtasks.js +5 -3
  34. package/lib/gcp/secretManager.js +1 -1
  35. package/lib/utils.js +30 -1
  36. package/npm-shrinkwrap.json +2 -2
  37. package/package.json +1 -1
@@ -1,21 +1,41 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  const command_1 = require("../command");
4
- const logger_1 = require("../logger");
5
4
  const projectUtils_1 = require("../projectUtils");
6
5
  const secretManager_1 = require("../gcp/secretManager");
7
6
  const prompt_1 = require("../prompt");
7
+ const utils_1 = require("../utils");
8
8
  const secrets = require("../functions/secrets");
9
+ const backend = require("../deploy/functions/backend");
9
10
  exports.default = new command_1.Command("functions:secrets:destroy <KEY>[@version]")
10
11
  .description("Destroy a secret. Defaults to destroying the latest version.")
11
12
  .withForce("Destroys a secret without confirmation.")
12
13
  .action(async (key, options) => {
13
14
  const projectId = (0, projectUtils_1.needProjectId)(options);
15
+ const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
16
+ const haveBackend = await backend.existingBackend({ projectId });
14
17
  let [name, version] = key.split("@");
15
18
  if (!version) {
16
19
  version = "latest";
17
20
  }
18
21
  const sv = await (0, secretManager_1.getSecretVersion)(projectId, name, version);
22
+ if (sv.state === "DESTROYED") {
23
+ (0, utils_1.logBullet)(`Secret ${sv.secret.name}@${version} is already destroyed. Nothing to do.`);
24
+ return;
25
+ }
26
+ const boundEndpoints = backend
27
+ .allEndpoints(haveBackend)
28
+ .filter((e) => secrets.inUse({ projectId, projectNumber }, sv.secret, e));
29
+ if (boundEndpoints.length > 0) {
30
+ const endpointsMsg = boundEndpoints
31
+ .map((e) => `${e.id}[${e.platform}](${e.region})`)
32
+ .join("\t\n");
33
+ (0, utils_1.logWarning)(`Secret ${name}@${version} is currently in use by following functions:\n\t${endpointsMsg}`);
34
+ if (!options.force) {
35
+ (0, utils_1.logWarning)("Refusing to destroy secret in use. Use -f to destroy the secret anyway.");
36
+ return;
37
+ }
38
+ }
19
39
  if (!options.force) {
20
40
  const confirm = await (0, prompt_1.promptOnce)({
21
41
  name: "destroy",
@@ -28,12 +48,12 @@ exports.default = new command_1.Command("functions:secrets:destroy <KEY>[@versio
28
48
  }
29
49
  }
30
50
  await (0, secretManager_1.destroySecretVersion)(projectId, name, version);
31
- logger_1.logger.info(`Destroyed secret version ${name}@${sv.versionId}`);
51
+ (0, utils_1.logBullet)(`Destroyed secret version ${name}@${sv.versionId}`);
32
52
  const secret = await (0, secretManager_1.getSecret)(projectId, name);
33
53
  if (secrets.isFirebaseManaged(secret)) {
34
54
  const versions = await (0, secretManager_1.listSecretVersions)(projectId, name);
35
55
  if (versions.filter((v) => v.state === "ENABLED").length === 0) {
36
- logger_1.logger.info(`No active secret versions left. Destroying secret ${name}`);
56
+ (0, utils_1.logBullet)(`No active secret versions left. Destroying secret ${name}`);
37
57
  await (0, secretManager_1.deleteSecret)(projectId, name);
38
58
  }
39
59
  }
@@ -10,6 +10,7 @@ const utils_1 = require("../utils");
10
10
  const prompt_1 = require("../prompt");
11
11
  const secretManager_1 = require("../gcp/secretManager");
12
12
  exports.default = new command_1.Command("functions:secrets:prune")
13
+ .withForce("Destroys unused secrets without prompt")
13
14
  .description("Destroys unused secrets")
14
15
  .before(requirePermissions_1.requirePermissions, [
15
16
  "cloudfunctions.functions.list",
@@ -32,18 +33,20 @@ exports.default = new command_1.Command("functions:secrets:prune")
32
33
  }
33
34
  (0, utils_1.logBullet)(`Found ${pruned.length} unused active secret versions:\n\t` +
34
35
  pruned.map((sv) => `${sv.secret}@${sv.version}`).join("\n\t"));
35
- const confirm = await (0, prompt_1.promptOnce)({
36
- name: "destroy",
37
- type: "confirm",
38
- default: true,
39
- message: `Do you want to destroy unused secret versions?`,
40
- }, options);
41
- if (!confirm) {
42
- (0, utils_1.logBullet)("Run the following commands to destroy each unused secret version:\n\t" +
43
- pruned
44
- .map((sv) => `firebase functions:secrets:destroy ${sv.secret}@${sv.version}`)
45
- .join("\n\t"));
46
- return;
36
+ if (!options.force) {
37
+ const confirm = await (0, prompt_1.promptOnce)({
38
+ name: "destroy",
39
+ type: "confirm",
40
+ default: true,
41
+ message: `Do you want to destroy unused secret versions?`,
42
+ }, options);
43
+ if (!confirm) {
44
+ (0, utils_1.logBullet)("Run the following commands to destroy each unused secret version:\n\t" +
45
+ pruned
46
+ .map((sv) => `firebase functions:secrets:destroy ${sv.secret}@${sv.version}`)
47
+ .join("\n\t"));
48
+ return;
49
+ }
47
50
  }
48
51
  await Promise.all(pruned.map((sv) => (0, secretManager_1.destroySecretVersion)(projectId, sv.secret, sv.version)));
49
52
  (0, utils_1.logSuccess)("Destroyed all unused secrets!");
@@ -10,9 +10,11 @@ const prompt_1 = require("../prompt");
10
10
  const utils_1 = require("../utils");
11
11
  const projectUtils_1 = require("../projectUtils");
12
12
  const secretManager_1 = require("../gcp/secretManager");
13
+ const secrets = require("../functions/secrets");
14
+ const backend = require("../deploy/functions/backend");
13
15
  exports.default = new command_1.Command("functions:secrets:set <KEY>")
14
- .description("Create or update a secret for use in Cloud Functions for Firebase")
15
- .withForce("Does not ensure input keys are valid or upgrade existing secrets to have Firebase manage them.")
16
+ .description("Create or update a secret for use in Cloud Functions for Firebase.")
17
+ .withForce("Automatically updates functions to use the new secret.")
16
18
  .before(requirePermissions_1.requirePermissions, [
17
19
  "secretmanager.secrets.create",
18
20
  "secretmanager.secrets.get",
@@ -22,6 +24,7 @@ exports.default = new command_1.Command("functions:secrets:set <KEY>")
22
24
  .option("--data-file <dataFile>", 'File path from which to read secret data. Set to "-" to read the secret data from stdin.')
23
25
  .action(async (unvalidatedKey, options) => {
24
26
  const projectId = (0, projectUtils_1.needProjectId)(options);
27
+ const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
25
28
  const key = await (0, secrets_1.ensureValidKey)(unvalidatedKey, options);
26
29
  const secret = await (0, secrets_1.ensureSecret)(projectId, key, options);
27
30
  let secretValue;
@@ -41,6 +44,50 @@ exports.default = new command_1.Command("functions:secrets:set <KEY>")
41
44
  }
42
45
  const secretVersion = await (0, secretManager_1.addVersion)(projectId, key, secretValue);
43
46
  (0, utils_1.logSuccess)(`Created a new secret version ${(0, secretManager_1.toSecretVersionResourceName)(secretVersion)}`);
44
- (0, utils_1.logBullet)("Please deploy your functions for the change to take effect by running:\n\t" +
45
- clc.bold("firebase deploy --only functions"));
47
+ if (!secrets.isFirebaseManaged(secret)) {
48
+ (0, utils_1.logBullet)("Please deploy your functions for the change to take effect by running:\n\t" +
49
+ clc.bold("firebase deploy --only functions"));
50
+ return;
51
+ }
52
+ const haveBackend = await backend.existingBackend({ projectId });
53
+ const endpointsToUpdate = backend
54
+ .allEndpoints(haveBackend)
55
+ .filter((e) => secrets.inUse({ projectId, projectNumber }, secret, e));
56
+ if (endpointsToUpdate.length === 0) {
57
+ return;
58
+ }
59
+ (0, utils_1.logBullet)(`${endpointsToUpdate.length} functions are using stale version of secret ${secret.name}:\n\t` +
60
+ endpointsToUpdate.map((e) => `${e.id}(${e.region})`).join("\n\t"));
61
+ if (!options.force) {
62
+ const confirm = await (0, prompt_1.promptOnce)({
63
+ name: "redeploy",
64
+ type: "confirm",
65
+ default: true,
66
+ message: `Do you want to re-deploy the functions and destroy the stale version of secret ${secret.name}?`,
67
+ }, options);
68
+ if (!confirm) {
69
+ (0, utils_1.logBullet)("Please deploy your functions for the change to take effect by running:\n\t" +
70
+ clc.bold("firebase deploy --only functions"));
71
+ return;
72
+ }
73
+ }
74
+ const updateOps = endpointsToUpdate.map(async (e) => {
75
+ (0, utils_1.logBullet)(`Updating function ${e.id}(${e.region})...`);
76
+ const updated = await secrets.updateEndpointSecret({ projectId, projectNumber }, secretVersion, e);
77
+ (0, utils_1.logBullet)(`Updated function ${e.id}(${e.region}).`);
78
+ return updated;
79
+ });
80
+ const updatedEndpoints = await Promise.all(updateOps);
81
+ (0, utils_1.logBullet)(`Pruning stale secrets...`);
82
+ const prunedResult = await (0, secrets_1.pruneAndDestroySecrets)({ projectId, projectNumber }, updatedEndpoints);
83
+ if (prunedResult.destroyed.length > 0) {
84
+ (0, utils_1.logBullet)(`Detroyed unused secret versions: ${prunedResult.destroyed
85
+ .map((s) => `${s.secret}@${s.version}`)
86
+ .join(", ")}`);
87
+ }
88
+ if (prunedResult.erred.length > 0) {
89
+ (0, utils_1.logWarning)(`Failed to destroy unused secret versions:\n\t${prunedResult.erred
90
+ .map((err) => err.message)
91
+ .join("\n\t")}`);
92
+ }
46
93
  });
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.compareFunctions = exports.findEventFilter = exports.missingEndpoint = exports.hasEndpoint = exports.regionalEndpoints = exports.matchingBackend = exports.findEndpoint = exports.someEndpoint = exports.allEndpoints = exports.checkAvailability = exports.existingBackend = exports.scheduleIdForFunction = exports.functionName = exports.isEmptyBackend = exports.of = exports.empty = exports.isTaskQueueTriggered = exports.isScheduleTriggered = exports.isEventTriggered = exports.isCallableTriggered = exports.isHttpsTriggered = exports.secretVersionName = exports.SCHEDULED_FUNCTION_LABEL = exports.MIN_MEMORY_FOR_CONCURRENCY = exports.DEFAULT_MEMORY = exports.memoryOptionDisplayName = exports.endpointTriggerType = void 0;
3
+ exports.compareFunctions = exports.missingEndpoint = exports.hasEndpoint = exports.regionalEndpoints = exports.matchingBackend = exports.findEndpoint = exports.someEndpoint = exports.allEndpoints = exports.checkAvailability = exports.existingBackend = exports.scheduleIdForFunction = exports.functionName = exports.isEmptyBackend = exports.of = exports.empty = exports.isTaskQueueTriggered = exports.isScheduleTriggered = exports.isEventTriggered = exports.isCallableTriggered = exports.isHttpsTriggered = exports.secretVersionName = exports.SCHEDULED_FUNCTION_LABEL = exports.MIN_MEMORY_FOR_CONCURRENCY = exports.DEFAULT_MEMORY = exports.memoryOptionDisplayName = exports.endpointTriggerType = void 0;
4
4
  const gcf = require("../../gcp/cloudfunctions");
5
5
  const gcfV2 = require("../../gcp/cloudfunctionsv2");
6
6
  const utils = require("../../utils");
@@ -230,10 +230,6 @@ const missingEndpoint = (backend) => (endpoint) => {
230
230
  return !(0, exports.hasEndpoint)(backend)(endpoint);
231
231
  };
232
232
  exports.missingEndpoint = missingEndpoint;
233
- function findEventFilter(endpoint, attribute) {
234
- return endpoint.eventTrigger.eventFilters.find((ef) => ef.attribute === attribute);
235
- }
236
- exports.findEventFilter = findEventFilter;
237
233
  function compareFunctions(left, right) {
238
234
  if (left.platform !== right.platform) {
239
235
  return right.platform < left.platform ? -1 : 1;
@@ -20,6 +20,7 @@ const triggerRegionHelper_1 = require("./triggerRegionHelper");
20
20
  const checkIam_1 = require("./checkIam");
21
21
  const error_1 = require("../../error");
22
22
  const projectConfig_1 = require("../../functions/projectConfig");
23
+ const previews_1 = require("../../previews");
23
24
  function hasUserConfig(config) {
24
25
  return Object.keys(config).length > 1;
25
26
  }
@@ -96,12 +97,21 @@ async function prepare(context, options, payload) {
96
97
  clc.bold(sourceDirName) +
97
98
  " directory for uploading...");
98
99
  }
99
- if (backend.someEndpoint(wantBackend, (e) => e.platform === "gcfv1")) {
100
- context.functionsSourceV1 = await (0, prepareFunctionsUpload_1.prepareFunctionsUpload)(sourceDir, context.config, runtimeConfig);
101
- }
102
100
  if (backend.someEndpoint(wantBackend, (e) => e.platform === "gcfv2")) {
101
+ if (!previews_1.previews.functionsv2) {
102
+ throw new error_1.FirebaseError("This version of firebase-tools does not support Google Cloud " +
103
+ "Functions gen 2\n" +
104
+ "If Cloud Functions for Firebase gen 2 is still in alpha, sign up " +
105
+ "for the alpha program at " +
106
+ "https://services.google.com/fb/forms/firebasealphaprogram/\n" +
107
+ "If Cloud Functions for Firebase gen 2 is in beta, get the latest " +
108
+ "version of Firebse Tools with `npm i -g firebase-tools@latest`");
109
+ }
103
110
  context.functionsSourceV2 = await (0, prepareFunctionsUpload_1.prepareFunctionsUpload)(sourceDir, context.config);
104
111
  }
112
+ if (backend.someEndpoint(wantBackend, (e) => e.platform === "gcfv1")) {
113
+ context.functionsSourceV1 = await (0, prepareFunctionsUpload_1.prepareFunctionsUpload)(sourceDir, context.config, runtimeConfig);
114
+ }
105
115
  for (const endpoint of backend.allEndpoints(wantBackend)) {
106
116
  endpoint.environmentVariables = wantBackend.environmentVariables;
107
117
  }
@@ -124,9 +124,7 @@ class Fabricator {
124
124
  await this.setTrigger(endpoint);
125
125
  }
126
126
  async updateEndpoint(update, scraper) {
127
- if (update.deleteAndRecreate || update.endpoint.platform !== "gcfv2") {
128
- update.endpoint.labels = Object.assign(Object.assign({}, update.endpoint.labels), deploymentTool.labels());
129
- }
127
+ update.endpoint.labels = Object.assign(Object.assign({}, update.endpoint.labels), deploymentTool.labels());
130
128
  if (update.deleteAndRecreate) {
131
129
  await this.deleteEndpoint(update.deleteAndRecreate);
132
130
  await this.createEndpoint(update.endpoint, scraper);
@@ -11,9 +11,12 @@ const fabricator = require("./fabricator");
11
11
  const reporter = require("./reporter");
12
12
  const executor = require("./executor");
13
13
  const prompts = require("../prompts");
14
+ const secrets = require("../../../functions/secrets");
14
15
  const functionsConfig_1 = require("../../../functionsConfig");
15
16
  const functionsDeployHelper_1 = require("../functionsDeployHelper");
16
17
  const error_1 = require("../../../error");
18
+ const projectUtils_1 = require("../../../projectUtils");
19
+ const utils_1 = require("../../../utils");
17
20
  async function release(context, options, payload) {
18
21
  if (!context.config) {
19
22
  return;
@@ -59,6 +62,24 @@ async function release(context, options, payload) {
59
62
  const opts = allErrors.length === 1 ? { original: allErrors[0] } : { children: allErrors };
60
63
  throw new error_1.FirebaseError("There was an error deploying functions", Object.assign(Object.assign({}, opts), { exit: 2 }));
61
64
  }
65
+ else {
66
+ if (secrets.of(haveEndpoints).length > 0) {
67
+ const projectId = (0, projectUtils_1.needProjectId)(options);
68
+ const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
69
+ const reloadedBackend = await backend.existingBackend({ projectId });
70
+ const prunedResult = await secrets.pruneAndDestroySecrets({ projectId, projectNumber }, backend.allEndpoints(reloadedBackend));
71
+ if (prunedResult.destroyed.length > 0) {
72
+ (0, utils_1.logLabeledBullet)("functions", `Destroyed unused secret versions: ${prunedResult.destroyed
73
+ .map((s) => `${s.secret}@${s.version}`)
74
+ .join(", ")}`);
75
+ }
76
+ if (prunedResult.erred.length > 0) {
77
+ (0, utils_1.logLabeledWarning)("functions", `Failed to destroy unused secret versions:\n\t${prunedResult.erred
78
+ .map((err) => err.message)
79
+ .join("\n\t")}`);
80
+ }
81
+ }
82
+ }
62
83
  }
63
84
  exports.release = release;
64
85
  function printTriggerUrls(results) {
@@ -103,7 +103,6 @@ function changedTriggerRegion(want, have) {
103
103
  }
104
104
  exports.changedTriggerRegion = changedTriggerRegion;
105
105
  function changedV2PubSubTopic(want, have) {
106
- var _a, _b;
107
106
  if (want.platform !== "gcfv2") {
108
107
  return false;
109
108
  }
@@ -122,7 +121,7 @@ function changedV2PubSubTopic(want, have) {
122
121
  if (have.eventTrigger.eventType !== v2events.PUBSUB_PUBLISH_EVENT) {
123
122
  return false;
124
123
  }
125
- return (((_a = backend.findEventFilter(have, "topic")) === null || _a === void 0 ? void 0 : _a.value) !== ((_b = backend.findEventFilter(want, "topic")) === null || _b === void 0 ? void 0 : _b.value));
124
+ return have.eventTrigger.eventFilters.topic !== want.eventTrigger.eventFilters.topic;
126
125
  }
127
126
  exports.changedV2PubSubTopic = changedV2PubSubTopic;
128
127
  function upgradedScheduleFromV1ToV2(want, have) {
@@ -49,7 +49,7 @@ function parseEndpoints(manifest, id, project, defaultRegion, runtime) {
49
49
  minInstances: "number",
50
50
  concurrency: "number",
51
51
  serviceAccountEmail: "string",
52
- timeout: "string",
52
+ timeoutSeconds: "number",
53
53
  vpc: "object",
54
54
  labels: "object",
55
55
  ingressSettings: "string",
@@ -88,16 +88,18 @@ function parseEndpoints(manifest, id, project, defaultRegion, runtime) {
88
88
  if (backend.isEventTriggered(ep)) {
89
89
  (0, parsing_1.requireKeys)(prefix + ".eventTrigger", ep.eventTrigger, "eventType", "eventFilters");
90
90
  (0, parsing_1.assertKeyTypes)(prefix + ".eventTrigger", ep.eventTrigger, {
91
- eventFilters: "array",
91
+ eventFilters: "object",
92
+ eventFilterPathPatterns: "object",
92
93
  eventType: "string",
93
94
  retry: "boolean",
94
95
  region: "string",
95
96
  serviceAccountEmail: "string",
97
+ channel: "string",
96
98
  });
97
99
  triggered = { eventTrigger: ep.eventTrigger };
98
- for (const eventFilter of triggered.eventTrigger.eventFilters) {
99
- if (eventFilter.attribute === "topic" && !eventFilter.value.startsWith("projects/")) {
100
- eventFilter.value = `projects/${project}/topics/${eventFilter.value}`;
100
+ for (const [k, v] of Object.entries(triggered.eventTrigger.eventFilters)) {
101
+ if (k === "topic" && !v.startsWith("projects/")) {
102
+ triggered.eventTrigger.eventFilters[k] = `projects/${project}/topics/${v}`;
101
103
  }
102
104
  }
103
105
  }
@@ -134,7 +136,6 @@ function parseEndpoints(manifest, id, project, defaultRegion, runtime) {
134
136
  });
135
137
  if (ep.taskQueueTrigger.rateLimits) {
136
138
  (0, parsing_1.assertKeyTypes)(prefix + ".taskQueueTrigger.rateLimits", ep.taskQueueTrigger.rateLimits, {
137
- maxBurstSize: "number",
138
139
  maxConcurrentDispatches: "number",
139
140
  maxDispatchesPerSecond: "number",
140
141
  });
@@ -142,9 +143,9 @@ function parseEndpoints(manifest, id, project, defaultRegion, runtime) {
142
143
  if (ep.taskQueueTrigger.retryConfig) {
143
144
  (0, parsing_1.assertKeyTypes)(prefix + ".taskQueueTrigger.retryConfig", ep.taskQueueTrigger.retryConfig, {
144
145
  maxAttempts: "number",
145
- maxRetryDuration: "string",
146
- minBackoff: "string",
147
- maxBackoff: "string",
146
+ maxRetrySeconds: "number",
147
+ minBackoffSeconds: "number",
148
+ maxBackoffSeconds: "number",
148
149
  maxDoublings: "number",
149
150
  });
150
151
  }
@@ -159,7 +160,7 @@ function parseEndpoints(manifest, id, project, defaultRegion, runtime) {
159
160
  region,
160
161
  project,
161
162
  runtime, entryPoint: ep.entryPoint }, triggered);
162
- (0, proto_1.copyIfPresent)(parsed, ep, "availableMemoryMb", "maxInstances", "minInstances", "concurrency", "serviceAccountEmail", "timeout", "vpc", "labels", "ingressSettings", "environmentVariables");
163
+ (0, proto_1.copyIfPresent)(parsed, ep, "availableMemoryMb", "maxInstances", "minInstances", "concurrency", "serviceAccountEmail", "timeoutSeconds", "vpc", "labels", "ingressSettings", "environmentVariables");
163
164
  allParsed.push(parsed);
164
165
  }
165
166
  return allParsed;
@@ -17,7 +17,7 @@ const discovery = require("../discovery");
17
17
  const validate = require("./validate");
18
18
  const versioning = require("./versioning");
19
19
  const parseTriggers = require("./parseTriggers");
20
- const MIN_FUNCTIONS_SDK_VERSION = "3.19.0";
20
+ const MIN_FUNCTIONS_SDK_VERSION = "3.20.0";
21
21
  async function tryCreateDelegate(context) {
22
22
  const packageJsonPath = path.join(context.sourceDir, "package.json");
23
23
  if (!(await (0, util_1.promisify)(fs.exists)(packageJsonPath))) {
@@ -113,31 +113,16 @@ function addResourcesToBackend(projectId, runtime, annotation, want) {
113
113
  triggered = {
114
114
  eventTrigger: {
115
115
  eventType: annotation.eventTrigger.eventType,
116
- eventFilters: [
117
- {
118
- attribute: "resource",
119
- value: annotation.eventTrigger.resource,
120
- },
121
- ],
116
+ eventFilters: { resource: annotation.eventTrigger.resource },
122
117
  retry: !!annotation.failurePolicy,
123
118
  },
124
119
  };
125
120
  if (annotation.platform === "gcfv2") {
126
121
  if (annotation.eventTrigger.eventType === v2events.PUBSUB_PUBLISH_EVENT) {
127
- triggered.eventTrigger.eventFilters = [
128
- {
129
- attribute: "topic",
130
- value: annotation.eventTrigger.resource,
131
- },
132
- ];
122
+ triggered.eventTrigger.eventFilters = { topic: annotation.eventTrigger.resource };
133
123
  }
134
124
  if (v2events.STORAGE_EVENTS.find((event) => { var _a; return event === (((_a = annotation.eventTrigger) === null || _a === void 0 ? void 0 : _a.eventType) || ""); })) {
135
- triggered.eventTrigger.eventFilters = [
136
- {
137
- attribute: "bucket",
138
- value: annotation.eventTrigger.resource,
139
- },
140
- ];
125
+ triggered.eventTrigger.eventFilters = { bucket: annotation.eventTrigger.resource };
141
126
  }
142
127
  }
143
128
  }
@@ -162,7 +147,8 @@ function addResourcesToBackend(projectId, runtime, annotation, want) {
162
147
  }
163
148
  endpoint.secretEnvironmentVariables = secretEnvs;
164
149
  }
165
- proto.copyIfPresent(endpoint, annotation, "concurrency", "serviceAccountEmail", "labels", "ingressSettings", "timeout", "maxInstances", "minInstances", "availableMemoryMb");
150
+ proto.copyIfPresent(endpoint, annotation, "concurrency", "serviceAccountEmail", "labels", "ingressSettings", "maxInstances", "minInstances", "availableMemoryMb");
151
+ proto.renameIfPresent(endpoint, annotation, "timeoutSeconds", "timeout", proto.secondsFromDuration);
166
152
  want.endpoints[region] = want.endpoints[region] || {};
167
153
  want.endpoints[region][endpoint.id] = endpoint;
168
154
  mergeRequiredAPIs(want);
@@ -2,7 +2,6 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.ensureStorageTriggerRegion = exports.obtainStorageBindings = void 0;
4
4
  const storage = require("../../../gcp/storage");
5
- const backend = require("../backend");
6
5
  const logger_1 = require("../../../logger");
7
6
  const error_1 = require("../../../error");
8
7
  const location_1 = require("../../../gcp/location");
@@ -27,12 +26,8 @@ async function ensureStorageTriggerRegion(endpoint) {
27
26
  const { eventTrigger } = endpoint;
28
27
  if (!eventTrigger.region) {
29
28
  logger_1.logger.debug("Looking up bucket region for the storage event trigger");
30
- const bucketFilter = backend.findEventFilter(endpoint, "bucket");
31
- if (!bucketFilter) {
32
- throw new error_1.FirebaseError("Storage event trigger unexpectedly missing event filter with bucket attribute.");
33
- }
34
29
  try {
35
- const bucket = await storage.getBucket(bucketFilter.value);
30
+ const bucket = await storage.getBucket(eventTrigger.eventFilters.bucket);
36
31
  eventTrigger.region = bucket.location.toLowerCase();
37
32
  logger_1.logger.debug("Setting the event trigger region to", eventTrigger.region, ".");
38
33
  }
@@ -0,0 +1,41 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.replaceConsoleLinks = void 0;
4
+ const registry_1 = require("../registry");
5
+ const types_1 = require("../types");
6
+ function replaceConsoleLinks(postinstall) {
7
+ const uiInfo = registry_1.EmulatorRegistry.getInfo(types_1.Emulators.UI);
8
+ const uiUrl = uiInfo ? `http://${registry_1.EmulatorRegistry.getInfoHostString(uiInfo)}` : "unknown";
9
+ let subbedPostinstall = postinstall;
10
+ const linkReplacements = new Map([
11
+ [
12
+ /(http[s]?:\/\/)?console\.firebase\.google\.com\/(u\/[0-9]\/)?project\/[A-Za-z0-9-]+\/storage[A-Za-z0-9\/-]*(?=[\)\]\s])/,
13
+ `${uiUrl}/${types_1.Emulators.STORAGE}`,
14
+ ],
15
+ [
16
+ /(http[s]?:\/\/)?console\.firebase\.google\.com\/(u\/[0-9]\/)?project\/[A-Za-z0-9-]+\/firestore[A-Za-z0-9\/-]*(?=[\)\]\s])/,
17
+ `${uiUrl}/${types_1.Emulators.FIRESTORE}`,
18
+ ],
19
+ [
20
+ /(http[s]?:\/\/)?console\.firebase\.google\.com\/(u\/[0-9]\/)?project\/[A-Za-z0-9-]+\/database[A-Za-z0-9\/-]*(?=[\)\]\s])/,
21
+ `${uiUrl}/${types_1.Emulators.DATABASE}`,
22
+ ],
23
+ [
24
+ /(http[s]?:\/\/)?console\.firebase\.google\.com\/(u\/[0-9]\/)?project\/[A-Za-z0-9-]+\/authentication[A-Za-z0-9\/-]*(?=[\)\]\s])/,
25
+ `${uiUrl}/${types_1.Emulators.AUTH}`,
26
+ ],
27
+ [
28
+ /(http[s]?:\/\/)?console\.firebase\.google\.com\/(u\/[0-9]\/)?project\/[A-Za-z0-9-]+\/functions[A-Za-z0-9\/-]*(?=[\)\]\s])/,
29
+ `${uiUrl}/logs`,
30
+ ],
31
+ [
32
+ /(http[s]?:\/\/)?console\.firebase\.google\.com\/(u\/[0-9]\/)?project\/[A-Za-z0-9-]+\/extensions[A-Za-z0-9\/-]*(?=[\)\]\s])/,
33
+ `${uiUrl}/${types_1.Emulators.EXTENSIONS}`,
34
+ ],
35
+ ]);
36
+ for (const [consoleLinkRegex, replacement] of linkReplacements) {
37
+ subbedPostinstall = subbedPostinstall.replace(consoleLinkRegex, replacement);
38
+ }
39
+ return subbedPostinstall;
40
+ }
41
+ exports.replaceConsoleLinks = replaceConsoleLinks;
@@ -8,9 +8,9 @@ const fs = require("fs");
8
8
  const backend = require("../deploy/functions/backend");
9
9
  const constants_1 = require("./constants");
10
10
  const proto_1 = require("../gcp/proto");
11
- const logger_1 = require("../logger");
12
11
  const manifest_1 = require("../extensions/manifest");
13
12
  const extensionsHelper_1 = require("../extensions/extensionsHelper");
13
+ const postinstall_1 = require("./extensions/postinstall");
14
14
  const memoryLookup = {
15
15
  "128MB": 128,
16
16
  "256MB": 256,
@@ -33,12 +33,7 @@ class EmulatedTrigger {
33
33
  return memoryLookup[this.definition.availableMemoryMb || "128MB"] * 1024 * 1024;
34
34
  }
35
35
  get timeoutMs() {
36
- if (typeof this.definition.timeout === "number") {
37
- return this.definition.timeout * 1000;
38
- }
39
- else {
40
- return parseInt((this.definition.timeout || "60s").split("s")[0], 10) * 1000;
41
- }
36
+ return (this.definition.timeoutSeconds || 60) * 1000;
42
37
  }
43
38
  getRawFunction() {
44
39
  if (!this.module) {
@@ -62,7 +57,7 @@ function emulatedFunctionsFromEndpoints(endpoints) {
62
57
  name: endpoint.id,
63
58
  id: `${endpoint.region}-${endpoint.id}`,
64
59
  };
65
- (0, proto_1.copyIfPresent)(def, endpoint, "timeout", "availableMemoryMb", "labels", "platform", "secretEnvironmentVariables");
60
+ (0, proto_1.copyIfPresent)(def, endpoint, "availableMemoryMb", "labels", "timeoutSeconds", "platform", "secretEnvironmentVariables");
66
61
  if (backend.isHttpsTriggered(endpoint)) {
67
62
  def.httpsTrigger = endpoint.httpsTrigger;
68
63
  }
@@ -73,25 +68,20 @@ function emulatedFunctionsFromEndpoints(endpoints) {
73
68
  else if (backend.isEventTriggered(endpoint)) {
74
69
  const eventTrigger = endpoint.eventTrigger;
75
70
  if (endpoint.platform === "gcfv1") {
76
- const resourceFilter = backend.findEventFilter(endpoint, "resource");
77
- if (!resourceFilter) {
78
- logger_1.logger.debug(`Invalid event trigger ${JSON.stringify(endpoint)}, expected event filter with resource attribute. Skipping.`);
79
- continue;
80
- }
81
71
  def.eventTrigger = {
82
72
  eventType: eventTrigger.eventType,
83
- resource: resourceFilter.value,
73
+ resource: eventTrigger.eventFilters.resource,
84
74
  };
85
75
  }
86
76
  else {
87
- const [eventFilter] = endpoint.eventTrigger.eventFilters;
88
- if (!eventFilter) {
89
- logger_1.logger.debug(`Invalid event trigger ${JSON.stringify(endpoint)}, expected at least one event filter. Skipping.`);
77
+ const { resource, topic, bucket } = endpoint.eventTrigger.eventFilters;
78
+ const eventResource = resource || topic || bucket;
79
+ if (!eventResource) {
90
80
  continue;
91
81
  }
92
82
  def.eventTrigger = {
93
83
  eventType: eventTrigger.eventType,
94
- resource: eventFilter.value,
84
+ resource: eventResource,
95
85
  };
96
86
  }
97
87
  }
@@ -244,7 +234,7 @@ function getSecretLocalPath(backend, projectDir) {
244
234
  }
245
235
  exports.getSecretLocalPath = getSecretLocalPath;
246
236
  function toBackendInfo(e, cf3Triggers) {
247
- var _a;
237
+ var _a, _b;
248
238
  const envWithSecrets = Object.assign({}, e.env);
249
239
  for (const s of e.secretEnv) {
250
240
  envWithSecrets[s.key] = backend.secretVersionName(s);
@@ -252,10 +242,16 @@ function toBackendInfo(e, cf3Triggers) {
252
242
  let extensionVersion = e.extensionVersion;
253
243
  if (extensionVersion) {
254
244
  extensionVersion = (0, extensionsHelper_1.substituteParams)(extensionVersion, e.env);
245
+ if ((_a = extensionVersion.spec) === null || _a === void 0 ? void 0 : _a.postinstallContent) {
246
+ extensionVersion.spec.postinstallContent = (0, postinstall_1.replaceConsoleLinks)(extensionVersion.spec.postinstallContent);
247
+ }
255
248
  }
256
249
  let extensionSpec = e.extensionSpec;
257
250
  if (extensionSpec) {
258
251
  extensionSpec = (0, extensionsHelper_1.substituteParams)(extensionSpec, e.env);
252
+ if (extensionSpec === null || extensionSpec === void 0 ? void 0 : extensionSpec.postinstallContent) {
253
+ extensionSpec.postinstallContent = (0, postinstall_1.replaceConsoleLinks)(extensionSpec.postinstallContent);
254
+ }
259
255
  }
260
256
  return JSON.parse(JSON.stringify({
261
257
  directory: e.functionsDir,
@@ -264,7 +260,7 @@ function toBackendInfo(e, cf3Triggers) {
264
260
  extension: e.extension,
265
261
  extensionVersion: extensionVersion,
266
262
  extensionSpec: extensionSpec,
267
- functionTriggers: (_a = e.predefinedTriggers) !== null && _a !== void 0 ? _a : cf3Triggers,
263
+ functionTriggers: (_b = e.predefinedTriggers) !== null && _b !== void 0 ? _b : cf3Triggers,
268
264
  }));
269
265
  }
270
266
  exports.toBackendInfo = toBackendInfo;
@@ -173,7 +173,7 @@ function createFirebaseEndpoints(emulator) {
173
173
  return res.status(400).json({
174
174
  error: {
175
175
  code: 400,
176
- message: err.toString(),
176
+ message: err.message,
177
177
  },
178
178
  });
179
179
  }
@@ -201,7 +201,7 @@ function createFirebaseEndpoints(emulator) {
201
201
  }
202
202
  throw err;
203
203
  }
204
- metadata.addDownloadToken();
204
+ metadata.addDownloadToken(false);
205
205
  return res.status(200).json(new metadata_1.OutgoingFirebaseMetadata(metadata));
206
206
  }
207
207
  const uploadCommand = req.header("x-goog-upload-command");
@@ -291,9 +291,9 @@ function createFirebaseEndpoints(emulator) {
291
291
  }
292
292
  throw err;
293
293
  }
294
- let metadata;
294
+ let storedMetadata;
295
295
  try {
296
- metadata = await storageLayer.uploadObject(upload);
296
+ storedMetadata = await storageLayer.uploadObject(upload);
297
297
  }
298
298
  catch (err) {
299
299
  if (err instanceof errors_1.ForbiddenError) {
@@ -306,8 +306,8 @@ function createFirebaseEndpoints(emulator) {
306
306
  }
307
307
  throw err;
308
308
  }
309
- metadata.addDownloadToken();
310
- return res.status(200).json(new metadata_1.OutgoingFirebaseMetadata(metadata));
309
+ storedMetadata.addDownloadToken(false);
310
+ return res.status(200).json(new metadata_1.OutgoingFirebaseMetadata(storedMetadata));
311
311
  }
312
312
  return res.sendStatus(400);
313
313
  };
@@ -210,12 +210,15 @@ function createCloudEndpoints(emulator) {
210
210
  ({ metadataRaw, dataRaw } = (0, multipart_1.parseObjectUploadMultipartRequest)(contentTypeHeader, await (0, request_1.reqBodyToBuffer)(req)));
211
211
  }
212
212
  catch (err) {
213
- return res.status(400).json({
214
- error: {
215
- code: 400,
216
- message: err,
217
- },
218
- });
213
+ if (err instanceof Error) {
214
+ return res.status(400).json({
215
+ error: {
216
+ code: 400,
217
+ message: err.message,
218
+ },
219
+ });
220
+ }
221
+ throw err;
219
222
  }
220
223
  const upload = uploadService.multipartUpload({
221
224
  bucketId: req.params.bucketId,