firebase-tools 11.6.0 → 11.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/lib/auth.js +1 -1
  2. package/lib/commands/crashlytics-mappingfile-generateid.js +26 -0
  3. package/lib/commands/crashlytics-mappingfile-upload.js +46 -0
  4. package/lib/commands/crashlytics-symbols-upload.js +18 -87
  5. package/lib/commands/functions-delete.js +2 -0
  6. package/lib/commands/functions-secrets-get.js +2 -0
  7. package/lib/commands/index.js +3 -0
  8. package/lib/crashlytics/buildToolsJarHelper.js +51 -0
  9. package/lib/deploy/functions/backend.js +4 -4
  10. package/lib/deploy/functions/build.js +98 -17
  11. package/lib/deploy/functions/cache/applyHash.js +29 -0
  12. package/lib/deploy/functions/cache/hash.js +30 -0
  13. package/lib/deploy/functions/cel.js +249 -0
  14. package/lib/deploy/functions/checkIam.js +6 -5
  15. package/lib/deploy/functions/functionsDeployHelper.js +12 -1
  16. package/lib/deploy/functions/params.js +262 -105
  17. package/lib/deploy/functions/prepare.js +34 -4
  18. package/lib/deploy/functions/prepareFunctionsUpload.js +12 -4
  19. package/lib/deploy/functions/release/fabricator.js +39 -6
  20. package/lib/deploy/functions/release/index.js +2 -0
  21. package/lib/deploy/functions/release/planner.js +17 -0
  22. package/lib/deploy/functions/runtimes/discovery/index.js +1 -16
  23. package/lib/deploy/functions/runtimes/discovery/parsing.js +16 -0
  24. package/lib/deploy/functions/runtimes/discovery/v1alpha1.js +59 -131
  25. package/lib/deploy/functions/runtimes/node/parseTriggers.js +10 -1
  26. package/lib/emulator/constants.js +1 -1
  27. package/lib/emulator/controller.js +6 -11
  28. package/lib/emulator/extensionsEmulator.js +1 -0
  29. package/lib/emulator/functionsEmulator.js +18 -59
  30. package/lib/emulator/functionsEmulatorRuntime.js +12 -23
  31. package/lib/emulator/functionsRuntimeWorker.js +38 -7
  32. package/lib/emulator/storage/apis/firebase.js +145 -129
  33. package/lib/emulator/storage/apis/gcloud.js +102 -42
  34. package/lib/emulator/storage/files.js +25 -15
  35. package/lib/emulator/storage/metadata.js +86 -56
  36. package/lib/emulator/storage/multipart.js +2 -2
  37. package/lib/emulator/storage/rules/runtime.js +10 -2
  38. package/lib/emulator/storage/upload.js +45 -9
  39. package/lib/extensions/extensionsHelper.js +1 -1
  40. package/lib/functions/constants.js +14 -0
  41. package/lib/functions/env.js +9 -9
  42. package/lib/functions/secrets.js +8 -1
  43. package/lib/gcp/cloudfunctions.js +15 -18
  44. package/lib/gcp/cloudfunctionsv2.js +15 -18
  45. package/lib/gcp/cloudscheduler.js +32 -14
  46. package/lib/gcp/secretManager.js +15 -1
  47. package/lib/gcp/storage.js +15 -1
  48. package/lib/previews.js +1 -1
  49. package/lib/track.js +3 -0
  50. package/npm-shrinkwrap.json +563 -30
  51. package/package.json +7 -5
  52. package/templates/init/storage/storage.rules +1 -1
@@ -24,6 +24,7 @@ const utils = require("../../../utils");
24
24
  const services = require("../services");
25
25
  const v1_1 = require("../../../functions/events/v1");
26
26
  const throttler_1 = require("../../../throttler/throttler");
27
+ const checkIam_1 = require("../checkIam");
27
28
  const gcfV1PollerOptions = {
28
29
  apiOrigin: api_1.functionsOrigin,
29
30
  apiVersion: gcf.API_VERSION,
@@ -46,6 +47,7 @@ class Fabricator {
46
47
  this.functionExecutor = args.functionExecutor;
47
48
  this.sources = args.sources;
48
49
  this.appEngineLocation = args.appEngineLocation;
50
+ this.projectNumber = args.projectNumber;
49
51
  }
50
52
  async applyPlan(plan) {
51
53
  const timer = new timer_1.Timer();
@@ -89,6 +91,9 @@ class Fabricator {
89
91
  this.logOpStart("creating", endpoint);
90
92
  upserts.push(handle("create", endpoint, () => this.createEndpoint(endpoint, scraper)));
91
93
  }
94
+ for (const endpoint of changes.endpointsToSkip) {
95
+ utils.logSuccess(this.getLogSuccessMessage("skip", endpoint));
96
+ }
92
97
  for (const update of changes.endpointsToUpdate) {
93
98
  this.logOpStart("updating", update.endpoint);
94
99
  upserts.push(handle("update", update.endpoint, () => this.updateEndpoint(update, scraper)));
@@ -270,6 +275,12 @@ class Fabricator {
270
275
  .run(() => run.setInvokerCreate(endpoint.project, serviceName, ["public"]))
271
276
  .catch(rethrowAs(endpoint, "set invoker"));
272
277
  }
278
+ else if (backend.isScheduleTriggered(endpoint)) {
279
+ const invoker = [(0, checkIam_1.getDefaultComputeServiceAgent)(this.projectNumber)];
280
+ await this.executor
281
+ .run(() => run.setInvokerCreate(endpoint.project, serviceName, invoker))
282
+ .catch(rethrowAs(endpoint, "set invoker"));
283
+ }
273
284
  const mem = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY;
274
285
  const hasCustomCPU = endpoint.cpu !== backend.memoryToGen1Cpu(mem);
275
286
  if (!endpoint.concurrency) {
@@ -346,6 +357,9 @@ class Fabricator {
346
357
  v1_1.AUTH_BLOCKING_EVENTS.includes(endpoint.blockingTrigger.eventType)) {
347
358
  invoker = ["public"];
348
359
  }
360
+ else if (backend.isScheduleTriggered(endpoint)) {
361
+ invoker = [(0, checkIam_1.getDefaultComputeServiceAgent)(this.projectNumber)];
362
+ }
349
363
  if (invoker) {
350
364
  await this.executor
351
365
  .run(() => run.setInvokerUpdate(endpoint.project, serviceName, invoker))
@@ -452,13 +466,16 @@ class Fabricator {
452
466
  }
453
467
  }
454
468
  async upsertScheduleV1(endpoint) {
455
- const job = scheduler.jobFromEndpoint(endpoint, this.appEngineLocation);
469
+ const job = scheduler.jobFromEndpoint(endpoint, this.appEngineLocation, this.projectNumber);
456
470
  await this.executor
457
471
  .run(() => scheduler.createOrReplaceJob(job))
458
472
  .catch(rethrowAs(endpoint, "upsert schedule"));
459
473
  }
460
- upsertScheduleV2(endpoint) {
461
- return Promise.reject(new reporter.DeploymentError(endpoint, "upsert schedule", new Error("Not implemented")));
474
+ async upsertScheduleV2(endpoint) {
475
+ const job = scheduler.jobFromEndpoint(endpoint, endpoint.region, this.projectNumber);
476
+ await this.executor
477
+ .run(() => scheduler.createOrReplaceJob(job))
478
+ .catch(rethrowAs(endpoint, "upsert schedule"));
462
479
  }
463
480
  async upsertTaskQueue(endpoint) {
464
481
  const queue = cloudtasks.queueFromEndpoint(endpoint);
@@ -486,8 +503,11 @@ class Fabricator {
486
503
  .run(() => pubsub.deleteTopic(topicName))
487
504
  .catch(rethrowAs(endpoint, "delete topic"));
488
505
  }
489
- deleteScheduleV2(endpoint) {
490
- return Promise.reject(new reporter.DeploymentError(endpoint, "delete schedule", new Error("Not implemented")));
506
+ async deleteScheduleV2(endpoint) {
507
+ const jobName = scheduler.jobNameForEndpoint(endpoint, endpoint.region);
508
+ await this.executor
509
+ .run(() => scheduler.deleteJob(jobName))
510
+ .catch(rethrowAs(endpoint, "delete schedule"));
491
511
  }
492
512
  async disableTaskQueue(endpoint) {
493
513
  const update = {
@@ -509,8 +529,21 @@ class Fabricator {
509
529
  utils.logLabeledBullet("functions", `${op} ${runtime} function ${clc.bold(label)}...`);
510
530
  }
511
531
  logOpSuccess(op, endpoint) {
532
+ utils.logSuccess(this.getLogSuccessMessage(op, endpoint));
533
+ }
534
+ getLogSuccessMessage(op, endpoint) {
512
535
  const label = helper.getFunctionLabel(endpoint);
513
- utils.logSuccess(`${clc.bold(clc.green(`functions[${label}]`))} Successful ${op} operation.`);
536
+ switch (op) {
537
+ case "skip":
538
+ return `${clc.bold(clc.magenta(`functions[${label}]`))} Skipped (No changes detected)`;
539
+ default:
540
+ return `${clc.bold(clc.green(`functions[${label}]`))} Successful ${op} operation.`;
541
+ }
542
+ }
543
+ getSkippedDeployingNopOpMessage(endpoints) {
544
+ const functionNames = endpoints.map((endpoint) => endpoint.id).join(",");
545
+ return `${clc.bold(clc.magenta(`functions:`))} You can re-deploy skipped functions with:
546
+ ${clc.bold(`firebase deploy --only functions:${functionNames}`)} or ${clc.bold(`FUNCTIONS_DEPLOY_UNCHANGED=true firebase deploy`)}`;
514
547
  }
515
548
  }
516
549
  exports.Fabricator = Fabricator;
@@ -14,6 +14,7 @@ const prompts = require("../prompts");
14
14
  const functionsConfig_1 = require("../../../functionsConfig");
15
15
  const functionsDeployHelper_1 = require("../functionsDeployHelper");
16
16
  const error_1 = require("../../../error");
17
+ const getProjectNumber_1 = require("../../../getProjectNumber");
17
18
  async function release(context, options, payload) {
18
19
  if (!context.config) {
19
20
  return;
@@ -53,6 +54,7 @@ async function release(context, options, payload) {
53
54
  executor: new executor.QueueExecutor({}),
54
55
  sources: context.sources,
55
56
  appEngineLocation: (0, functionsConfig_1.getAppEngineLocation)(context.firebaseConfig),
57
+ projectNumber: options.projectNumber || (await (0, getProjectNumber_1.getProjectNumber)(context.projectId)),
56
58
  });
57
59
  const summary = await fab.applyPlan(plan);
58
60
  await reporter.logAndTrackDeployStats(summary);
@@ -7,6 +7,7 @@ const error_1 = require("../../../error");
7
7
  const utils = require("../../../utils");
8
8
  const backend = require("../backend");
9
9
  const v2events = require("../../../functions/events/v2");
10
+ const previews_1 = require("../../../previews");
10
11
  function calculateChangesets(want, have, keyFn, deleteAll) {
11
12
  const toCreate = utils.groupBy(Object.keys(want)
12
13
  .filter((id) => !have[id])
@@ -15,20 +16,36 @@ function calculateChangesets(want, have, keyFn, deleteAll) {
15
16
  .filter((id) => !want[id])
16
17
  .filter((id) => deleteAll || (0, deploymentTool_1.isFirebaseManaged)(have[id].labels || {}))
17
18
  .map((id) => have[id]), keyFn);
19
+ const { skipdeployingnoopfunctions } = previews_1.previews;
20
+ const toSkipPredicate = (id) => !!(skipdeployingnoopfunctions &&
21
+ have[id].hash &&
22
+ want[id].hash &&
23
+ want[id].hash === have[id].hash);
24
+ const toSkipEndpointsMap = Object.keys(want)
25
+ .filter((id) => have[id])
26
+ .filter((id) => toSkipPredicate(id))
27
+ .reduce((memo, id) => {
28
+ memo[id] = want[id];
29
+ return memo;
30
+ }, {});
31
+ const toSkip = utils.groupBy(Object.values(toSkipEndpointsMap), keyFn);
18
32
  const toUpdate = utils.groupBy(Object.keys(want)
19
33
  .filter((id) => have[id])
34
+ .filter((id) => !toSkipEndpointsMap[id])
20
35
  .map((id) => calculateUpdate(want[id], have[id])), (eu) => keyFn(eu.endpoint));
21
36
  const result = {};
22
37
  const keys = new Set([
23
38
  ...Object.keys(toCreate),
24
39
  ...Object.keys(toDelete),
25
40
  ...Object.keys(toUpdate),
41
+ ...Object.keys(toSkip),
26
42
  ]);
27
43
  for (const key of keys) {
28
44
  result[key] = {
29
45
  endpointsToCreate: toCreate[key] || [],
30
46
  endpointsToUpdate: toUpdate[key] || [],
31
47
  endpointsToDelete: toDelete[key] || [],
48
+ endpointsToSkip: toSkip[key] || [],
32
49
  };
33
50
  }
34
51
  return result;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.detectFromPort = exports.detectFromYaml = exports.yamlToBuild = exports.yamlToBackend = exports.readFileAsync = void 0;
3
+ exports.detectFromPort = exports.detectFromYaml = exports.yamlToBuild = exports.readFileAsync = void 0;
4
4
  const node_fetch_1 = require("node-fetch");
5
5
  const fs = require("fs");
6
6
  const path = require("path");
@@ -11,21 +11,6 @@ const api = require("../../.../../../../api");
11
11
  const v1alpha1 = require("./v1alpha1");
12
12
  const error_1 = require("../../../../error");
13
13
  exports.readFileAsync = (0, util_1.promisify)(fs.readFile);
14
- function yamlToBackend(yaml, project, region, runtime) {
15
- try {
16
- if (!yaml.specVersion) {
17
- throw new error_1.FirebaseError("Expect backend yaml to specify a version number");
18
- }
19
- if (yaml.specVersion === "v1alpha1") {
20
- return v1alpha1.backendFromV1Alpha1(yaml, project, region, runtime);
21
- }
22
- throw new error_1.FirebaseError("It seems you are using a newer SDK than this version of the CLI can handle. Please update your CLI with `npm install -g firebase-tools`");
23
- }
24
- catch (err) {
25
- throw new error_1.FirebaseError("Failed to parse backend specification", { children: [err] });
26
- }
27
- }
28
- exports.yamlToBackend = yamlToBackend;
29
14
  function yamlToBuild(yaml, project, region, runtime) {
30
15
  try {
31
16
  if (!yaml.specVersion) {
@@ -31,6 +31,22 @@ function assertKeyTypes(prefix, yaml, schema) {
31
31
  }
32
32
  continue;
33
33
  }
34
+ if (value === null) {
35
+ if (schemaType.endsWith("?")) {
36
+ continue;
37
+ }
38
+ throw new error_1.FirebaseError(`Expected ${fullKey} to be type ${schemaType}; was null`);
39
+ }
40
+ if (schemaType.endsWith("?")) {
41
+ schemaType = schemaType.slice(0, schemaType.length - 1);
42
+ }
43
+ if (schemaType.includes("Field")) {
44
+ const match = /^Field<(\w+)>$/.exec(schemaType);
45
+ if (match && typeof value !== "string" && typeof value !== match[1]) {
46
+ throw new error_1.FirebaseError(`Expected ${fullKey} to be Field<${match[1]}>; was ${typeof value}`);
47
+ }
48
+ continue;
49
+ }
34
50
  if (value === null) {
35
51
  if (schemaType.endsWith("?")) {
36
52
  continue;
@@ -1,7 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.backendFromV1Alpha1 = exports.buildFromV1Alpha1 = void 0;
4
- const backend = require("../../backend");
3
+ exports.buildFromV1Alpha1 = void 0;
5
4
  const build = require("../../build");
6
5
  const proto_1 = require("../../../../gcp/proto");
7
6
  const parsing_1 = require("./parsing");
@@ -27,33 +26,13 @@ function buildFromV1Alpha1(yaml, project, region, runtime) {
27
26
  bd.requiredAPIs = parseRequiredAPIs(manifest);
28
27
  for (const id of Object.keys(manifest.endpoints)) {
29
28
  const me = manifest.endpoints[id];
30
- assertManifestEndpoint(me, id);
29
+ assertBuildEndpoint(me, id);
31
30
  const be = parseEndpointForBuild(id, me, project, region, runtime);
32
31
  bd.endpoints[id] = be;
33
32
  }
34
33
  return bd;
35
34
  }
36
35
  exports.buildFromV1Alpha1 = buildFromV1Alpha1;
37
- function backendFromV1Alpha1(yaml, project, region, runtime) {
38
- const manifest = JSON.parse(JSON.stringify(yaml));
39
- const bkend = backend.empty();
40
- bkend.requiredAPIs = parseRequiredAPIs(manifest);
41
- (0, parsing_1.requireKeys)("", manifest, "endpoints");
42
- (0, parsing_1.assertKeyTypes)("", manifest, {
43
- specVersion: "string",
44
- params: "array",
45
- requiredAPIs: "array",
46
- endpoints: "object",
47
- });
48
- for (const id of Object.keys(manifest.endpoints)) {
49
- for (const parsed of parseEndpoints(manifest, id, project, region, runtime)) {
50
- bkend.endpoints[parsed.region] = bkend.endpoints[parsed.region] || {};
51
- bkend.endpoints[parsed.region][parsed.id] = parsed;
52
- }
53
- }
54
- return bkend;
55
- }
56
- exports.backendFromV1Alpha1 = backendFromV1Alpha1;
57
36
  function parseRequiredAPIs(manifest) {
58
37
  const requiredAPIs = manifest.requiredAPIs || [];
59
38
  for (const { api, reason } of requiredAPIs) {
@@ -66,22 +45,22 @@ function parseRequiredAPIs(manifest) {
66
45
  }
67
46
  return requiredAPIs;
68
47
  }
69
- function assertManifestEndpoint(ep, id) {
48
+ function assertBuildEndpoint(ep, id) {
70
49
  const prefix = `endpoints[${id}]`;
71
50
  (0, parsing_1.assertKeyTypes)(prefix, ep, {
72
51
  region: "array",
73
- platform: (platform) => backend.AllFunctionsPlatforms.includes(platform),
52
+ platform: (platform) => build.AllFunctionsPlatforms.includes(platform),
74
53
  entryPoint: "string",
75
- availableMemoryMb: (mem) => mem === null || backend.isValidMemoryOption(mem),
76
- maxInstances: "number?",
77
- minInstances: "number?",
78
- concurrency: "number?",
54
+ availableMemoryMb: (mem) => mem === null || isCEL(mem) || build.isValidMemoryOption(mem),
55
+ maxInstances: "Field<number>?",
56
+ minInstances: "Field<number>?",
57
+ concurrency: "Field<number>?",
79
58
  serviceAccount: "string?",
80
59
  serviceAccountEmail: "string?",
81
- timeoutSeconds: "number?",
60
+ timeoutSeconds: "Field<number>?",
82
61
  vpc: "object?",
83
62
  labels: "object?",
84
- ingressSettings: (setting) => setting === null || backend.AllIngressSettings.includes(setting),
63
+ ingressSettings: (setting) => setting === null || build.AllIngressSettings.includes(setting),
85
64
  environmentVariables: "object?",
86
65
  secretEnvironmentVariables: "array?",
87
66
  httpsTrigger: "object",
@@ -90,12 +69,12 @@ function assertManifestEndpoint(ep, id) {
90
69
  scheduleTrigger: "object",
91
70
  taskQueueTrigger: "object",
92
71
  blockingTrigger: "object",
93
- cpu: (cpu) => cpu === null || typeof cpu === "number" || cpu === "gcf_gen1",
72
+ cpu: (cpu) => cpu === null || isCEL(cpu) || cpu === "gcf_gen1" || typeof cpu === "number",
94
73
  });
95
74
  if (ep.vpc) {
96
75
  (0, parsing_1.assertKeyTypes)(prefix + ".vpc", ep.vpc, {
97
76
  connector: "string",
98
- egressSettings: (setting) => setting === null || backend.AllVpcEgressSettings.includes(setting),
77
+ egressSettings: (setting) => setting === null || build.AllVpcEgressSettings.includes(setting),
99
78
  });
100
79
  (0, parsing_1.requireKeys)(prefix + ".vpc", ep.vpc, "connector");
101
80
  }
@@ -124,44 +103,46 @@ function assertManifestEndpoint(ep, id) {
124
103
  if (triggerCount > 1) {
125
104
  throw new error_1.FirebaseError("Multiple triggers defined for endpoint" + id);
126
105
  }
127
- if (backend.isEventTriggered(ep)) {
106
+ if (build.isEventTriggered(ep)) {
128
107
  (0, parsing_1.requireKeys)(prefix + ".eventTrigger", ep.eventTrigger, "eventType", "eventFilters");
129
108
  (0, parsing_1.assertKeyTypes)(prefix + ".eventTrigger", ep.eventTrigger, {
130
109
  eventFilters: "object",
131
110
  eventFilterPathPatterns: "object",
132
111
  eventType: "string",
133
- retry: "boolean",
134
- region: "string",
112
+ retry: "Field<boolean>",
113
+ region: "Field<string>",
135
114
  serviceAccount: "string?",
136
115
  serviceAccountEmail: "string?",
137
116
  channel: "string",
138
117
  });
139
118
  }
140
- else if (backend.isHttpsTriggered(ep)) {
119
+ else if (build.isHttpsTriggered(ep)) {
141
120
  (0, parsing_1.assertKeyTypes)(prefix + ".httpsTrigger", ep.httpsTrigger, {
142
121
  invoker: "array?",
143
122
  });
144
123
  }
145
- else if (backend.isCallableTriggered(ep)) {
124
+ else if (build.isCallableTriggered(ep)) {
146
125
  }
147
- else if (backend.isScheduleTriggered(ep)) {
126
+ else if (build.isScheduleTriggered(ep)) {
148
127
  (0, parsing_1.assertKeyTypes)(prefix + ".scheduleTrigger", ep.scheduleTrigger, {
149
- schedule: "string",
150
- timeZone: "string?",
128
+ schedule: "Field<string>",
129
+ timeZone: "Field<string>?",
151
130
  retryConfig: "object?",
152
131
  });
153
- (0, parsing_1.assertKeyTypes)(prefix + ".scheduleTrigger.retryConfig", ep.scheduleTrigger.retryConfig || {}, {
154
- retryCount: "number?",
155
- maxDoublings: "number?",
156
- minBackoffSeconds: "number?",
157
- maxBackoffSeconds: "number?",
158
- maxRetrySeconds: "number?",
159
- minBackoffDuration: "string?",
160
- maxBackoffDuration: "string?",
161
- maxRetryDuration: "string?",
162
- });
132
+ if (ep.scheduleTrigger.retryConfig) {
133
+ (0, parsing_1.assertKeyTypes)(prefix + ".scheduleTrigger.retryConfig", ep.scheduleTrigger.retryConfig, {
134
+ retryCount: "Field<number>?",
135
+ maxDoublings: "Field<number>?",
136
+ minBackoffSeconds: "Field<number>?",
137
+ maxBackoffSeconds: "Field<number>?",
138
+ maxRetrySeconds: "Field<number>?",
139
+ maxRetryDuration: "string?",
140
+ minBackoffDuration: "string?",
141
+ maxBackoffDuration: "string?",
142
+ });
143
+ }
163
144
  }
164
- else if (backend.isTaskQueueTriggered(ep)) {
145
+ else if (build.isTaskQueueTriggered(ep)) {
165
146
  (0, parsing_1.assertKeyTypes)(prefix + ".taskQueueTrigger", ep.taskQueueTrigger, {
166
147
  rateLimits: "object?",
167
148
  retryConfig: "object?",
@@ -169,21 +150,21 @@ function assertManifestEndpoint(ep, id) {
169
150
  });
170
151
  if (ep.taskQueueTrigger.rateLimits) {
171
152
  (0, parsing_1.assertKeyTypes)(prefix + ".taskQueueTrigger.rateLimits", ep.taskQueueTrigger.rateLimits, {
172
- maxConcurrentDispatches: "number?",
173
- maxDispatchesPerSecond: "number?",
153
+ maxConcurrentDispatches: "Field<number>?",
154
+ maxDispatchesPerSecond: "Field<number>?",
174
155
  });
175
156
  }
176
157
  if (ep.taskQueueTrigger.retryConfig) {
177
158
  (0, parsing_1.assertKeyTypes)(prefix + ".taskQueueTrigger.retryConfig", ep.taskQueueTrigger.retryConfig, {
178
- maxAttempts: "number?",
179
- maxRetrySeconds: "number?",
180
- minBackoffSeconds: "number?",
181
- maxBackoffSeconds: "number?",
182
- maxDoublings: "number?",
159
+ maxAttempts: "Field<number>?",
160
+ maxRetrySeconds: "Field<number>?",
161
+ minBackoffSeconds: "Field<number>?",
162
+ maxBackoffSeconds: "Field<number>?",
163
+ maxDoublings: "Field<number>?",
183
164
  });
184
165
  }
185
166
  }
186
- else if (backend.isBlockingTriggered(ep)) {
167
+ else if (build.isBlockingTriggered(ep)) {
187
168
  (0, parsing_1.requireKeys)(prefix + ".blockingTrigger", ep.blockingTrigger, "eventType");
188
169
  (0, parsing_1.assertKeyTypes)(prefix + ".blockingTrigger", ep.blockingTrigger, {
189
170
  eventType: "string",
@@ -198,12 +179,14 @@ function assertManifestEndpoint(ep, id) {
198
179
  function parseEndpointForBuild(id, ep, project, defaultRegion, runtime) {
199
180
  var _a;
200
181
  let triggered;
201
- if (backend.isEventTriggered(ep)) {
182
+ if (build.isEventTriggered(ep)) {
202
183
  const eventTrigger = {
203
184
  eventType: ep.eventTrigger.eventType,
204
185
  retry: ep.eventTrigger.retry,
205
186
  };
206
- (0, proto_1.renameIfPresent)(eventTrigger, ep.eventTrigger, "serviceAccount", "serviceAccountEmail");
187
+ if ("serviceAccountEmail" in ep.eventTrigger) {
188
+ eventTrigger.serviceAccount = ep.eventTrigger.serviceAccountEmail;
189
+ }
207
190
  (0, proto_1.copyIfPresent)(eventTrigger, ep.eventTrigger, "serviceAccount", "eventFilterPathPatterns", "region");
208
191
  (0, proto_1.convertIfPresent)(eventTrigger, ep.eventTrigger, "channel", (c) => resolveChannelName(project, c, defaultRegion));
209
192
  (0, proto_1.convertIfPresent)(eventTrigger, ep.eventTrigger, "eventFilters", (filters) => {
@@ -215,20 +198,23 @@ function parseEndpointForBuild(id, ep, project, defaultRegion, runtime) {
215
198
  });
216
199
  triggered = { eventTrigger };
217
200
  }
218
- else if (backend.isHttpsTriggered(ep)) {
201
+ else if (build.isHttpsTriggered(ep)) {
219
202
  triggered = { httpsTrigger: {} };
220
203
  (0, proto_1.copyIfPresent)(triggered.httpsTrigger, ep.httpsTrigger, "invoker");
221
204
  }
222
- else if (backend.isCallableTriggered(ep)) {
205
+ else if (build.isCallableTriggered(ep)) {
223
206
  triggered = { callableTrigger: {} };
224
207
  }
225
- else if (backend.isScheduleTriggered(ep)) {
208
+ else if (build.isScheduleTriggered(ep)) {
226
209
  const st = {
227
210
  schedule: ep.scheduleTrigger.schedule || "",
228
211
  timeZone: (_a = ep.scheduleTrigger.timeZone) !== null && _a !== void 0 ? _a : null,
229
212
  };
230
213
  if (ep.scheduleTrigger.retryConfig) {
231
214
  st.retryConfig = {};
215
+ (0, proto_1.convertIfPresent)(st.retryConfig, ep.scheduleTrigger.retryConfig, "maxBackoffSeconds", "maxBackoffDuration", (duration) => (duration === null ? null : (0, proto_1.secondsFromDuration)(duration)));
216
+ (0, proto_1.convertIfPresent)(st.retryConfig, ep.scheduleTrigger.retryConfig, "minBackoffSeconds", "minBackoffDuration", (duration) => (duration === null ? null : (0, proto_1.secondsFromDuration)(duration)));
217
+ (0, proto_1.convertIfPresent)(st.retryConfig, ep.scheduleTrigger.retryConfig, "maxRetrySeconds", "maxRetryDuration", (duration) => (duration === null ? null : (0, proto_1.secondsFromDuration)(duration)));
232
218
  (0, proto_1.copyIfPresent)(st.retryConfig, ep.scheduleTrigger.retryConfig, "retryCount", "minBackoffSeconds", "maxBackoffSeconds", "maxRetrySeconds", "maxDoublings");
233
219
  (0, proto_1.convertIfPresent)(st.retryConfig, ep.scheduleTrigger.retryConfig, "minBackoffSeconds", "minBackoffDuration", (0, functional_1.nullsafeVisitor)(proto_1.secondsFromDuration));
234
220
  (0, proto_1.convertIfPresent)(st.retryConfig, ep.scheduleTrigger.retryConfig, "maxBackoffSeconds", "maxBackoffDuration", (0, functional_1.nullsafeVisitor)(proto_1.secondsFromDuration));
@@ -239,7 +225,7 @@ function parseEndpointForBuild(id, ep, project, defaultRegion, runtime) {
239
225
  }
240
226
  triggered = { scheduleTrigger: st };
241
227
  }
242
- else if (backend.isTaskQueueTriggered(ep)) {
228
+ else if (build.isTaskQueueTriggered(ep)) {
243
229
  const tq = {};
244
230
  if (ep.taskQueueTrigger.invoker) {
245
231
  tq.invoker = ep.taskQueueTrigger.invoker;
@@ -261,7 +247,7 @@ function parseEndpointForBuild(id, ep, project, defaultRegion, runtime) {
261
247
  }
262
248
  triggered = { taskQueueTrigger: tq };
263
249
  }
264
- else if (backend.isBlockingTriggered(ep)) {
250
+ else if (ep.blockingTrigger) {
265
251
  triggered = { blockingTrigger: ep.blockingTrigger };
266
252
  }
267
253
  else {
@@ -270,7 +256,9 @@ function parseEndpointForBuild(id, ep, project, defaultRegion, runtime) {
270
256
  }
271
257
  const parsed = Object.assign({ platform: ep.platform || "gcfv2", region: ep.region || [defaultRegion], project,
272
258
  runtime, entryPoint: ep.entryPoint }, triggered);
273
- (0, proto_1.renameIfPresent)(parsed, ep, "serviceAccount", "serviceAccountEmail");
259
+ if ("serviceAccountEmail" in ep) {
260
+ parsed.serviceAccount = ep.serviceAccountEmail;
261
+ }
274
262
  (0, proto_1.copyIfPresent)(parsed, ep, "availableMemoryMb", "cpu", "maxInstances", "minInstances", "concurrency", "timeoutSeconds", "vpc", "labels", "ingressSettings", "environmentVariables", "serviceAccount");
275
263
  (0, proto_1.convertIfPresent)(parsed, ep, "secretEnvironmentVariables", (senvs) => {
276
264
  if (!senvs) {
@@ -282,69 +270,6 @@ function parseEndpointForBuild(id, ep, project, defaultRegion, runtime) {
282
270
  });
283
271
  return parsed;
284
272
  }
285
- function parseEndpoints(manifest, id, project, defaultRegion, runtime) {
286
- const allParsed = [];
287
- const prefix = `endpoints[${id}]`;
288
- const ep = manifest.endpoints[id];
289
- assertManifestEndpoint(ep, id);
290
- for (const region of ep.region || [defaultRegion]) {
291
- let triggered;
292
- if (backend.isEventTriggered(ep)) {
293
- const eventTrigger = {
294
- eventType: ep.eventTrigger.eventType,
295
- retry: false,
296
- };
297
- (0, proto_1.renameIfPresent)(eventTrigger, ep.eventTrigger, "serviceAccount", "serviceAccountEmail");
298
- (0, proto_1.copyIfPresent)(eventTrigger, ep.eventTrigger, "eventFilterPathPatterns", "retry", "serviceAccount", "region");
299
- (0, proto_1.convertIfPresent)(eventTrigger, ep.eventTrigger, "channel", (c) => resolveChannelName(project, c, defaultRegion));
300
- (0, proto_1.convertIfPresent)(eventTrigger, ep.eventTrigger, "eventFilters", (filters) => {
301
- const copy = Object.assign({}, filters);
302
- if (copy["topic"] && !copy["topic"].startsWith("projects/")) {
303
- copy["topic"] = `projects/${project}/topics/${copy["topic"]}`;
304
- }
305
- return copy;
306
- });
307
- triggered = { eventTrigger };
308
- }
309
- else if (backend.isHttpsTriggered(ep)) {
310
- triggered = { httpsTrigger: {} };
311
- (0, proto_1.copyIfPresent)(triggered.httpsTrigger, ep.httpsTrigger, "invoker");
312
- }
313
- else if (backend.isCallableTriggered(ep)) {
314
- triggered = { callableTrigger: {} };
315
- }
316
- else if (backend.isScheduleTriggered(ep)) {
317
- triggered = { scheduleTrigger: ep.scheduleTrigger };
318
- }
319
- else if (backend.isTaskQueueTriggered(ep)) {
320
- triggered = { taskQueueTrigger: ep.taskQueueTrigger };
321
- }
322
- else if (backend.isBlockingTriggered(ep)) {
323
- triggered = { blockingTrigger: ep.blockingTrigger };
324
- }
325
- else {
326
- throw new error_1.FirebaseError(`Do not recognize trigger type for endpoint ${id}. Try upgrading ` +
327
- "firebase-tools with npm install -g firebase-tools@latest");
328
- }
329
- (0, parsing_1.requireKeys)(prefix, ep, "entryPoint");
330
- const parsed = Object.assign({ platform: ep.platform || "gcfv2", id,
331
- region,
332
- project,
333
- runtime, entryPoint: ep.entryPoint }, triggered);
334
- (0, proto_1.renameIfPresent)(parsed, ep, "serviceAccount", "serviceAccountEmail");
335
- (0, proto_1.copyIfPresent)(parsed, ep, "availableMemoryMb", "maxInstances", "minInstances", "concurrency", "serviceAccount", "timeoutSeconds", "vpc", "labels", "ingressSettings", "environmentVariables", "cpu");
336
- (0, proto_1.convertIfPresent)(parsed, ep, "secretEnvironmentVariables", (senvs) => {
337
- if (!senvs) {
338
- return null;
339
- }
340
- return senvs.map(({ key, secret }) => {
341
- return { key, secret: secret || key, projectId: project };
342
- });
343
- });
344
- allParsed.push(parsed);
345
- }
346
- return allParsed;
347
- }
348
273
  function resolveChannelName(projectId, channel, defaultRegion) {
349
274
  if (!channel.includes("/")) {
350
275
  const location = defaultRegion;
@@ -365,3 +290,6 @@ function resolveChannelName(projectId, channel, defaultRegion) {
365
290
  return "projects/" + projectId + "/locations/" + location + "/channels/" + channelId;
366
291
  }
367
292
  }
293
+ function isCEL(expr) {
294
+ return typeof expr === "string" && expr.includes("{{") && expr.includes("}}");
295
+ }
@@ -116,7 +116,7 @@ function addResourcesToBuild(projectId, runtime, annotation, want) {
116
116
  proto.copyIfPresent(triggered.taskQueueTrigger.retryConfig, annotation.taskQueueTrigger.retryConfig, "maxAttempts", "maxDoublings");
117
117
  proto.convertIfPresent(triggered.taskQueueTrigger.retryConfig, annotation.taskQueueTrigger.retryConfig, "minBackoffSeconds", "minBackoff", toSeconds);
118
118
  proto.convertIfPresent(triggered.taskQueueTrigger.retryConfig, annotation.taskQueueTrigger.retryConfig, "maxBackoffSeconds", "maxBackoff", toSeconds);
119
- proto.convertIfPresent(triggered.taskQueueTrigger.retryConfig, annotation.taskQueueTrigger.retryConfig, "maxRetryDurationSeconds", "maxRetryDuration", toSeconds);
119
+ proto.convertIfPresent(triggered.taskQueueTrigger.retryConfig, annotation.taskQueueTrigger.retryConfig, "maxRetrySeconds", "maxRetryDuration", toSeconds);
120
120
  }
121
121
  }
122
122
  else if (annotation.httpsTrigger) {
@@ -199,6 +199,15 @@ function addResourcesToBuild(projectId, runtime, annotation, want) {
199
199
  return str;
200
200
  });
201
201
  proto.convertIfPresent(endpoint, annotation, "timeoutSeconds", "timeout", proto.secondsFromDuration);
202
+ if (annotation.secrets) {
203
+ endpoint.secretEnvironmentVariables = annotation.secrets.map((secret) => {
204
+ return {
205
+ secret,
206
+ projectId,
207
+ key: secret,
208
+ };
209
+ });
210
+ }
202
211
  want.endpoints[endpointId] = endpoint;
203
212
  }
204
213
  exports.addResourcesToBuild = addResourcesToBuild;
@@ -28,7 +28,7 @@ exports.FIND_AVAILBLE_PORT_BY_DEFAULT = {
28
28
  auth: false,
29
29
  storage: false,
30
30
  extensions: false,
31
- eventarc: false,
31
+ eventarc: true,
32
32
  };
33
33
  exports.EMULATOR_DESCRIPTION = {
34
34
  ui: "Emulator UI",
@@ -338,18 +338,13 @@ async function startAll(options, showUI = true) {
338
338
  projectAlias: options.projectAlias,
339
339
  });
340
340
  await startEmulator(functionsEmulator);
341
+ const eventarcAddr = await getAndCheckAddress(types_1.Emulators.EVENTARC, options);
342
+ const eventarcEmulator = new eventarcEmulator_1.EventarcEmulator({
343
+ host: eventarcAddr.host,
344
+ port: eventarcAddr.port,
345
+ });
346
+ await startEmulator(eventarcEmulator);
341
347
  }
342
- if (!shouldStart(options, types_1.Emulators.EVENTARC)) {
343
- if (options.config.src.emulators) {
344
- options.config.src.emulators.eventarc = { host: constants_1.DEFAULT_HOST, port: constants_1.DEFAULT_PORTS.eventarc };
345
- }
346
- }
347
- const eventarcAddr = await getAndCheckAddress(types_1.Emulators.EVENTARC, options);
348
- const eventarcEmulator = new eventarcEmulator_1.EventarcEmulator({
349
- host: eventarcAddr.host,
350
- port: eventarcAddr.port,
351
- });
352
- await startEmulator(eventarcEmulator);
353
348
  if (shouldStart(options, types_1.Emulators.FIRESTORE)) {
354
349
  const firestoreLogger = emulatorLogger_1.EmulatorLogger.forEmulator(types_1.Emulators.FIRESTORE);
355
350
  const firestoreAddr = await getAndCheckAddress(types_1.Emulators.FIRESTORE, options);
@@ -142,6 +142,7 @@ class ExtensionsEmulator {
142
142
  const emulatableBackend = {
143
143
  functionsDir,
144
144
  env: nonSecretEnv,
145
+ codebase: "",
145
146
  secretEnv: secretEnvVariables,
146
147
  predefinedTriggers: extensionTriggers,
147
148
  nodeMajorVersion: nodeMajorVersion,