firebase-tools 10.6.0 → 10.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/command.js +4 -4
- package/lib/commands/deploy.js +1 -1
- package/lib/commands/emulators-start.js +7 -2
- package/lib/commands/ext-configure.js +15 -5
- package/lib/commands/ext-export.js +6 -5
- package/lib/commands/ext-install.js +28 -44
- package/lib/commands/ext-update.js +9 -1
- package/lib/commands/functions-delete.js +7 -3
- package/lib/commands/hosting-channel-deploy.js +2 -2
- package/lib/deploy/database/deploy.js +4 -0
- package/lib/deploy/database/index.js +1 -0
- package/lib/deploy/extensions/deploy.js +4 -4
- package/lib/deploy/extensions/deploymentSummary.js +8 -5
- package/lib/deploy/extensions/planner.js +36 -9
- package/lib/deploy/extensions/prepare.js +1 -1
- package/lib/deploy/extensions/secrets.js +2 -2
- package/lib/deploy/extensions/tasks.js +60 -21
- package/lib/deploy/functions/backend.js +37 -2
- package/lib/deploy/functions/build.js +173 -0
- package/lib/deploy/functions/checkIam.js +11 -14
- package/lib/deploy/functions/containerCleaner.js +8 -7
- package/lib/deploy/functions/deploy.js +49 -28
- package/lib/deploy/functions/ensure.js +4 -4
- package/lib/deploy/functions/functionsDeployHelper.js +99 -24
- package/lib/deploy/functions/prepare.js +129 -71
- package/lib/deploy/functions/prepareFunctionsUpload.js +16 -21
- package/lib/deploy/functions/pricing.js +6 -3
- package/lib/deploy/functions/prompts.js +1 -7
- package/lib/deploy/functions/release/executor.js +1 -1
- package/lib/deploy/functions/release/fabricator.js +69 -25
- package/lib/deploy/functions/release/index.js +20 -6
- package/lib/deploy/functions/release/planner.js +18 -10
- package/lib/deploy/functions/release/reporter.js +14 -11
- package/lib/deploy/functions/runtimes/discovery/parsing.js +12 -6
- package/lib/deploy/functions/runtimes/discovery/v1alpha1.js +50 -3
- package/lib/deploy/functions/runtimes/golang/index.js +3 -0
- package/lib/deploy/functions/runtimes/node/index.js +7 -0
- package/lib/deploy/functions/runtimes/node/parseRuntimeAndValidateSDK.js +3 -3
- package/lib/deploy/functions/runtimes/node/parseTriggers.js +132 -6
- package/lib/deploy/functions/runtimes/node/versioning.js +2 -2
- package/lib/deploy/functions/services/auth.js +95 -0
- package/lib/deploy/functions/services/index.js +41 -21
- package/lib/deploy/functions/validate.js +33 -7
- package/lib/deploy/hosting/args.js +2 -0
- package/lib/deploy/hosting/convertConfig.js +39 -8
- package/lib/deploy/hosting/deploy.js +3 -3
- package/lib/deploy/hosting/prepare.js +2 -2
- package/lib/deploy/hosting/release.js +6 -2
- package/lib/deploy/index.js +82 -93
- package/lib/deploy/remoteconfig/deploy.js +4 -0
- package/lib/deploy/remoteconfig/index.js +3 -1
- package/lib/emulator/auth/cloudFunctions.js +6 -2
- package/lib/emulator/auth/operations.js +5 -1
- package/lib/emulator/auth/server.js +8 -1
- package/lib/emulator/auth/state.js +27 -24
- package/lib/emulator/auth/utils.js +3 -25
- package/lib/emulator/controller.js +17 -14
- package/lib/emulator/databaseEmulator.js +36 -3
- package/lib/emulator/downloadableEmulators.js +39 -23
- package/lib/emulator/extensions/validation.js +2 -2
- package/lib/emulator/extensionsEmulator.js +85 -21
- package/lib/emulator/functionsEmulator.js +89 -15
- package/lib/emulator/functionsEmulatorRuntime.js +1 -1
- package/lib/emulator/functionsEmulatorShared.js +25 -2
- package/lib/emulator/functionsEmulatorShell.js +2 -3
- package/lib/emulator/functionsEmulatorUtils.js +5 -1
- package/lib/emulator/pubsubEmulator.js +13 -9
- package/lib/emulator/registry.js +34 -12
- package/lib/emulator/storage/apis/firebase.js +33 -6
- package/lib/emulator/storage/apis/gcloud.js +6 -3
- package/lib/emulator/storage/files.js +9 -1
- package/lib/ensureApiEnabled.js +8 -4
- package/lib/extensions/changelog.js +1 -1
- package/lib/extensions/emulator/optionsHelper.js +4 -3
- package/lib/extensions/emulator/specHelper.js +7 -1
- package/lib/extensions/extensionsHelper.js +30 -24
- package/lib/extensions/manifest.js +27 -7
- package/lib/extensions/paramHelper.js +7 -5
- package/lib/extensions/provisioningHelper.js +2 -2
- package/lib/extensions/warnings.js +11 -4
- package/lib/functions/events/index.js +7 -0
- package/lib/functions/events/v1.js +6 -0
- package/lib/functions/projectConfig.js +32 -6
- package/lib/functionsShellCommandAction.js +1 -1
- package/lib/gcp/cloudfunctions.js +38 -5
- package/lib/gcp/cloudfunctionsv2.js +46 -7
- package/lib/gcp/identityPlatform.js +44 -0
- package/lib/gcp/secretManager.js +1 -1
- package/lib/metaprogramming.js +2 -0
- package/lib/previews.js +1 -1
- package/lib/serve/functions.js +16 -19
- package/lib/serve/hosting.js +25 -12
- package/lib/serve/index.js +6 -0
- package/lib/track.js +15 -21
- package/npm-shrinkwrap.json +256 -527
- package/package.json +6 -3
- package/schema/firebase-config.json +6 -0
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.compareFunctions = exports.missingEndpoint = exports.hasEndpoint = exports.regionalEndpoints = exports.matchingBackend = exports.findEndpoint = exports.someEndpoint = exports.allEndpoints = exports.checkAvailability = exports.existingBackend = exports.scheduleIdForFunction = exports.functionName = exports.isEmptyBackend = exports.of = exports.empty = exports.isTaskQueueTriggered = exports.isScheduleTriggered = exports.isEventTriggered = exports.isCallableTriggered = exports.isHttpsTriggered = exports.secretVersionName = exports.SCHEDULED_FUNCTION_LABEL = exports.MIN_MEMORY_FOR_CONCURRENCY = exports.DEFAULT_MEMORY = exports.memoryOptionDisplayName = exports.endpointTriggerType = void 0;
|
|
3
|
+
exports.compareFunctions = exports.missingEndpoint = exports.hasEndpoint = exports.regionalEndpoints = exports.matchingBackend = exports.findEndpoint = exports.someEndpoint = exports.allEndpoints = exports.checkAvailability = exports.existingBackend = exports.scheduleIdForFunction = exports.functionName = exports.isEmptyBackend = exports.merge = exports.of = exports.empty = exports.isBlockingTriggered = exports.isTaskQueueTriggered = exports.isScheduleTriggered = exports.isEventTriggered = exports.isCallableTriggered = exports.isHttpsTriggered = exports.AllFunctionsPlatforms = exports.secretVersionName = exports.SCHEDULED_FUNCTION_LABEL = exports.MIN_MEMORY_FOR_CONCURRENCY = exports.DEFAULT_MEMORY = exports.memoryOptionDisplayName = exports.AllMemoryOptions = exports.AllIngressSettings = exports.AllVpcEgressSettings = exports.endpointTriggerType = void 0;
|
|
4
4
|
const gcf = require("../../gcp/cloudfunctions");
|
|
5
5
|
const gcfV2 = require("../../gcp/cloudfunctionsv2");
|
|
6
6
|
const utils = require("../../utils");
|
|
7
7
|
const error_1 = require("../../error");
|
|
8
8
|
const previews_1 = require("../../previews");
|
|
9
|
+
const functional_1 = require("../../functional");
|
|
9
10
|
function endpointTriggerType(endpoint) {
|
|
10
11
|
if (isScheduleTriggered(endpoint)) {
|
|
11
12
|
return "scheduled";
|
|
@@ -22,11 +23,21 @@ function endpointTriggerType(endpoint) {
|
|
|
22
23
|
else if (isTaskQueueTriggered(endpoint)) {
|
|
23
24
|
return "taskQueue";
|
|
24
25
|
}
|
|
26
|
+
else if (isBlockingTriggered(endpoint)) {
|
|
27
|
+
return endpoint.blockingTrigger.eventType;
|
|
28
|
+
}
|
|
25
29
|
else {
|
|
26
30
|
throw new Error("Unexpected trigger type for endpoint " + JSON.stringify(endpoint));
|
|
27
31
|
}
|
|
28
32
|
}
|
|
29
33
|
exports.endpointTriggerType = endpointTriggerType;
|
|
34
|
+
exports.AllVpcEgressSettings = ["PRIVATE_RANGES_ONLY", "ALL_TRAFFIC"];
|
|
35
|
+
exports.AllIngressSettings = [
|
|
36
|
+
"ALLOW_ALL",
|
|
37
|
+
"ALLOW_INTERNAL_ONLY",
|
|
38
|
+
"ALLOW_INTERNAL_AND_GCLB",
|
|
39
|
+
];
|
|
40
|
+
exports.AllMemoryOptions = [128, 256, 512, 1024, 2048, 4096, 8192];
|
|
30
41
|
function memoryOptionDisplayName(option) {
|
|
31
42
|
return {
|
|
32
43
|
128: "128MB",
|
|
@@ -47,6 +58,7 @@ function secretVersionName(s) {
|
|
|
47
58
|
return `projects/${s.projectId}/secrets/${s.secret}/versions/${(_a = s.version) !== null && _a !== void 0 ? _a : "latest"}`;
|
|
48
59
|
}
|
|
49
60
|
exports.secretVersionName = secretVersionName;
|
|
61
|
+
exports.AllFunctionsPlatforms = ["gcfv1", "gcfv2"];
|
|
50
62
|
function isHttpsTriggered(triggered) {
|
|
51
63
|
return {}.hasOwnProperty.call(triggered, "httpsTrigger");
|
|
52
64
|
}
|
|
@@ -67,6 +79,10 @@ function isTaskQueueTriggered(triggered) {
|
|
|
67
79
|
return {}.hasOwnProperty.call(triggered, "taskQueueTrigger");
|
|
68
80
|
}
|
|
69
81
|
exports.isTaskQueueTriggered = isTaskQueueTriggered;
|
|
82
|
+
function isBlockingTriggered(triggered) {
|
|
83
|
+
return {}.hasOwnProperty.call(triggered, "blockingTrigger");
|
|
84
|
+
}
|
|
85
|
+
exports.isBlockingTriggered = isBlockingTriggered;
|
|
70
86
|
function empty() {
|
|
71
87
|
return {
|
|
72
88
|
requiredAPIs: [],
|
|
@@ -87,6 +103,25 @@ function of(...endpoints) {
|
|
|
87
103
|
return bkend;
|
|
88
104
|
}
|
|
89
105
|
exports.of = of;
|
|
106
|
+
function merge(...backends) {
|
|
107
|
+
const merged = of(...(0, functional_1.flattenArray)(backends.map((b) => allEndpoints(b))));
|
|
108
|
+
const apiToReasons = {};
|
|
109
|
+
for (const b of backends) {
|
|
110
|
+
for (const { api, reason } of b.requiredAPIs) {
|
|
111
|
+
const reasons = apiToReasons[api] || new Set();
|
|
112
|
+
if (reason) {
|
|
113
|
+
reasons.add(reason);
|
|
114
|
+
}
|
|
115
|
+
apiToReasons[api] = reasons;
|
|
116
|
+
}
|
|
117
|
+
merged.environmentVariables = Object.assign(Object.assign({}, merged.environmentVariables), b.environmentVariables);
|
|
118
|
+
}
|
|
119
|
+
for (const [api, reasons] of Object.entries(apiToReasons)) {
|
|
120
|
+
merged.requiredAPIs.push({ api, reason: Array.from(reasons).join(" ") });
|
|
121
|
+
}
|
|
122
|
+
return merged;
|
|
123
|
+
}
|
|
124
|
+
exports.merge = merge;
|
|
90
125
|
function isEmptyBackend(backend) {
|
|
91
126
|
return (Object.keys(backend.requiredAPIs).length === 0 && Object.keys(backend.endpoints).length === 0);
|
|
92
127
|
}
|
|
@@ -207,7 +242,7 @@ function findEndpoint(backend, predicate) {
|
|
|
207
242
|
}
|
|
208
243
|
exports.findEndpoint = findEndpoint;
|
|
209
244
|
function matchingBackend(backend, predicate) {
|
|
210
|
-
const filtered = Object.assign({},
|
|
245
|
+
const filtered = Object.assign(Object.assign({}, backend), { endpoints: {} });
|
|
211
246
|
for (const endpoint of allEndpoints(backend)) {
|
|
212
247
|
if (!predicate(endpoint)) {
|
|
213
248
|
continue;
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.resolveBackend = exports.of = exports.empty = void 0;
|
|
4
|
+
const backend = require("./backend");
|
|
5
|
+
const proto = require("../../gcp/proto");
|
|
6
|
+
const api = require("../../.../../api");
|
|
7
|
+
const error_1 = require("../../error");
|
|
8
|
+
const functional_1 = require("../../functional");
|
|
9
|
+
function empty() {
|
|
10
|
+
return {
|
|
11
|
+
requiredAPIs: [],
|
|
12
|
+
endpoints: {},
|
|
13
|
+
params: [],
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
exports.empty = empty;
|
|
17
|
+
function of(endpoints) {
|
|
18
|
+
const build = empty();
|
|
19
|
+
build.endpoints = endpoints;
|
|
20
|
+
return build;
|
|
21
|
+
}
|
|
22
|
+
exports.of = of;
|
|
23
|
+
function resolveInt(from) {
|
|
24
|
+
if (from == null) {
|
|
25
|
+
return 0;
|
|
26
|
+
}
|
|
27
|
+
else if (typeof from === "string") {
|
|
28
|
+
throw new error_1.FirebaseError("CEL evaluation of expression '" + from + "' not yet supported");
|
|
29
|
+
}
|
|
30
|
+
return from;
|
|
31
|
+
}
|
|
32
|
+
function resolveString(from) {
|
|
33
|
+
if (from == null) {
|
|
34
|
+
return "";
|
|
35
|
+
}
|
|
36
|
+
else if (from.includes("{{") && from.includes("}}")) {
|
|
37
|
+
throw new error_1.FirebaseError("CEL evaluation of expression '" + from + "' not yet supported");
|
|
38
|
+
}
|
|
39
|
+
return from;
|
|
40
|
+
}
|
|
41
|
+
function resolveBoolean(from) {
|
|
42
|
+
if (from == null) {
|
|
43
|
+
return false;
|
|
44
|
+
}
|
|
45
|
+
else if (typeof from === "string") {
|
|
46
|
+
throw new error_1.FirebaseError("CEL evaluation of expression '" + from + "' not yet supported");
|
|
47
|
+
}
|
|
48
|
+
return from;
|
|
49
|
+
}
|
|
50
|
+
function isMemoryOption(value) {
|
|
51
|
+
return value == null || [128, 256, 512, 1024, 2048, 4096, 8192].includes(value);
|
|
52
|
+
}
|
|
53
|
+
function resolveBackend(build) {
|
|
54
|
+
const bkEndpoints = [];
|
|
55
|
+
for (const endpointId of Object.keys(build.endpoints)) {
|
|
56
|
+
const endpoint = build.endpoints[endpointId];
|
|
57
|
+
let regions = endpoint.region;
|
|
58
|
+
if (typeof regions === "undefined") {
|
|
59
|
+
regions = [api.functionsDefaultRegion];
|
|
60
|
+
}
|
|
61
|
+
for (const region of regions) {
|
|
62
|
+
const trigger = discoverTrigger(endpoint);
|
|
63
|
+
if (typeof endpoint.platform === "undefined") {
|
|
64
|
+
throw new error_1.FirebaseError("platform can't be undefined");
|
|
65
|
+
}
|
|
66
|
+
if (!isMemoryOption(endpoint.availableMemoryMb)) {
|
|
67
|
+
throw new error_1.FirebaseError("available memory must be a supported value, if present");
|
|
68
|
+
}
|
|
69
|
+
let timeout;
|
|
70
|
+
if (endpoint.timeoutSeconds) {
|
|
71
|
+
timeout = resolveInt(endpoint.timeoutSeconds);
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
timeout = 60;
|
|
75
|
+
}
|
|
76
|
+
const bkEndpoint = Object.assign({ id: endpointId, project: endpoint.project, region: region, entryPoint: endpoint.entryPoint, platform: endpoint.platform, runtime: endpoint.runtime, timeoutSeconds: timeout }, trigger);
|
|
77
|
+
proto.renameIfPresent(bkEndpoint, endpoint, "maxInstances", "maxInstances", resolveInt);
|
|
78
|
+
proto.renameIfPresent(bkEndpoint, endpoint, "minInstances", "minInstances", resolveInt);
|
|
79
|
+
proto.renameIfPresent(bkEndpoint, endpoint, "concurrency", "concurrency", resolveInt);
|
|
80
|
+
proto.copyIfPresent(bkEndpoint, endpoint, "ingressSettings", "availableMemoryMb", "environmentVariables", "labels");
|
|
81
|
+
if (endpoint.vpc) {
|
|
82
|
+
bkEndpoint.vpc = {
|
|
83
|
+
connector: resolveString(endpoint.vpc.connector).replace("$REGION", region),
|
|
84
|
+
};
|
|
85
|
+
proto.copyIfPresent(bkEndpoint.vpc, endpoint.vpc, "egressSettings");
|
|
86
|
+
}
|
|
87
|
+
if (endpoint.serviceAccount) {
|
|
88
|
+
bkEndpoint.serviceAccountEmail = endpoint.serviceAccount;
|
|
89
|
+
}
|
|
90
|
+
bkEndpoints.push(bkEndpoint);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
const bkend = backend.of(...bkEndpoints);
|
|
94
|
+
bkend.requiredAPIs = build.requiredAPIs;
|
|
95
|
+
return bkend;
|
|
96
|
+
}
|
|
97
|
+
exports.resolveBackend = resolveBackend;
|
|
98
|
+
function discoverTrigger(endpoint) {
|
|
99
|
+
let trigger;
|
|
100
|
+
if ("httpsTrigger" in endpoint) {
|
|
101
|
+
const bkHttps = {};
|
|
102
|
+
if (endpoint.httpsTrigger.invoker) {
|
|
103
|
+
bkHttps.invoker = [endpoint.httpsTrigger.invoker];
|
|
104
|
+
}
|
|
105
|
+
trigger = { httpsTrigger: bkHttps };
|
|
106
|
+
}
|
|
107
|
+
else if ("callableTrigger" in endpoint) {
|
|
108
|
+
trigger = { callableTrigger: {} };
|
|
109
|
+
}
|
|
110
|
+
else if ("blockingTrigger" in endpoint) {
|
|
111
|
+
throw new error_1.FirebaseError("blocking triggers not supported");
|
|
112
|
+
}
|
|
113
|
+
else if ("eventTrigger" in endpoint) {
|
|
114
|
+
const bkEventFilters = {};
|
|
115
|
+
for (const key in endpoint.eventTrigger.eventFilters) {
|
|
116
|
+
if (typeof key === "string") {
|
|
117
|
+
bkEventFilters[key] = resolveString(endpoint.eventTrigger.eventFilters[key]);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
const bkEvent = {
|
|
121
|
+
eventType: endpoint.eventTrigger.eventType,
|
|
122
|
+
eventFilters: bkEventFilters,
|
|
123
|
+
retry: resolveBoolean(endpoint.eventTrigger.retry),
|
|
124
|
+
};
|
|
125
|
+
if (endpoint.eventTrigger.serviceAccount) {
|
|
126
|
+
bkEvent.serviceAccountEmail = endpoint.eventTrigger.serviceAccount;
|
|
127
|
+
}
|
|
128
|
+
if (endpoint.eventTrigger.region) {
|
|
129
|
+
bkEvent.region = resolveString(endpoint.eventTrigger.region);
|
|
130
|
+
}
|
|
131
|
+
trigger = { eventTrigger: bkEvent };
|
|
132
|
+
}
|
|
133
|
+
else if ("scheduleTrigger" in endpoint) {
|
|
134
|
+
const bkSchedule = {
|
|
135
|
+
schedule: resolveString(endpoint.scheduleTrigger.schedule),
|
|
136
|
+
timeZone: resolveString(endpoint.scheduleTrigger.timeZone),
|
|
137
|
+
};
|
|
138
|
+
proto.renameIfPresent(bkSchedule, endpoint.scheduleTrigger, "retryConfig", "retryConfig", resolveInt);
|
|
139
|
+
trigger = { scheduleTrigger: bkSchedule };
|
|
140
|
+
}
|
|
141
|
+
else if ("taskQueueTrigger" in endpoint) {
|
|
142
|
+
const bkTaskQueue = {};
|
|
143
|
+
if (endpoint.taskQueueTrigger.rateLimits) {
|
|
144
|
+
const bkRateLimits = {};
|
|
145
|
+
proto.renameIfPresent(bkRateLimits, endpoint.taskQueueTrigger.rateLimits, "maxConcurrentDispatches", "maxConcurrentDispatches", resolveInt);
|
|
146
|
+
proto.renameIfPresent(bkRateLimits, endpoint.taskQueueTrigger.rateLimits, "maxDispatchesPerSecond", "maxDispatchesPerSecond", resolveInt);
|
|
147
|
+
bkTaskQueue.rateLimits = bkRateLimits;
|
|
148
|
+
}
|
|
149
|
+
if (endpoint.taskQueueTrigger.retryConfig) {
|
|
150
|
+
const bkRetryConfig = {};
|
|
151
|
+
proto.renameIfPresent(bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "maxAttempts", "maxAttempts", resolveInt);
|
|
152
|
+
proto.renameIfPresent(bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "maxBackoffSeconds", "maxBackoffSeconds", (from) => {
|
|
153
|
+
return proto.durationFromSeconds(resolveInt(from));
|
|
154
|
+
});
|
|
155
|
+
proto.renameIfPresent(bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "minBackoffSeconds", "minBackoffSeconds", (from) => {
|
|
156
|
+
return proto.durationFromSeconds(resolveInt(from));
|
|
157
|
+
});
|
|
158
|
+
proto.renameIfPresent(bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "maxRetrySeconds", "maxRetryDurationSeconds", (from) => {
|
|
159
|
+
return proto.durationFromSeconds(resolveInt(from));
|
|
160
|
+
});
|
|
161
|
+
proto.renameIfPresent(bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "maxDoublings", "maxDoublings", resolveInt);
|
|
162
|
+
bkTaskQueue.retryConfig = bkRetryConfig;
|
|
163
|
+
}
|
|
164
|
+
if (endpoint.taskQueueTrigger.invoker) {
|
|
165
|
+
bkTaskQueue.invoker = endpoint.taskQueueTrigger.invoker.map((sa) => resolveString(sa));
|
|
166
|
+
}
|
|
167
|
+
trigger = { taskQueueTrigger: bkTaskQueue };
|
|
168
|
+
}
|
|
169
|
+
else {
|
|
170
|
+
(0, functional_1.assertExhaustive)(endpoint);
|
|
171
|
+
}
|
|
172
|
+
return trigger;
|
|
173
|
+
}
|
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ensureServiceAgentRoles = exports.mergeBindings = exports.
|
|
3
|
+
exports.ensureServiceAgentRoles = exports.mergeBindings = exports.obtainDefaultComputeServiceAgentBindings = exports.obtainPubSubServiceAgentBindings = exports.obtainBinding = exports.checkHttpIam = exports.checkServiceAccountIam = exports.EVENTARC_EVENT_RECEIVER_ROLE = exports.RUN_INVOKER_ROLE = exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = void 0;
|
|
4
4
|
const cli_color_1 = require("cli-color");
|
|
5
5
|
const logger_1 = require("../../logger");
|
|
6
6
|
const functionsDeployHelper_1 = require("./functionsDeployHelper");
|
|
7
7
|
const error_1 = require("../../error");
|
|
8
|
+
const functional_1 = require("../../functional");
|
|
8
9
|
const iam = require("../../gcp/iam");
|
|
9
10
|
const backend = require("./backend");
|
|
10
|
-
const
|
|
11
|
+
const track_1 = require("../../track");
|
|
11
12
|
const utils = require("../../utils");
|
|
12
13
|
const resourceManager_1 = require("../../gcp/resourceManager");
|
|
13
14
|
const services_1 = require("./services");
|
|
@@ -15,7 +16,6 @@ const PERMISSION = "cloudfunctions.functions.setIamPolicy";
|
|
|
15
16
|
exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = "roles/iam.serviceAccountTokenCreator";
|
|
16
17
|
exports.RUN_INVOKER_ROLE = "roles/run.invoker";
|
|
17
18
|
exports.EVENTARC_EVENT_RECEIVER_ROLE = "roles/eventarc.eventReceiver";
|
|
18
|
-
exports.EVENTARC_SERVICE_AGENT_ROLE = "roles/eventarc.serviceAgent";
|
|
19
19
|
async function checkServiceAccountIam(projectId) {
|
|
20
20
|
const saEmail = `${projectId}@appspot.gserviceaccount.com`;
|
|
21
21
|
let passed = false;
|
|
@@ -35,11 +35,14 @@ async function checkServiceAccountIam(projectId) {
|
|
|
35
35
|
}
|
|
36
36
|
exports.checkServiceAccountIam = checkServiceAccountIam;
|
|
37
37
|
async function checkHttpIam(context, options, payload) {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
38
|
+
if (!payload.functions) {
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
const filters = context.filters || (0, functionsDeployHelper_1.getEndpointFilters)(options);
|
|
42
|
+
const wantBackends = Object.values(payload.functions).map(({ wantBackend }) => wantBackend);
|
|
43
|
+
const httpEndpoints = [...(0, functional_1.flattenArray)(wantBackends.map((b) => backend.allEndpoints(b)))]
|
|
41
44
|
.filter(backend.isHttpsTriggered)
|
|
42
|
-
.filter((f) => (0, functionsDeployHelper_1.
|
|
45
|
+
.filter((f) => (0, functionsDeployHelper_1.endpointMatchesAnyFilter)(f, filters));
|
|
43
46
|
const existing = await backend.existingBackend(context);
|
|
44
47
|
const newHttpsEndpoints = httpEndpoints.filter(backend.missingEndpoint(existing));
|
|
45
48
|
if (newHttpsEndpoints.length === 0) {
|
|
@@ -56,7 +59,7 @@ async function checkHttpIam(context, options, payload) {
|
|
|
56
59
|
return;
|
|
57
60
|
}
|
|
58
61
|
if (!passed) {
|
|
59
|
-
void track("Error (User)", "deploy:functions:http_create_missing_iam");
|
|
62
|
+
void (0, track_1.track)("Error (User)", "deploy:functions:http_create_missing_iam");
|
|
60
63
|
throw new error_1.FirebaseError(`Missing required permission on project ${(0, cli_color_1.bold)(context.projectId)} to deploy new HTTPS functions. The permission ${(0, cli_color_1.bold)(PERMISSION)} is required to deploy the following functions:\n\n- ` +
|
|
61
64
|
newHttpsEndpoints.map((func) => func.id).join("\n- ") +
|
|
62
65
|
`\n\nTo address this error, please ask a project Owner to assign your account the "Cloud Functions Admin" role at the following URL:\n\nhttps://console.cloud.google.com/iam-admin/iam?project=${context.projectId}`);
|
|
@@ -97,11 +100,6 @@ function obtainDefaultComputeServiceAgentBindings(projectNumber, existingPolicy)
|
|
|
97
100
|
return [invokerBinding, eventReceiverBinding];
|
|
98
101
|
}
|
|
99
102
|
exports.obtainDefaultComputeServiceAgentBindings = obtainDefaultComputeServiceAgentBindings;
|
|
100
|
-
function obtainEventarcServiceAgentBindings(projectNumber, existingPolicy) {
|
|
101
|
-
const eventarcServiceAgent = `serviceAccount:service-${projectNumber}@gcp-sa-eventarc.iam.gserviceaccount.com`;
|
|
102
|
-
return [obtainBinding(existingPolicy, eventarcServiceAgent, exports.EVENTARC_SERVICE_AGENT_ROLE)];
|
|
103
|
-
}
|
|
104
|
-
exports.obtainEventarcServiceAgentBindings = obtainEventarcServiceAgentBindings;
|
|
105
103
|
function mergeBindings(policy, allRequiredBindings) {
|
|
106
104
|
for (const requiredBindings of allRequiredBindings) {
|
|
107
105
|
if (requiredBindings.length === 0) {
|
|
@@ -145,7 +143,6 @@ async function ensureServiceAgentRoles(projectNumber, want, have) {
|
|
|
145
143
|
if (haveServices.length === 0) {
|
|
146
144
|
allRequiredBindings.push(obtainPubSubServiceAgentBindings(projectNumber, policy));
|
|
147
145
|
allRequiredBindings.push(obtainDefaultComputeServiceAgentBindings(projectNumber, policy));
|
|
148
|
-
allRequiredBindings.push(obtainEventarcServiceAgentBindings(projectNumber, policy));
|
|
149
146
|
}
|
|
150
147
|
if (!allRequiredBindings.find((bindings) => bindings.length > 0)) {
|
|
151
148
|
return;
|
|
@@ -251,13 +251,14 @@ class DockerHelper {
|
|
|
251
251
|
this.client = new docker.Client(origin);
|
|
252
252
|
}
|
|
253
253
|
async ls(path) {
|
|
254
|
-
if (!this.cache
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
254
|
+
if (!(path in this.cache)) {
|
|
255
|
+
this.cache[path] = retry(() => this.client.listTags(path)).then((res) => {
|
|
256
|
+
return {
|
|
257
|
+
tags: res.tags,
|
|
258
|
+
digests: Object.keys(res.manifest),
|
|
259
|
+
children: res.child,
|
|
260
|
+
};
|
|
261
|
+
});
|
|
261
262
|
}
|
|
262
263
|
return this.cache[path];
|
|
263
264
|
}
|
|
@@ -6,55 +6,62 @@ const clc = require("cli-color");
|
|
|
6
6
|
const fs = require("fs");
|
|
7
7
|
const checkIam_1 = require("./checkIam");
|
|
8
8
|
const utils_1 = require("../../utils");
|
|
9
|
+
const projectConfig_1 = require("../../functions/projectConfig");
|
|
9
10
|
const gcs = require("../../gcp/storage");
|
|
10
11
|
const gcf = require("../../gcp/cloudfunctions");
|
|
11
12
|
const gcfv2 = require("../../gcp/cloudfunctionsv2");
|
|
12
13
|
const backend = require("./backend");
|
|
13
14
|
(0, tmp_1.setGracefulCleanup)();
|
|
14
|
-
async function uploadSourceV1(
|
|
15
|
-
const
|
|
16
|
-
|
|
15
|
+
async function uploadSourceV1(projectId, source, wantBackend) {
|
|
16
|
+
const v1Endpoints = backend.allEndpoints(wantBackend).filter((e) => e.platform === "gcfv1");
|
|
17
|
+
if (v1Endpoints.length === 0) {
|
|
18
|
+
return;
|
|
19
|
+
}
|
|
20
|
+
const region = v1Endpoints[0].region;
|
|
21
|
+
const uploadUrl = await gcf.generateUploadUrl(projectId, region);
|
|
17
22
|
const uploadOpts = {
|
|
18
|
-
file:
|
|
19
|
-
stream: fs.createReadStream(
|
|
23
|
+
file: source.functionsSourceV1,
|
|
24
|
+
stream: fs.createReadStream(source.functionsSourceV1),
|
|
20
25
|
};
|
|
21
26
|
await gcs.upload(uploadOpts, uploadUrl, {
|
|
22
27
|
"x-goog-content-length-range": "0,104857600",
|
|
23
28
|
});
|
|
29
|
+
return uploadUrl;
|
|
24
30
|
}
|
|
25
|
-
async function uploadSourceV2(
|
|
26
|
-
const
|
|
31
|
+
async function uploadSourceV2(projectId, source, wantBackend) {
|
|
32
|
+
const v2Endpoints = backend.allEndpoints(wantBackend).filter((e) => e.platform === "gcfv2");
|
|
33
|
+
if (v2Endpoints.length === 0) {
|
|
34
|
+
return;
|
|
35
|
+
}
|
|
36
|
+
const region = v2Endpoints[0].region;
|
|
37
|
+
const res = await gcfv2.generateUploadUrl(projectId, region);
|
|
27
38
|
const uploadOpts = {
|
|
28
|
-
file:
|
|
29
|
-
stream: fs.createReadStream(
|
|
39
|
+
file: source.functionsSourceV2,
|
|
40
|
+
stream: fs.createReadStream(source.functionsSourceV2),
|
|
30
41
|
};
|
|
31
42
|
await gcs.upload(uploadOpts, res.uploadUrl);
|
|
32
|
-
|
|
43
|
+
return res.storageSource;
|
|
33
44
|
}
|
|
34
|
-
async function
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
if (!context.functionsSourceV1 && !context.functionsSourceV2) {
|
|
45
|
+
async function uploadCodebase(context, codebase, wantBackend) {
|
|
46
|
+
var _a;
|
|
47
|
+
const source = (_a = context.sources) === null || _a === void 0 ? void 0 : _a[codebase];
|
|
48
|
+
if (!source || (!source.functionsSourceV1 && !source.functionsSourceV2)) {
|
|
39
49
|
return;
|
|
40
50
|
}
|
|
41
|
-
|
|
51
|
+
const uploads = [];
|
|
42
52
|
try {
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
const
|
|
46
|
-
if (
|
|
47
|
-
|
|
53
|
+
uploads.push(uploadSourceV1(context.projectId, source, wantBackend));
|
|
54
|
+
uploads.push(uploadSourceV2(context.projectId, source, wantBackend));
|
|
55
|
+
const [sourceUrl, storage] = await Promise.all(uploads);
|
|
56
|
+
if (sourceUrl) {
|
|
57
|
+
source.sourceUrl = sourceUrl;
|
|
48
58
|
}
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
uploads.push(uploadSourceV2(context, region));
|
|
52
|
-
}
|
|
59
|
+
if (storage) {
|
|
60
|
+
source.storage = storage;
|
|
53
61
|
}
|
|
54
|
-
|
|
55
|
-
const source = context.config.source;
|
|
62
|
+
const sourceDir = (0, projectConfig_1.configForCodebase)(context.config, codebase).source;
|
|
56
63
|
if (uploads.length) {
|
|
57
|
-
(0, utils_1.logSuccess)(`${clc.green.bold("functions:")} ${clc.bold(
|
|
64
|
+
(0, utils_1.logSuccess)(`${clc.green.bold("functions:")} ${clc.bold(sourceDir)} folder uploaded successfully`);
|
|
58
65
|
}
|
|
59
66
|
}
|
|
60
67
|
catch (err) {
|
|
@@ -62,4 +69,18 @@ async function deploy(context, options, payload) {
|
|
|
62
69
|
throw err;
|
|
63
70
|
}
|
|
64
71
|
}
|
|
72
|
+
async function deploy(context, options, payload) {
|
|
73
|
+
if (!context.config) {
|
|
74
|
+
return;
|
|
75
|
+
}
|
|
76
|
+
if (!payload.functions) {
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
await (0, checkIam_1.checkHttpIam)(context, options, payload);
|
|
80
|
+
const uploads = [];
|
|
81
|
+
for (const [codebase, { wantBackend }] of Object.entries(payload.functions)) {
|
|
82
|
+
uploads.push(uploadCodebase(context, codebase, wantBackend));
|
|
83
|
+
}
|
|
84
|
+
await Promise.all(uploads);
|
|
85
|
+
}
|
|
65
86
|
exports.deploy = deploy;
|
|
@@ -9,7 +9,7 @@ const secretManager_1 = require("../../gcp/secretManager");
|
|
|
9
9
|
const previews_1 = require("../../previews");
|
|
10
10
|
const projects_1 = require("../../management/projects");
|
|
11
11
|
const functional_1 = require("../../functional");
|
|
12
|
-
const
|
|
12
|
+
const track_1 = require("../../track");
|
|
13
13
|
const backend = require("./backend");
|
|
14
14
|
const ensureApiEnabled = require("../../ensureApiEnabled");
|
|
15
15
|
const FAQ_URL = "https://firebase.google.com/support/faq#functions-runtime";
|
|
@@ -26,9 +26,9 @@ async function defaultServiceAccount(e) {
|
|
|
26
26
|
}
|
|
27
27
|
exports.defaultServiceAccount = defaultServiceAccount;
|
|
28
28
|
function nodeBillingError(projectId) {
|
|
29
|
-
void track("functions_runtime_notices", "nodejs10_billing_error");
|
|
29
|
+
void (0, track_1.track)("functions_runtime_notices", "nodejs10_billing_error");
|
|
30
30
|
return new error_1.FirebaseError(`Cloud Functions deployment requires the pay-as-you-go (Blaze) billing plan. To upgrade your project, visit the following URL:
|
|
31
|
-
|
|
31
|
+
|
|
32
32
|
https://console.firebase.google.com/project/${projectId}/usage/details
|
|
33
33
|
|
|
34
34
|
For additional information about this requirement, see Firebase FAQs:
|
|
@@ -36,7 +36,7 @@ For additional information about this requirement, see Firebase FAQs:
|
|
|
36
36
|
${FAQ_URL}`, { exit: 1 });
|
|
37
37
|
}
|
|
38
38
|
function nodePermissionError(projectId) {
|
|
39
|
-
void track("functions_runtime_notices", "nodejs10_permission_error");
|
|
39
|
+
void (0, track_1.track)("functions_runtime_notices", "nodejs10_permission_error");
|
|
40
40
|
return new error_1.FirebaseError(`Cloud Functions deployment requires the Cloud Build API to be enabled. The current credentials do not have permission to enable APIs for project ${clc.bold(projectId)}.
|
|
41
41
|
|
|
42
42
|
Please ask a project owner to visit the following URL to enable Cloud Build:
|
|
@@ -1,41 +1,116 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getFunctionLabel = exports.
|
|
4
|
-
|
|
5
|
-
|
|
3
|
+
exports.groupEndpointsByCodebase = exports.targetCodebases = exports.getFunctionLabel = exports.getEndpointFilters = exports.parseFunctionSelector = exports.endpointMatchesFilter = exports.endpointMatchesAnyFilter = void 0;
|
|
4
|
+
const backend = require("./backend");
|
|
5
|
+
const projectConfig_1 = require("../../functions/projectConfig");
|
|
6
|
+
function endpointMatchesAnyFilter(endpoint, filters) {
|
|
7
|
+
if (!filters) {
|
|
6
8
|
return true;
|
|
7
9
|
}
|
|
8
|
-
return
|
|
10
|
+
return filters.some((filter) => endpointMatchesFilter(endpoint, filter));
|
|
9
11
|
}
|
|
10
|
-
exports.
|
|
11
|
-
function
|
|
12
|
-
|
|
13
|
-
|
|
12
|
+
exports.endpointMatchesAnyFilter = endpointMatchesAnyFilter;
|
|
13
|
+
function endpointMatchesFilter(endpoint, filter) {
|
|
14
|
+
if (endpoint.codebase && filter.codebase) {
|
|
15
|
+
if (endpoint.codebase !== filter.codebase) {
|
|
16
|
+
return false;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
if (!filter.idChunks) {
|
|
20
|
+
return true;
|
|
21
|
+
}
|
|
22
|
+
const idChunks = endpoint.id.split("-");
|
|
23
|
+
if (idChunks.length < filter.idChunks.length) {
|
|
14
24
|
return false;
|
|
15
25
|
}
|
|
16
|
-
for (let i = 0; i <
|
|
17
|
-
if (
|
|
26
|
+
for (let i = 0; i < filter.idChunks.length; i += 1) {
|
|
27
|
+
if (idChunks[i] !== filter.idChunks[i]) {
|
|
18
28
|
return false;
|
|
19
29
|
}
|
|
20
30
|
}
|
|
21
31
|
return true;
|
|
22
32
|
}
|
|
23
|
-
exports.
|
|
24
|
-
function
|
|
33
|
+
exports.endpointMatchesFilter = endpointMatchesFilter;
|
|
34
|
+
function parseFunctionSelector(selector) {
|
|
35
|
+
const fragments = selector.split(":");
|
|
36
|
+
if (fragments.length < 2) {
|
|
37
|
+
return [
|
|
38
|
+
{ codebase: fragments[0] },
|
|
39
|
+
{ codebase: projectConfig_1.DEFAULT_CODEBASE, idChunks: fragments[0].split(/[-.]/) },
|
|
40
|
+
];
|
|
41
|
+
}
|
|
42
|
+
return [
|
|
43
|
+
{
|
|
44
|
+
codebase: fragments[0],
|
|
45
|
+
idChunks: fragments[1].split(/[-.]/),
|
|
46
|
+
},
|
|
47
|
+
];
|
|
48
|
+
}
|
|
49
|
+
exports.parseFunctionSelector = parseFunctionSelector;
|
|
50
|
+
function getEndpointFilters(options) {
|
|
25
51
|
if (!options.only) {
|
|
26
|
-
return
|
|
27
|
-
}
|
|
28
|
-
const
|
|
29
|
-
const
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
52
|
+
return undefined;
|
|
53
|
+
}
|
|
54
|
+
const selectors = options.only.split(",");
|
|
55
|
+
const filters = [];
|
|
56
|
+
for (let selector of selectors) {
|
|
57
|
+
if (selector.startsWith("functions:")) {
|
|
58
|
+
selector = selector.replace("functions:", "");
|
|
59
|
+
if (selector.length > 0) {
|
|
60
|
+
filters.push(...parseFunctionSelector(selector));
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
if (filters.length === 0) {
|
|
65
|
+
return undefined;
|
|
66
|
+
}
|
|
67
|
+
return filters;
|
|
36
68
|
}
|
|
37
|
-
exports.
|
|
69
|
+
exports.getEndpointFilters = getEndpointFilters;
|
|
38
70
|
function getFunctionLabel(fn) {
|
|
39
|
-
|
|
71
|
+
let id = `${fn.id}(${fn.region})`;
|
|
72
|
+
if (fn.codebase && fn.codebase !== projectConfig_1.DEFAULT_CODEBASE) {
|
|
73
|
+
id = `[${fn.codebase}]${id}`;
|
|
74
|
+
}
|
|
75
|
+
return id;
|
|
40
76
|
}
|
|
41
77
|
exports.getFunctionLabel = getFunctionLabel;
|
|
78
|
+
function targetCodebases(config, filters) {
|
|
79
|
+
const codebasesFromConfig = [...new Set(Object.values(config).map((c) => c.codebase))];
|
|
80
|
+
if (!filters) {
|
|
81
|
+
return [...codebasesFromConfig];
|
|
82
|
+
}
|
|
83
|
+
const codebasesFromFilters = [
|
|
84
|
+
...new Set(filters.map((f) => f.codebase).filter((c) => c !== undefined)),
|
|
85
|
+
];
|
|
86
|
+
if (codebasesFromFilters.length === 0) {
|
|
87
|
+
return [...codebasesFromConfig];
|
|
88
|
+
}
|
|
89
|
+
const intersections = [];
|
|
90
|
+
for (const codebase of codebasesFromConfig) {
|
|
91
|
+
if (codebasesFromFilters.includes(codebase)) {
|
|
92
|
+
intersections.push(codebase);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return intersections;
|
|
96
|
+
}
|
|
97
|
+
exports.targetCodebases = targetCodebases;
|
|
98
|
+
function groupEndpointsByCodebase(wantBackends, haveEndpoints) {
|
|
99
|
+
const grouped = {};
|
|
100
|
+
let endpointsToAssign = haveEndpoints;
|
|
101
|
+
for (const codebase of Object.keys(wantBackends)) {
|
|
102
|
+
const names = backend.allEndpoints(wantBackends[codebase]).map((e) => backend.functionName(e));
|
|
103
|
+
grouped[codebase] = backend.of(...endpointsToAssign.filter((e) => names.includes(backend.functionName(e))));
|
|
104
|
+
endpointsToAssign = endpointsToAssign.filter((e) => !names.includes(backend.functionName(e)));
|
|
105
|
+
}
|
|
106
|
+
for (const codebase of Object.keys(wantBackends)) {
|
|
107
|
+
const matchedEndpoints = endpointsToAssign.filter((e) => e.codebase === codebase);
|
|
108
|
+
grouped[codebase] = backend.merge(grouped[codebase], backend.of(...matchedEndpoints));
|
|
109
|
+
const matchedNames = matchedEndpoints.map((e) => backend.functionName(e));
|
|
110
|
+
endpointsToAssign = endpointsToAssign.filter((e) => {
|
|
111
|
+
return !matchedNames.includes(backend.functionName(e));
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
return grouped;
|
|
115
|
+
}
|
|
116
|
+
exports.groupEndpointsByCodebase = groupEndpointsByCodebase;
|