firebase-tools 11.3.0 → 11.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -15
- package/lib/apiv2.js +5 -0
- package/lib/checkValidTargetFilters.js +3 -2
- package/lib/command.js +1 -0
- package/lib/commands/hosting-clone.js +5 -0
- package/lib/commands/login-ci.js +2 -0
- package/lib/database/rulesConfig.js +35 -8
- package/lib/deploy/functions/backend.js +6 -4
- package/lib/deploy/functions/build.js +107 -95
- package/lib/deploy/functions/ensure.js +1 -1
- package/lib/deploy/functions/params.js +5 -2
- package/lib/deploy/functions/prepare.js +3 -3
- package/lib/deploy/functions/pricing.js +3 -2
- package/lib/deploy/functions/prompts.js +1 -1
- package/lib/deploy/functions/release/fabricator.js +8 -7
- package/lib/deploy/functions/release/index.js +4 -0
- package/lib/deploy/functions/runtimes/discovery/parsing.js +19 -8
- package/lib/deploy/functions/runtimes/discovery/v1alpha1.js +115 -107
- package/lib/deploy/functions/runtimes/node/parseTriggers.js +53 -21
- package/lib/deploy/functions/services/storage.js +6 -0
- package/lib/deploy/hosting/convertConfig.js +87 -16
- package/lib/deploy/hosting/deploy.js +1 -1
- package/lib/deploy/index.js +1 -1
- package/lib/deploy/storage/prepare.js +29 -6
- package/lib/emulator/controller.js +0 -1
- package/lib/emulator/functionsEmulator.js +3 -0
- package/lib/emulator/functionsEmulatorRuntime.js +1 -1
- package/lib/emulator/functionsEmulatorShared.js +6 -11
- package/lib/emulator/storage/files.js +19 -22
- package/lib/emulator/storage/metadata.js +6 -6
- package/lib/emulator/storage/persistence.js +26 -12
- package/lib/extensions/displayExtensionInfo.js +1 -101
- package/lib/extensions/emulator/triggerHelper.js +2 -2
- package/lib/extensions/updateHelper.js +1 -7
- package/lib/functional.js +16 -1
- package/lib/functions/env.js +47 -2
- package/lib/gcp/cloudfunctions.js +21 -8
- package/lib/gcp/cloudfunctionsv2.js +43 -19
- package/lib/gcp/cloudscheduler.js +25 -13
- package/lib/gcp/cloudtasks.js +4 -3
- package/lib/gcp/iam.js +20 -17
- package/lib/gcp/proto.js +18 -6
- package/lib/gcp/resourceManager.js +25 -3
- package/lib/index.js +1 -1
- package/lib/previews.js +1 -1
- package/lib/rc.js +3 -9
- package/lib/requireAuth.js +4 -0
- package/lib/rulesDeploy.js +40 -3
- package/npm-shrinkwrap.json +48 -37
- package/package.json +7 -4
package/lib/functional.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.partition = exports.assertExhaustive = exports.zipIn = exports.zip = exports.reduceFlat = exports.flatten = exports.flattenArray = exports.flattenObject = void 0;
|
|
3
|
+
exports.nullsafeVisitor = exports.mapObject = exports.partition = exports.assertExhaustive = exports.zipIn = exports.zip = exports.reduceFlat = exports.flatten = exports.flattenArray = exports.flattenObject = void 0;
|
|
4
4
|
function* flattenObject(obj) {
|
|
5
5
|
function* helper(path, obj) {
|
|
6
6
|
for (const [k, v] of Object.entries(obj)) {
|
|
@@ -63,3 +63,18 @@ function partition(arr, callbackFn) {
|
|
|
63
63
|
}, [[], []]);
|
|
64
64
|
}
|
|
65
65
|
exports.partition = partition;
|
|
66
|
+
function mapObject(input, transform) {
|
|
67
|
+
const result = {};
|
|
68
|
+
for (const [k, v] of Object.entries(input)) {
|
|
69
|
+
result[k] = transform(v);
|
|
70
|
+
}
|
|
71
|
+
return result;
|
|
72
|
+
}
|
|
73
|
+
exports.mapObject = mapObject;
|
|
74
|
+
const nullsafeVisitor = (func, ...rest) => (first) => {
|
|
75
|
+
if (first === null) {
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
return func(first, ...rest);
|
|
79
|
+
};
|
|
80
|
+
exports.nullsafeVisitor = nullsafeVisitor;
|
package/lib/functions/env.js
CHANGED
|
@@ -51,6 +51,17 @@ const ESCAPE_SEQUENCES_TO_CHARACTERS = {
|
|
|
51
51
|
"\\'": "'",
|
|
52
52
|
'\\"': '"',
|
|
53
53
|
};
|
|
54
|
+
const ALL_ESCAPE_SEQUENCES_RE = /\\[nrtv\\'"]/g;
|
|
55
|
+
const CHARACTERS_TO_ESCAPE_SEQUENCES = {
|
|
56
|
+
"\n": "\\n",
|
|
57
|
+
"\r": "\\r",
|
|
58
|
+
"\t": "\\t",
|
|
59
|
+
"\v": "\\v",
|
|
60
|
+
"\\": "\\\\",
|
|
61
|
+
"'": "\\'",
|
|
62
|
+
'"': '\\"',
|
|
63
|
+
};
|
|
64
|
+
const ALL_ESCAPABLE_CHARACTERS_RE = /[\n\r\t\v\\'"]/g;
|
|
54
65
|
function parse(data) {
|
|
55
66
|
const envs = {};
|
|
56
67
|
const errors = [];
|
|
@@ -63,7 +74,7 @@ function parse(data) {
|
|
|
63
74
|
if ((quotesMatch = /^(["'])(.*)\1$/ms.exec(v)) != null) {
|
|
64
75
|
v = quotesMatch[2];
|
|
65
76
|
if (quotesMatch[1] === '"') {
|
|
66
|
-
v = v.replace(
|
|
77
|
+
v = v.replace(ALL_ESCAPE_SEQUENCES_RE, (match) => ESCAPE_SEQUENCES_TO_CHARACTERS[match]);
|
|
67
78
|
}
|
|
68
79
|
}
|
|
69
80
|
envs[k] = v;
|
|
@@ -146,9 +157,43 @@ function hasUserEnvs({ functionsSource, projectId, projectAlias, isEmulator, })
|
|
|
146
157
|
}
|
|
147
158
|
exports.hasUserEnvs = hasUserEnvs;
|
|
148
159
|
function writeUserEnvs(toWrite, envOpts) {
|
|
149
|
-
|
|
160
|
+
if (Object.keys(toWrite).length === 0) {
|
|
161
|
+
return;
|
|
162
|
+
}
|
|
163
|
+
const { functionsSource, projectId, projectAlias, isEmulator } = envOpts;
|
|
164
|
+
let envFiles = findEnvfiles(functionsSource, projectId, projectAlias, isEmulator);
|
|
165
|
+
if (envFiles.length === 0) {
|
|
166
|
+
envFiles = [createEnvFile(envOpts)];
|
|
167
|
+
}
|
|
168
|
+
const currentEnvs = loadUserEnvs(envOpts);
|
|
169
|
+
for (const k of Object.keys(toWrite)) {
|
|
170
|
+
validateKey(k);
|
|
171
|
+
if (currentEnvs.hasOwnProperty(k)) {
|
|
172
|
+
throw new error_1.FirebaseError(`Attempted to write param-defined key ${k} to .env files, but it was already defined.`);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
const mostSpecificEnv = path.join(functionsSource, envFiles[envFiles.length - 1]);
|
|
176
|
+
(0, utils_1.logBullet)(clc.cyan.bold("functions: ") + `Writing new parameter values to disk: ${mostSpecificEnv}`);
|
|
177
|
+
for (const k of Object.keys(toWrite)) {
|
|
178
|
+
fs.appendFileSync(mostSpecificEnv, formatUserEnvForWrite(k, toWrite[k]));
|
|
179
|
+
}
|
|
150
180
|
}
|
|
151
181
|
exports.writeUserEnvs = writeUserEnvs;
|
|
182
|
+
function createEnvFile(envOpts) {
|
|
183
|
+
const fileToWrite = envOpts.isEmulator
|
|
184
|
+
? FUNCTIONS_EMULATOR_DOTENV
|
|
185
|
+
: `.env.${envOpts.projectAlias || envOpts.projectId}`;
|
|
186
|
+
logger_1.logger.debug(`Creating ${fileToWrite}...`);
|
|
187
|
+
fs.writeFileSync(path.join(envOpts.functionsSource, fileToWrite), "", { flag: "wx" });
|
|
188
|
+
return fileToWrite;
|
|
189
|
+
}
|
|
190
|
+
function formatUserEnvForWrite(key, value) {
|
|
191
|
+
const escapedValue = value.replace(ALL_ESCAPABLE_CHARACTERS_RE, (match) => CHARACTERS_TO_ESCAPE_SEQUENCES[match]);
|
|
192
|
+
if (escapedValue !== value) {
|
|
193
|
+
return `${key}="${escapedValue}"\n`;
|
|
194
|
+
}
|
|
195
|
+
return `${key}=${escapedValue}\n`;
|
|
196
|
+
}
|
|
152
197
|
function loadUserEnvs({ functionsSource, projectId, projectAlias, isEmulator, }) {
|
|
153
198
|
var _a;
|
|
154
199
|
const envFiles = findEnvfiles(functionsSource, projectId, projectAlias, isEmulator);
|
|
@@ -255,18 +255,20 @@ function endpointFromFunction(gcfFunction) {
|
|
|
255
255
|
if (securityLevel) {
|
|
256
256
|
endpoint.securityLevel = securityLevel;
|
|
257
257
|
}
|
|
258
|
-
proto.copyIfPresent(endpoint, gcfFunction, "
|
|
259
|
-
proto.renameIfPresent(endpoint, gcfFunction, "
|
|
258
|
+
proto.copyIfPresent(endpoint, gcfFunction, "minInstances", "maxInstances", "ingressSettings", "labels", "environmentVariables", "secretEnvironmentVariables", "sourceUploadUrl");
|
|
259
|
+
proto.renameIfPresent(endpoint, gcfFunction, "serviceAccount", "serviceAccountEmail");
|
|
260
|
+
proto.convertIfPresent(endpoint, gcfFunction, "availableMemoryMb", (raw) => raw);
|
|
261
|
+
proto.convertIfPresent(endpoint, gcfFunction, "timeoutSeconds", "timeout", (dur) => dur === null ? null : proto.secondsFromDuration(dur));
|
|
260
262
|
if (gcfFunction.vpcConnector) {
|
|
261
263
|
endpoint.vpc = { connector: gcfFunction.vpcConnector };
|
|
262
|
-
proto.
|
|
264
|
+
proto.convertIfPresent(endpoint.vpc, gcfFunction, "egressSettings", "vpcConnectorEgressSettings", (raw) => raw);
|
|
263
265
|
}
|
|
264
266
|
endpoint.codebase = ((_g = gcfFunction.labels) === null || _g === void 0 ? void 0 : _g[exports.CODEBASE_LABEL]) || projectConfig.DEFAULT_CODEBASE;
|
|
265
267
|
return endpoint;
|
|
266
268
|
}
|
|
267
269
|
exports.endpointFromFunction = endpointFromFunction;
|
|
268
270
|
function functionFromEndpoint(endpoint, sourceUploadUrl) {
|
|
269
|
-
var _a;
|
|
271
|
+
var _a, _b;
|
|
270
272
|
if (endpoint.platform !== "gcfv1") {
|
|
271
273
|
throw new error_1.FirebaseError("Trying to create a v1 CloudFunction with v2 API. This should never happen");
|
|
272
274
|
}
|
|
@@ -281,8 +283,13 @@ function functionFromEndpoint(endpoint, sourceUploadUrl) {
|
|
|
281
283
|
runtime: endpoint.runtime,
|
|
282
284
|
dockerRegistry: "ARTIFACT_REGISTRY",
|
|
283
285
|
};
|
|
284
|
-
|
|
286
|
+
if (typeof endpoint.labels !== "undefined") {
|
|
287
|
+
gcfFunction.labels = Object.assign({}, endpoint.labels);
|
|
288
|
+
}
|
|
285
289
|
if (backend.isEventTriggered(endpoint)) {
|
|
290
|
+
if (!((_a = endpoint.eventTrigger.eventFilters) === null || _a === void 0 ? void 0 : _a.resource)) {
|
|
291
|
+
throw new error_1.FirebaseError("Cannot create v1 function from an eventTrigger without a resource");
|
|
292
|
+
}
|
|
286
293
|
gcfFunction.eventTrigger = {
|
|
287
294
|
eventType: endpoint.eventTrigger.eventType,
|
|
288
295
|
resource: endpoint.eventTrigger.eventFilters.resource,
|
|
@@ -316,18 +323,24 @@ function functionFromEndpoint(endpoint, sourceUploadUrl) {
|
|
|
316
323
|
gcfFunction.httpsTrigger.securityLevel = endpoint.securityLevel;
|
|
317
324
|
}
|
|
318
325
|
}
|
|
319
|
-
proto.copyIfPresent(gcfFunction, endpoint, "
|
|
320
|
-
proto.renameIfPresent(gcfFunction, endpoint, "
|
|
326
|
+
proto.copyIfPresent(gcfFunction, endpoint, "minInstances", "maxInstances", "ingressSettings", "environmentVariables", "secretEnvironmentVariables");
|
|
327
|
+
proto.renameIfPresent(gcfFunction, endpoint, "serviceAccountEmail", "serviceAccount");
|
|
328
|
+
proto.convertIfPresent(gcfFunction, endpoint, "availableMemoryMb", (mem) => mem);
|
|
329
|
+
proto.convertIfPresent(gcfFunction, endpoint, "timeout", "timeoutSeconds", (sec) => sec ? proto.durationFromSeconds(sec) : null);
|
|
321
330
|
if (endpoint.vpc) {
|
|
322
331
|
proto.renameIfPresent(gcfFunction, endpoint.vpc, "vpcConnector", "connector");
|
|
323
332
|
proto.renameIfPresent(gcfFunction, endpoint.vpc, "vpcConnectorEgressSettings", "egressSettings");
|
|
324
333
|
}
|
|
334
|
+
else if (endpoint.vpc === null) {
|
|
335
|
+
gcfFunction.vpcConnector = null;
|
|
336
|
+
gcfFunction.vpcConnectorEgressSettings = null;
|
|
337
|
+
}
|
|
325
338
|
const codebase = endpoint.codebase || projectConfig.DEFAULT_CODEBASE;
|
|
326
339
|
if (codebase !== projectConfig.DEFAULT_CODEBASE) {
|
|
327
340
|
gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { [exports.CODEBASE_LABEL]: codebase });
|
|
328
341
|
}
|
|
329
342
|
else {
|
|
330
|
-
(
|
|
343
|
+
(_b = gcfFunction.labels) === null || _b === void 0 ? true : delete _b[exports.CODEBASE_LABEL];
|
|
331
344
|
}
|
|
332
345
|
return gcfFunction;
|
|
333
346
|
}
|
|
@@ -161,7 +161,7 @@ async function deleteFunction(cloudFunction) {
|
|
|
161
161
|
}
|
|
162
162
|
exports.deleteFunction = deleteFunction;
|
|
163
163
|
function functionFromEndpoint(endpoint, source) {
|
|
164
|
-
var _a;
|
|
164
|
+
var _a, _b;
|
|
165
165
|
if (endpoint.platform !== "gcfv2") {
|
|
166
166
|
throw new error_1.FirebaseError("Trying to create a v2 CloudFunction with v1 API. This should never happen");
|
|
167
167
|
}
|
|
@@ -182,7 +182,8 @@ function functionFromEndpoint(endpoint, source) {
|
|
|
182
182
|
serviceConfig: {},
|
|
183
183
|
};
|
|
184
184
|
proto.copyIfPresent(gcfFunction, endpoint, "labels");
|
|
185
|
-
proto.copyIfPresent(gcfFunction.serviceConfig, endpoint, "environmentVariables", "secretEnvironmentVariables", "
|
|
185
|
+
proto.copyIfPresent(gcfFunction.serviceConfig, endpoint, "environmentVariables", "secretEnvironmentVariables", "ingressSettings", "timeoutSeconds");
|
|
186
|
+
proto.renameIfPresent(gcfFunction.serviceConfig, endpoint, "serviceAccountEmail", "serviceAccount");
|
|
186
187
|
const mem = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY;
|
|
187
188
|
gcfFunction.serviceConfig.availableMemory = mem > 1024 ? `${mem / 1024}Gi` : `${mem}Mi`;
|
|
188
189
|
proto.renameIfPresent(gcfFunction.serviceConfig, endpoint, "minInstanceCount", "minInstances");
|
|
@@ -191,11 +192,19 @@ function functionFromEndpoint(endpoint, source) {
|
|
|
191
192
|
proto.renameIfPresent(gcfFunction.serviceConfig, endpoint.vpc, "vpcConnector", "connector");
|
|
192
193
|
proto.renameIfPresent(gcfFunction.serviceConfig, endpoint.vpc, "vpcConnectorEgressSettings", "egressSettings");
|
|
193
194
|
}
|
|
195
|
+
else if (endpoint.vpc === null) {
|
|
196
|
+
gcfFunction.serviceConfig.vpcConnector = null;
|
|
197
|
+
gcfFunction.serviceConfig.vpcConnectorEgressSettings = null;
|
|
198
|
+
}
|
|
194
199
|
if (backend.isEventTriggered(endpoint)) {
|
|
195
200
|
gcfFunction.eventTrigger = {
|
|
196
201
|
eventType: endpoint.eventTrigger.eventType,
|
|
197
202
|
};
|
|
198
203
|
if (gcfFunction.eventTrigger.eventType === v2_1.PUBSUB_PUBLISH_EVENT) {
|
|
204
|
+
if (!((_a = endpoint.eventTrigger.eventFilters) === null || _a === void 0 ? void 0 : _a.topic)) {
|
|
205
|
+
throw new error_1.FirebaseError("Error: Pub/Sub event trigger is missing topic: " +
|
|
206
|
+
JSON.stringify(endpoint.eventTrigger, null, 2));
|
|
207
|
+
}
|
|
199
208
|
gcfFunction.eventTrigger.pubsubTopic = endpoint.eventTrigger.eventFilters.topic;
|
|
200
209
|
gcfFunction.eventTrigger.eventFilters = [];
|
|
201
210
|
for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilters)) {
|
|
@@ -206,7 +215,7 @@ function functionFromEndpoint(endpoint, source) {
|
|
|
206
215
|
}
|
|
207
216
|
else {
|
|
208
217
|
gcfFunction.eventTrigger.eventFilters = [];
|
|
209
|
-
for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilters)) {
|
|
218
|
+
for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilters || {})) {
|
|
210
219
|
gcfFunction.eventTrigger.eventFilters.push({ attribute, value });
|
|
211
220
|
}
|
|
212
221
|
for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilterPathPatterns || {})) {
|
|
@@ -241,7 +250,7 @@ function functionFromEndpoint(endpoint, source) {
|
|
|
241
250
|
gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { [exports.CODEBASE_LABEL]: codebase });
|
|
242
251
|
}
|
|
243
252
|
else {
|
|
244
|
-
(
|
|
253
|
+
(_b = gcfFunction.labels) === null || _b === void 0 ? true : delete _b[exports.CODEBASE_LABEL];
|
|
245
254
|
}
|
|
246
255
|
return gcfFunction;
|
|
247
256
|
}
|
|
@@ -273,29 +282,33 @@ function endpointFromFunction(gcfFunction) {
|
|
|
273
282
|
};
|
|
274
283
|
}
|
|
275
284
|
else if (gcfFunction.eventTrigger) {
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
eventType: gcfFunction.eventTrigger.eventType,
|
|
279
|
-
eventFilters: {},
|
|
280
|
-
retry: false,
|
|
281
|
-
},
|
|
282
|
-
};
|
|
285
|
+
const eventFilters = {};
|
|
286
|
+
const eventFilterPathPatterns = {};
|
|
283
287
|
if (gcfFunction.eventTrigger.pubsubTopic) {
|
|
284
|
-
|
|
288
|
+
eventFilters.topic = gcfFunction.eventTrigger.pubsubTopic;
|
|
285
289
|
}
|
|
286
290
|
else {
|
|
287
291
|
for (const eventFilter of gcfFunction.eventTrigger.eventFilters || []) {
|
|
288
292
|
if (eventFilter.operator === "match-path-pattern") {
|
|
289
|
-
|
|
290
|
-
trigger.eventTrigger.eventFilterPathPatterns = {};
|
|
291
|
-
}
|
|
292
|
-
trigger.eventTrigger.eventFilterPathPatterns[eventFilter.attribute] = eventFilter.value;
|
|
293
|
+
eventFilterPathPatterns[eventFilter.attribute] = eventFilter.value;
|
|
293
294
|
}
|
|
294
295
|
else {
|
|
295
|
-
|
|
296
|
+
eventFilters[eventFilter.attribute] = eventFilter.value;
|
|
296
297
|
}
|
|
297
298
|
}
|
|
298
299
|
}
|
|
300
|
+
trigger = {
|
|
301
|
+
eventTrigger: {
|
|
302
|
+
eventType: gcfFunction.eventTrigger.eventType,
|
|
303
|
+
retry: false,
|
|
304
|
+
},
|
|
305
|
+
};
|
|
306
|
+
if (Object.keys(eventFilters).length) {
|
|
307
|
+
trigger.eventTrigger.eventFilters = eventFilters;
|
|
308
|
+
}
|
|
309
|
+
if (Object.keys(eventFilterPathPatterns).length) {
|
|
310
|
+
trigger.eventTrigger.eventFilterPathPatterns = eventFilterPathPatterns;
|
|
311
|
+
}
|
|
299
312
|
proto.copyIfPresent(trigger.eventTrigger, gcfFunction.eventTrigger, "channel");
|
|
300
313
|
proto.renameIfPresent(trigger.eventTrigger, gcfFunction.eventTrigger, "region", "triggerRegion");
|
|
301
314
|
}
|
|
@@ -308,8 +321,19 @@ function endpointFromFunction(gcfFunction) {
|
|
|
308
321
|
const endpoint = Object.assign(Object.assign({ platform: "gcfv2", id,
|
|
309
322
|
project,
|
|
310
323
|
region }, trigger), { entryPoint: gcfFunction.buildConfig.entryPoint, runtime: gcfFunction.buildConfig.runtime, uri: gcfFunction.serviceConfig.uri });
|
|
311
|
-
proto.copyIfPresent(endpoint, gcfFunction.serviceConfig, "
|
|
312
|
-
proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "
|
|
324
|
+
proto.copyIfPresent(endpoint, gcfFunction.serviceConfig, "ingressSettings", "environmentVariables", "secretEnvironmentVariables", "timeoutSeconds");
|
|
325
|
+
proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "serviceAccount", "serviceAccountEmail");
|
|
326
|
+
proto.convertIfPresent(endpoint, gcfFunction.serviceConfig, "availableMemoryMb", "availableMemory", (prod) => {
|
|
327
|
+
if (prod === null) {
|
|
328
|
+
logger_1.logger.debug("Prod should always return a valid memory amount");
|
|
329
|
+
return prod;
|
|
330
|
+
}
|
|
331
|
+
const mem = mebibytes(prod);
|
|
332
|
+
if (!backend.isValidMemoryOption(mem)) {
|
|
333
|
+
logger_1.logger.warn("Converting a function to an endpoint with an invalid memory option", mem);
|
|
334
|
+
}
|
|
335
|
+
return mem;
|
|
336
|
+
});
|
|
313
337
|
proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "minInstances", "minInstanceCount");
|
|
314
338
|
proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "maxInstances", "maxInstanceCount");
|
|
315
339
|
proto.copyIfPresent(endpoint, gcfFunction, "labels");
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.jobFromEndpoint = exports.createOrReplaceJob = exports.updateJob = exports.getJob = exports.deleteJob = exports.createJob =
|
|
3
|
+
exports.jobFromEndpoint = exports.topicNameForEndpoint = exports.jobNameForEndpoint = exports.createOrReplaceJob = exports.updateJob = exports.getJob = exports.deleteJob = exports.createJob = void 0;
|
|
4
4
|
const _ = require("lodash");
|
|
5
5
|
const error_1 = require("../error");
|
|
6
6
|
const logger_1 = require("../logger");
|
|
@@ -11,13 +11,6 @@ const proto = require("./proto");
|
|
|
11
11
|
const functional_1 = require("../functional");
|
|
12
12
|
const VERSION = "v1beta1";
|
|
13
13
|
const DEFAULT_TIME_ZONE = "America/Los_Angeles";
|
|
14
|
-
function assertValidJob(job) {
|
|
15
|
-
proto.assertOneOf("Scheduler Job", job, "target", "httpTarget", "pubsubTarget");
|
|
16
|
-
if (job.httpTarget) {
|
|
17
|
-
proto.assertOneOf("Scheduler Job", job.httpTarget, "httpTarget.authorizationHeader", "oauthToken", "odicToken");
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
exports.assertValidJob = assertValidJob;
|
|
21
14
|
const apiClient = new apiv2_1.Client({ urlPrefix: api_1.cloudschedulerOrigin, apiVersion: VERSION });
|
|
22
15
|
function createJob(job) {
|
|
23
16
|
const strippedName = job.name.substring(0, job.name.lastIndexOf("/"));
|
|
@@ -76,14 +69,22 @@ function isIdentical(job, otherJob) {
|
|
|
76
69
|
job.timeZone === otherJob.timeZone &&
|
|
77
70
|
_.isEqual(job.retryConfig, otherJob.retryConfig));
|
|
78
71
|
}
|
|
72
|
+
function jobNameForEndpoint(endpoint, appEngineLocation) {
|
|
73
|
+
const id = backend.scheduleIdForFunction(endpoint);
|
|
74
|
+
return `projects/${endpoint.project}/locations/${appEngineLocation}/jobs/${id}`;
|
|
75
|
+
}
|
|
76
|
+
exports.jobNameForEndpoint = jobNameForEndpoint;
|
|
77
|
+
function topicNameForEndpoint(endpoint) {
|
|
78
|
+
const id = backend.scheduleIdForFunction(endpoint);
|
|
79
|
+
return `projects/${endpoint.project}/topics/${id}`;
|
|
80
|
+
}
|
|
81
|
+
exports.topicNameForEndpoint = topicNameForEndpoint;
|
|
79
82
|
function jobFromEndpoint(endpoint, appEngineLocation) {
|
|
80
83
|
const job = {};
|
|
81
84
|
if (endpoint.platform === "gcfv1") {
|
|
82
|
-
|
|
83
|
-
const region = appEngineLocation;
|
|
84
|
-
job.name = `projects/${endpoint.project}/locations/${region}/jobs/${id}`;
|
|
85
|
+
job.name = jobNameForEndpoint(endpoint, appEngineLocation);
|
|
85
86
|
job.pubsubTarget = {
|
|
86
|
-
topicName:
|
|
87
|
+
topicName: topicNameForEndpoint(endpoint),
|
|
87
88
|
attributes: {
|
|
88
89
|
scheduled: "true",
|
|
89
90
|
},
|
|
@@ -95,7 +96,18 @@ function jobFromEndpoint(endpoint, appEngineLocation) {
|
|
|
95
96
|
else {
|
|
96
97
|
(0, functional_1.assertExhaustive)(endpoint.platform);
|
|
97
98
|
}
|
|
98
|
-
|
|
99
|
+
if (!endpoint.scheduleTrigger.schedule) {
|
|
100
|
+
throw new error_1.FirebaseError("Cannot create a scheduler job without a schedule:" + JSON.stringify(endpoint));
|
|
101
|
+
}
|
|
102
|
+
job.schedule = endpoint.scheduleTrigger.schedule;
|
|
103
|
+
job.timeZone = endpoint.scheduleTrigger.timeZone || DEFAULT_TIME_ZONE;
|
|
104
|
+
if (endpoint.scheduleTrigger.retryConfig) {
|
|
105
|
+
job.retryConfig = {};
|
|
106
|
+
proto.copyIfPresent(job.retryConfig, endpoint.scheduleTrigger.retryConfig, "maxDoublings", "retryCount");
|
|
107
|
+
proto.convertIfPresent(job.retryConfig, endpoint.scheduleTrigger.retryConfig, "maxBackoffDuration", "maxBackoffSeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
108
|
+
proto.convertIfPresent(job.retryConfig, endpoint.scheduleTrigger.retryConfig, "minBackoffDuration", "minBackoffSeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
109
|
+
proto.convertIfPresent(job.retryConfig, endpoint.scheduleTrigger.retryConfig, "maxRetryDuration", "maxRetrySeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
110
|
+
}
|
|
99
111
|
return job;
|
|
100
112
|
}
|
|
101
113
|
exports.jobFromEndpoint = jobFromEndpoint;
|
package/lib/gcp/cloudtasks.js
CHANGED
|
@@ -4,6 +4,7 @@ exports.queueFromEndpoint = exports.queueNameForEndpoint = exports.setEnqueuer =
|
|
|
4
4
|
const proto = require("./proto");
|
|
5
5
|
const apiv2_1 = require("../apiv2");
|
|
6
6
|
const api_1 = require("../api");
|
|
7
|
+
const functional_1 = require("../functional");
|
|
7
8
|
const API_VERSION = "v2";
|
|
8
9
|
const client = new apiv2_1.Client({
|
|
9
10
|
urlPrefix: api_1.cloudTasksOrigin,
|
|
@@ -136,9 +137,9 @@ function queueFromEndpoint(endpoint) {
|
|
|
136
137
|
}
|
|
137
138
|
if (endpoint.taskQueueTrigger.retryConfig) {
|
|
138
139
|
proto.copyIfPresent(queue.retryConfig, endpoint.taskQueueTrigger.retryConfig, "maxAttempts", "maxDoublings");
|
|
139
|
-
proto.
|
|
140
|
-
proto.
|
|
141
|
-
proto.
|
|
140
|
+
proto.convertIfPresent(queue.retryConfig, endpoint.taskQueueTrigger.retryConfig, "maxRetryDuration", "maxRetrySeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
141
|
+
proto.convertIfPresent(queue.retryConfig, endpoint.taskQueueTrigger.retryConfig, "maxBackoff", "maxBackoffSeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
142
|
+
proto.convertIfPresent(queue.retryConfig, endpoint.taskQueueTrigger.retryConfig, "minBackoff", "minBackoffSeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
142
143
|
}
|
|
143
144
|
return queue;
|
|
144
145
|
}
|
package/lib/gcp/iam.js
CHANGED
|
@@ -2,11 +2,9 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.testIamPermissions = exports.testResourceIamPermissions = exports.getRole = exports.deleteServiceAccount = exports.createServiceAccountKey = exports.getServiceAccount = exports.createServiceAccount = void 0;
|
|
4
4
|
const api_1 = require("../api");
|
|
5
|
-
const lodash_1 = require("lodash");
|
|
6
5
|
const logger_1 = require("../logger");
|
|
7
6
|
const apiv2_1 = require("../apiv2");
|
|
8
|
-
const
|
|
9
|
-
const apiClient = new apiv2_1.Client({ urlPrefix: api_1.iamOrigin, apiVersion: API_VERSION });
|
|
7
|
+
const apiClient = new apiv2_1.Client({ urlPrefix: api_1.iamOrigin, apiVersion: "v1" });
|
|
10
8
|
async function createServiceAccount(projectId, accountId, description, displayName) {
|
|
11
9
|
const response = await apiClient.post(`/projects/${projectId}/serviceAccounts`, {
|
|
12
10
|
accountId,
|
|
@@ -31,8 +29,8 @@ async function createServiceAccountKey(projectId, serviceAccountName) {
|
|
|
31
29
|
return response.body;
|
|
32
30
|
}
|
|
33
31
|
exports.createServiceAccountKey = createServiceAccountKey;
|
|
34
|
-
function deleteServiceAccount(projectId, accountEmail) {
|
|
35
|
-
|
|
32
|
+
async function deleteServiceAccount(projectId, accountEmail) {
|
|
33
|
+
await apiClient.delete(`/projects/${projectId}/serviceAccounts/${accountEmail}`, {
|
|
36
34
|
resolveOnHTTPError: true,
|
|
37
35
|
});
|
|
38
36
|
}
|
|
@@ -44,25 +42,30 @@ async function getRole(role) {
|
|
|
44
42
|
return response.body;
|
|
45
43
|
}
|
|
46
44
|
exports.getRole = getRole;
|
|
47
|
-
async function testResourceIamPermissions(origin, apiVersion, resourceName, permissions) {
|
|
45
|
+
async function testResourceIamPermissions(origin, apiVersion, resourceName, permissions, quotaUser = "") {
|
|
48
46
|
const localClient = new apiv2_1.Client({ urlPrefix: origin, apiVersion });
|
|
49
47
|
if (process.env.FIREBASE_SKIP_INFORMATIONAL_IAM) {
|
|
50
|
-
logger_1.logger.debug(
|
|
51
|
-
return { allowed: permissions, missing: [], passed: true };
|
|
48
|
+
logger_1.logger.debug(`[iam] skipping informational check of permissions ${JSON.stringify(permissions)} on resource ${resourceName}`);
|
|
49
|
+
return { allowed: Array.from(permissions).sort(), missing: [], passed: true };
|
|
50
|
+
}
|
|
51
|
+
const headers = {};
|
|
52
|
+
if (quotaUser) {
|
|
53
|
+
headers["x-goog-quota-user"] = quotaUser;
|
|
54
|
+
}
|
|
55
|
+
const response = await localClient.post(`/${resourceName}:testIamPermissions`, { permissions }, { headers });
|
|
56
|
+
const allowed = new Set(response.body.permissions || []);
|
|
57
|
+
const missing = new Set(permissions);
|
|
58
|
+
for (const p of allowed) {
|
|
59
|
+
missing.delete(p);
|
|
52
60
|
}
|
|
53
|
-
const response = await localClient.post(`/${resourceName}:testIamPermissions`, {
|
|
54
|
-
permissions,
|
|
55
|
-
});
|
|
56
|
-
const allowed = (response.body.permissions || []).sort();
|
|
57
|
-
const missing = (0, lodash_1.difference)(permissions, allowed);
|
|
58
61
|
return {
|
|
59
|
-
allowed,
|
|
60
|
-
missing,
|
|
61
|
-
passed: missing.
|
|
62
|
+
allowed: Array.from(allowed).sort(),
|
|
63
|
+
missing: Array.from(missing).sort(),
|
|
64
|
+
passed: missing.size === 0,
|
|
62
65
|
};
|
|
63
66
|
}
|
|
64
67
|
exports.testResourceIamPermissions = testResourceIamPermissions;
|
|
65
68
|
async function testIamPermissions(projectId, permissions) {
|
|
66
|
-
return testResourceIamPermissions(api_1.resourceManagerOrigin, "v1", `projects/${projectId}`, permissions);
|
|
69
|
+
return testResourceIamPermissions(api_1.resourceManagerOrigin, "v1", `projects/${projectId}`, permissions, `projects/${projectId}`);
|
|
67
70
|
}
|
|
68
71
|
exports.testIamPermissions = testIamPermissions;
|
package/lib/gcp/proto.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.formatServiceAccount = exports.getInvokerMembers = exports.fieldMasks = exports.renameIfPresent = exports.copyIfPresent = exports.assertOneOf = exports.durationFromSeconds = exports.secondsFromDuration = void 0;
|
|
3
|
+
exports.formatServiceAccount = exports.getInvokerMembers = exports.fieldMasks = exports.renameIfPresent = exports.convertIfPresent = exports.copyIfPresent = exports.assertOneOf = exports.durationFromSeconds = exports.secondsFromDuration = void 0;
|
|
4
4
|
const error_1 = require("../error");
|
|
5
5
|
function secondsFromDuration(d) {
|
|
6
6
|
return +d.slice(0, d.length - 1);
|
|
@@ -32,13 +32,25 @@ function copyIfPresent(dest, src, ...fields) {
|
|
|
32
32
|
}
|
|
33
33
|
}
|
|
34
34
|
exports.copyIfPresent = copyIfPresent;
|
|
35
|
-
function
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
35
|
+
function convertIfPresent(...args) {
|
|
36
|
+
if (args.length === 4) {
|
|
37
|
+
const [dest, src, key, converter] = args;
|
|
38
|
+
if (Object.prototype.hasOwnProperty.call(src, key)) {
|
|
39
|
+
dest[key] = converter(src[key]);
|
|
40
|
+
}
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
const [dest, src, destKey, srcKey, converter] = args;
|
|
44
|
+
if (Object.prototype.hasOwnProperty.call(src, srcKey)) {
|
|
45
|
+
dest[destKey] = converter(src[srcKey]);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
exports.convertIfPresent = convertIfPresent;
|
|
49
|
+
function renameIfPresent(dest, src, destKey, srcKey) {
|
|
50
|
+
if (!Object.prototype.hasOwnProperty.call(src, srcKey)) {
|
|
39
51
|
return;
|
|
40
52
|
}
|
|
41
|
-
dest[
|
|
53
|
+
dest[destKey] = src[srcKey];
|
|
42
54
|
}
|
|
43
55
|
exports.renameIfPresent = renameIfPresent;
|
|
44
56
|
function fieldMasks(object, ...doNotRecurseIn) {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.addServiceAccountToRoles = exports.setIamPolicy = exports.getIamPolicy = exports.firebaseRoles = void 0;
|
|
3
|
+
exports.serviceAccountHasRoles = exports.addServiceAccountToRoles = exports.setIamPolicy = exports.getIamPolicy = exports.firebaseRoles = void 0;
|
|
4
4
|
const lodash_1 = require("lodash");
|
|
5
5
|
const api_1 = require("../api");
|
|
6
6
|
const apiv2_1 = require("../apiv2");
|
|
@@ -26,9 +26,11 @@ async function setIamPolicy(projectIdOrNumber, newPolicy, updateMask = "") {
|
|
|
26
26
|
return response.body;
|
|
27
27
|
}
|
|
28
28
|
exports.setIamPolicy = setIamPolicy;
|
|
29
|
-
async function addServiceAccountToRoles(projectId, serviceAccountName, roles) {
|
|
29
|
+
async function addServiceAccountToRoles(projectId, serviceAccountName, roles, skipAccountLookup = false) {
|
|
30
30
|
const [{ name: fullServiceAccountName }, projectPolicy] = await Promise.all([
|
|
31
|
-
|
|
31
|
+
skipAccountLookup
|
|
32
|
+
? Promise.resolve({ name: serviceAccountName })
|
|
33
|
+
: (0, iam_1.getServiceAccount)(projectId, serviceAccountName),
|
|
32
34
|
getIamPolicy(projectId),
|
|
33
35
|
]);
|
|
34
36
|
const newMemberName = `serviceAccount:${fullServiceAccountName.split("/").pop()}`;
|
|
@@ -49,3 +51,23 @@ async function addServiceAccountToRoles(projectId, serviceAccountName, roles) {
|
|
|
49
51
|
return setIamPolicy(projectId, projectPolicy, "bindings");
|
|
50
52
|
}
|
|
51
53
|
exports.addServiceAccountToRoles = addServiceAccountToRoles;
|
|
54
|
+
async function serviceAccountHasRoles(projectId, serviceAccountName, roles, skipAccountLookup = false) {
|
|
55
|
+
const [{ name: fullServiceAccountName }, projectPolicy] = await Promise.all([
|
|
56
|
+
skipAccountLookup
|
|
57
|
+
? Promise.resolve({ name: serviceAccountName })
|
|
58
|
+
: (0, iam_1.getServiceAccount)(projectId, serviceAccountName),
|
|
59
|
+
getIamPolicy(projectId),
|
|
60
|
+
]);
|
|
61
|
+
const memberName = `serviceAccount:${fullServiceAccountName.split("/").pop()}`;
|
|
62
|
+
for (const roleName of roles) {
|
|
63
|
+
const binding = projectPolicy.bindings.find((b) => b.role === roleName);
|
|
64
|
+
if (!binding) {
|
|
65
|
+
return false;
|
|
66
|
+
}
|
|
67
|
+
if (!binding.members.includes(memberName)) {
|
|
68
|
+
return false;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return true;
|
|
72
|
+
}
|
|
73
|
+
exports.serviceAccountHasRoles = serviceAccountHasRoles;
|
package/lib/index.js
CHANGED
|
@@ -9,7 +9,7 @@ program.version(pkg.version);
|
|
|
9
9
|
program.option("-P, --project <alias_or_project_id>", "the Firebase project to use for this command");
|
|
10
10
|
program.option("--account <email>", "the Google account to use for authorization");
|
|
11
11
|
program.option("-j, --json", "output JSON instead of text, also triggers non-interactive mode");
|
|
12
|
-
program.option("--token <token>", "supply an auth token for this command");
|
|
12
|
+
program.option("--token <token>", "DEPRECATED - will be removed in a future major version - supply an auth token for this command");
|
|
13
13
|
program.option("--non-interactive", "error out of the command instead of waiting for prompts");
|
|
14
14
|
program.option("-i, --interactive", "force prompts to be displayed");
|
|
15
15
|
program.option("--debug", "print verbose debug output and keep a debug log file");
|
package/lib/previews.js
CHANGED
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.previews = void 0;
|
|
4
4
|
const lodash_1 = require("lodash");
|
|
5
5
|
const configstore_1 = require("./configstore");
|
|
6
|
-
exports.previews = Object.assign({ rtdbrules: false, ext: false, extdev: false, rtdbmanagement: false, golang: false, deletegcfartifacts: false, emulatoruisnapshot: false, frameworkawareness: false, functionsparams: false }, configstore_1.configstore.get("previews"));
|
|
6
|
+
exports.previews = Object.assign({ rtdbrules: false, ext: false, extdev: false, rtdbmanagement: false, golang: false, deletegcfartifacts: false, emulatoruisnapshot: false, frameworkawareness: false, functionsparams: false, crossservicerules: false }, configstore_1.configstore.get("previews"));
|
|
7
7
|
if (process.env.FIREBASE_CLI_PREVIEWS) {
|
|
8
8
|
process.env.FIREBASE_CLI_PREVIEWS.split(",").forEach((feature) => {
|
|
9
9
|
if ((0, lodash_1.has)(exports.previews, feature)) {
|
package/lib/rc.js
CHANGED
|
@@ -136,15 +136,9 @@ class RC {
|
|
|
136
136
|
requireTarget(project, type, name) {
|
|
137
137
|
const target = this.target(project, type, name);
|
|
138
138
|
if (!target.length) {
|
|
139
|
-
throw new error_1.FirebaseError(
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
clc.bold(project) +
|
|
143
|
-
". Configure with:\n\n firebase target:apply " +
|
|
144
|
-
type +
|
|
145
|
-
" " +
|
|
146
|
-
name +
|
|
147
|
-
" <resources...>");
|
|
139
|
+
throw new error_1.FirebaseError(`Deploy target ${clc.bold(name)} not configured for project ${clc.bold(project)}. Configure with:
|
|
140
|
+
|
|
141
|
+
firebase target:apply ${type} ${name} <resources...>`);
|
|
148
142
|
}
|
|
149
143
|
return target;
|
|
150
144
|
}
|
package/lib/requireAuth.js
CHANGED
|
@@ -32,9 +32,13 @@ async function requireAuth(options) {
|
|
|
32
32
|
let tokenOpt = utils.getInheritedOption(options, "token");
|
|
33
33
|
if (tokenOpt) {
|
|
34
34
|
logger_1.logger.debug("> authorizing via --token option");
|
|
35
|
+
utils.logWarning("Authenticating with `--token` is deprecated and will be removed in a future major version of `firebase-tools`. " +
|
|
36
|
+
"Instead, use a service account key with `GOOGLE_APPLICATION_CREDENTIALS`: https://cloud.google.com/docs/authentication/getting-started");
|
|
35
37
|
}
|
|
36
38
|
else if (process.env.FIREBASE_TOKEN) {
|
|
37
39
|
logger_1.logger.debug("> authorizing via FIREBASE_TOKEN environment variable");
|
|
40
|
+
utils.logWarning("Authenticating with `FIREBASE_TOKEN` is deprecated and will be removed in a future major version of `firebase-tools`. " +
|
|
41
|
+
"Instead, use a service account key with `GOOGLE_APPLICATION_CREDENTIALS`: https://cloud.google.com/docs/authentication/getting-started");
|
|
38
42
|
}
|
|
39
43
|
else if (user) {
|
|
40
44
|
logger_1.logger.debug(`> authorizing via signed-in user (${user.email})`);
|