firebase-tools 11.4.1 → 11.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/deploy/functions/backend.js +6 -4
- package/lib/deploy/functions/build.js +107 -95
- package/lib/deploy/functions/ensure.js +1 -1
- package/lib/deploy/functions/params.js +5 -2
- package/lib/deploy/functions/prepare.js +3 -3
- package/lib/deploy/functions/pricing.js +3 -2
- package/lib/deploy/functions/prompts.js +1 -1
- package/lib/deploy/functions/release/fabricator.js +8 -7
- package/lib/deploy/functions/runtimes/discovery/parsing.js +19 -8
- package/lib/deploy/functions/runtimes/discovery/v1alpha1.js +112 -107
- package/lib/deploy/functions/runtimes/node/parseTriggers.js +53 -21
- package/lib/deploy/functions/services/storage.js +6 -0
- package/lib/deploy/hosting/convertConfig.js +8 -1
- package/lib/emulator/functionsEmulatorShared.js +6 -11
- package/lib/emulator/storage/files.js +4 -0
- package/lib/emulator/storage/metadata.js +6 -6
- package/lib/extensions/displayExtensionInfo.js +1 -101
- package/lib/extensions/emulator/triggerHelper.js +2 -2
- package/lib/extensions/updateHelper.js +1 -7
- package/lib/functional.js +16 -1
- package/lib/gcp/cloudfunctions.js +21 -8
- package/lib/gcp/cloudfunctionsv2.js +43 -19
- package/lib/gcp/cloudscheduler.js +25 -13
- package/lib/gcp/cloudtasks.js +4 -3
- package/lib/gcp/proto.js +18 -6
- package/lib/gcp/resourceManager.js +25 -3
- package/lib/previews.js +1 -1
- package/lib/rulesDeploy.js +39 -2
- package/npm-shrinkwrap.json +2 -2
- package/package.json +1 -1
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.printSourceDownloadLink = exports.
|
|
4
|
-
const _ = require("lodash");
|
|
3
|
+
exports.printSourceDownloadLink = exports.displayExtInfo = void 0;
|
|
5
4
|
const clc = require("cli-color");
|
|
6
5
|
const { marked } = require("marked");
|
|
7
6
|
const TerminalRenderer = require("marked-terminal");
|
|
@@ -12,8 +11,6 @@ const error_1 = require("../error");
|
|
|
12
11
|
marked.setOptions({
|
|
13
12
|
renderer: new TerminalRenderer(),
|
|
14
13
|
});
|
|
15
|
-
const additionColor = clc.green;
|
|
16
|
-
const deletionColor = clc.red;
|
|
17
14
|
function displayExtInfo(extensionName, publisher, spec, published = false) {
|
|
18
15
|
const lines = [];
|
|
19
16
|
lines.push(`**Name**: ${spec.displayName}`);
|
|
@@ -46,103 +43,6 @@ function displayExtInfo(extensionName, publisher, spec, published = false) {
|
|
|
46
43
|
}
|
|
47
44
|
}
|
|
48
45
|
exports.displayExtInfo = displayExtInfo;
|
|
49
|
-
function displayUpdateChangesNoInput(spec, newSpec) {
|
|
50
|
-
var _a, _b, _c, _d;
|
|
51
|
-
const lines = [];
|
|
52
|
-
if (spec.displayName !== newSpec.displayName) {
|
|
53
|
-
lines.push("", "**Name:**", deletionColor(`- ${spec.displayName}`), additionColor(`+ ${newSpec.displayName}`));
|
|
54
|
-
}
|
|
55
|
-
if (((_a = spec.author) === null || _a === void 0 ? void 0 : _a.authorName) !== ((_b = newSpec.author) === null || _b === void 0 ? void 0 : _b.authorName)) {
|
|
56
|
-
lines.push("", "**Author:**", deletionColor(`- ${(_c = spec.author) === null || _c === void 0 ? void 0 : _c.authorName}`), additionColor(`+ ${(_d = spec.author) === null || _d === void 0 ? void 0 : _d.authorName}`));
|
|
57
|
-
}
|
|
58
|
-
if (spec.description !== newSpec.description) {
|
|
59
|
-
lines.push("", "**Description:**", deletionColor(`- ${spec.description}`), additionColor(`+ ${newSpec.description}`));
|
|
60
|
-
}
|
|
61
|
-
if (spec.sourceUrl !== newSpec.sourceUrl) {
|
|
62
|
-
lines.push("", "**Source code:**", deletionColor(`- ${spec.sourceUrl}`), additionColor(`+ ${newSpec.sourceUrl}`));
|
|
63
|
-
}
|
|
64
|
-
if (spec.billingRequired && !newSpec.billingRequired) {
|
|
65
|
-
lines.push("", "**Billing is no longer required for this extension.**");
|
|
66
|
-
}
|
|
67
|
-
logger_1.logger.info(marked(lines.join("\n")));
|
|
68
|
-
return lines;
|
|
69
|
-
}
|
|
70
|
-
exports.displayUpdateChangesNoInput = displayUpdateChangesNoInput;
|
|
71
|
-
async function displayUpdateChangesRequiringConfirmation(args) {
|
|
72
|
-
const equals = (a, b) => {
|
|
73
|
-
return _.isEqual(a, b);
|
|
74
|
-
};
|
|
75
|
-
if (args.spec.license !== args.newSpec.license) {
|
|
76
|
-
const message = "\n" +
|
|
77
|
-
"**License**\n" +
|
|
78
|
-
deletionColor(args.spec.license ? `- ${args.spec.license}\n` : "- None\n") +
|
|
79
|
-
additionColor(args.newSpec.license ? `+ ${args.newSpec.license}\n` : "+ None\n");
|
|
80
|
-
logger_1.logger.info(message);
|
|
81
|
-
if (!(await (0, extensionsHelper_1.confirm)({ nonInteractive: args.nonInteractive, force: args.force, default: true }))) {
|
|
82
|
-
throw new error_1.FirebaseError("Unable to update this extension instance without explicit consent for the change to 'License'.");
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
const apisDiffDeletions = _.differenceWith(args.spec.apis, _.get(args.newSpec, "apis", []), equals);
|
|
86
|
-
const apisDiffAdditions = _.differenceWith(args.newSpec.apis, _.get(args.spec, "apis", []), equals);
|
|
87
|
-
if (apisDiffDeletions.length || apisDiffAdditions.length) {
|
|
88
|
-
let message = "\n**APIs:**\n";
|
|
89
|
-
apisDiffDeletions.forEach((api) => {
|
|
90
|
-
message += deletionColor(`- ${api.apiName} (${api.reason})\n`);
|
|
91
|
-
});
|
|
92
|
-
apisDiffAdditions.forEach((api) => {
|
|
93
|
-
message += additionColor(`+ ${api.apiName} (${api.reason})\n`);
|
|
94
|
-
});
|
|
95
|
-
logger_1.logger.info(message);
|
|
96
|
-
if (!(await (0, extensionsHelper_1.confirm)({ nonInteractive: args.nonInteractive, force: args.force, default: true }))) {
|
|
97
|
-
throw new error_1.FirebaseError("Unable to update this extension instance without explicit consent for the change to 'APIs'.");
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
const resourcesDiffDeletions = _.differenceWith(args.spec.resources, _.get(args.newSpec, "resources", []), compareResources);
|
|
101
|
-
const resourcesDiffAdditions = _.differenceWith(args.newSpec.resources, _.get(args.spec, "resources", []), compareResources);
|
|
102
|
-
if (resourcesDiffDeletions.length || resourcesDiffAdditions.length) {
|
|
103
|
-
let message = "\n**Resources:**\n";
|
|
104
|
-
resourcesDiffDeletions.forEach((resource) => {
|
|
105
|
-
message += deletionColor(` - ${getResourceReadableName(resource)}`);
|
|
106
|
-
});
|
|
107
|
-
resourcesDiffAdditions.forEach((resource) => {
|
|
108
|
-
message += additionColor(`+ ${getResourceReadableName(resource)}`);
|
|
109
|
-
});
|
|
110
|
-
logger_1.logger.info(message);
|
|
111
|
-
if (!(await (0, extensionsHelper_1.confirm)({ nonInteractive: args.nonInteractive, force: args.force, default: true }))) {
|
|
112
|
-
throw new error_1.FirebaseError("Unable to update this extension instance without explicit consent for the change to 'Resources'.");
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
const rolesDiffDeletions = _.differenceWith(args.spec.roles, _.get(args.newSpec, "roles", []), equals);
|
|
116
|
-
const rolesDiffAdditions = _.differenceWith(args.newSpec.roles, _.get(args.spec, "roles", []), equals);
|
|
117
|
-
if (rolesDiffDeletions.length || rolesDiffAdditions.length) {
|
|
118
|
-
let message = "\n**Permissions:**\n";
|
|
119
|
-
rolesDiffDeletions.forEach((role) => {
|
|
120
|
-
message += deletionColor(`- ${role.role} (${role.reason})\n`);
|
|
121
|
-
});
|
|
122
|
-
rolesDiffAdditions.forEach((role) => {
|
|
123
|
-
message += additionColor(`+ ${role.role} (${role.reason})\n`);
|
|
124
|
-
});
|
|
125
|
-
logger_1.logger.info(message);
|
|
126
|
-
if (!(await (0, extensionsHelper_1.confirm)({ nonInteractive: args.nonInteractive, force: args.force, default: true }))) {
|
|
127
|
-
throw new error_1.FirebaseError("Unable to update this extension instance without explicit consent for the change to 'Permissions'.");
|
|
128
|
-
}
|
|
129
|
-
}
|
|
130
|
-
if (!args.spec.billingRequired && args.newSpec.billingRequired) {
|
|
131
|
-
logger_1.logger.info("Billing is now required for the new version of this extension.");
|
|
132
|
-
if (!(await (0, extensionsHelper_1.confirm)({ nonInteractive: args.nonInteractive, force: args.force, default: true }))) {
|
|
133
|
-
throw new error_1.FirebaseError("Unable to update this extension instance without explicit consent for the change to 'BillingRequired'.");
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
exports.displayUpdateChangesRequiringConfirmation = displayUpdateChangesRequiringConfirmation;
|
|
138
|
-
function compareResources(resource1, resource2) {
|
|
139
|
-
return resource1.name === resource2.name && resource1.type === resource2.type;
|
|
140
|
-
}
|
|
141
|
-
function getResourceReadableName(resource) {
|
|
142
|
-
return resource.type === "firebaseextensions.v1beta.function"
|
|
143
|
-
? `${resource.name} (Cloud Function): ${resource.description}\n`
|
|
144
|
-
: `${resource.name} (${resource.type})\n`;
|
|
145
|
-
}
|
|
146
46
|
function printSourceDownloadLink(sourceDownloadUri) {
|
|
147
47
|
const sourceDownloadMsg = `Want to review the source code that will be installed? Download it here: ${sourceDownloadUri}`;
|
|
148
48
|
utils.logBullet(marked(sourceDownloadMsg));
|
|
@@ -12,8 +12,8 @@ function functionResourceToEmulatedTriggerDefintion(resource) {
|
|
|
12
12
|
platform: "gcfv1",
|
|
13
13
|
};
|
|
14
14
|
const properties = resource.properties || {};
|
|
15
|
-
proto.
|
|
16
|
-
proto.
|
|
15
|
+
proto.convertIfPresent(etd, properties, "timeoutSeconds", "timeout", proto.secondsFromDuration);
|
|
16
|
+
proto.convertIfPresent(etd, properties, "regions", "location", (str) => [str]);
|
|
17
17
|
proto.copyIfPresent(etd, properties, "availableMemoryMb");
|
|
18
18
|
if (properties.httpsTrigger) {
|
|
19
19
|
etd.httpsTrigger = properties.httpsTrigger;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.inferUpdateSource = exports.updateFromPublisherSource = exports.updateToVersionFromPublisherSource = exports.updateFromUrlSource = exports.updateFromLocalSource = exports.update = exports.
|
|
3
|
+
exports.inferUpdateSource = exports.updateFromPublisherSource = exports.updateToVersionFromPublisherSource = exports.updateFromUrlSource = exports.updateFromLocalSource = exports.update = exports.warningUpdateToOtherSource = exports.getExistingSourceOrigin = void 0;
|
|
4
4
|
const clc = require("cli-color");
|
|
5
5
|
const semver = require("semver");
|
|
6
6
|
const { marked } = require("marked");
|
|
@@ -52,12 +52,6 @@ function warningUpdateToOtherSource(sourceOrigin) {
|
|
|
52
52
|
logger_1.logger.info(marked(warning));
|
|
53
53
|
}
|
|
54
54
|
exports.warningUpdateToOtherSource = warningUpdateToOtherSource;
|
|
55
|
-
async function displayChanges(args) {
|
|
56
|
-
utils.logLabeledBullet("extensions", "This update contains the following changes:");
|
|
57
|
-
(0, displayExtensionInfo_1.displayUpdateChangesNoInput)(args.spec, args.newSpec);
|
|
58
|
-
await (0, displayExtensionInfo_1.displayUpdateChangesRequiringConfirmation)(args);
|
|
59
|
-
}
|
|
60
|
-
exports.displayChanges = displayChanges;
|
|
61
55
|
async function update(updateOptions) {
|
|
62
56
|
const { projectId, instanceId, source, extRef, params, canEmitEvents, allowedEventTypes, eventarcChannel, } = updateOptions;
|
|
63
57
|
if (extRef) {
|
package/lib/functional.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.partition = exports.assertExhaustive = exports.zipIn = exports.zip = exports.reduceFlat = exports.flatten = exports.flattenArray = exports.flattenObject = void 0;
|
|
3
|
+
exports.nullsafeVisitor = exports.mapObject = exports.partition = exports.assertExhaustive = exports.zipIn = exports.zip = exports.reduceFlat = exports.flatten = exports.flattenArray = exports.flattenObject = void 0;
|
|
4
4
|
function* flattenObject(obj) {
|
|
5
5
|
function* helper(path, obj) {
|
|
6
6
|
for (const [k, v] of Object.entries(obj)) {
|
|
@@ -63,3 +63,18 @@ function partition(arr, callbackFn) {
|
|
|
63
63
|
}, [[], []]);
|
|
64
64
|
}
|
|
65
65
|
exports.partition = partition;
|
|
66
|
+
function mapObject(input, transform) {
|
|
67
|
+
const result = {};
|
|
68
|
+
for (const [k, v] of Object.entries(input)) {
|
|
69
|
+
result[k] = transform(v);
|
|
70
|
+
}
|
|
71
|
+
return result;
|
|
72
|
+
}
|
|
73
|
+
exports.mapObject = mapObject;
|
|
74
|
+
const nullsafeVisitor = (func, ...rest) => (first) => {
|
|
75
|
+
if (first === null) {
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
return func(first, ...rest);
|
|
79
|
+
};
|
|
80
|
+
exports.nullsafeVisitor = nullsafeVisitor;
|
|
@@ -255,18 +255,20 @@ function endpointFromFunction(gcfFunction) {
|
|
|
255
255
|
if (securityLevel) {
|
|
256
256
|
endpoint.securityLevel = securityLevel;
|
|
257
257
|
}
|
|
258
|
-
proto.copyIfPresent(endpoint, gcfFunction, "
|
|
259
|
-
proto.renameIfPresent(endpoint, gcfFunction, "
|
|
258
|
+
proto.copyIfPresent(endpoint, gcfFunction, "minInstances", "maxInstances", "ingressSettings", "labels", "environmentVariables", "secretEnvironmentVariables", "sourceUploadUrl");
|
|
259
|
+
proto.renameIfPresent(endpoint, gcfFunction, "serviceAccount", "serviceAccountEmail");
|
|
260
|
+
proto.convertIfPresent(endpoint, gcfFunction, "availableMemoryMb", (raw) => raw);
|
|
261
|
+
proto.convertIfPresent(endpoint, gcfFunction, "timeoutSeconds", "timeout", (dur) => dur === null ? null : proto.secondsFromDuration(dur));
|
|
260
262
|
if (gcfFunction.vpcConnector) {
|
|
261
263
|
endpoint.vpc = { connector: gcfFunction.vpcConnector };
|
|
262
|
-
proto.
|
|
264
|
+
proto.convertIfPresent(endpoint.vpc, gcfFunction, "egressSettings", "vpcConnectorEgressSettings", (raw) => raw);
|
|
263
265
|
}
|
|
264
266
|
endpoint.codebase = ((_g = gcfFunction.labels) === null || _g === void 0 ? void 0 : _g[exports.CODEBASE_LABEL]) || projectConfig.DEFAULT_CODEBASE;
|
|
265
267
|
return endpoint;
|
|
266
268
|
}
|
|
267
269
|
exports.endpointFromFunction = endpointFromFunction;
|
|
268
270
|
function functionFromEndpoint(endpoint, sourceUploadUrl) {
|
|
269
|
-
var _a;
|
|
271
|
+
var _a, _b;
|
|
270
272
|
if (endpoint.platform !== "gcfv1") {
|
|
271
273
|
throw new error_1.FirebaseError("Trying to create a v1 CloudFunction with v2 API. This should never happen");
|
|
272
274
|
}
|
|
@@ -281,8 +283,13 @@ function functionFromEndpoint(endpoint, sourceUploadUrl) {
|
|
|
281
283
|
runtime: endpoint.runtime,
|
|
282
284
|
dockerRegistry: "ARTIFACT_REGISTRY",
|
|
283
285
|
};
|
|
284
|
-
|
|
286
|
+
if (typeof endpoint.labels !== "undefined") {
|
|
287
|
+
gcfFunction.labels = Object.assign({}, endpoint.labels);
|
|
288
|
+
}
|
|
285
289
|
if (backend.isEventTriggered(endpoint)) {
|
|
290
|
+
if (!((_a = endpoint.eventTrigger.eventFilters) === null || _a === void 0 ? void 0 : _a.resource)) {
|
|
291
|
+
throw new error_1.FirebaseError("Cannot create v1 function from an eventTrigger without a resource");
|
|
292
|
+
}
|
|
286
293
|
gcfFunction.eventTrigger = {
|
|
287
294
|
eventType: endpoint.eventTrigger.eventType,
|
|
288
295
|
resource: endpoint.eventTrigger.eventFilters.resource,
|
|
@@ -316,18 +323,24 @@ function functionFromEndpoint(endpoint, sourceUploadUrl) {
|
|
|
316
323
|
gcfFunction.httpsTrigger.securityLevel = endpoint.securityLevel;
|
|
317
324
|
}
|
|
318
325
|
}
|
|
319
|
-
proto.copyIfPresent(gcfFunction, endpoint, "
|
|
320
|
-
proto.renameIfPresent(gcfFunction, endpoint, "
|
|
326
|
+
proto.copyIfPresent(gcfFunction, endpoint, "minInstances", "maxInstances", "ingressSettings", "environmentVariables", "secretEnvironmentVariables");
|
|
327
|
+
proto.renameIfPresent(gcfFunction, endpoint, "serviceAccountEmail", "serviceAccount");
|
|
328
|
+
proto.convertIfPresent(gcfFunction, endpoint, "availableMemoryMb", (mem) => mem);
|
|
329
|
+
proto.convertIfPresent(gcfFunction, endpoint, "timeout", "timeoutSeconds", (sec) => sec ? proto.durationFromSeconds(sec) : null);
|
|
321
330
|
if (endpoint.vpc) {
|
|
322
331
|
proto.renameIfPresent(gcfFunction, endpoint.vpc, "vpcConnector", "connector");
|
|
323
332
|
proto.renameIfPresent(gcfFunction, endpoint.vpc, "vpcConnectorEgressSettings", "egressSettings");
|
|
324
333
|
}
|
|
334
|
+
else if (endpoint.vpc === null) {
|
|
335
|
+
gcfFunction.vpcConnector = null;
|
|
336
|
+
gcfFunction.vpcConnectorEgressSettings = null;
|
|
337
|
+
}
|
|
325
338
|
const codebase = endpoint.codebase || projectConfig.DEFAULT_CODEBASE;
|
|
326
339
|
if (codebase !== projectConfig.DEFAULT_CODEBASE) {
|
|
327
340
|
gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { [exports.CODEBASE_LABEL]: codebase });
|
|
328
341
|
}
|
|
329
342
|
else {
|
|
330
|
-
(
|
|
343
|
+
(_b = gcfFunction.labels) === null || _b === void 0 ? true : delete _b[exports.CODEBASE_LABEL];
|
|
331
344
|
}
|
|
332
345
|
return gcfFunction;
|
|
333
346
|
}
|
|
@@ -161,7 +161,7 @@ async function deleteFunction(cloudFunction) {
|
|
|
161
161
|
}
|
|
162
162
|
exports.deleteFunction = deleteFunction;
|
|
163
163
|
function functionFromEndpoint(endpoint, source) {
|
|
164
|
-
var _a;
|
|
164
|
+
var _a, _b;
|
|
165
165
|
if (endpoint.platform !== "gcfv2") {
|
|
166
166
|
throw new error_1.FirebaseError("Trying to create a v2 CloudFunction with v1 API. This should never happen");
|
|
167
167
|
}
|
|
@@ -182,7 +182,8 @@ function functionFromEndpoint(endpoint, source) {
|
|
|
182
182
|
serviceConfig: {},
|
|
183
183
|
};
|
|
184
184
|
proto.copyIfPresent(gcfFunction, endpoint, "labels");
|
|
185
|
-
proto.copyIfPresent(gcfFunction.serviceConfig, endpoint, "environmentVariables", "secretEnvironmentVariables", "
|
|
185
|
+
proto.copyIfPresent(gcfFunction.serviceConfig, endpoint, "environmentVariables", "secretEnvironmentVariables", "ingressSettings", "timeoutSeconds");
|
|
186
|
+
proto.renameIfPresent(gcfFunction.serviceConfig, endpoint, "serviceAccountEmail", "serviceAccount");
|
|
186
187
|
const mem = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY;
|
|
187
188
|
gcfFunction.serviceConfig.availableMemory = mem > 1024 ? `${mem / 1024}Gi` : `${mem}Mi`;
|
|
188
189
|
proto.renameIfPresent(gcfFunction.serviceConfig, endpoint, "minInstanceCount", "minInstances");
|
|
@@ -191,11 +192,19 @@ function functionFromEndpoint(endpoint, source) {
|
|
|
191
192
|
proto.renameIfPresent(gcfFunction.serviceConfig, endpoint.vpc, "vpcConnector", "connector");
|
|
192
193
|
proto.renameIfPresent(gcfFunction.serviceConfig, endpoint.vpc, "vpcConnectorEgressSettings", "egressSettings");
|
|
193
194
|
}
|
|
195
|
+
else if (endpoint.vpc === null) {
|
|
196
|
+
gcfFunction.serviceConfig.vpcConnector = null;
|
|
197
|
+
gcfFunction.serviceConfig.vpcConnectorEgressSettings = null;
|
|
198
|
+
}
|
|
194
199
|
if (backend.isEventTriggered(endpoint)) {
|
|
195
200
|
gcfFunction.eventTrigger = {
|
|
196
201
|
eventType: endpoint.eventTrigger.eventType,
|
|
197
202
|
};
|
|
198
203
|
if (gcfFunction.eventTrigger.eventType === v2_1.PUBSUB_PUBLISH_EVENT) {
|
|
204
|
+
if (!((_a = endpoint.eventTrigger.eventFilters) === null || _a === void 0 ? void 0 : _a.topic)) {
|
|
205
|
+
throw new error_1.FirebaseError("Error: Pub/Sub event trigger is missing topic: " +
|
|
206
|
+
JSON.stringify(endpoint.eventTrigger, null, 2));
|
|
207
|
+
}
|
|
199
208
|
gcfFunction.eventTrigger.pubsubTopic = endpoint.eventTrigger.eventFilters.topic;
|
|
200
209
|
gcfFunction.eventTrigger.eventFilters = [];
|
|
201
210
|
for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilters)) {
|
|
@@ -206,7 +215,7 @@ function functionFromEndpoint(endpoint, source) {
|
|
|
206
215
|
}
|
|
207
216
|
else {
|
|
208
217
|
gcfFunction.eventTrigger.eventFilters = [];
|
|
209
|
-
for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilters)) {
|
|
218
|
+
for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilters || {})) {
|
|
210
219
|
gcfFunction.eventTrigger.eventFilters.push({ attribute, value });
|
|
211
220
|
}
|
|
212
221
|
for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilterPathPatterns || {})) {
|
|
@@ -241,7 +250,7 @@ function functionFromEndpoint(endpoint, source) {
|
|
|
241
250
|
gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { [exports.CODEBASE_LABEL]: codebase });
|
|
242
251
|
}
|
|
243
252
|
else {
|
|
244
|
-
(
|
|
253
|
+
(_b = gcfFunction.labels) === null || _b === void 0 ? true : delete _b[exports.CODEBASE_LABEL];
|
|
245
254
|
}
|
|
246
255
|
return gcfFunction;
|
|
247
256
|
}
|
|
@@ -273,29 +282,33 @@ function endpointFromFunction(gcfFunction) {
|
|
|
273
282
|
};
|
|
274
283
|
}
|
|
275
284
|
else if (gcfFunction.eventTrigger) {
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
eventType: gcfFunction.eventTrigger.eventType,
|
|
279
|
-
eventFilters: {},
|
|
280
|
-
retry: false,
|
|
281
|
-
},
|
|
282
|
-
};
|
|
285
|
+
const eventFilters = {};
|
|
286
|
+
const eventFilterPathPatterns = {};
|
|
283
287
|
if (gcfFunction.eventTrigger.pubsubTopic) {
|
|
284
|
-
|
|
288
|
+
eventFilters.topic = gcfFunction.eventTrigger.pubsubTopic;
|
|
285
289
|
}
|
|
286
290
|
else {
|
|
287
291
|
for (const eventFilter of gcfFunction.eventTrigger.eventFilters || []) {
|
|
288
292
|
if (eventFilter.operator === "match-path-pattern") {
|
|
289
|
-
|
|
290
|
-
trigger.eventTrigger.eventFilterPathPatterns = {};
|
|
291
|
-
}
|
|
292
|
-
trigger.eventTrigger.eventFilterPathPatterns[eventFilter.attribute] = eventFilter.value;
|
|
293
|
+
eventFilterPathPatterns[eventFilter.attribute] = eventFilter.value;
|
|
293
294
|
}
|
|
294
295
|
else {
|
|
295
|
-
|
|
296
|
+
eventFilters[eventFilter.attribute] = eventFilter.value;
|
|
296
297
|
}
|
|
297
298
|
}
|
|
298
299
|
}
|
|
300
|
+
trigger = {
|
|
301
|
+
eventTrigger: {
|
|
302
|
+
eventType: gcfFunction.eventTrigger.eventType,
|
|
303
|
+
retry: false,
|
|
304
|
+
},
|
|
305
|
+
};
|
|
306
|
+
if (Object.keys(eventFilters).length) {
|
|
307
|
+
trigger.eventTrigger.eventFilters = eventFilters;
|
|
308
|
+
}
|
|
309
|
+
if (Object.keys(eventFilterPathPatterns).length) {
|
|
310
|
+
trigger.eventTrigger.eventFilterPathPatterns = eventFilterPathPatterns;
|
|
311
|
+
}
|
|
299
312
|
proto.copyIfPresent(trigger.eventTrigger, gcfFunction.eventTrigger, "channel");
|
|
300
313
|
proto.renameIfPresent(trigger.eventTrigger, gcfFunction.eventTrigger, "region", "triggerRegion");
|
|
301
314
|
}
|
|
@@ -308,8 +321,19 @@ function endpointFromFunction(gcfFunction) {
|
|
|
308
321
|
const endpoint = Object.assign(Object.assign({ platform: "gcfv2", id,
|
|
309
322
|
project,
|
|
310
323
|
region }, trigger), { entryPoint: gcfFunction.buildConfig.entryPoint, runtime: gcfFunction.buildConfig.runtime, uri: gcfFunction.serviceConfig.uri });
|
|
311
|
-
proto.copyIfPresent(endpoint, gcfFunction.serviceConfig, "
|
|
312
|
-
proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "
|
|
324
|
+
proto.copyIfPresent(endpoint, gcfFunction.serviceConfig, "ingressSettings", "environmentVariables", "secretEnvironmentVariables", "timeoutSeconds");
|
|
325
|
+
proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "serviceAccount", "serviceAccountEmail");
|
|
326
|
+
proto.convertIfPresent(endpoint, gcfFunction.serviceConfig, "availableMemoryMb", "availableMemory", (prod) => {
|
|
327
|
+
if (prod === null) {
|
|
328
|
+
logger_1.logger.debug("Prod should always return a valid memory amount");
|
|
329
|
+
return prod;
|
|
330
|
+
}
|
|
331
|
+
const mem = mebibytes(prod);
|
|
332
|
+
if (!backend.isValidMemoryOption(mem)) {
|
|
333
|
+
logger_1.logger.warn("Converting a function to an endpoint with an invalid memory option", mem);
|
|
334
|
+
}
|
|
335
|
+
return mem;
|
|
336
|
+
});
|
|
313
337
|
proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "minInstances", "minInstanceCount");
|
|
314
338
|
proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "maxInstances", "maxInstanceCount");
|
|
315
339
|
proto.copyIfPresent(endpoint, gcfFunction, "labels");
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.jobFromEndpoint = exports.createOrReplaceJob = exports.updateJob = exports.getJob = exports.deleteJob = exports.createJob =
|
|
3
|
+
exports.jobFromEndpoint = exports.topicNameForEndpoint = exports.jobNameForEndpoint = exports.createOrReplaceJob = exports.updateJob = exports.getJob = exports.deleteJob = exports.createJob = void 0;
|
|
4
4
|
const _ = require("lodash");
|
|
5
5
|
const error_1 = require("../error");
|
|
6
6
|
const logger_1 = require("../logger");
|
|
@@ -11,13 +11,6 @@ const proto = require("./proto");
|
|
|
11
11
|
const functional_1 = require("../functional");
|
|
12
12
|
const VERSION = "v1beta1";
|
|
13
13
|
const DEFAULT_TIME_ZONE = "America/Los_Angeles";
|
|
14
|
-
function assertValidJob(job) {
|
|
15
|
-
proto.assertOneOf("Scheduler Job", job, "target", "httpTarget", "pubsubTarget");
|
|
16
|
-
if (job.httpTarget) {
|
|
17
|
-
proto.assertOneOf("Scheduler Job", job.httpTarget, "httpTarget.authorizationHeader", "oauthToken", "odicToken");
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
exports.assertValidJob = assertValidJob;
|
|
21
14
|
const apiClient = new apiv2_1.Client({ urlPrefix: api_1.cloudschedulerOrigin, apiVersion: VERSION });
|
|
22
15
|
function createJob(job) {
|
|
23
16
|
const strippedName = job.name.substring(0, job.name.lastIndexOf("/"));
|
|
@@ -76,14 +69,22 @@ function isIdentical(job, otherJob) {
|
|
|
76
69
|
job.timeZone === otherJob.timeZone &&
|
|
77
70
|
_.isEqual(job.retryConfig, otherJob.retryConfig));
|
|
78
71
|
}
|
|
72
|
+
function jobNameForEndpoint(endpoint, appEngineLocation) {
|
|
73
|
+
const id = backend.scheduleIdForFunction(endpoint);
|
|
74
|
+
return `projects/${endpoint.project}/locations/${appEngineLocation}/jobs/${id}`;
|
|
75
|
+
}
|
|
76
|
+
exports.jobNameForEndpoint = jobNameForEndpoint;
|
|
77
|
+
function topicNameForEndpoint(endpoint) {
|
|
78
|
+
const id = backend.scheduleIdForFunction(endpoint);
|
|
79
|
+
return `projects/${endpoint.project}/topics/${id}`;
|
|
80
|
+
}
|
|
81
|
+
exports.topicNameForEndpoint = topicNameForEndpoint;
|
|
79
82
|
function jobFromEndpoint(endpoint, appEngineLocation) {
|
|
80
83
|
const job = {};
|
|
81
84
|
if (endpoint.platform === "gcfv1") {
|
|
82
|
-
|
|
83
|
-
const region = appEngineLocation;
|
|
84
|
-
job.name = `projects/${endpoint.project}/locations/${region}/jobs/${id}`;
|
|
85
|
+
job.name = jobNameForEndpoint(endpoint, appEngineLocation);
|
|
85
86
|
job.pubsubTarget = {
|
|
86
|
-
topicName:
|
|
87
|
+
topicName: topicNameForEndpoint(endpoint),
|
|
87
88
|
attributes: {
|
|
88
89
|
scheduled: "true",
|
|
89
90
|
},
|
|
@@ -95,7 +96,18 @@ function jobFromEndpoint(endpoint, appEngineLocation) {
|
|
|
95
96
|
else {
|
|
96
97
|
(0, functional_1.assertExhaustive)(endpoint.platform);
|
|
97
98
|
}
|
|
98
|
-
|
|
99
|
+
if (!endpoint.scheduleTrigger.schedule) {
|
|
100
|
+
throw new error_1.FirebaseError("Cannot create a scheduler job without a schedule:" + JSON.stringify(endpoint));
|
|
101
|
+
}
|
|
102
|
+
job.schedule = endpoint.scheduleTrigger.schedule;
|
|
103
|
+
job.timeZone = endpoint.scheduleTrigger.timeZone || DEFAULT_TIME_ZONE;
|
|
104
|
+
if (endpoint.scheduleTrigger.retryConfig) {
|
|
105
|
+
job.retryConfig = {};
|
|
106
|
+
proto.copyIfPresent(job.retryConfig, endpoint.scheduleTrigger.retryConfig, "maxDoublings", "retryCount");
|
|
107
|
+
proto.convertIfPresent(job.retryConfig, endpoint.scheduleTrigger.retryConfig, "maxBackoffDuration", "maxBackoffSeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
108
|
+
proto.convertIfPresent(job.retryConfig, endpoint.scheduleTrigger.retryConfig, "minBackoffDuration", "minBackoffSeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
109
|
+
proto.convertIfPresent(job.retryConfig, endpoint.scheduleTrigger.retryConfig, "maxRetryDuration", "maxRetrySeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
110
|
+
}
|
|
99
111
|
return job;
|
|
100
112
|
}
|
|
101
113
|
exports.jobFromEndpoint = jobFromEndpoint;
|
package/lib/gcp/cloudtasks.js
CHANGED
|
@@ -4,6 +4,7 @@ exports.queueFromEndpoint = exports.queueNameForEndpoint = exports.setEnqueuer =
|
|
|
4
4
|
const proto = require("./proto");
|
|
5
5
|
const apiv2_1 = require("../apiv2");
|
|
6
6
|
const api_1 = require("../api");
|
|
7
|
+
const functional_1 = require("../functional");
|
|
7
8
|
const API_VERSION = "v2";
|
|
8
9
|
const client = new apiv2_1.Client({
|
|
9
10
|
urlPrefix: api_1.cloudTasksOrigin,
|
|
@@ -136,9 +137,9 @@ function queueFromEndpoint(endpoint) {
|
|
|
136
137
|
}
|
|
137
138
|
if (endpoint.taskQueueTrigger.retryConfig) {
|
|
138
139
|
proto.copyIfPresent(queue.retryConfig, endpoint.taskQueueTrigger.retryConfig, "maxAttempts", "maxDoublings");
|
|
139
|
-
proto.
|
|
140
|
-
proto.
|
|
141
|
-
proto.
|
|
140
|
+
proto.convertIfPresent(queue.retryConfig, endpoint.taskQueueTrigger.retryConfig, "maxRetryDuration", "maxRetrySeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
141
|
+
proto.convertIfPresent(queue.retryConfig, endpoint.taskQueueTrigger.retryConfig, "maxBackoff", "maxBackoffSeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
142
|
+
proto.convertIfPresent(queue.retryConfig, endpoint.taskQueueTrigger.retryConfig, "minBackoff", "minBackoffSeconds", (0, functional_1.nullsafeVisitor)(proto.durationFromSeconds));
|
|
142
143
|
}
|
|
143
144
|
return queue;
|
|
144
145
|
}
|
package/lib/gcp/proto.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.formatServiceAccount = exports.getInvokerMembers = exports.fieldMasks = exports.renameIfPresent = exports.copyIfPresent = exports.assertOneOf = exports.durationFromSeconds = exports.secondsFromDuration = void 0;
|
|
3
|
+
exports.formatServiceAccount = exports.getInvokerMembers = exports.fieldMasks = exports.renameIfPresent = exports.convertIfPresent = exports.copyIfPresent = exports.assertOneOf = exports.durationFromSeconds = exports.secondsFromDuration = void 0;
|
|
4
4
|
const error_1 = require("../error");
|
|
5
5
|
function secondsFromDuration(d) {
|
|
6
6
|
return +d.slice(0, d.length - 1);
|
|
@@ -32,13 +32,25 @@ function copyIfPresent(dest, src, ...fields) {
|
|
|
32
32
|
}
|
|
33
33
|
}
|
|
34
34
|
exports.copyIfPresent = copyIfPresent;
|
|
35
|
-
function
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
35
|
+
function convertIfPresent(...args) {
|
|
36
|
+
if (args.length === 4) {
|
|
37
|
+
const [dest, src, key, converter] = args;
|
|
38
|
+
if (Object.prototype.hasOwnProperty.call(src, key)) {
|
|
39
|
+
dest[key] = converter(src[key]);
|
|
40
|
+
}
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
const [dest, src, destKey, srcKey, converter] = args;
|
|
44
|
+
if (Object.prototype.hasOwnProperty.call(src, srcKey)) {
|
|
45
|
+
dest[destKey] = converter(src[srcKey]);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
exports.convertIfPresent = convertIfPresent;
|
|
49
|
+
function renameIfPresent(dest, src, destKey, srcKey) {
|
|
50
|
+
if (!Object.prototype.hasOwnProperty.call(src, srcKey)) {
|
|
39
51
|
return;
|
|
40
52
|
}
|
|
41
|
-
dest[
|
|
53
|
+
dest[destKey] = src[srcKey];
|
|
42
54
|
}
|
|
43
55
|
exports.renameIfPresent = renameIfPresent;
|
|
44
56
|
function fieldMasks(object, ...doNotRecurseIn) {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.addServiceAccountToRoles = exports.setIamPolicy = exports.getIamPolicy = exports.firebaseRoles = void 0;
|
|
3
|
+
exports.serviceAccountHasRoles = exports.addServiceAccountToRoles = exports.setIamPolicy = exports.getIamPolicy = exports.firebaseRoles = void 0;
|
|
4
4
|
const lodash_1 = require("lodash");
|
|
5
5
|
const api_1 = require("../api");
|
|
6
6
|
const apiv2_1 = require("../apiv2");
|
|
@@ -26,9 +26,11 @@ async function setIamPolicy(projectIdOrNumber, newPolicy, updateMask = "") {
|
|
|
26
26
|
return response.body;
|
|
27
27
|
}
|
|
28
28
|
exports.setIamPolicy = setIamPolicy;
|
|
29
|
-
async function addServiceAccountToRoles(projectId, serviceAccountName, roles) {
|
|
29
|
+
async function addServiceAccountToRoles(projectId, serviceAccountName, roles, skipAccountLookup = false) {
|
|
30
30
|
const [{ name: fullServiceAccountName }, projectPolicy] = await Promise.all([
|
|
31
|
-
|
|
31
|
+
skipAccountLookup
|
|
32
|
+
? Promise.resolve({ name: serviceAccountName })
|
|
33
|
+
: (0, iam_1.getServiceAccount)(projectId, serviceAccountName),
|
|
32
34
|
getIamPolicy(projectId),
|
|
33
35
|
]);
|
|
34
36
|
const newMemberName = `serviceAccount:${fullServiceAccountName.split("/").pop()}`;
|
|
@@ -49,3 +51,23 @@ async function addServiceAccountToRoles(projectId, serviceAccountName, roles) {
|
|
|
49
51
|
return setIamPolicy(projectId, projectPolicy, "bindings");
|
|
50
52
|
}
|
|
51
53
|
exports.addServiceAccountToRoles = addServiceAccountToRoles;
|
|
54
|
+
async function serviceAccountHasRoles(projectId, serviceAccountName, roles, skipAccountLookup = false) {
|
|
55
|
+
const [{ name: fullServiceAccountName }, projectPolicy] = await Promise.all([
|
|
56
|
+
skipAccountLookup
|
|
57
|
+
? Promise.resolve({ name: serviceAccountName })
|
|
58
|
+
: (0, iam_1.getServiceAccount)(projectId, serviceAccountName),
|
|
59
|
+
getIamPolicy(projectId),
|
|
60
|
+
]);
|
|
61
|
+
const memberName = `serviceAccount:${fullServiceAccountName.split("/").pop()}`;
|
|
62
|
+
for (const roleName of roles) {
|
|
63
|
+
const binding = projectPolicy.bindings.find((b) => b.role === roleName);
|
|
64
|
+
if (!binding) {
|
|
65
|
+
return false;
|
|
66
|
+
}
|
|
67
|
+
if (!binding.members.includes(memberName)) {
|
|
68
|
+
return false;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return true;
|
|
72
|
+
}
|
|
73
|
+
exports.serviceAccountHasRoles = serviceAccountHasRoles;
|
package/lib/previews.js
CHANGED
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.previews = void 0;
|
|
4
4
|
const lodash_1 = require("lodash");
|
|
5
5
|
const configstore_1 = require("./configstore");
|
|
6
|
-
exports.previews = Object.assign({ rtdbrules: false, ext: false, extdev: false, rtdbmanagement: false, golang: false, deletegcfartifacts: false, emulatoruisnapshot: false, frameworkawareness: false, functionsparams: false }, configstore_1.configstore.get("previews"));
|
|
6
|
+
exports.previews = Object.assign({ rtdbrules: false, ext: false, extdev: false, rtdbmanagement: false, golang: false, deletegcfartifacts: false, emulatoruisnapshot: false, frameworkawareness: false, functionsparams: false, crossservicerules: false }, configstore_1.configstore.get("previews"));
|
|
7
7
|
if (process.env.FIREBASE_CLI_PREVIEWS) {
|
|
8
8
|
process.env.FIREBASE_CLI_PREVIEWS.split(",").forEach((feature) => {
|
|
9
9
|
if ((0, lodash_1.has)(exports.previews, feature)) {
|
package/lib/rulesDeploy.js
CHANGED
|
@@ -9,9 +9,14 @@ const logger_1 = require("./logger");
|
|
|
9
9
|
const error_1 = require("./error");
|
|
10
10
|
const utils = require("./utils");
|
|
11
11
|
const prompt_1 = require("./prompt");
|
|
12
|
+
const getProjectNumber_1 = require("./getProjectNumber");
|
|
13
|
+
const resourceManager_1 = require("./gcp/resourceManager");
|
|
14
|
+
const previews_1 = require("./previews");
|
|
12
15
|
const QUOTA_EXCEEDED_STATUS_CODE = 429;
|
|
13
16
|
const RULESET_COUNT_LIMIT = 1000;
|
|
14
17
|
const RULESETS_TO_GC = 10;
|
|
18
|
+
const CROSS_SERVICE_FUNCTIONS = /firestore\.(get|exists)/;
|
|
19
|
+
const CROSS_SERVICE_RULES_ROLE = "roles/firebaserules.firestoreServiceAgent";
|
|
15
20
|
var RulesetServiceType;
|
|
16
21
|
(function (RulesetServiceType) {
|
|
17
22
|
RulesetServiceType["CLOUD_FIRESTORE"] = "cloud.firestore";
|
|
@@ -54,17 +59,49 @@ class RulesDeploy {
|
|
|
54
59
|
}
|
|
55
60
|
return { latestName, latestContent };
|
|
56
61
|
}
|
|
62
|
+
async checkStorageRulesIamPermissions(rulesContent) {
|
|
63
|
+
if ((rulesContent === null || rulesContent === void 0 ? void 0 : rulesContent.match(CROSS_SERVICE_FUNCTIONS)) === null) {
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
if (this.options.nonInteractive) {
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
const projectNumber = await (0, getProjectNumber_1.getProjectNumber)(this.options);
|
|
70
|
+
const saEmail = `service-${projectNumber}@gcp-sa-firebasestorage.iam.gserviceaccount.com`;
|
|
71
|
+
try {
|
|
72
|
+
if (await (0, resourceManager_1.serviceAccountHasRoles)(projectNumber, saEmail, [CROSS_SERVICE_RULES_ROLE], true)) {
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
const addRole = await (0, prompt_1.promptOnce)({
|
|
76
|
+
type: "confirm",
|
|
77
|
+
name: "rulesRole",
|
|
78
|
+
message: `Cloud Storage for Firebase needs an IAM Role to use cross-service rules. Grant the new role?`,
|
|
79
|
+
default: true,
|
|
80
|
+
}, this.options);
|
|
81
|
+
if (addRole) {
|
|
82
|
+
await (0, resourceManager_1.addServiceAccountToRoles)(projectNumber, saEmail, [CROSS_SERVICE_RULES_ROLE], true);
|
|
83
|
+
utils.logBullet(`${clc.bold.cyan(RulesetType[this.type] + ":")} updated service account for cross-service rules...`);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
catch (e) {
|
|
87
|
+
logger_1.logger.warn("[rules] Error checking or updating Cloud Storage for Firebase service account permissions.");
|
|
88
|
+
logger_1.logger.warn("[rules] Cross-service Storage rules may not function properly", e.message);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
57
91
|
async createRulesets(service) {
|
|
92
|
+
var _a;
|
|
58
93
|
const createdRulesetNames = [];
|
|
59
94
|
const { latestName: latestRulesetName, latestContent: latestRulesetContent } = await this.getCurrentRules(service);
|
|
60
95
|
const newRulesetsByFilename = new Map();
|
|
61
|
-
for (const filename of Object.
|
|
62
|
-
const files = this.rulesFiles[filename];
|
|
96
|
+
for (const [filename, files] of Object.entries(this.rulesFiles)) {
|
|
63
97
|
if (latestRulesetName && _.isEqual(files, latestRulesetContent)) {
|
|
64
98
|
utils.logBullet(`${clc.bold.cyan(RulesetType[this.type] + ":")} latest version of ${clc.bold(filename)} already up to date, skipping upload...`);
|
|
65
99
|
this.rulesetNames[filename] = latestRulesetName;
|
|
66
100
|
continue;
|
|
67
101
|
}
|
|
102
|
+
if (previews_1.previews.crossservicerules && service === RulesetServiceType.FIREBASE_STORAGE) {
|
|
103
|
+
await this.checkStorageRulesIamPermissions((_a = files[0]) === null || _a === void 0 ? void 0 : _a.content);
|
|
104
|
+
}
|
|
68
105
|
utils.logBullet(`${clc.bold.cyan(RulesetType[this.type] + ":")} uploading rules ${clc.bold(filename)}...`);
|
|
69
106
|
newRulesetsByFilename.set(filename, gcp.rules.createRuleset(this.options.project, files));
|
|
70
107
|
}
|
package/npm-shrinkwrap.json
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "firebase-tools",
|
|
3
|
-
"version": "11.4.
|
|
3
|
+
"version": "11.4.2",
|
|
4
4
|
"lockfileVersion": 2,
|
|
5
5
|
"requires": true,
|
|
6
6
|
"packages": {
|
|
7
7
|
"": {
|
|
8
8
|
"name": "firebase-tools",
|
|
9
|
-
"version": "11.4.
|
|
9
|
+
"version": "11.4.2",
|
|
10
10
|
"license": "MIT",
|
|
11
11
|
"dependencies": {
|
|
12
12
|
"@google-cloud/pubsub": "^3.0.1",
|