firebase-tools 13.7.5 → 13.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api.js +9 -1
- package/lib/apiv2.js +19 -13
- package/lib/apphosting/app.js +4 -3
- package/lib/apphosting/githubConnections.js +1 -1
- package/lib/apphosting/index.js +42 -18
- package/lib/checkValidTargetFilters.js +8 -1
- package/lib/commands/apphosting-backends-create.js +3 -3
- package/lib/commands/apphosting-backends-delete.js +24 -17
- package/lib/commands/apphosting-backends-list.js +3 -3
- package/lib/commands/apphosting-secrets-grantaccess.js +9 -5
- package/lib/commands/dataconnect-list.js +64 -0
- package/lib/commands/dataconnect-sdk-generate.js +36 -0
- package/lib/commands/dataconnect-sql-diff.js +25 -0
- package/lib/commands/dataconnect-sql-migrate.js +41 -0
- package/lib/commands/deploy.js +27 -1
- package/lib/commands/index.js +10 -0
- package/lib/commands/init.js +7 -0
- package/lib/commands/setup-emulators-dataconnect.js +12 -0
- package/lib/config.js +1 -0
- package/lib/dataconnect/build.js +23 -0
- package/lib/dataconnect/checkIam.js +30 -0
- package/lib/dataconnect/client.js +115 -0
- package/lib/dataconnect/dataplaneClient.js +16 -0
- package/lib/dataconnect/ensureApis.js +12 -0
- package/lib/dataconnect/fileUtils.js +89 -0
- package/lib/dataconnect/filters.js +45 -0
- package/lib/dataconnect/freeTrial.js +23 -0
- package/lib/dataconnect/graphqlError.js +13 -0
- package/lib/dataconnect/load.js +40 -0
- package/lib/dataconnect/names.js +48 -0
- package/lib/dataconnect/prompts.js +20 -0
- package/lib/dataconnect/provisionCloudSql.js +91 -0
- package/lib/dataconnect/schemaMigration.js +137 -0
- package/lib/dataconnect/types.js +23 -0
- package/lib/deploy/dataconnect/deploy.js +84 -0
- package/lib/deploy/dataconnect/index.js +9 -0
- package/lib/deploy/dataconnect/prepare.js +30 -0
- package/lib/deploy/dataconnect/release.js +67 -0
- package/lib/deploy/functions/checkIam.js +4 -34
- package/lib/deploy/index.js +2 -0
- package/lib/downloadUtils.js +2 -2
- package/lib/emulator/constants.js +3 -0
- package/lib/emulator/controller.js +38 -12
- package/lib/emulator/dataconnectEmulator.js +86 -0
- package/lib/emulator/download.js +1 -1
- package/lib/emulator/downloadableEmulators.js +42 -3
- package/lib/emulator/portUtils.js +3 -2
- package/lib/emulator/registry.js +5 -0
- package/lib/emulator/types.js +3 -0
- package/lib/experiments.js +5 -0
- package/lib/extensions/emulator/specHelper.js +5 -39
- package/lib/gcp/apphosting.js +6 -1
- package/lib/gcp/cloudsql/cloudsqladmin.js +155 -0
- package/lib/gcp/cloudsql/connect.js +127 -0
- package/lib/gcp/cloudsql/fbToolsAuthClient.js +42 -0
- package/lib/gcp/cloudsql/types.js +2 -0
- package/lib/gcp/iam.js +33 -1
- package/lib/gcp/secretManager.js +1 -1
- package/lib/init/features/dataconnect/index.js +124 -0
- package/lib/init/features/emulators.js +13 -0
- package/lib/init/features/functions/index.js +15 -3
- package/lib/init/features/index.js +3 -1
- package/lib/init/index.js +1 -0
- package/lib/logger.js +22 -2
- package/lib/operation-poller.js +7 -1
- package/lib/rc.js +10 -1
- package/lib/requireAuth.js +1 -0
- package/lib/utils.js +51 -4
- package/package.json +6 -2
- package/schema/connector-yaml.json +54 -0
- package/schema/dataconnect-yaml.json +72 -0
- package/schema/firebase-config.json +103 -0
- package/templates/extensions/javascript/package.lint.json +2 -2
- package/templates/extensions/javascript/package.nolint.json +2 -2
- package/templates/extensions/typescript/package.lint.json +2 -2
- package/templates/extensions/typescript/package.nolint.json +2 -2
- package/templates/init/dataconnect/connector.yaml +2 -0
- package/templates/init/dataconnect/dataconnect.yaml +10 -0
- package/templates/init/dataconnect/mutations.gql +5 -0
- package/templates/init/dataconnect/queries.gql +7 -0
- package/templates/init/dataconnect/schema.gql +16 -0
- package/templates/init/functions/javascript/_gitignore +2 -1
- package/templates/init/functions/javascript/package.lint.json +2 -2
- package/templates/init/functions/javascript/package.nolint.json +2 -2
- package/templates/init/functions/python/_gitignore +1 -0
- package/templates/init/functions/typescript/_gitignore +1 -0
- package/templates/init/functions/typescript/package.lint.json +2 -2
- package/templates/init/functions/typescript/package.nolint.json +2 -2
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.checkInstanceConfig = exports.installRequiredExtensions = exports.REQUIRED_EXTENSIONS_COMMANDS = exports.provisionCloudSql = void 0;
|
|
4
|
+
const cloudSqlAdminClient = require("../gcp/cloudsql/cloudsqladmin");
|
|
5
|
+
const connect_1 = require("../gcp/cloudsql/connect");
|
|
6
|
+
const utils = require("../utils");
|
|
7
|
+
const checkIam_1 = require("./checkIam");
|
|
8
|
+
const GOOGLE_ML_INTEGRATION_ROLE = "roles/aiplatform.user";
|
|
9
|
+
const freeTrial_1 = require("./freeTrial");
|
|
10
|
+
const error_1 = require("../error");
|
|
11
|
+
async function provisionCloudSql(args) {
|
|
12
|
+
let connectionName;
|
|
13
|
+
const { projectId, locationId, instanceId, databaseId, enableGoogleMlIntegration, silent } = args;
|
|
14
|
+
try {
|
|
15
|
+
const existingInstance = await cloudSqlAdminClient.getInstance(projectId, instanceId);
|
|
16
|
+
silent || utils.logLabeledBullet("dataconnect", `Found existing instance ${instanceId}.`);
|
|
17
|
+
connectionName = (existingInstance === null || existingInstance === void 0 ? void 0 : existingInstance.connectionName) || "";
|
|
18
|
+
if (!checkInstanceConfig(existingInstance, enableGoogleMlIntegration)) {
|
|
19
|
+
silent ||
|
|
20
|
+
utils.logLabeledBullet("dataconnect", `Instance ${instanceId} settings not compatible with Firebase Data Connect.` +
|
|
21
|
+
`Updating instance to enable Cloud IAM authentication and public IP. This may take a few minutes...`);
|
|
22
|
+
await cloudSqlAdminClient.updateInstanceForDataConnect(existingInstance, enableGoogleMlIntegration);
|
|
23
|
+
silent || utils.logLabeledBullet("dataconnect", "Instance updated");
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
catch (err) {
|
|
27
|
+
if (err.status !== 404) {
|
|
28
|
+
throw err;
|
|
29
|
+
}
|
|
30
|
+
const freeTrialInstanceId = await (0, freeTrial_1.checkForFreeTrialInstance)(projectId);
|
|
31
|
+
if (freeTrialInstanceId) {
|
|
32
|
+
(0, freeTrial_1.printFreeTrialUnavailable)(projectId, freeTrialInstanceId);
|
|
33
|
+
throw new error_1.FirebaseError("Free trial unavailable.");
|
|
34
|
+
}
|
|
35
|
+
silent ||
|
|
36
|
+
utils.logLabeledBullet("dataconnect", `CloudSQL instance '${instanceId}' not found, creating it. This instance is provided under the terms of the Data Connect free trial ${(0, freeTrial_1.freeTrialTermsLink)()}`);
|
|
37
|
+
silent || utils.logLabeledBullet("dataconnect", `This may take while...`);
|
|
38
|
+
const newInstance = await cloudSqlAdminClient.createInstance(projectId, locationId, instanceId, enableGoogleMlIntegration);
|
|
39
|
+
silent || utils.logLabeledBullet("dataconnect", "Instance created");
|
|
40
|
+
connectionName = (newInstance === null || newInstance === void 0 ? void 0 : newInstance.connectionName) || "";
|
|
41
|
+
}
|
|
42
|
+
try {
|
|
43
|
+
await cloudSqlAdminClient.getDatabase(projectId, instanceId, databaseId);
|
|
44
|
+
silent || utils.logLabeledBullet("dataconnect", `Found existing database ${databaseId}.`);
|
|
45
|
+
}
|
|
46
|
+
catch (err) {
|
|
47
|
+
silent ||
|
|
48
|
+
utils.logLabeledBullet("dataconnect", `Database ${databaseId} not found, creating it now...`);
|
|
49
|
+
await cloudSqlAdminClient.createDatabase(projectId, instanceId, databaseId);
|
|
50
|
+
silent || utils.logLabeledBullet("dataconnect", `Database ${databaseId} created.`);
|
|
51
|
+
}
|
|
52
|
+
if (enableGoogleMlIntegration) {
|
|
53
|
+
await (0, checkIam_1.grantRolesToCloudSqlServiceAccount)(projectId, instanceId, [GOOGLE_ML_INTEGRATION_ROLE]);
|
|
54
|
+
}
|
|
55
|
+
return connectionName;
|
|
56
|
+
}
|
|
57
|
+
exports.provisionCloudSql = provisionCloudSql;
|
|
58
|
+
exports.REQUIRED_EXTENSIONS_COMMANDS = [
|
|
59
|
+
`CREATE SCHEMA IF NOT EXISTS "public"`,
|
|
60
|
+
`CREATE EXTENSION IF NOT EXISTS "uuid-ossp" with SCHEMA public`,
|
|
61
|
+
`CREATE EXTENSION IF NOT EXISTS "vector" with SCHEMA public`,
|
|
62
|
+
`CREATE EXTENSION IF NOT EXISTS "google_ml_integration" with SCHEMA public CASCADE`,
|
|
63
|
+
];
|
|
64
|
+
async function installRequiredExtensions(projectId, instanceId, databaseId, username) {
|
|
65
|
+
await (0, connect_1.execute)(exports.REQUIRED_EXTENSIONS_COMMANDS, {
|
|
66
|
+
projectId,
|
|
67
|
+
instanceId,
|
|
68
|
+
databaseId,
|
|
69
|
+
username,
|
|
70
|
+
silent: true,
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
exports.installRequiredExtensions = installRequiredExtensions;
|
|
74
|
+
function checkInstanceConfig(instance, requireGoogleMlIntegration) {
|
|
75
|
+
var _a, _b, _c, _d;
|
|
76
|
+
const settings = instance.settings;
|
|
77
|
+
if (!((_a = settings.ipConfiguration) === null || _a === void 0 ? void 0 : _a.ipv4Enabled)) {
|
|
78
|
+
return false;
|
|
79
|
+
}
|
|
80
|
+
if (requireGoogleMlIntegration) {
|
|
81
|
+
if (!settings.enableGoogleMlIntegration) {
|
|
82
|
+
return false;
|
|
83
|
+
}
|
|
84
|
+
if (!((_b = settings.databaseFlags) === null || _b === void 0 ? void 0 : _b.some((f) => f.name === "cloudsql.enable_google_ml_integration" && f.value === "on"))) {
|
|
85
|
+
return false;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
const isIamEnabled = (_d = (_c = settings.databaseFlags) === null || _c === void 0 ? void 0 : _c.some((f) => f.name === "cloudsql.iam_authentication" && f.value === "on")) !== null && _d !== void 0 ? _d : false;
|
|
89
|
+
return isIamEnabled;
|
|
90
|
+
}
|
|
91
|
+
exports.checkInstanceConfig = checkInstanceConfig;
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.migrateSchema = exports.diffSchema = void 0;
|
|
4
|
+
const clc = require("colorette");
|
|
5
|
+
const sql_formatter_1 = require("sql-formatter");
|
|
6
|
+
const client_1 = require("./client");
|
|
7
|
+
const connect_1 = require("../gcp/cloudsql/connect");
|
|
8
|
+
const prompt_1 = require("../prompt");
|
|
9
|
+
const logger_1 = require("../logger");
|
|
10
|
+
const error_1 = require("../error");
|
|
11
|
+
const provisionCloudSql_1 = require("./provisionCloudSql");
|
|
12
|
+
const projectUtils_1 = require("../projectUtils");
|
|
13
|
+
const utils_1 = require("../utils");
|
|
14
|
+
const IMCOMPATIBLE_SCHEMA_ERROR_TYPESTRING = "type.googleapis.com/google.firebase.dataconnect.v1main.IncompatibleSqlSchemaError";
|
|
15
|
+
async function diffSchema(schema) {
|
|
16
|
+
var _a, _b;
|
|
17
|
+
const dbName = (_a = schema.primaryDatasource.postgresql) === null || _a === void 0 ? void 0 : _a.database;
|
|
18
|
+
const instanceName = (_b = schema.primaryDatasource.postgresql) === null || _b === void 0 ? void 0 : _b.cloudSql.instance;
|
|
19
|
+
if (!instanceName || !dbName) {
|
|
20
|
+
throw new error_1.FirebaseError(`tried to diff schema but ${instanceName} was undefined`);
|
|
21
|
+
}
|
|
22
|
+
try {
|
|
23
|
+
await (0, client_1.upsertSchema)(schema, true);
|
|
24
|
+
}
|
|
25
|
+
catch (err) {
|
|
26
|
+
const incompatible = getIncompatibleSchemaError(err);
|
|
27
|
+
if (incompatible) {
|
|
28
|
+
displaySchemaChanges(incompatible);
|
|
29
|
+
return incompatible.diffs;
|
|
30
|
+
}
|
|
31
|
+
throw err;
|
|
32
|
+
}
|
|
33
|
+
logger_1.logger.debug(`Schema was up to date for ${instanceName}:${dbName}`);
|
|
34
|
+
return [];
|
|
35
|
+
}
|
|
36
|
+
exports.diffSchema = diffSchema;
|
|
37
|
+
async function migrateSchema(options, schema, allowNonInteractiveMigration) {
|
|
38
|
+
var _a, _b;
|
|
39
|
+
const projectId = (0, projectUtils_1.needProjectId)(options);
|
|
40
|
+
const databaseId = (_a = schema.primaryDatasource.postgresql) === null || _a === void 0 ? void 0 : _a.database;
|
|
41
|
+
if (!databaseId) {
|
|
42
|
+
throw new error_1.FirebaseError("Schema is missing primaryDatasource.postgresql?.database, cannot migrate");
|
|
43
|
+
}
|
|
44
|
+
const instanceId = (_b = schema.primaryDatasource.postgresql) === null || _b === void 0 ? void 0 : _b.cloudSql.instance.split("/").pop();
|
|
45
|
+
if (!instanceId) {
|
|
46
|
+
throw new error_1.FirebaseError(`tried to migrate schema but ${instanceId} was undefined`);
|
|
47
|
+
}
|
|
48
|
+
const iamUser = await (0, connect_1.setupIAMUser)(instanceId, databaseId, options);
|
|
49
|
+
try {
|
|
50
|
+
await (0, client_1.upsertSchema)(schema, true);
|
|
51
|
+
}
|
|
52
|
+
catch (err) {
|
|
53
|
+
const incompatible = getIncompatibleSchemaError(err);
|
|
54
|
+
if (incompatible) {
|
|
55
|
+
const choice = await promptForSchemaMigration(options, databaseId, incompatible, allowNonInteractiveMigration);
|
|
56
|
+
const commandsToExecute = incompatible.diffs
|
|
57
|
+
.filter((d) => {
|
|
58
|
+
switch (choice) {
|
|
59
|
+
case "all":
|
|
60
|
+
return true;
|
|
61
|
+
case "safe":
|
|
62
|
+
return !d.destructive;
|
|
63
|
+
case "none":
|
|
64
|
+
return false;
|
|
65
|
+
}
|
|
66
|
+
})
|
|
67
|
+
.map((d) => d.sql);
|
|
68
|
+
if (commandsToExecute.length) {
|
|
69
|
+
await (0, connect_1.execute)([
|
|
70
|
+
...provisionCloudSql_1.REQUIRED_EXTENSIONS_COMMANDS,
|
|
71
|
+
...commandsToExecute,
|
|
72
|
+
`GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA "public" TO PUBLIC`,
|
|
73
|
+
], {
|
|
74
|
+
projectId,
|
|
75
|
+
instanceId,
|
|
76
|
+
databaseId,
|
|
77
|
+
username: iamUser,
|
|
78
|
+
});
|
|
79
|
+
return incompatible.diffs;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
throw err;
|
|
83
|
+
}
|
|
84
|
+
logger_1.logger.debug(`Schema was up to date for ${instanceId}:${databaseId}`);
|
|
85
|
+
return [];
|
|
86
|
+
}
|
|
87
|
+
exports.migrateSchema = migrateSchema;
|
|
88
|
+
async function promptForSchemaMigration(options, databaseName, err, allowNonInteractiveMigration) {
|
|
89
|
+
displaySchemaChanges(err);
|
|
90
|
+
if (!options.nonInteractive) {
|
|
91
|
+
const choices = err.destructive
|
|
92
|
+
? [
|
|
93
|
+
{ name: "Execute all changes (including destructive changes)", value: "all" },
|
|
94
|
+
{ name: "Execute only safe changes", value: "safe" },
|
|
95
|
+
{ name: "Abort changes", value: "none" },
|
|
96
|
+
]
|
|
97
|
+
: [
|
|
98
|
+
{ name: "Execute changes", value: "safe" },
|
|
99
|
+
{ name: "Abort changes", value: "none" },
|
|
100
|
+
];
|
|
101
|
+
return await (0, prompt_1.promptOnce)({
|
|
102
|
+
message: `Would you like to execute these changes against ${databaseName}?`,
|
|
103
|
+
type: "list",
|
|
104
|
+
choices,
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
else if (!allowNonInteractiveMigration) {
|
|
108
|
+
logger_1.logger.error("Your database schema is incompatible with your Data Connect schema. Run `firebase dataconnect:sql:migrate` to migrate your database schema");
|
|
109
|
+
return "none";
|
|
110
|
+
}
|
|
111
|
+
else if (options.force) {
|
|
112
|
+
return "all";
|
|
113
|
+
}
|
|
114
|
+
else if (!err.destructive) {
|
|
115
|
+
return "safe";
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
logger_1.logger.error("This schema migration includes potentially destructive changes. If you'd like to execute it anyway, rerun this command with --force");
|
|
119
|
+
return "none";
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
function displaySchemaChanges(error) {
|
|
123
|
+
const message = "Your new schema is incompatible with the schema of your CloudSQL database. " +
|
|
124
|
+
"The following SQL statements will migrate your database schema to match your new Data Connect schema.\n" +
|
|
125
|
+
error.diffs.map(toString).join("\n");
|
|
126
|
+
(0, utils_1.logLabeledWarning)("dataconnect", message);
|
|
127
|
+
}
|
|
128
|
+
function toString(diff) {
|
|
129
|
+
return `\/** ${diff.destructive ? clc.red("Destructive: ") : ""}${diff.description}*\/\n${(0, sql_formatter_1.format)(diff.sql, { language: "postgresql" })}`;
|
|
130
|
+
}
|
|
131
|
+
function getIncompatibleSchemaError(err) {
|
|
132
|
+
var _a;
|
|
133
|
+
const original = (_a = err.context) === null || _a === void 0 ? void 0 : _a.body.error;
|
|
134
|
+
const details = original.details;
|
|
135
|
+
const incompatibles = details.filter((d) => d["@type"] === IMCOMPATIBLE_SCHEMA_ERROR_TYPESTRING);
|
|
136
|
+
return incompatibles[0];
|
|
137
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.toDatasource = exports.requiresVector = exports.SCHEMA_ID = void 0;
|
|
4
|
+
exports.SCHEMA_ID = "main";
|
|
5
|
+
function requiresVector(dm) {
|
|
6
|
+
var _a, _b, _c, _d;
|
|
7
|
+
return (_d = (_c = (_b = (_a = dm === null || dm === void 0 ? void 0 : dm.primaryDataSource) === null || _a === void 0 ? void 0 : _a.postgres) === null || _b === void 0 ? void 0 : _b.requiredExtensions) === null || _c === void 0 ? void 0 : _c.includes("vector")) !== null && _d !== void 0 ? _d : false;
|
|
8
|
+
}
|
|
9
|
+
exports.requiresVector = requiresVector;
|
|
10
|
+
function toDatasource(projectId, locationId, ds) {
|
|
11
|
+
if (ds.postgresql) {
|
|
12
|
+
return {
|
|
13
|
+
postgresql: {
|
|
14
|
+
database: ds.postgresql.database,
|
|
15
|
+
cloudSql: {
|
|
16
|
+
instance: `projects/${projectId}/locations/${locationId}/instances/${ds.postgresql.cloudSql.instanceId}`,
|
|
17
|
+
},
|
|
18
|
+
},
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
return {};
|
|
22
|
+
}
|
|
23
|
+
exports.toDatasource = toDatasource;
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const client = require("../../dataconnect/client");
|
|
4
|
+
const utils = require("../../utils");
|
|
5
|
+
const types_1 = require("../../dataconnect/types");
|
|
6
|
+
const projectUtils_1 = require("../../projectUtils");
|
|
7
|
+
const provisionCloudSql_1 = require("../../dataconnect/provisionCloudSql");
|
|
8
|
+
const names_1 = require("../../dataconnect/names");
|
|
9
|
+
const prompt_1 = require("../../prompt");
|
|
10
|
+
const api_1 = require("../../api");
|
|
11
|
+
const ensureApiEnabled = require("../../ensureApiEnabled");
|
|
12
|
+
async function default_1(context, options) {
|
|
13
|
+
const projectId = (0, projectUtils_1.needProjectId)(options);
|
|
14
|
+
const serviceInfos = context.dataconnect.serviceInfos;
|
|
15
|
+
const services = await client.listAllServices(projectId);
|
|
16
|
+
const filters = context.dataconnect.filters;
|
|
17
|
+
if (serviceInfos.some((si) => {
|
|
18
|
+
return (0, types_1.requiresVector)(si.deploymentMetadata);
|
|
19
|
+
})) {
|
|
20
|
+
await ensureApiEnabled.ensure(projectId, (0, api_1.vertexAIOrigin)(), "dataconnect");
|
|
21
|
+
}
|
|
22
|
+
const servicesToCreate = serviceInfos
|
|
23
|
+
.filter((si) => !services.some((s) => matches(si, s)))
|
|
24
|
+
.filter((si) => {
|
|
25
|
+
return !filters || (filters === null || filters === void 0 ? void 0 : filters.some((f) => si.dataConnectYaml.serviceId === f.serviceId));
|
|
26
|
+
});
|
|
27
|
+
const servicesToDelete = filters
|
|
28
|
+
? []
|
|
29
|
+
: services.filter((s) => !serviceInfos.some((si) => matches(si, s)));
|
|
30
|
+
await Promise.all(servicesToCreate.map(async (s) => {
|
|
31
|
+
const { projectId, locationId, serviceId } = splitName(s.serviceName);
|
|
32
|
+
await client.createService(projectId, locationId, serviceId);
|
|
33
|
+
utils.logLabeledSuccess("dataconnect", `Created service ${s.serviceName}`);
|
|
34
|
+
}));
|
|
35
|
+
if (servicesToDelete.length) {
|
|
36
|
+
if (await (0, prompt_1.confirm)({
|
|
37
|
+
force: options.force,
|
|
38
|
+
nonInteractive: options.nonInteractive,
|
|
39
|
+
message: `The following services exist on ${projectId} but are not listed in your 'firebase.json'\n${servicesToDelete
|
|
40
|
+
.map((s) => s.name)
|
|
41
|
+
.join("\n")}\nWould you like to delete these services?`,
|
|
42
|
+
})) {
|
|
43
|
+
await Promise.all(servicesToDelete.map(async (s) => {
|
|
44
|
+
const { projectId, locationId, serviceId } = splitName(s.name);
|
|
45
|
+
await client.deleteService(projectId, locationId, serviceId);
|
|
46
|
+
utils.logLabeledSuccess("dataconnect", `Deleted service ${s.name}`);
|
|
47
|
+
}));
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
utils.logLabeledBullet("dataconnect", "Checking for CloudSQL resources...");
|
|
51
|
+
await Promise.all(serviceInfos
|
|
52
|
+
.filter((si) => {
|
|
53
|
+
return !filters || (filters === null || filters === void 0 ? void 0 : filters.some((f) => si.dataConnectYaml.serviceId === f.serviceId));
|
|
54
|
+
})
|
|
55
|
+
.map(async (s) => {
|
|
56
|
+
var _a, _b;
|
|
57
|
+
const instanceId = (_a = s.schema.primaryDatasource.postgresql) === null || _a === void 0 ? void 0 : _a.cloudSql.instance.split("/").pop();
|
|
58
|
+
const databaseId = (_b = s.schema.primaryDatasource.postgresql) === null || _b === void 0 ? void 0 : _b.database;
|
|
59
|
+
if (!instanceId || !databaseId) {
|
|
60
|
+
return Promise.resolve();
|
|
61
|
+
}
|
|
62
|
+
const enableGoogleMlIntegration = (0, types_1.requiresVector)(s.deploymentMetadata);
|
|
63
|
+
return (0, provisionCloudSql_1.provisionCloudSql)({
|
|
64
|
+
projectId,
|
|
65
|
+
locationId: (0, names_1.parseServiceName)(s.serviceName).location,
|
|
66
|
+
instanceId,
|
|
67
|
+
databaseId,
|
|
68
|
+
enableGoogleMlIntegration,
|
|
69
|
+
});
|
|
70
|
+
}));
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
exports.default = default_1;
|
|
74
|
+
function matches(si, s) {
|
|
75
|
+
return si.serviceName === s.name;
|
|
76
|
+
}
|
|
77
|
+
function splitName(serviceName) {
|
|
78
|
+
const parts = serviceName.split("/");
|
|
79
|
+
return {
|
|
80
|
+
projectId: parts[1],
|
|
81
|
+
locationId: parts[3],
|
|
82
|
+
serviceId: parts[5],
|
|
83
|
+
};
|
|
84
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.release = exports.deploy = exports.prepare = void 0;
|
|
4
|
+
const prepare_1 = require("./prepare");
|
|
5
|
+
exports.prepare = prepare_1.default;
|
|
6
|
+
const deploy_1 = require("./deploy");
|
|
7
|
+
exports.deploy = deploy_1.default;
|
|
8
|
+
const release_1 = require("./release");
|
|
9
|
+
exports.release = release_1.default;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const path = require("path");
|
|
4
|
+
const load_1 = require("../../dataconnect/load");
|
|
5
|
+
const fileUtils_1 = require("../../dataconnect/fileUtils");
|
|
6
|
+
const logger_1 = require("../../logger");
|
|
7
|
+
const utils = require("../../utils");
|
|
8
|
+
const projectUtils_1 = require("../../projectUtils");
|
|
9
|
+
const filters_1 = require("../../dataconnect/filters");
|
|
10
|
+
const build_1 = require("../../dataconnect/build");
|
|
11
|
+
const ensureApis_1 = require("../../dataconnect/ensureApis");
|
|
12
|
+
async function default_1(context, options) {
|
|
13
|
+
const projectId = (0, projectUtils_1.needProjectId)(options);
|
|
14
|
+
await (0, ensureApis_1.ensureApis)(projectId);
|
|
15
|
+
const serviceCfgs = (0, fileUtils_1.readFirebaseJson)(options.config);
|
|
16
|
+
utils.logLabeledBullet("dataconnect", `Preparing to deploy`);
|
|
17
|
+
const filters = (0, filters_1.getResourceFilters)(options);
|
|
18
|
+
const serviceInfos = await Promise.all(serviceCfgs.map((c) => (0, load_1.load)(projectId, c.location, path.join(options.cwd || process.cwd(), c.source))));
|
|
19
|
+
for (const si of serviceInfos) {
|
|
20
|
+
si.deploymentMetadata = await (0, build_1.build)(options, si.sourceDirectory);
|
|
21
|
+
}
|
|
22
|
+
context.dataconnect = {
|
|
23
|
+
serviceInfos,
|
|
24
|
+
filters,
|
|
25
|
+
};
|
|
26
|
+
utils.logLabeledBullet("dataconnect", `Successfully prepared schema and connectors`);
|
|
27
|
+
logger_1.logger.debug(JSON.stringify(context.dataconnect, null, 2));
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
exports.default = default_1;
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const utils = require("../../utils");
|
|
4
|
+
const client_1 = require("../../dataconnect/client");
|
|
5
|
+
const prompts_1 = require("../../dataconnect/prompts");
|
|
6
|
+
const error_1 = require("../../error");
|
|
7
|
+
const schemaMigration_1 = require("../../dataconnect/schemaMigration");
|
|
8
|
+
async function default_1(context, options) {
|
|
9
|
+
const serviceInfos = context.dataconnect.serviceInfos;
|
|
10
|
+
const filters = context.dataconnect.filters;
|
|
11
|
+
const wantSchemas = serviceInfos
|
|
12
|
+
.filter((si) => {
|
|
13
|
+
return (!filters ||
|
|
14
|
+
filters.some((f) => {
|
|
15
|
+
return f.serviceId === si.dataConnectYaml.serviceId && (f.schemaOnly || f.fullService);
|
|
16
|
+
}));
|
|
17
|
+
})
|
|
18
|
+
.map((s) => s.schema);
|
|
19
|
+
if (wantSchemas.length) {
|
|
20
|
+
utils.logLabeledBullet("dataconnect", "Checking if database schemas match Data Connect schemas...");
|
|
21
|
+
for (const s of wantSchemas) {
|
|
22
|
+
await (0, schemaMigration_1.migrateSchema)(options, s, false);
|
|
23
|
+
}
|
|
24
|
+
utils.logLabeledBullet("dataconnect", "Releasing schemas...");
|
|
25
|
+
const schemaPromises = await Promise.allSettled(wantSchemas.map((s) => (0, client_1.upsertSchema)(s)));
|
|
26
|
+
const failedSchemas = schemaPromises.filter((p) => p.status === "rejected");
|
|
27
|
+
if (failedSchemas.length) {
|
|
28
|
+
throw new error_1.FirebaseError(`Errors while updating your schemas:\n ${failedSchemas.map((f) => f.reason).join("\n")}`);
|
|
29
|
+
}
|
|
30
|
+
utils.logLabeledBullet("dataconnect", "Schemas released.");
|
|
31
|
+
}
|
|
32
|
+
let wantConnectors = [];
|
|
33
|
+
wantConnectors = wantConnectors.concat(...serviceInfos.map((si) => si.connectorInfo
|
|
34
|
+
.filter((c) => {
|
|
35
|
+
return (!filters ||
|
|
36
|
+
filters.some((f) => {
|
|
37
|
+
return (f.serviceId === si.dataConnectYaml.serviceId &&
|
|
38
|
+
(f.connectorId === c.connectorYaml.connectorId || f.fullService));
|
|
39
|
+
}));
|
|
40
|
+
})
|
|
41
|
+
.map((c) => c.connector)));
|
|
42
|
+
const haveConnectors = await have(serviceInfos);
|
|
43
|
+
const connectorsToDelete = filters
|
|
44
|
+
? []
|
|
45
|
+
: haveConnectors.filter((h) => !wantConnectors.some((w) => w.name === h.name));
|
|
46
|
+
if (wantConnectors.length) {
|
|
47
|
+
utils.logLabeledBullet("dataconnect", "Releasing connectors...");
|
|
48
|
+
await Promise.all(wantConnectors.map(async (c) => {
|
|
49
|
+
await (0, client_1.upsertConnector)(c);
|
|
50
|
+
utils.logLabeledSuccess("dataconnect", `Deployed connector ${c.name}`);
|
|
51
|
+
}));
|
|
52
|
+
for (const c of connectorsToDelete) {
|
|
53
|
+
await (0, prompts_1.promptDeleteConnector)(options, c.name);
|
|
54
|
+
}
|
|
55
|
+
utils.logLabeledBullet("dataconnect", "Connectors released.");
|
|
56
|
+
}
|
|
57
|
+
utils.logLabeledSuccess("dataconnect", "Deploy complete!");
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
exports.default = default_1;
|
|
61
|
+
async function have(serviceInfos) {
|
|
62
|
+
let connectors = [];
|
|
63
|
+
for (const si of serviceInfos) {
|
|
64
|
+
connectors = connectors.concat(await (0, client_1.listConnectors)(si.serviceName));
|
|
65
|
+
}
|
|
66
|
+
return connectors;
|
|
67
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ensureServiceAgentRoles = exports.
|
|
3
|
+
exports.ensureServiceAgentRoles = exports.obtainDefaultComputeServiceAgentBindings = exports.obtainPubSubServiceAgentBindings = exports.checkHttpIam = exports.checkServiceAccountIam = exports.EVENTARC_EVENT_RECEIVER_ROLE = exports.RUN_INVOKER_ROLE = exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = void 0;
|
|
4
4
|
const colorette_1 = require("colorette");
|
|
5
5
|
const logger_1 = require("../../logger");
|
|
6
6
|
const functionsDeployHelper_1 = require("./functionsDeployHelper");
|
|
@@ -102,36 +102,6 @@ function obtainDefaultComputeServiceAgentBindings(projectNumber) {
|
|
|
102
102
|
return [runInvokerBinding, eventarcEventReceiverBinding];
|
|
103
103
|
}
|
|
104
104
|
exports.obtainDefaultComputeServiceAgentBindings = obtainDefaultComputeServiceAgentBindings;
|
|
105
|
-
function mergeBindings(policy, requiredBindings) {
|
|
106
|
-
let updated = false;
|
|
107
|
-
for (const requiredBinding of requiredBindings) {
|
|
108
|
-
const match = policy.bindings.find((b) => b.role === requiredBinding.role);
|
|
109
|
-
if (!match) {
|
|
110
|
-
updated = true;
|
|
111
|
-
policy.bindings.push(requiredBinding);
|
|
112
|
-
continue;
|
|
113
|
-
}
|
|
114
|
-
for (const requiredMember of requiredBinding.members) {
|
|
115
|
-
if (!match.members.find((m) => m === requiredMember)) {
|
|
116
|
-
updated = true;
|
|
117
|
-
match.members.push(requiredMember);
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
}
|
|
121
|
-
return updated;
|
|
122
|
-
}
|
|
123
|
-
exports.mergeBindings = mergeBindings;
|
|
124
|
-
function printManualIamConfig(requiredBindings, projectId) {
|
|
125
|
-
utils.logLabeledBullet("functions", "Failed to verify the project has the correct IAM bindings for a successful deployment.", "warn");
|
|
126
|
-
utils.logLabeledBullet("functions", "You can either re-run `firebase deploy` as a project owner or manually run the following set of `gcloud` commands:", "warn");
|
|
127
|
-
for (const binding of requiredBindings) {
|
|
128
|
-
for (const member of binding.members) {
|
|
129
|
-
utils.logLabeledBullet("functions", `\`gcloud projects add-iam-policy-binding ${projectId} ` +
|
|
130
|
-
`--member=${member} ` +
|
|
131
|
-
`--role=${binding.role}\``, "warn");
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
105
|
async function ensureServiceAgentRoles(projectId, projectNumber, want, have) {
|
|
136
106
|
const wantServices = backend.allEndpoints(want).reduce(reduceEventsToServices, []);
|
|
137
107
|
const haveServices = backend.allEndpoints(have).reduce(reduceEventsToServices, []);
|
|
@@ -157,13 +127,13 @@ async function ensureServiceAgentRoles(projectId, projectNumber, want, have) {
|
|
|
157
127
|
policy = await (0, resourceManager_1.getIamPolicy)(projectNumber);
|
|
158
128
|
}
|
|
159
129
|
catch (err) {
|
|
160
|
-
printManualIamConfig(requiredBindings, projectId);
|
|
130
|
+
iam.printManualIamConfig(requiredBindings, projectId, "functions");
|
|
161
131
|
utils.logLabeledBullet("functions", "Could not verify the necessary IAM configuration for the following newly-integrated services: " +
|
|
162
132
|
`${newServices.map((service) => service.api).join(", ")}` +
|
|
163
133
|
". Deployment may fail.", "warn");
|
|
164
134
|
return;
|
|
165
135
|
}
|
|
166
|
-
const hasUpdatedBindings = mergeBindings(policy, requiredBindings);
|
|
136
|
+
const hasUpdatedBindings = iam.mergeBindings(policy, requiredBindings);
|
|
167
137
|
if (!hasUpdatedBindings) {
|
|
168
138
|
return;
|
|
169
139
|
}
|
|
@@ -171,7 +141,7 @@ async function ensureServiceAgentRoles(projectId, projectNumber, want, have) {
|
|
|
171
141
|
await (0, resourceManager_1.setIamPolicy)(projectNumber, policy, "bindings");
|
|
172
142
|
}
|
|
173
143
|
catch (err) {
|
|
174
|
-
printManualIamConfig(requiredBindings, projectId);
|
|
144
|
+
iam.printManualIamConfig(requiredBindings, projectId, "functions");
|
|
175
145
|
throw new error_1.FirebaseError("We failed to modify the IAM policy for the project. The functions " +
|
|
176
146
|
"deployment requires specific roles to be granted to service agents," +
|
|
177
147
|
" otherwise the deployment will fail.", { original: err });
|
package/lib/deploy/index.js
CHANGED
|
@@ -19,6 +19,7 @@ const FunctionsTarget = require("./functions");
|
|
|
19
19
|
const StorageTarget = require("./storage");
|
|
20
20
|
const RemoteConfigTarget = require("./remoteconfig");
|
|
21
21
|
const ExtensionsTarget = require("./extensions");
|
|
22
|
+
const DataConnectTarget = require("./dataconnect");
|
|
22
23
|
const frameworks_1 = require("../frameworks");
|
|
23
24
|
const prepare_1 = require("./hosting/prepare");
|
|
24
25
|
const github_1 = require("../init/features/hosting/github");
|
|
@@ -32,6 +33,7 @@ const TARGETS = {
|
|
|
32
33
|
storage: StorageTarget,
|
|
33
34
|
remoteconfig: RemoteConfigTarget,
|
|
34
35
|
extensions: ExtensionsTarget,
|
|
36
|
+
dataconnect: DataConnectTarget,
|
|
35
37
|
};
|
|
36
38
|
const chain = async function (fns, context, options, payload) {
|
|
37
39
|
for (const latest of fns) {
|
package/lib/downloadUtils.js
CHANGED
|
@@ -7,9 +7,9 @@ const ProgressBar = require("progress");
|
|
|
7
7
|
const tmp = require("tmp");
|
|
8
8
|
const apiv2_1 = require("./apiv2");
|
|
9
9
|
const error_1 = require("./error");
|
|
10
|
-
async function downloadToTmp(remoteUrl) {
|
|
10
|
+
async function downloadToTmp(remoteUrl, auth = false) {
|
|
11
11
|
const u = new url_1.URL(remoteUrl);
|
|
12
|
-
const c = new apiv2_1.Client({ urlPrefix: u.origin, auth
|
|
12
|
+
const c = new apiv2_1.Client({ urlPrefix: u.origin, auth });
|
|
13
13
|
const tmpfile = tmp.fileSync();
|
|
14
14
|
const writeStream = fs.createWriteStream(tmpfile.name);
|
|
15
15
|
const res = await c.request({
|
|
@@ -14,6 +14,7 @@ exports.DEFAULT_PORTS = {
|
|
|
14
14
|
auth: 9099,
|
|
15
15
|
storage: 9199,
|
|
16
16
|
eventarc: 9299,
|
|
17
|
+
dataconnect: 9399,
|
|
17
18
|
};
|
|
18
19
|
exports.FIND_AVAILBLE_PORT_BY_DEFAULT = {
|
|
19
20
|
ui: true,
|
|
@@ -28,6 +29,7 @@ exports.FIND_AVAILBLE_PORT_BY_DEFAULT = {
|
|
|
28
29
|
storage: false,
|
|
29
30
|
extensions: false,
|
|
30
31
|
eventarc: true,
|
|
32
|
+
dataconnect: true,
|
|
31
33
|
};
|
|
32
34
|
exports.EMULATOR_DESCRIPTION = {
|
|
33
35
|
ui: "Emulator UI",
|
|
@@ -42,6 +44,7 @@ exports.EMULATOR_DESCRIPTION = {
|
|
|
42
44
|
storage: "Storage Emulator",
|
|
43
45
|
extensions: "Extensions Emulator",
|
|
44
46
|
eventarc: "Eventarc Emulator",
|
|
47
|
+
dataconnect: "Data Connect Emulator",
|
|
45
48
|
};
|
|
46
49
|
exports.DEFAULT_HOST = "localhost";
|
|
47
50
|
class Constants {
|