firebase-tools 13.34.0 → 13.35.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/lib/apphosting/config.js +8 -6
  2. package/lib/apphosting/yaml.js +21 -48
  3. package/lib/commands/dataconnect-sdk-generate.js +4 -1
  4. package/lib/commands/dataconnect-sql-diff.js +1 -1
  5. package/lib/commands/dataconnect-sql-setup.js +6 -1
  6. package/lib/commands/functions-artifacts-setpolicy.js +125 -0
  7. package/lib/commands/index.js +2 -0
  8. package/lib/commands/open.js +3 -0
  9. package/lib/dataconnect/build.js +3 -1
  10. package/lib/dataconnect/fileUtils.js +8 -4
  11. package/lib/dataconnect/schemaMigration.js +27 -24
  12. package/lib/defaultCredentials.js +12 -1
  13. package/lib/deploy/dataconnect/prepare.js +1 -1
  14. package/lib/deploy/functions/containerCleaner.js +17 -2
  15. package/lib/deploy/functions/runtimes/discovery/index.js +3 -1
  16. package/lib/deploy/index.js +10 -4
  17. package/lib/emulator/ExpressBasedEmulator.js +1 -1
  18. package/lib/emulator/apphosting/index.js +1 -0
  19. package/lib/emulator/apphosting/serve.js +48 -7
  20. package/lib/emulator/controller.js +1 -0
  21. package/lib/emulator/dataconnect/pgliteServer.js +7 -2
  22. package/lib/emulator/dataconnectEmulator.js +15 -4
  23. package/lib/emulator/downloadableEmulators.js +9 -9
  24. package/lib/emulator/env.js +17 -1
  25. package/lib/emulator/functionsEmulator.js +2 -20
  26. package/lib/extensions/extensionsHelper.js +8 -4
  27. package/lib/frameworks/angular/utils.js +60 -42
  28. package/lib/functions/artifacts.js +104 -0
  29. package/lib/gcp/artifactregistry.js +27 -2
  30. package/lib/gcp/cloudsql/connect.js +17 -5
  31. package/lib/gcp/cloudsql/permissions_setup.js +28 -14
  32. package/lib/init/features/dataconnect/index.js +1 -1
  33. package/lib/init/features/dataconnect/sdk.js +23 -3
  34. package/package.json +1 -1
  35. package/templates/init/dataconnect/connector.yaml +1 -1
@@ -139,20 +139,22 @@ async function exportConfig(cwd, projectRoot, backendRoot, projectId, userGivenC
139
139
  localAppHostingConfig = await yaml_1.AppHostingYamlConfig.loadFromFile(localAppHostingConfigPath);
140
140
  }
141
141
  const configToExport = await loadConfigToExportSecrets(cwd, userGivenConfigFile);
142
- const secretsToExport = configToExport.secrets;
142
+ const secretsToExport = Object.entries(configToExport.env)
143
+ .filter(([, env]) => env.secret)
144
+ .map(([variable, env]) => {
145
+ return Object.assign({ variable }, env);
146
+ });
143
147
  if (!secretsToExport) {
144
148
  logger_1.logger.info("No secrets found to export in the chosen App Hosting config files");
145
149
  return;
146
150
  }
147
151
  const secretMaterial = await (0, secrets_1.fetchSecrets)(projectId, secretsToExport);
148
152
  for (const [key, value] of secretMaterial) {
149
- localAppHostingConfig.addEnvironmentVariable({
150
- variable: key,
151
- value: value,
153
+ localAppHostingConfig.env[key] = {
154
+ value,
152
155
  availability: ["RUNTIME"],
153
- });
156
+ };
154
157
  }
155
- localAppHostingConfig.clearSecrets();
156
158
  localAppHostingConfig.upsertFile(localAppHostingConfigPath);
157
159
  logger_1.logger.info(`Wrote secrets as environment variables to ${exports.APPHOSTING_LOCAL_YAML_FILE}.`);
158
160
  (0, utils_2.updateOrCreateGitignore)(projectRoot, [exports.APPHOSTING_LOCAL_YAML_FILE]);
@@ -6,53 +6,40 @@ const utils_1 = require("../utils");
6
6
  const config_1 = require("./config");
7
7
  const yaml = require("yaml");
8
8
  const jsYaml = require("js-yaml");
9
+ const path = require("path");
9
10
  const fsutils_1 = require("../fsutils");
10
11
  const error_1 = require("../error");
11
12
  class AppHostingYamlConfig {
13
+ constructor() {
14
+ this.env = {};
15
+ }
12
16
  static async loadFromFile(filePath) {
13
17
  var _a;
14
- const config = new AppHostingYamlConfig();
15
18
  if (!(0, fsutils_1.fileExistsSync)(filePath)) {
16
- throw new error_1.FirebaseError("Cannot load AppHostingYamlConfig from given path, it doesn't exist");
19
+ throw new error_1.FirebaseError(`Cannot load ${filePath} from given path, it doesn't exist`);
17
20
  }
21
+ const config = new AppHostingYamlConfig();
18
22
  const file = await (0, utils_1.readFileFromDirectory)((0, path_1.dirname)(filePath), (0, path_1.basename)(filePath));
23
+ config.filename = path.basename(filePath);
19
24
  const loadedAppHostingYaml = (_a = (await (0, utils_1.wrappedSafeLoad)(file.source))) !== null && _a !== void 0 ? _a : {};
20
25
  if (loadedAppHostingYaml.env) {
21
- const parsedEnvs = parseEnv(loadedAppHostingYaml.env);
22
- config._environmentVariables = parsedEnvs.environmentVariables;
23
- config._secrets = parsedEnvs.secrets;
26
+ config.env = parseEnv(loadedAppHostingYaml.env);
24
27
  }
25
28
  return config;
26
29
  }
27
30
  static empty() {
28
31
  return new AppHostingYamlConfig();
29
32
  }
30
- constructor() {
31
- this._environmentVariables = new Map();
32
- this._secrets = new Map();
33
- }
34
- get environmentVariables() {
35
- return mapToArray(this._environmentVariables);
36
- }
37
- get secrets() {
38
- return mapToArray(this._secrets);
39
- }
40
- addEnvironmentVariable(env) {
41
- this._environmentVariables.set(env.variable, env);
42
- }
43
- addSecret(secret) {
44
- this._secrets.set(secret.variable, secret);
45
- }
46
- clearSecrets() {
47
- this._secrets.clear();
48
- }
49
- merge(other) {
50
- for (const [key, value] of other._environmentVariables) {
51
- this._environmentVariables.set(key, value);
52
- }
53
- for (const [key, value] of other._secrets) {
54
- this._secrets.set(key, value);
33
+ merge(other, allowSecretsToBecomePlaintext = true) {
34
+ if (!allowSecretsToBecomePlaintext) {
35
+ const wereSecrets = Object.entries(this.env)
36
+ .filter(([, env]) => env.secret)
37
+ .map(([key]) => key);
38
+ if (wereSecrets.some((key) => { var _a; return (_a = other.env[key]) === null || _a === void 0 ? void 0 : _a.value; })) {
39
+ throw new error_1.FirebaseError(`Cannot convert secret to plaintext in ${other.filename ? other.filename : "apphosting yaml"}`);
40
+ }
55
41
  }
42
+ this.env = Object.assign(Object.assign({}, this.env), other.env);
56
43
  }
57
44
  async upsertFile(filePath) {
58
45
  let yamlConfigToWrite = {};
@@ -60,27 +47,13 @@ class AppHostingYamlConfig {
60
47
  const file = await (0, utils_1.readFileFromDirectory)((0, path_1.dirname)(filePath), (0, path_1.basename)(filePath));
61
48
  yamlConfigToWrite = await (0, utils_1.wrappedSafeLoad)(file.source);
62
49
  }
63
- yamlConfigToWrite.env = [...this.environmentVariables, ...this.secrets];
50
+ yamlConfigToWrite.env = Object.entries(this.env).map(([variable, env]) => {
51
+ return Object.assign({ variable }, env);
52
+ });
64
53
  (0, config_1.store)(filePath, yaml.parseDocument(jsYaml.dump(yamlConfigToWrite)));
65
54
  }
66
55
  }
67
56
  exports.AppHostingYamlConfig = AppHostingYamlConfig;
68
57
  function parseEnv(envs) {
69
- const environmentVariables = new Map();
70
- const secrets = new Map();
71
- for (const env of envs) {
72
- if (env.value) {
73
- environmentVariables.set(env.variable, env);
74
- }
75
- if (env.secret) {
76
- secrets.set(env.variable, env);
77
- }
78
- }
79
- return {
80
- environmentVariables,
81
- secrets,
82
- };
83
- }
84
- function mapToArray(map) {
85
- return Array.from(map.values());
58
+ return Object.fromEntries(envs.map((env) => [env.variable, env]));
86
59
  }
@@ -8,6 +8,7 @@ const projectUtils_1 = require("../projectUtils");
8
8
  const load_1 = require("../dataconnect/load");
9
9
  const fileUtils_1 = require("../dataconnect/fileUtils");
10
10
  const logger_1 = require("../logger");
11
+ const auth_1 = require("../auth");
11
12
  exports.command = new command_1.Command("dataconnect:sdk:generate")
12
13
  .description("generates typed SDKs for your Data Connect connectors")
13
14
  .option("--watch", "watch for changes to your connector GQL files and regenerate your SDKs when updates occur")
@@ -27,14 +28,16 @@ exports.command = new command_1.Command("dataconnect:sdk:generate")
27
28
  if (!hasGeneratables) {
28
29
  logger_1.logger.warn("No generated SDKs have been declared in connector.yaml files.");
29
30
  logger_1.logger.warn(`Run ${clc.bold("firebase init dataconnect:sdk")} to configure a generated SDK.`);
30
- logger_1.logger.warn(`See https://firebase.google.com/docs/data-connect/gp/web-sdk for more details of how to configure generated SDKs.`);
31
+ logger_1.logger.warn(`See https://firebase.google.com/docs/data-connect/web-sdk for more details of how to configure generated SDKs.`);
31
32
  return;
32
33
  }
33
34
  for (const conn of serviceInfo.connectorInfo) {
35
+ const account = (0, auth_1.getProjectDefaultAccount)(options.projectRoot);
34
36
  const output = await dataconnectEmulator_1.DataConnectEmulator.generate({
35
37
  configDir,
36
38
  connectorId: conn.connectorYaml.connectorId,
37
39
  watch: options.watch,
40
+ account,
38
41
  });
39
42
  logger_1.logger.info(output);
40
43
  logger_1.logger.info(`Generated SDKs for ${conn.connectorYaml.connectorId}`);
@@ -21,6 +21,6 @@ exports.command = new command_1.Command("dataconnect:sql:diff [serviceId]")
21
21
  const projectId = (0, projectUtils_1.needProjectId)(options);
22
22
  await (0, ensureApis_1.ensureApis)(projectId);
23
23
  const serviceInfo = await (0, fileUtils_1.pickService)(projectId, options.config, serviceId);
24
- const diffs = await (0, schemaMigration_1.diffSchema)(serviceInfo.schema, (_a = serviceInfo.dataConnectYaml.schema.datasource.postgresql) === null || _a === void 0 ? void 0 : _a.schemaValidation);
24
+ const diffs = await (0, schemaMigration_1.diffSchema)(options, serviceInfo.schema, (_a = serviceInfo.dataConnectYaml.schema.datasource.postgresql) === null || _a === void 0 ? void 0 : _a.schemaValidation);
25
25
  return { projectId, serviceId, diffs };
26
26
  });
@@ -11,6 +11,8 @@ const ensureApis_1 = require("../dataconnect/ensureApis");
11
11
  const permissions_setup_1 = require("../gcp/cloudsql/permissions_setup");
12
12
  const permissions_1 = require("../gcp/cloudsql/permissions");
13
13
  const schemaMigration_1 = require("../dataconnect/schemaMigration");
14
+ const connect_1 = require("../gcp/cloudsql/connect");
15
+ const cloudSqlAdminClient = require("../gcp/cloudsql/cloudsqladmin");
14
16
  exports.command = new command_1.Command("dataconnect:sql:setup [serviceId]")
15
17
  .description("Setup your CloudSQL database")
16
18
  .before(requirePermissions_1.requirePermissions, [
@@ -29,7 +31,10 @@ exports.command = new command_1.Command("dataconnect:sql:setup [serviceId]")
29
31
  if (!instanceId) {
30
32
  throw new error_1.FirebaseError("dataconnect.yaml is missing field schema.datasource.postgresql.cloudsql.instanceId");
31
33
  }
32
- const { databaseId } = (0, schemaMigration_1.getIdentifiers)(serviceInfo.schema);
34
+ const { serviceName, instanceName, databaseId } = (0, schemaMigration_1.getIdentifiers)(serviceInfo.schema);
35
+ await (0, schemaMigration_1.ensureServiceIsConnectedToCloudSql)(serviceName, instanceName, databaseId, true);
36
+ const { user, mode } = await (0, connect_1.getIAMUser)(options);
37
+ await cloudSqlAdminClient.createUser(projectId, instanceId, mode, user);
33
38
  const schemaInfo = await (0, permissions_setup_1.getSchemaMetadata)(instanceId, databaseId, permissions_1.DEFAULT_SCHEMA, options);
34
39
  await (0, permissions_setup_1.setupSQLPermissions)(instanceId, databaseId, schemaInfo, options);
35
40
  });
@@ -0,0 +1,125 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.command = void 0;
4
+ const clc = require("colorette");
5
+ const command_1 = require("../command");
6
+ const error_1 = require("../error");
7
+ const projectUtils_1 = require("../projectUtils");
8
+ const prompt_1 = require("../prompt");
9
+ const requirePermissions_1 = require("../requirePermissions");
10
+ const requireAuth_1 = require("../requireAuth");
11
+ const utils_1 = require("../utils");
12
+ const artifactregistry = require("../gcp/artifactregistry");
13
+ const artifacts = require("../functions/artifacts");
14
+ exports.command = new command_1.Command("functions:artifacts:setpolicy")
15
+ .description("Set up a cleanup policy for Cloud Run functions container images in Artifact Registry. " +
16
+ "This policy will automatically delete old container images created during functions deployment.")
17
+ .option("--location <location>", "Specify location to set up the cleanup policy. " +
18
+ "If omitted, uses the default functions location.", "us-central1")
19
+ .option("--days <days>", "Number of days to keep container images before deletion. Default is 3 days.", "3")
20
+ .option("--none", "Opt-out from cleanup policy. This will prevent suggestions to set up a cleanup policy during initialization and deployment.")
21
+ .before((options) => {
22
+ if (options.days && options.none) {
23
+ throw new error_1.FirebaseError("Cannot specify both --days and --none options.");
24
+ }
25
+ })
26
+ .withForce("Automatically create or modify cleanup policy")
27
+ .before(requireAuth_1.requireAuth)
28
+ .before(async (options) => {
29
+ const projectId = (0, projectUtils_1.needProjectId)(options);
30
+ await artifactregistry.ensureApiEnabled(projectId);
31
+ })
32
+ .before(requirePermissions_1.requirePermissions, [
33
+ "artifactregistry.repositories.update",
34
+ "artifactregistry.versions.delete",
35
+ ])
36
+ .action(async (options) => {
37
+ var _a;
38
+ const projectId = (0, projectUtils_1.needProjectId)(options);
39
+ const location = options.location || "us-central1";
40
+ let daysToKeep = parseInt(options.days || "3", 10);
41
+ const repoPath = artifacts.makeRepoPath(projectId, location);
42
+ let repository;
43
+ try {
44
+ repository = await artifactregistry.getRepository(repoPath);
45
+ }
46
+ catch (err) {
47
+ if (err.status === 404) {
48
+ (0, utils_1.logBullet)(`Repository '${repoPath}' does not exist in Artifact Registry.`);
49
+ (0, utils_1.logBullet)(`Please deploy your functions first using: ` +
50
+ `${clc.bold(`firebase deploy --only functions`)}`);
51
+ return;
52
+ }
53
+ throw err;
54
+ }
55
+ if (options.none) {
56
+ const existingPolicy = artifacts.findExistingPolicy(repository);
57
+ if (artifacts.hasCleanupOptOut(repository) && !existingPolicy) {
58
+ (0, utils_1.logBullet)(`Repository '${repoPath}' is already opted out from cleanup policies.`);
59
+ (0, utils_1.logBullet)(`No changes needed.`);
60
+ return;
61
+ }
62
+ (0, utils_1.logBullet)(`You are about to opt-out from cleanup policy for repository '${repoPath}'.`);
63
+ (0, utils_1.logBullet)(`This will prevent suggestions to set up cleanup policy during initialization and deployment.`);
64
+ if (existingPolicy) {
65
+ (0, utils_1.logBullet)(`Note: This will remove the existing cleanup policy from the repository.`);
66
+ }
67
+ const confirmOptOut = await (0, prompt_1.confirm)(Object.assign(Object.assign({}, options), { default: true, message: "Do you want to continue?" }));
68
+ if (!confirmOptOut) {
69
+ throw new error_1.FirebaseError("Command aborted.", { exit: 1 });
70
+ }
71
+ try {
72
+ await artifacts.optOutRepository(repository);
73
+ (0, utils_1.logSuccess)(`Successfully opted out from cleanup policy for ${clc.bold(repoPath)}`);
74
+ return;
75
+ }
76
+ catch (err) {
77
+ throw new error_1.FirebaseError("Failed to opt-out from artifact registry cleanup policy", {
78
+ original: err,
79
+ });
80
+ }
81
+ }
82
+ if (isNaN(daysToKeep) || daysToKeep < 0) {
83
+ throw new error_1.FirebaseError("Days must be a non-negative number");
84
+ }
85
+ if (daysToKeep === 0) {
86
+ daysToKeep = 0.003472;
87
+ }
88
+ if (artifacts.hasSameCleanupPolicy(repository, daysToKeep)) {
89
+ (0, utils_1.logBullet)(`A cleanup policy already exists that deletes images older than ${clc.bold(daysToKeep)} days.`);
90
+ (0, utils_1.logBullet)(`No changes needed.`);
91
+ return;
92
+ }
93
+ (0, utils_1.logBullet)(`You are about to set up a cleanup policy for Cloud Run functions container images in location ${clc.bold(location)}`);
94
+ (0, utils_1.logBullet)(`This policy will automatically delete container images that are older than ${clc.bold(daysToKeep)} days`);
95
+ (0, utils_1.logBullet)("This helps reduce storage costs by removing old container images that are no longer needed");
96
+ const existingPolicy = artifacts.findExistingPolicy(repository);
97
+ let isUpdate = false;
98
+ if (existingPolicy && ((_a = existingPolicy.condition) === null || _a === void 0 ? void 0 : _a.olderThan)) {
99
+ const existingDays = artifacts.parseDaysFromPolicy(existingPolicy.condition.olderThan);
100
+ if (existingDays) {
101
+ isUpdate = true;
102
+ (0, utils_1.logBullet)(`Note: This will update an existing policy that currently deletes images older than ${clc.bold(existingDays)} days`);
103
+ }
104
+ }
105
+ if (artifacts.hasCleanupOptOut(repository)) {
106
+ (0, utils_1.logBullet)(`Note: This repository was previously opted out from cleanup policy. This action will remove the opt-out status.`);
107
+ }
108
+ const confirmSetup = await (0, prompt_1.confirm)(Object.assign(Object.assign({}, options), { default: true, message: "Do you want to continue?" }));
109
+ if (!confirmSetup) {
110
+ throw new error_1.FirebaseError("Command aborted.", { exit: 1 });
111
+ }
112
+ try {
113
+ await artifacts.setCleanupPolicy(repository, daysToKeep);
114
+ const successMessage = isUpdate
115
+ ? `Successfully updated cleanup policy to delete images older than ${clc.bold(daysToKeep)} days`
116
+ : `Successfully set up cleanup policy that deletes images older than ${clc.bold(daysToKeep)} days`;
117
+ (0, utils_1.logSuccess)(successMessage);
118
+ (0, utils_1.logBullet)(`Cleanup policy has been set for ${clc.bold(repoPath)}`);
119
+ }
120
+ catch (err) {
121
+ throw new error_1.FirebaseError("Failed to set up artifact registry cleanup policy", {
122
+ original: err,
123
+ });
124
+ }
125
+ });
@@ -138,6 +138,8 @@ function load(client) {
138
138
  client.functions.secrets.describe = loadCommand("functions-secrets-describe");
139
139
  client.functions.secrets.prune = loadCommand("functions-secrets-prune");
140
140
  client.functions.secrets.set = loadCommand("functions-secrets-set");
141
+ client.functions.artifacts = {};
142
+ client.functions.artifacts.setpolicy = loadCommand("functions-artifacts-setpolicy");
141
143
  client.help = loadCommand("help");
142
144
  client.hosting = {};
143
145
  client.hosting.channel = {};
@@ -14,13 +14,16 @@ const utils = require("../utils");
14
14
  const requireHostingSite_1 = require("../requireHostingSite");
15
15
  const LINKS = [
16
16
  { name: "Analytics", arg: "analytics", consolePath: "/analytics" },
17
+ { name: "App Hosting", arg: "apphosting", consolePath: "/apphosting" },
17
18
  { name: "Authentication: Providers", arg: "auth", consolePath: "/authentication/providers" },
18
19
  { name: "Authentication: Users", arg: "auth:users", consolePath: "/authentication/users" },
19
20
  { name: "Crash Reporting", arg: "crash", consolePath: "/crashlytics" },
20
21
  { name: "Database: Data", arg: "database", consolePath: "/database/data" },
21
22
  { name: "Database: Rules", arg: "database:rules", consolePath: "/database/rules" },
23
+ { name: "Data Connect", arg: "dataconnect", consolePath: "/dataconnect" },
22
24
  { name: "Docs", arg: "docs", url: "https://firebase.google.com/docs" },
23
25
  { name: "Dynamic Links", arg: "links", consolePath: "/durablelinks" },
26
+ { name: "Extensions", arg: "extensions", consolePath: "/extensions" },
24
27
  { name: "Firestore: Data", arg: "firestore", consolePath: "/firestore/data" },
25
28
  { name: "Firestore: Rules", arg: "firestore:rules", consolePath: "/firestore/rules" },
26
29
  { name: "Firestore: Indexes", arg: "firestore:indexes", consolePath: "/firestore/indexes" },
@@ -7,9 +7,11 @@ const experiments = require("../experiments");
7
7
  const prompt_1 = require("../prompt");
8
8
  const utils = require("../utils");
9
9
  const graphqlError_1 = require("./graphqlError");
10
+ const auth_1 = require("../auth");
10
11
  async function build(options, configDir, dryRun) {
11
12
  var _a, _b;
12
- const args = { configDir };
13
+ const account = (0, auth_1.getProjectDefaultAccount)(options.projectRoot);
14
+ const args = { configDir, account };
13
15
  if (experiments.isEnabled("fdcconnectorevolution") && options.projectId) {
14
16
  const flags = process.env["DATA_CONNECT_PREVIEW"];
15
17
  if (flags) {
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getFrameworksFromPackageJson = exports.SUPPORTED_FRAMEWORKS = exports.resolvePackageJson = exports.getPlatformFromFolder = exports.pickService = exports.readGQLFiles = exports.readConnectorYaml = exports.readDataConnectYaml = exports.readFirebaseJson = void 0;
3
+ exports.getFrameworksFromPackageJson = exports.frameworksMap = exports.SUPPORTED_FRAMEWORKS = exports.resolvePackageJson = exports.getPlatformFromFolder = exports.pickService = exports.readGQLFiles = exports.readConnectorYaml = exports.readDataConnectYaml = exports.readFirebaseJson = void 0;
4
4
  const fs = require("fs-extra");
5
5
  const path = require("path");
6
6
  const error_1 = require("../error");
@@ -147,12 +147,16 @@ async function resolvePackageJson(packageJsonPath) {
147
147
  }
148
148
  }
149
149
  exports.resolvePackageJson = resolvePackageJson;
150
- exports.SUPPORTED_FRAMEWORKS = ["react"];
150
+ exports.SUPPORTED_FRAMEWORKS = ["react", "angular"];
151
+ exports.frameworksMap = {
152
+ react: ["react", "next"],
153
+ angular: ["@angular/core"],
154
+ };
151
155
  function getFrameworksFromPackageJson(packageJson) {
152
156
  var _a, _b;
153
157
  const devDependencies = Object.keys((_a = packageJson.devDependencies) !== null && _a !== void 0 ? _a : {});
154
158
  const dependencies = Object.keys((_b = packageJson.dependencies) !== null && _b !== void 0 ? _b : {});
155
- const matched = new Set([...devDependencies, ...dependencies].filter((dep) => exports.SUPPORTED_FRAMEWORKS.includes(dep)));
156
- return Array.from(matched);
159
+ const allDeps = Array.from(new Set([...devDependencies, ...dependencies]));
160
+ return exports.SUPPORTED_FRAMEWORKS.filter((framework) => exports.frameworksMap[framework].find((dep) => allDeps.includes(dep)));
157
161
  }
158
162
  exports.getFrameworksFromPackageJson = getFrameworksFromPackageJson;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getIdentifiers = exports.grantRoleToUserInSchema = exports.migrateSchema = exports.diffSchema = void 0;
3
+ exports.ensureServiceIsConnectedToCloudSql = exports.getIdentifiers = exports.grantRoleToUserInSchema = exports.migrateSchema = exports.diffSchema = void 0;
4
4
  const clc = require("colorette");
5
5
  const sql_formatter_1 = require("sql-formatter");
6
6
  const types_1 = require("./types");
@@ -16,10 +16,23 @@ const utils_1 = require("../utils");
16
16
  const cloudsqladmin_1 = require("../gcp/cloudsql/cloudsqladmin");
17
17
  const cloudSqlAdminClient = require("../gcp/cloudsql/cloudsqladmin");
18
18
  const errors = require("./errors");
19
- async function diffSchema(schema, schemaValidation) {
20
- const { serviceName, instanceName, databaseId } = getIdentifiers(schema);
19
+ async function setupSchemaIfNecessary(instanceId, databaseId, options) {
20
+ await (0, connect_1.setupIAMUsers)(instanceId, databaseId, options);
21
+ const schemaInfo = await (0, permissions_setup_1.getSchemaMetadata)(instanceId, databaseId, permissions_1.DEFAULT_SCHEMA, options);
22
+ if (schemaInfo.setupStatus !== permissions_setup_1.SchemaSetupStatus.BrownField &&
23
+ schemaInfo.setupStatus !== permissions_setup_1.SchemaSetupStatus.GreenField) {
24
+ return await (0, permissions_setup_1.setupSQLPermissions)(instanceId, databaseId, schemaInfo, options, true);
25
+ }
26
+ else {
27
+ logger_1.logger.info(`Detected schema "${schemaInfo.name}" is setup in ${schemaInfo.setupStatus} mode. Skipping Setup.`);
28
+ }
29
+ return schemaInfo.setupStatus;
30
+ }
31
+ async function diffSchema(options, schema, schemaValidation) {
32
+ const { serviceName, instanceName, databaseId, instanceId } = getIdentifiers(schema);
21
33
  await ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, false);
22
34
  let diffs = [];
35
+ await setupSchemaIfNecessary(instanceId, databaseId, options);
23
36
  let validationMode = schemaValidation !== null && schemaValidation !== void 0 ? schemaValidation : "COMPATIBLE";
24
37
  setSchemaValidationMode(schema, validationMode);
25
38
  try {
@@ -87,7 +100,9 @@ async function migrateSchema(args) {
87
100
  const { options, schema, validateOnly, schemaValidation } = args;
88
101
  const { serviceName, instanceId, instanceName, databaseId } = getIdentifiers(schema);
89
102
  await ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, true);
103
+ await (0, connect_1.setupIAMUsers)(instanceId, databaseId, options);
90
104
  let diffs = [];
105
+ await setupSchemaIfNecessary(instanceId, databaseId, options);
91
106
  let validationMode = schemaValidation !== null && schemaValidation !== void 0 ? schemaValidation : "COMPATIBLE";
92
107
  setSchemaValidationMode(schema, validationMode);
93
108
  try {
@@ -159,27 +174,15 @@ async function grantRoleToUserInSchema(options, schema) {
159
174
  const projectId = (0, projectUtils_1.needProjectId)(options);
160
175
  const { user, mode } = (0, connect_1.toDatabaseUser)(email);
161
176
  const fdcSqlRole = permissions_setup_1.fdcSqlRoleMap[role](databaseId);
177
+ await (0, connect_1.setupIAMUsers)(instanceId, databaseId, options);
162
178
  const userIsCSQLAdmin = await (0, cloudsqladmin_1.iamUserIsCSQLAdmin)(options);
163
179
  if (!userIsCSQLAdmin) {
164
180
  throw new error_1.FirebaseError(`Only users with 'roles/cloudsql.admin' can grant SQL roles. If you do not have this role, ask your database administrator to run this command or manually grant ${fdcSqlRole} to ${user}`);
165
181
  }
166
- const schemaInfo = await (0, permissions_setup_1.getSchemaMetadata)(instanceId, databaseId, permissions_1.DEFAULT_SCHEMA, options);
167
- let isGreenfieldSetup = schemaInfo.setupStatus === permissions_setup_1.SchemaSetupStatus.GreenField;
168
- switch (schemaInfo.setupStatus) {
169
- case permissions_setup_1.SchemaSetupStatus.NotSetup:
170
- case permissions_setup_1.SchemaSetupStatus.NotFound:
171
- const newSetupStatus = await (0, permissions_setup_1.setupSQLPermissions)(instanceId, databaseId, schemaInfo, options);
172
- isGreenfieldSetup = newSetupStatus === permissions_setup_1.SchemaSetupStatus.GreenField;
173
- break;
174
- default:
175
- logger_1.logger.info(`Detected schema "${schemaInfo.name}" is setup in ${schemaInfo.setupStatus} mode. Skipping Setup.`);
176
- break;
177
- }
178
- if (!isGreenfieldSetup && fdcSqlRole === (0, permissions_1.firebaseowner)(databaseId, permissions_1.DEFAULT_SCHEMA)) {
179
- const newSetupStatus = await (0, permissions_setup_1.setupSQLPermissions)(instanceId, databaseId, schemaInfo, options);
180
- if (newSetupStatus !== permissions_setup_1.SchemaSetupStatus.GreenField) {
181
- throw new error_1.FirebaseError(`Can't grant owner rule for brownfield databases. Consider fully migrating your database to FDC using 'firebase dataconnect:sql:setup'`);
182
- }
182
+ const schemaSetupStatus = await setupSchemaIfNecessary(instanceId, databaseId, options);
183
+ if (schemaSetupStatus !== permissions_setup_1.SchemaSetupStatus.GreenField &&
184
+ fdcSqlRole === (0, permissions_1.firebaseowner)(databaseId, permissions_1.DEFAULT_SCHEMA)) {
185
+ throw new error_1.FirebaseError(`Owner rule isn't available in brownfield databases. If you would like Data Connect to manage and own your database schema, run 'firebase dataconnect:sql:setup'`);
183
186
  }
184
187
  await cloudSqlAdminClient.createUser(projectId, instanceId, mode, user);
185
188
  await (0, connect_1.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, [`GRANT "${fdcSqlRole}" TO "${user}"`], false);
@@ -259,10 +262,9 @@ async function handleIncompatibleSchemaError(args) {
259
262
  }
260
263
  const schemaInfo = await (0, permissions_setup_1.getSchemaMetadata)(instanceId, databaseId, permissions_1.DEFAULT_SCHEMA, options);
261
264
  if (schemaInfo.setupStatus !== permissions_setup_1.SchemaSetupStatus.GreenField) {
262
- const newSetupStatus = await (0, permissions_setup_1.setupSQLPermissions)(instanceId, databaseId, schemaInfo, options, true);
263
- if (newSetupStatus !== permissions_setup_1.SchemaSetupStatus.GreenField) {
264
- throw new error_1.FirebaseError(`Can't migrate brownfield databases. Consider fully migrating your database to FDC using 'firebase dataconnect:sql:setup'`);
265
- }
265
+ throw new error_1.FirebaseError(`Brownfield database are protected from SQL changes by Data Connect.\n` +
266
+ `You can use the SQL diff generated by 'firebase dataconnect:sql:diff' to assist you in applying the required changes to your CloudSQL database. Connector deployment will succeed when there is no required diff changes.\n` +
267
+ `If you would like Data Connect to manage your database schema, run 'firebase dataconnect:sql:setup'`);
266
268
  }
267
269
  if (!(await (0, permissions_setup_1.checkSQLRoleIsGranted)(options, instanceId, databaseId, (0, permissions_1.firebaseowner)(databaseId), (await (0, connect_1.getIAMUser)(options)).user))) {
268
270
  throw new error_1.FirebaseError(`Command aborted. Only users granted firebaseowner SQL role can run migrations.`);
@@ -399,6 +401,7 @@ async function ensureServiceIsConnectedToCloudSql(serviceName, instanceId, datab
399
401
  logger_1.logger.debug(err);
400
402
  }
401
403
  }
404
+ exports.ensureServiceIsConnectedToCloudSql = ensureServiceIsConnectedToCloudSql;
402
405
  function displaySchemaChanges(error, validationMode, instanceName, databaseId) {
403
406
  switch (error.violationType) {
404
407
  case "INCOMPATIBLE_SCHEMA":
@@ -1,8 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.clearCredentials = exports.getCredentialPathAsync = void 0;
3
+ exports.hasDefaultCredentials = exports.clearCredentials = exports.getCredentialPathAsync = void 0;
4
4
  const fs = require("fs");
5
5
  const path = require("path");
6
+ const google_auth_library_1 = require("google-auth-library");
6
7
  const api_1 = require("./api");
7
8
  const logger_1 = require("./logger");
8
9
  async function getCredentialPathAsync(account) {
@@ -78,3 +79,13 @@ function userEmailSlug(user) {
78
79
  const slug = email.replace("@", "_").replace(".", "_");
79
80
  return slug;
80
81
  }
82
+ async function hasDefaultCredentials() {
83
+ try {
84
+ await google_auth_library_1.auth.getApplicationDefault();
85
+ return true;
86
+ }
87
+ catch (err) {
88
+ return false;
89
+ }
90
+ }
91
+ exports.hasDefaultCredentials = hasDefaultCredentials;
@@ -54,7 +54,7 @@ async function default_1(context, options) {
54
54
  utils.logLabeledBullet("dataconnect", `Successfully prepared schema and connectors`);
55
55
  if (options.dryRun) {
56
56
  for (const si of serviceInfos) {
57
- await (0, schemaMigration_1.diffSchema)(si.schema, (_a = si.dataConnectYaml.schema.datasource.postgresql) === null || _a === void 0 ? void 0 : _a.schemaValidation);
57
+ await (0, schemaMigration_1.diffSchema)(options, si.schema, (_a = si.dataConnectYaml.schema.datasource.postgresql) === null || _a === void 0 ? void 0 : _a.schemaValidation);
58
58
  }
59
59
  utils.logLabeledBullet("dataconnect", "Checking for CloudSQL resources...");
60
60
  await Promise.all(serviceInfos
@@ -83,12 +83,27 @@ async function cleanupBuildImages(haveFunctions, deletedFunctions, cleaners = {}
83
83
  exports.cleanupBuildImages = cleanupBuildImages;
84
84
  class ArtifactRegistryCleaner {
85
85
  static packagePath(func) {
86
- const encodedId = func.id
86
+ const encodedId = func.platform === "gcfv2"
87
+ ? ArtifactRegistryCleaner.encodePackageNameV2(func)
88
+ : ArtifactRegistryCleaner.encodePackageNameV1(func);
89
+ return `projects/${func.project}/locations/${func.region}/repositories/gcf-artifacts/packages/${encodedId}`;
90
+ }
91
+ static encodePart(part) {
92
+ return part
87
93
  .replace(/_/g, "__")
88
94
  .replace(/-/g, "--")
89
95
  .replace(/^[A-Z]/, (first) => `${first.toLowerCase()}-${first.toLowerCase()}`)
90
96
  .replace(/[A-Z]/g, (upper) => `_${upper.toLowerCase()}`);
91
- return `projects/${func.project}/locations/${func.region}/repositories/gcf-artifacts/packages/${encodedId}`;
97
+ }
98
+ static encodePackageNameV1(func) {
99
+ return ArtifactRegistryCleaner.encodePart(func.id);
100
+ }
101
+ static encodePackageNameV2(func) {
102
+ return [
103
+ ArtifactRegistryCleaner.encodePart(func.project),
104
+ ArtifactRegistryCleaner.encodePart(func.region),
105
+ ArtifactRegistryCleaner.encodePart(func.id),
106
+ ].join("__");
92
107
  }
93
108
  async cleanupFunction(func) {
94
109
  let op;
@@ -54,7 +54,9 @@ async function detectFromPort(port, project, runtime, initialDelay = 0, timeout
54
54
  let res;
55
55
  const timedOut = new Promise((resolve, reject) => {
56
56
  setTimeout(() => {
57
- reject(new error_1.FirebaseError("User code failed to load. Cannot determine backend specification"));
57
+ const originalError = "User code failed to load. Cannot determine backend specification.";
58
+ const error = `${originalError} Timeout after ${timeout}. See https://firebase.google.com/docs/functions/tips#avoid_deployment_timeouts_during_initialization'`;
59
+ reject(new error_1.FirebaseError(error));
58
60
  }, getFunctionDiscoveryTimeout() || timeout);
59
61
  });
60
62
  if (initialDelay > 0) {
@@ -42,15 +42,21 @@ const chain = async function (fns, context, options, payload) {
42
42
  };
43
43
  const isDeployingWebFramework = (options) => {
44
44
  const config = options.config.get("hosting");
45
- const webFrameworkInConfig = (Array.isArray(config) ? config : [config]).find((it) => it.source);
46
- if (!webFrameworkInConfig)
45
+ if (!config)
46
+ return false;
47
+ const normalizedConfig = Array.isArray(config) ? config : [config];
48
+ const webFrameworksInConfig = normalizedConfig.filter((c) => c === null || c === void 0 ? void 0 : c.source);
49
+ if (webFrameworksInConfig.length === 0)
47
50
  return false;
48
51
  if (!options.only)
49
52
  return true;
50
53
  return options.only.split(",").some((it) => {
51
54
  const [target, site] = it.split(":");
52
- return (target === "hosting" &&
53
- [webFrameworkInConfig.site, webFrameworkInConfig.target].includes(site));
55
+ if (target !== "hosting")
56
+ return false;
57
+ if (!site)
58
+ return true;
59
+ return webFrameworksInConfig.some((c) => [c.site, c.target].includes(site));
54
60
  });
55
61
  };
56
62
  exports.isDeployingWebFramework = isDeployingWebFramework;
@@ -24,7 +24,7 @@ class ExpressBasedEmulator {
24
24
  });
25
25
  }
26
26
  if (!this.options.noBodyParser) {
27
- app.use(bodyParser.json());
27
+ app.use(bodyParser.json({ limit: "130mb" }));
28
28
  }
29
29
  app.set("json spaces", 2);
30
30
  return Promise.resolve(app);
@@ -10,6 +10,7 @@ class AppHostingEmulator {
10
10
  }
11
11
  async start() {
12
12
  const { hostname, port } = await (0, serve_1.start)({
13
+ projectId: this.args.projectId,
13
14
  port: this.args.port,
14
15
  startCommand: this.args.startCommand,
15
16
  rootDirectory: this.args.rootDirectory,