firebase-tools 15.9.0 → 15.9.2-ct-studioexport3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/appdistribution/distribution.js +1 -1
- package/lib/appdistribution/options-parser-util.js +7 -0
- package/lib/apphosting/config.js +24 -1
- package/lib/commands/appdistribution-distribute.js +5 -5
- package/lib/commands/apptesting.js +9 -10
- package/lib/commands/dataconnect-sql-setup.js +3 -4
- package/lib/commands/dataconnect-sql-shell.js +2 -1
- package/lib/commands/index.js +4 -6
- package/lib/commands/studio-export.js +23 -5
- package/lib/dataconnect/schemaMigration.js +25 -17
- package/lib/dataconnect/types.js +1 -0
- package/lib/deploy/functions/build.js +24 -9
- package/lib/deploy/functions/runtimes/discovery/v1alpha1.js +8 -2
- package/lib/emulator/apphosting/config.js +1 -21
- package/lib/emulator/controller.js +2 -1
- package/lib/emulator/hub.js +2 -0
- package/lib/emulator/hubExport.js +7 -6
- package/lib/experiments.js +6 -6
- package/lib/firebase_studio/migrate.js +75 -15
- package/lib/frameworks/next/constants.js +6 -1
- package/lib/frameworks/next/index.js +5 -1
- package/lib/gcp/cloudfunctionsv2.js +23 -2
- package/lib/gcp/cloudsql/permissionsSetup.js +4 -4
- package/lib/mcp/tools/apptesting/tests.js +2 -2
- package/package.json +1 -1
- package/schema/dataconnect-yaml.json +4 -0
- package/templates/firebase-studio-export/system_instructions_template.md +14 -3
|
@@ -39,7 +39,7 @@ async function upload(requests, appName, distribution) {
|
|
|
39
39
|
utils.logSuccess(`View this release in the Firebase console: ${release.firebaseConsoleUri}`);
|
|
40
40
|
utils.logSuccess(`Share this release with testers who have access: ${release.testingUri}`);
|
|
41
41
|
utils.logSuccess(`Download the release binary (link expires in 1 hour): ${release.binaryDownloadUri}`);
|
|
42
|
-
return uploadResponse.release
|
|
42
|
+
return uploadResponse.release;
|
|
43
43
|
}
|
|
44
44
|
catch (err) {
|
|
45
45
|
if ((0, error_1.getErrStatus)(err) === 404) {
|
|
@@ -50,7 +50,14 @@ function getAppName(options) {
|
|
|
50
50
|
}
|
|
51
51
|
return toAppName(options.app);
|
|
52
52
|
}
|
|
53
|
+
const APP_ID_FORMAT = /^\d+:\d+:(android|ios|web):[a-fA-F0-9]+$/;
|
|
54
|
+
function validateAppId(appId) {
|
|
55
|
+
if (!APP_ID_FORMAT.test(appId)) {
|
|
56
|
+
throw new error_1.FirebaseError(`Invalid Firebase app ID: ${appId}`);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
53
59
|
function toAppName(appId) {
|
|
60
|
+
validateAppId(appId);
|
|
54
61
|
return `projects/${appId.split(":")[1]}/apps/${appId}`;
|
|
55
62
|
}
|
|
56
63
|
function parseTestDevices(value, file = "") {
|
package/lib/apphosting/config.js
CHANGED
|
@@ -4,6 +4,7 @@ exports.APPHOSTING_YAML_FILE_REGEX = exports.APPHOSTING_LOCAL_YAML_FILE = export
|
|
|
4
4
|
exports.discoverBackendRoot = discoverBackendRoot;
|
|
5
5
|
exports.listAppHostingFilesInPath = listAppHostingFilesInPath;
|
|
6
6
|
exports.load = load;
|
|
7
|
+
exports.getAppHostingConfiguration = getAppHostingConfiguration;
|
|
7
8
|
exports.store = store;
|
|
8
9
|
exports.findEnv = findEnv;
|
|
9
10
|
exports.upsertEnv = upsertEnv;
|
|
@@ -22,6 +23,7 @@ const yaml_1 = require("./yaml");
|
|
|
22
23
|
const logger_1 = require("../logger");
|
|
23
24
|
const csm = require("../gcp/secretManager");
|
|
24
25
|
const error_1 = require("../error");
|
|
26
|
+
const path_2 = require("path");
|
|
25
27
|
exports.APPHOSTING_BASE_YAML_FILE = "apphosting.yaml";
|
|
26
28
|
exports.APPHOSTING_EMULATORS_YAML_FILE = "apphosting.emulator.yaml";
|
|
27
29
|
exports.APPHOSTING_LOCAL_YAML_FILE = "apphosting.local.yaml";
|
|
@@ -64,6 +66,28 @@ function load(yamlPath) {
|
|
|
64
66
|
}
|
|
65
67
|
return yaml.parseDocument(raw);
|
|
66
68
|
}
|
|
69
|
+
const dynamicDispatch = exports;
|
|
70
|
+
async function getAppHostingConfiguration(backendDir) {
|
|
71
|
+
const appHostingConfigPaths = dynamicDispatch.listAppHostingFilesInPath(backendDir);
|
|
72
|
+
const fileNameToPathMap = Object.fromEntries(appHostingConfigPaths.map((path) => [(0, path_2.basename)(path), path]));
|
|
73
|
+
const output = yaml_1.AppHostingYamlConfig.empty();
|
|
74
|
+
const baseFilePath = fileNameToPathMap[exports.APPHOSTING_BASE_YAML_FILE];
|
|
75
|
+
const emulatorsFilePath = fileNameToPathMap[exports.APPHOSTING_EMULATORS_YAML_FILE];
|
|
76
|
+
const localFilePath = fileNameToPathMap[exports.APPHOSTING_LOCAL_YAML_FILE];
|
|
77
|
+
if (baseFilePath) {
|
|
78
|
+
const baseFile = await yaml_1.AppHostingYamlConfig.loadFromFile(baseFilePath);
|
|
79
|
+
output.merge(baseFile, false);
|
|
80
|
+
}
|
|
81
|
+
if (emulatorsFilePath) {
|
|
82
|
+
const emulatorsConfig = await yaml_1.AppHostingYamlConfig.loadFromFile(emulatorsFilePath);
|
|
83
|
+
output.merge(emulatorsConfig, false);
|
|
84
|
+
}
|
|
85
|
+
if (localFilePath) {
|
|
86
|
+
const localYamlConfig = await yaml_1.AppHostingYamlConfig.loadFromFile(localFilePath);
|
|
87
|
+
output.merge(localYamlConfig, true);
|
|
88
|
+
}
|
|
89
|
+
return output;
|
|
90
|
+
}
|
|
67
91
|
function store(yamlPath, document) {
|
|
68
92
|
(0, fs_1.writeFileSync)(yamlPath, document.toString());
|
|
69
93
|
}
|
|
@@ -94,7 +118,6 @@ function upsertEnv(document, env) {
|
|
|
94
118
|
}
|
|
95
119
|
envs.add(envYaml);
|
|
96
120
|
}
|
|
97
|
-
const dynamicDispatch = exports;
|
|
98
121
|
async function maybeAddSecretToYaml(secretName, fileName = exports.APPHOSTING_BASE_YAML_FILE) {
|
|
99
122
|
const backendRoot = dynamicDispatch.discoverBackendRoot(process.cwd());
|
|
100
123
|
let path;
|
|
@@ -97,7 +97,7 @@ async function distribute(appName, distribution, testCases, testDevices, release
|
|
|
97
97
|
}
|
|
98
98
|
}
|
|
99
99
|
}
|
|
100
|
-
const
|
|
100
|
+
const release = await (0, distribution_1.upload)(requests, appName, distribution);
|
|
101
101
|
if (aabInfo && !aabInfo.testCertificate) {
|
|
102
102
|
aabInfo = await requests.getAabInfo(appName);
|
|
103
103
|
if (aabInfo.testCertificate) {
|
|
@@ -110,17 +110,17 @@ async function distribute(appName, distribution, testCases, testDevices, release
|
|
|
110
110
|
`SHA-256 certificate fingerprint: ${aabInfo.testCertificate.hashSha256}`);
|
|
111
111
|
}
|
|
112
112
|
}
|
|
113
|
-
await requests.updateReleaseNotes(
|
|
114
|
-
await requests.distribute(
|
|
113
|
+
await requests.updateReleaseNotes(release.name, releaseNotes);
|
|
114
|
+
await requests.distribute(release.name, testers, groups);
|
|
115
115
|
if (testDevices.length) {
|
|
116
116
|
utils.logBullet("starting automated test (note: this feature is in beta)");
|
|
117
117
|
const releaseTestPromises = [];
|
|
118
118
|
if (!testCases.length) {
|
|
119
|
-
releaseTestPromises.push(requests.createReleaseTest(
|
|
119
|
+
releaseTestPromises.push(requests.createReleaseTest(release.name, testDevices, undefined, loginCredential));
|
|
120
120
|
}
|
|
121
121
|
else {
|
|
122
122
|
for (const testCaseId of testCases) {
|
|
123
|
-
releaseTestPromises.push(requests.createReleaseTest(
|
|
123
|
+
releaseTestPromises.push(requests.createReleaseTest(release.name, testDevices, undefined, loginCredential, `${appName}/testCases/${testCaseId}`));
|
|
124
124
|
}
|
|
125
125
|
}
|
|
126
126
|
const releaseTests = await Promise.all(releaseTestPromises);
|
|
@@ -8,7 +8,6 @@ const clc = require("colorette");
|
|
|
8
8
|
const parseTestFiles_1 = require("../apptesting/parseTestFiles");
|
|
9
9
|
const ora = require("ora");
|
|
10
10
|
const error_1 = require("../error");
|
|
11
|
-
const marked_1 = require("marked");
|
|
12
11
|
const client_1 = require("../appdistribution/client");
|
|
13
12
|
const distribution_1 = require("../appdistribution/distribution");
|
|
14
13
|
const options_parser_util_1 = require("../appdistribution/options-parser-util");
|
|
@@ -38,13 +37,13 @@ exports.command = new command_1.Command("apptesting:execute <release-binary-file
|
|
|
38
37
|
throw new error_1.FirebaseError("No tests found");
|
|
39
38
|
}
|
|
40
39
|
const invokeSpinner = ora("Requesting test execution");
|
|
41
|
-
let
|
|
42
|
-
let
|
|
40
|
+
let releaseTests;
|
|
41
|
+
let release;
|
|
43
42
|
try {
|
|
44
43
|
const client = new client_1.AppDistributionClient();
|
|
45
|
-
|
|
44
|
+
release = await (0, distribution_1.upload)(client, appName, new distribution_1.Distribution(target));
|
|
46
45
|
invokeSpinner.start();
|
|
47
|
-
|
|
46
|
+
releaseTests = await invokeTests(client, release.name, tests, !testDevices.length ? defaultDevices : testDevices);
|
|
48
47
|
invokeSpinner.text = "Test execution requested";
|
|
49
48
|
invokeSpinner.succeed();
|
|
50
49
|
}
|
|
@@ -52,22 +51,22 @@ exports.command = new command_1.Command("apptesting:execute <release-binary-file
|
|
|
52
51
|
invokeSpinner.fail("Failed to request test execution");
|
|
53
52
|
throw ex;
|
|
54
53
|
}
|
|
55
|
-
logger_1.logger.info(clc.bold(`\n${clc.white("===")} Running ${pluralizeTests(
|
|
56
|
-
logger_1.logger.info(
|
|
54
|
+
logger_1.logger.info(clc.bold(`\n${clc.white("===")} Running ${pluralizeTests(releaseTests.length)}`));
|
|
55
|
+
logger_1.logger.info(`View progress and results in the Firebase Console:\n${release.firebaseConsoleUri}`);
|
|
57
56
|
});
|
|
58
57
|
function pluralizeTests(numTests) {
|
|
59
58
|
return `${numTests} test${numTests === 1 ? "" : "s"}`;
|
|
60
59
|
}
|
|
61
60
|
async function invokeTests(client, releaseName, testDefs, devices) {
|
|
62
61
|
try {
|
|
63
|
-
const
|
|
62
|
+
const releaseTests = [];
|
|
64
63
|
for (const testDef of testDefs) {
|
|
65
64
|
const aiInstructions = {
|
|
66
65
|
steps: testDef.testCase.steps,
|
|
67
66
|
};
|
|
68
|
-
|
|
67
|
+
releaseTests.push(await client.createReleaseTest(releaseName, devices, aiInstructions, undefined, undefined, testDef.testCase.displayName));
|
|
69
68
|
}
|
|
70
|
-
return
|
|
69
|
+
return releaseTests;
|
|
71
70
|
}
|
|
72
71
|
catch (err) {
|
|
73
72
|
throw new error_1.FirebaseError("Test invocation failed", { original: (0, error_1.getError)(err) });
|
|
@@ -8,7 +8,6 @@ const requireAuth_1 = require("../requireAuth");
|
|
|
8
8
|
const requirePermissions_1 = require("../requirePermissions");
|
|
9
9
|
const ensureApis_1 = require("../dataconnect/ensureApis");
|
|
10
10
|
const permissionsSetup_1 = require("../gcp/cloudsql/permissionsSetup");
|
|
11
|
-
const permissions_1 = require("../gcp/cloudsql/permissions");
|
|
12
11
|
const schemaMigration_1 = require("../dataconnect/schemaMigration");
|
|
13
12
|
const connect_1 = require("../gcp/cloudsql/connect");
|
|
14
13
|
const load_1 = require("../dataconnect/load");
|
|
@@ -33,9 +32,9 @@ exports.command = new command_1.Command("dataconnect:sql:setup")
|
|
|
33
32
|
if (!instanceId) {
|
|
34
33
|
throw new error_1.FirebaseError("dataconnect.yaml is missing field schema.datasource.postgresql.cloudsql.instanceId");
|
|
35
34
|
}
|
|
36
|
-
const { serviceName, instanceName, databaseId } = (0, schemaMigration_1.getIdentifiers)((0, types_1.mainSchema)(serviceInfo.schemas));
|
|
37
|
-
await (0, schemaMigration_1.ensureServiceIsConnectedToCloudSql)(serviceName, instanceName, databaseId, true);
|
|
35
|
+
const { serviceName, instanceName, databaseId, schemaName } = (0, schemaMigration_1.getIdentifiers)((0, types_1.mainSchema)(serviceInfo.schemas));
|
|
36
|
+
await (0, schemaMigration_1.ensureServiceIsConnectedToCloudSql)(serviceName, instanceName, databaseId, true, schemaName);
|
|
38
37
|
await (0, connect_1.setupIAMUsers)(instanceId, options);
|
|
39
|
-
const schemaInfo = await (0, permissionsSetup_1.getSchemaMetadata)(instanceId, databaseId,
|
|
38
|
+
const schemaInfo = await (0, permissionsSetup_1.getSchemaMetadata)(instanceId, databaseId, schemaName, options);
|
|
40
39
|
await (0, permissionsSetup_1.setupSQLPermissions)(instanceId, databaseId, schemaInfo, options);
|
|
41
40
|
});
|
|
@@ -83,7 +83,7 @@ exports.command = new command_1.Command("dataconnect:sql:shell")
|
|
|
83
83
|
const projectId = (0, projectUtils_1.needProjectId)(options);
|
|
84
84
|
await (0, ensureApis_1.ensureApis)(projectId);
|
|
85
85
|
const serviceInfo = await (0, load_1.pickOneService)(projectId, options.config, options.service, options.location);
|
|
86
|
-
const { instanceId, databaseId } = (0, schemaMigration_1.getIdentifiers)((0, types_1.mainSchema)(serviceInfo.schemas));
|
|
86
|
+
const { instanceId, databaseId, schemaName } = (0, schemaMigration_1.getIdentifiers)((0, types_1.mainSchema)(serviceInfo.schemas));
|
|
87
87
|
const { user: username } = await (0, connect_1.getIAMUser)(options);
|
|
88
88
|
const instance = await cloudSqlAdminClient.getInstance(projectId, instanceId);
|
|
89
89
|
const connectionName = instance.connectionName;
|
|
@@ -104,6 +104,7 @@ exports.command = new command_1.Command("dataconnect:sql:shell")
|
|
|
104
104
|
database: databaseId,
|
|
105
105
|
});
|
|
106
106
|
const conn = await pool.connect();
|
|
107
|
+
await conn.query(`SET search_path TO "${schemaName}"`);
|
|
107
108
|
logger_1.logger.info(`Logged in as ${username}`);
|
|
108
109
|
logger_1.logger.info(clc.cyan("Welcome to Data Connect Cloud SQL Shell"));
|
|
109
110
|
logger_1.logger.info(clc.gray("Type your your SQL query or '.exit' to quit, queries should end with ';' or add empty line to execute."));
|
package/lib/commands/index.js
CHANGED
|
@@ -247,18 +247,16 @@ function load(client) {
|
|
|
247
247
|
client.dataconnect.compile = loadCommand("dataconnect-compile");
|
|
248
248
|
client.dataconnect.sdk = {};
|
|
249
249
|
client.dataconnect.sdk.generate = loadCommand("dataconnect-sdk-generate");
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
client.studio.export = loadCommand("studio-export");
|
|
253
|
-
}
|
|
250
|
+
client.studio = {};
|
|
251
|
+
client.studio.export = loadCommand("studio-export");
|
|
254
252
|
client.target = loadCommand("target");
|
|
255
253
|
client.target.apply = loadCommand("target-apply");
|
|
256
254
|
client.target.clear = loadCommand("target-clear");
|
|
257
255
|
client.target.remove = loadCommand("target-remove");
|
|
258
256
|
client.use = loadCommand("use");
|
|
257
|
+
client.apptesting = {};
|
|
258
|
+
client.apptesting.execute = loadCommand("apptesting");
|
|
259
259
|
if (experiments.isEnabled("apptesting")) {
|
|
260
|
-
client.apptesting = {};
|
|
261
|
-
client.apptesting.execute = loadCommand("apptesting");
|
|
262
260
|
client.apptesting.wata = loadCommand("apptesting-wata");
|
|
263
261
|
}
|
|
264
262
|
const t1 = process.hrtime.bigint();
|
|
@@ -5,17 +5,35 @@ const command_1 = require("../command");
|
|
|
5
5
|
const logger_1 = require("../logger");
|
|
6
6
|
const migrate_1 = require("../firebase_studio/migrate");
|
|
7
7
|
const path = require("path");
|
|
8
|
-
const experiments = require("../experiments");
|
|
9
8
|
const error_1 = require("../error");
|
|
9
|
+
const unzip_1 = require("../unzip");
|
|
10
|
+
const fs = require("fs");
|
|
10
11
|
exports.command = new command_1.Command("studio:export <path>")
|
|
11
|
-
.description("Bootstrap Firebase Studio apps for migration to Antigravity. Run on the unzipped folder from the Firebase Studio download.")
|
|
12
|
+
.description("Bootstrap Firebase Studio apps for migration to Antigravity. Run on the unzipped folder from the Firebase Studio download, or directly on the downloaded zip file.")
|
|
12
13
|
.option("--no-start-agy", "skip starting the Antigravity IDE after migration")
|
|
13
14
|
.action(async (exportPath, options) => {
|
|
14
|
-
experiments.assertEnabled("studioexport", "export Studio apps");
|
|
15
15
|
if (!exportPath) {
|
|
16
16
|
throw new error_1.FirebaseError("Must specify a path for migration.", { exit: 1 });
|
|
17
17
|
}
|
|
18
|
-
|
|
19
|
-
|
|
18
|
+
let rootPath = path.resolve(exportPath);
|
|
19
|
+
if (fs.existsSync(rootPath) && fs.statSync(rootPath).isFile() && rootPath.endsWith(".zip")) {
|
|
20
|
+
logger_1.logger.info(`⏳ Unzipping ${rootPath}...`);
|
|
21
|
+
const parsedPath = path.parse(rootPath);
|
|
22
|
+
let extractDirName = parsedPath.name;
|
|
23
|
+
if (!extractDirName || extractDirName === ".") {
|
|
24
|
+
extractDirName = "studio-export";
|
|
25
|
+
}
|
|
26
|
+
const extractPath = path.join(parsedPath.dir, extractDirName);
|
|
27
|
+
await (0, unzip_1.unzip)(rootPath, extractPath);
|
|
28
|
+
const extractedItems = fs.readdirSync(extractPath);
|
|
29
|
+
if (extractedItems.length === 1 &&
|
|
30
|
+
fs.statSync(path.join(extractPath, extractedItems[0])).isDirectory()) {
|
|
31
|
+
rootPath = path.join(extractPath, extractedItems[0]);
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
rootPath = extractPath;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
logger_1.logger.info(`⏳ Exporting Studio apps from ${rootPath} to Antigravity...`);
|
|
20
38
|
await (0, migrate_1.migrate)(rootPath, options);
|
|
21
39
|
});
|
|
@@ -25,10 +25,10 @@ const errors = require("./errors");
|
|
|
25
25
|
const provisionCloudSql_1 = require("./provisionCloudSql");
|
|
26
26
|
const requireAuth_1 = require("../requireAuth");
|
|
27
27
|
const cloudbilling_1 = require("../gcp/cloudbilling");
|
|
28
|
-
async function setupSchemaIfNecessary(instanceId, databaseId, options) {
|
|
28
|
+
async function setupSchemaIfNecessary(instanceId, databaseId, schemaName, options) {
|
|
29
29
|
try {
|
|
30
30
|
await (0, connect_1.setupIAMUsers)(instanceId, options);
|
|
31
|
-
const schemaInfo = await (0, permissionsSetup_1.getSchemaMetadata)(instanceId, databaseId,
|
|
31
|
+
const schemaInfo = await (0, permissionsSetup_1.getSchemaMetadata)(instanceId, databaseId, schemaName, options);
|
|
32
32
|
switch (schemaInfo.setupStatus) {
|
|
33
33
|
case permissionsSetup_1.SchemaSetupStatus.BrownField:
|
|
34
34
|
case permissionsSetup_1.SchemaSetupStatus.GreenField:
|
|
@@ -50,8 +50,8 @@ async function diffSchema(options, schema, schemaValidation) {
|
|
|
50
50
|
let validationMode = schemaValidation ?? "STRICT";
|
|
51
51
|
setSchemaValidationMode(schema, validationMode);
|
|
52
52
|
displayStartSchemaDiff(validationMode);
|
|
53
|
-
const { serviceName, instanceName, databaseId, instanceId } = getIdentifiers(schema);
|
|
54
|
-
await ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, false);
|
|
53
|
+
const { serviceName, instanceName, databaseId, instanceId, schemaName } = getIdentifiers(schema);
|
|
54
|
+
await ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, false, schemaName);
|
|
55
55
|
let incompatible = undefined;
|
|
56
56
|
try {
|
|
57
57
|
await (0, client_1.upsertSchema)(schema, true);
|
|
@@ -114,8 +114,8 @@ async function migrateSchema(args) {
|
|
|
114
114
|
setSchemaValidationMode(schema, validationMode);
|
|
115
115
|
displayStartSchemaDiff(validationMode);
|
|
116
116
|
const projectId = (0, projectUtils_1.needProjectId)(options);
|
|
117
|
-
const { serviceName, instanceId, instanceName, databaseId } = getIdentifiers(schema);
|
|
118
|
-
await ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, true);
|
|
117
|
+
const { serviceName, instanceId, instanceName, databaseId, schemaName } = getIdentifiers(schema);
|
|
118
|
+
await ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, true, schemaName);
|
|
119
119
|
const existingInstance = await cloudSqlAdminClient.getInstance(projectId, instanceId);
|
|
120
120
|
if (existingInstance.state === "PENDING_CREATE") {
|
|
121
121
|
if (stats) {
|
|
@@ -134,7 +134,7 @@ async function migrateSchema(args) {
|
|
|
134
134
|
(0, utils_1.logLabeledWarning)("dataconnect", `Skip SQL schema migration because Cloud SQL is still being created`);
|
|
135
135
|
return [];
|
|
136
136
|
}
|
|
137
|
-
await setupSchemaIfNecessary(instanceId, databaseId, options);
|
|
137
|
+
await setupSchemaIfNecessary(instanceId, databaseId, schemaName, options);
|
|
138
138
|
let diffs = [];
|
|
139
139
|
try {
|
|
140
140
|
await (0, client_1.upsertSchema)(schema, validateOnly);
|
|
@@ -168,6 +168,7 @@ async function migrateSchema(args) {
|
|
|
168
168
|
options,
|
|
169
169
|
databaseId,
|
|
170
170
|
instanceId,
|
|
171
|
+
schemaName,
|
|
171
172
|
incompatibleSchemaError: incompatible,
|
|
172
173
|
choice: migrationMode,
|
|
173
174
|
});
|
|
@@ -202,6 +203,7 @@ async function migrateSchema(args) {
|
|
|
202
203
|
options,
|
|
203
204
|
databaseId,
|
|
204
205
|
instanceId,
|
|
206
|
+
schemaName,
|
|
205
207
|
incompatibleSchemaError: incompatible,
|
|
206
208
|
choice: migrationMode,
|
|
207
209
|
});
|
|
@@ -242,13 +244,13 @@ async function upsertSecondarySchema(args) {
|
|
|
242
244
|
async function grantRoleToUserInSchema(options, schema) {
|
|
243
245
|
const role = options.role;
|
|
244
246
|
const email = options.email;
|
|
245
|
-
const { serviceName, instanceId, instanceName, databaseId } = getIdentifiers(schema);
|
|
246
|
-
await ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, false);
|
|
247
|
-
const schemaSetupStatus = await setupSchemaIfNecessary(instanceId, databaseId, options);
|
|
247
|
+
const { serviceName, instanceId, instanceName, databaseId, schemaName } = getIdentifiers(schema);
|
|
248
|
+
await ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, false, schemaName);
|
|
249
|
+
const schemaSetupStatus = await setupSchemaIfNecessary(instanceId, databaseId, schemaName, options);
|
|
248
250
|
if (schemaSetupStatus !== permissionsSetup_1.SchemaSetupStatus.GreenField && role === "owner") {
|
|
249
251
|
throw new error_1.FirebaseError(`Owner rule isn't available in ${schemaSetupStatus} databases. If you would like Data Connect to manage and own your database schema, run 'firebase dataconnect:sql:setup'`);
|
|
250
252
|
}
|
|
251
|
-
await (0, permissionsSetup_1.grantRoleTo)(options, instanceId, databaseId, role, email);
|
|
253
|
+
await (0, permissionsSetup_1.grantRoleTo)(options, instanceId, databaseId, role, email, schemaName);
|
|
252
254
|
}
|
|
253
255
|
function diffsEqual(x, y) {
|
|
254
256
|
if (x.length !== y.length) {
|
|
@@ -280,11 +282,13 @@ function getIdentifiers(schema) {
|
|
|
280
282
|
throw new error_1.FirebaseError("Data Connect schema must have a postgres datasource with a CloudSQL instance.");
|
|
281
283
|
}
|
|
282
284
|
const instanceId = instanceName.split("/").pop();
|
|
285
|
+
const schemaName = postgresDatasource?.postgresql?.schema || permissions_1.DEFAULT_SCHEMA;
|
|
283
286
|
const serviceName = serviceNameFromSchema(schema);
|
|
284
287
|
return {
|
|
285
288
|
databaseId,
|
|
286
289
|
instanceId,
|
|
287
290
|
instanceName,
|
|
291
|
+
schemaName,
|
|
288
292
|
serviceName,
|
|
289
293
|
};
|
|
290
294
|
}
|
|
@@ -299,7 +303,7 @@ function suggestedCommand(serviceName, invalidConnectorNames) {
|
|
|
299
303
|
return `firebase deploy --only ${onlys}`;
|
|
300
304
|
}
|
|
301
305
|
async function handleIncompatibleSchemaError(args) {
|
|
302
|
-
const { incompatibleSchemaError, options, instanceId, databaseId, choice } = args;
|
|
306
|
+
const { incompatibleSchemaError, options, instanceId, databaseId, schemaName, choice } = args;
|
|
303
307
|
const commandsToExecute = incompatibleSchemaError.diffs.filter((d) => {
|
|
304
308
|
switch (choice) {
|
|
305
309
|
case "all":
|
|
@@ -319,26 +323,29 @@ async function handleIncompatibleSchemaError(args) {
|
|
|
319
323
|
Please ask a user with 'roles/cloudsql.admin' to apply the following commands.\n
|
|
320
324
|
${diffsToString(commandsToExecuteBySuperUser)}`);
|
|
321
325
|
}
|
|
322
|
-
const schemaInfo = await (0, permissionsSetup_1.getSchemaMetadata)(instanceId, databaseId,
|
|
326
|
+
const schemaInfo = await (0, permissionsSetup_1.getSchemaMetadata)(instanceId, databaseId, schemaName, options);
|
|
323
327
|
if (schemaInfo.setupStatus !== permissionsSetup_1.SchemaSetupStatus.GreenField) {
|
|
324
328
|
throw new error_1.FirebaseError(`Brownfield database are protected from SQL changes by Data Connect.\n` +
|
|
325
329
|
`You can use the SQL diff generated by 'firebase dataconnect:sql:diff' to assist you in applying the required changes to your CloudSQL database. Connector deployment will succeed when there is no required diff changes.\n` +
|
|
326
330
|
`If you would like Data Connect to manage your database schema, run 'firebase dataconnect:sql:setup'`);
|
|
327
331
|
}
|
|
328
|
-
if (!(await (0, permissionsSetup_1.checkSQLRoleIsGranted)(options, instanceId, databaseId, (0, permissions_1.firebaseowner)(databaseId), (await (0, connect_1.getIAMUser)(options)).user))) {
|
|
332
|
+
if (!(await (0, permissionsSetup_1.checkSQLRoleIsGranted)(options, instanceId, databaseId, (0, permissions_1.firebaseowner)(databaseId, schemaName), (await (0, connect_1.getIAMUser)(options)).user))) {
|
|
329
333
|
if (!userIsCSQLAdmin) {
|
|
330
334
|
throw new error_1.FirebaseError(`Command aborted. Only users granted firebaseowner SQL role can run migrations.`);
|
|
331
335
|
}
|
|
332
336
|
const account = (await (0, requireAuth_1.requireAuth)(options));
|
|
333
337
|
(0, utils_1.logLabeledBullet)("dataconnect", `Granting firebaseowner role to myself ${account}...`);
|
|
334
|
-
await (0, permissionsSetup_1.grantRoleTo)(options, instanceId, databaseId, "owner", account);
|
|
338
|
+
await (0, permissionsSetup_1.grantRoleTo)(options, instanceId, databaseId, "owner", account, schemaName);
|
|
335
339
|
}
|
|
336
340
|
if (commandsToExecuteBySuperUser.length) {
|
|
337
341
|
(0, utils_1.logLabeledBullet)("dataconnect", `Executing admin SQL commands as superuser...`);
|
|
338
342
|
await (0, connect_1.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, commandsToExecuteBySuperUser.map((d) => d.sql), false);
|
|
339
343
|
}
|
|
340
344
|
if (commandsToExecuteByOwner.length) {
|
|
341
|
-
await (0, connect_1.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, [
|
|
345
|
+
await (0, connect_1.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, [
|
|
346
|
+
`SET ROLE "${(0, permissions_1.firebaseowner)(databaseId, schemaName)}"`,
|
|
347
|
+
...commandsToExecuteByOwner.map((d) => d.sql),
|
|
348
|
+
], false);
|
|
342
349
|
return incompatibleSchemaError.diffs;
|
|
343
350
|
}
|
|
344
351
|
}
|
|
@@ -421,7 +428,7 @@ function displayInvalidConnectors(invalidConnectors) {
|
|
|
421
428
|
(0, utils_1.logLabeledWarning)("dataconnect", `The schema you are deploying is incompatible with the following existing connectors: ${clc.bold(connectorIds)}.`);
|
|
422
429
|
(0, utils_1.logLabeledWarning)("dataconnect", `This is a ${clc.red("breaking")} change and may break existing apps.`);
|
|
423
430
|
}
|
|
424
|
-
async function ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, linkIfNotConnected) {
|
|
431
|
+
async function ensureServiceIsConnectedToCloudSql(serviceName, instanceName, databaseId, linkIfNotConnected, schemaName) {
|
|
425
432
|
let currentSchema = await (0, client_1.getSchema)(serviceName);
|
|
426
433
|
let postgresql = currentSchema?.datasources?.find((d) => d.postgresql)?.postgresql;
|
|
427
434
|
if (currentSchema?.reconciling &&
|
|
@@ -478,6 +485,7 @@ async function ensureServiceIsConnectedToCloudSql(serviceName, instanceName, dat
|
|
|
478
485
|
try {
|
|
479
486
|
postgresql.schemaValidation = "STRICT";
|
|
480
487
|
postgresql.database = databaseId;
|
|
488
|
+
postgresql.schema = schemaName;
|
|
481
489
|
postgresql.cloudSql = { instance: instanceName };
|
|
482
490
|
await (0, client_1.upsertSchema)(currentSchema, false);
|
|
483
491
|
}
|
package/lib/dataconnect/types.js
CHANGED
|
@@ -16,6 +16,7 @@ function toDatasource(projectId, locationId, ds) {
|
|
|
16
16
|
return {
|
|
17
17
|
postgresql: {
|
|
18
18
|
database: ds.postgresql.database,
|
|
19
|
+
schema: ds.postgresql.schema,
|
|
19
20
|
cloudSql: {
|
|
20
21
|
instance: `projects/${projectId}/locations/${locationId}/instances/${ds.postgresql.cloudSql.instanceId}`,
|
|
21
22
|
},
|
|
@@ -226,18 +226,33 @@ function toBackend(build, paramValues) {
|
|
|
226
226
|
r.resolveInts(bkEndpoint, bdEndpoint, "timeoutSeconds", "maxInstances", "minInstances", "concurrency");
|
|
227
227
|
proto.convertIfPresent(bkEndpoint, bdEndpoint, "cpu", (0, functional_1.nullsafeVisitor)((cpu) => (cpu === "gcf_gen1" ? cpu : r.resolveInt(cpu))));
|
|
228
228
|
if (bdEndpoint.vpc) {
|
|
229
|
-
|
|
230
|
-
if (bdEndpoint.vpc.connector &&
|
|
231
|
-
|
|
229
|
+
bkEndpoint.vpc = {};
|
|
230
|
+
if (typeof bdEndpoint.vpc.connector !== "undefined" && bdEndpoint.vpc.connector !== null) {
|
|
231
|
+
const connector = params.resolveString(bdEndpoint.vpc.connector, paramValues);
|
|
232
|
+
bkEndpoint.vpc.connector =
|
|
233
|
+
connector.includes("/") || connector === ""
|
|
234
|
+
? connector
|
|
235
|
+
: `projects/${bdEndpoint.project}/locations/${region}/connectors/${connector}`;
|
|
232
236
|
}
|
|
233
|
-
bkEndpoint.vpc = { connector: bdEndpoint.vpc.connector };
|
|
234
237
|
if (bdEndpoint.vpc.egressSettings) {
|
|
235
|
-
const
|
|
236
|
-
if (!backend.
|
|
237
|
-
throw new error_1.FirebaseError(`Value "${
|
|
238
|
-
"egress setting. Valid values are PRIVATE_RANGES_ONLY and ALL_TRAFFIC");
|
|
238
|
+
const egress = params.resolveString(bdEndpoint.vpc.egressSettings, paramValues);
|
|
239
|
+
if (!backend.AllVpcEgressSettings.includes(egress)) {
|
|
240
|
+
throw new error_1.FirebaseError(`Value "${egress}" is an invalid egress setting.`);
|
|
239
241
|
}
|
|
240
|
-
bkEndpoint.vpc.egressSettings =
|
|
242
|
+
bkEndpoint.vpc.egressSettings = egress;
|
|
243
|
+
}
|
|
244
|
+
if (bdEndpoint.vpc.networkInterfaces) {
|
|
245
|
+
bkEndpoint.vpc.networkInterfaces = bdEndpoint.vpc.networkInterfaces.map((ni) => {
|
|
246
|
+
const resolved = {};
|
|
247
|
+
if (ni.network)
|
|
248
|
+
resolved.network = params.resolveString(ni.network, paramValues);
|
|
249
|
+
if (ni.subnetwork)
|
|
250
|
+
resolved.subnetwork = params.resolveString(ni.subnetwork, paramValues);
|
|
251
|
+
if (ni.tags) {
|
|
252
|
+
resolved.tags = ni.tags.map((tag) => params.resolveString(tag, paramValues));
|
|
253
|
+
}
|
|
254
|
+
return resolved;
|
|
255
|
+
});
|
|
241
256
|
}
|
|
242
257
|
}
|
|
243
258
|
else if (bdEndpoint.vpc === null) {
|
|
@@ -88,10 +88,16 @@ function assertBuildEndpoint(ep, id) {
|
|
|
88
88
|
});
|
|
89
89
|
if (ep.vpc) {
|
|
90
90
|
(0, parsing_1.assertKeyTypes)(prefix + ".vpc", ep.vpc, {
|
|
91
|
-
connector: "string",
|
|
91
|
+
connector: "string?",
|
|
92
92
|
egressSettings: (setting) => setting === null || build.AllVpcEgressSettings.includes(setting),
|
|
93
|
+
networkInterfaces: "array?",
|
|
93
94
|
});
|
|
94
|
-
|
|
95
|
+
if (!ep.vpc.connector && !ep.vpc.networkInterfaces) {
|
|
96
|
+
throw new error_1.FirebaseError(`VPC settings on ${id} must specify either 'connector' or 'networkInterfaces'`);
|
|
97
|
+
}
|
|
98
|
+
if (ep.vpc.connector && ep.vpc.networkInterfaces) {
|
|
99
|
+
throw new error_1.FirebaseError(`VPC settings on ${id} cannot specify both 'connector' and 'networkInterfaces'`);
|
|
100
|
+
}
|
|
95
101
|
}
|
|
96
102
|
let triggerCount = 0;
|
|
97
103
|
if (ep.httpsTrigger) {
|
|
@@ -1,27 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.getLocalAppHostingConfiguration = getLocalAppHostingConfiguration;
|
|
4
|
-
const path_1 = require("path");
|
|
5
4
|
const config_1 = require("../../apphosting/config");
|
|
6
|
-
const yaml_1 = require("../../apphosting/yaml");
|
|
7
5
|
async function getLocalAppHostingConfiguration(backendDir) {
|
|
8
|
-
|
|
9
|
-
const fileNameToPathMap = Object.fromEntries(appHostingConfigPaths.map((path) => [(0, path_1.basename)(path), path]));
|
|
10
|
-
const output = yaml_1.AppHostingYamlConfig.empty();
|
|
11
|
-
const baseFilePath = fileNameToPathMap[config_1.APPHOSTING_BASE_YAML_FILE];
|
|
12
|
-
const emulatorsFilePath = fileNameToPathMap[config_1.APPHOSTING_EMULATORS_YAML_FILE];
|
|
13
|
-
const localFilePath = fileNameToPathMap[config_1.APPHOSTING_LOCAL_YAML_FILE];
|
|
14
|
-
if (baseFilePath) {
|
|
15
|
-
const baseFile = await yaml_1.AppHostingYamlConfig.loadFromFile(baseFilePath);
|
|
16
|
-
output.merge(baseFile, false);
|
|
17
|
-
}
|
|
18
|
-
if (emulatorsFilePath) {
|
|
19
|
-
const emulatorsConfig = await yaml_1.AppHostingYamlConfig.loadFromFile(emulatorsFilePath);
|
|
20
|
-
output.merge(emulatorsConfig, false);
|
|
21
|
-
}
|
|
22
|
-
if (localFilePath) {
|
|
23
|
-
const localYamlConfig = await yaml_1.AppHostingYamlConfig.loadFromFile(localFilePath);
|
|
24
|
-
output.merge(localYamlConfig, true);
|
|
25
|
-
}
|
|
26
|
-
return output;
|
|
6
|
+
return (0, config_1.getAppHostingConfiguration)(backendDir);
|
|
27
7
|
}
|
|
@@ -753,7 +753,8 @@ async function exportEmulatorData(exportPath, options, initiatedBy) {
|
|
|
753
753
|
}
|
|
754
754
|
utils.logBullet(`Exporting data to: ${exportAbsPath}`);
|
|
755
755
|
try {
|
|
756
|
-
|
|
756
|
+
const targets = filterEmulatorTargets(options);
|
|
757
|
+
await hubClient.postExport({ path: exportAbsPath, initiatedBy, targets });
|
|
757
758
|
}
|
|
758
759
|
catch (e) {
|
|
759
760
|
throw new error_1.FirebaseError("Export request failed, see emulator logs for more information.", {
|
package/lib/emulator/hub.js
CHANGED
|
@@ -71,11 +71,13 @@ class EmulatorHub extends ExpressBasedEmulator_1.ExpressBasedEmulator {
|
|
|
71
71
|
}
|
|
72
72
|
const path = req.body.path;
|
|
73
73
|
const initiatedBy = req.body.initiatedBy || "unknown";
|
|
74
|
+
const targets = req.body.targets;
|
|
74
75
|
utils.logLabeledBullet("emulators", `Received export request. Exporting data to ${path}.`);
|
|
75
76
|
try {
|
|
76
77
|
await new hubExport_1.HubExport(this.args.projectId, {
|
|
77
78
|
path,
|
|
78
79
|
initiatedBy,
|
|
80
|
+
targets,
|
|
79
81
|
}).exportAll();
|
|
80
82
|
utils.logLabeledSuccess("emulators", "Export complete.");
|
|
81
83
|
res.status(200).send({
|
|
@@ -19,6 +19,7 @@ class HubExport {
|
|
|
19
19
|
this.projectId = projectId;
|
|
20
20
|
this.options = options;
|
|
21
21
|
this.exportPath = options.path;
|
|
22
|
+
this.exportTargets = options.targets ?? [...types_1.IMPORT_EXPORT_EMULATORS];
|
|
22
23
|
this.tmpDir = fs.mkdtempSync(`firebase-export-${new Date().getTime()}`);
|
|
23
24
|
}
|
|
24
25
|
static readMetadata(exportPath) {
|
|
@@ -36,14 +37,14 @@ class HubExport {
|
|
|
36
37
|
}
|
|
37
38
|
}
|
|
38
39
|
async exportAll() {
|
|
39
|
-
const toExport = types_1.ALL_EMULATORS.filter(shouldExport);
|
|
40
|
+
const toExport = types_1.ALL_EMULATORS.filter(shouldExport).filter((e) => this.exportTargets.includes(e));
|
|
40
41
|
if (toExport.length === 0) {
|
|
41
42
|
throw new error_1.FirebaseError("No running emulators support import/export.");
|
|
42
43
|
}
|
|
43
44
|
const metadata = {
|
|
44
45
|
version: hub_1.EmulatorHub.CLI_VERSION,
|
|
45
46
|
};
|
|
46
|
-
if (shouldExport(types_1.Emulators.FIRESTORE)) {
|
|
47
|
+
if (shouldExport(types_1.Emulators.FIRESTORE) && toExport.includes(types_1.Emulators.FIRESTORE)) {
|
|
47
48
|
metadata.firestore = {
|
|
48
49
|
version: (0, downloadableEmulators_1.getDownloadDetails)(types_1.Emulators.FIRESTORE).version,
|
|
49
50
|
path: "firestore_export",
|
|
@@ -51,28 +52,28 @@ class HubExport {
|
|
|
51
52
|
};
|
|
52
53
|
await this.exportFirestore(metadata);
|
|
53
54
|
}
|
|
54
|
-
if (shouldExport(types_1.Emulators.DATABASE)) {
|
|
55
|
+
if (shouldExport(types_1.Emulators.DATABASE) && toExport.includes(types_1.Emulators.DATABASE)) {
|
|
55
56
|
metadata.database = {
|
|
56
57
|
version: (0, downloadableEmulators_1.getDownloadDetails)(types_1.Emulators.DATABASE).version,
|
|
57
58
|
path: "database_export",
|
|
58
59
|
};
|
|
59
60
|
await this.exportDatabase(metadata);
|
|
60
61
|
}
|
|
61
|
-
if (shouldExport(types_1.Emulators.AUTH)) {
|
|
62
|
+
if (shouldExport(types_1.Emulators.AUTH) && toExport.includes(types_1.Emulators.AUTH)) {
|
|
62
63
|
metadata.auth = {
|
|
63
64
|
version: hub_1.EmulatorHub.CLI_VERSION,
|
|
64
65
|
path: "auth_export",
|
|
65
66
|
};
|
|
66
67
|
await this.exportAuth(metadata);
|
|
67
68
|
}
|
|
68
|
-
if (shouldExport(types_1.Emulators.STORAGE)) {
|
|
69
|
+
if (shouldExport(types_1.Emulators.STORAGE) && toExport.includes(types_1.Emulators.STORAGE)) {
|
|
69
70
|
metadata.storage = {
|
|
70
71
|
version: hub_1.EmulatorHub.CLI_VERSION,
|
|
71
72
|
path: "storage_export",
|
|
72
73
|
};
|
|
73
74
|
await this.exportStorage(metadata);
|
|
74
75
|
}
|
|
75
|
-
if (shouldExport(types_1.Emulators.DATACONNECT)) {
|
|
76
|
+
if (shouldExport(types_1.Emulators.DATACONNECT) && toExport.includes(types_1.Emulators.DATACONNECT)) {
|
|
76
77
|
metadata.dataconnect = {
|
|
77
78
|
version: hub_1.EmulatorHub.CLI_VERSION,
|
|
78
79
|
path: "dataconnect_export",
|
package/lib/experiments.js
CHANGED
|
@@ -104,6 +104,11 @@ exports.ALL_EXPERIMENTS = experiments({
|
|
|
104
104
|
default: true,
|
|
105
105
|
public: false,
|
|
106
106
|
},
|
|
107
|
+
apphostinglocalbuilds: {
|
|
108
|
+
shortDescription: "Enable App Hosting local builds",
|
|
109
|
+
default: false,
|
|
110
|
+
public: false,
|
|
111
|
+
},
|
|
107
112
|
dataconnect: {
|
|
108
113
|
shortDescription: "Deprecated. Previosuly, enabled Data Connect related features.",
|
|
109
114
|
fullDescription: "Deprecated. Previously, enabled Data Connect related features.",
|
|
@@ -133,8 +138,8 @@ exports.ALL_EXPERIMENTS = experiments({
|
|
|
133
138
|
},
|
|
134
139
|
fdcift: {
|
|
135
140
|
shortDescription: "Enable instrumentless trial for Data Connect",
|
|
141
|
+
default: true,
|
|
136
142
|
public: false,
|
|
137
|
-
default: false,
|
|
138
143
|
},
|
|
139
144
|
apptesting: {
|
|
140
145
|
shortDescription: "Adds experimental App Testing feature",
|
|
@@ -145,11 +150,6 @@ exports.ALL_EXPERIMENTS = experiments({
|
|
|
145
150
|
default: true,
|
|
146
151
|
public: false,
|
|
147
152
|
},
|
|
148
|
-
studioexport: {
|
|
149
|
-
shortDescription: "Enable the experimental studio:export command.",
|
|
150
|
-
default: false,
|
|
151
|
-
public: false,
|
|
152
|
-
},
|
|
153
153
|
});
|
|
154
154
|
function isValidExperiment(name) {
|
|
155
155
|
return Object.keys(exports.ALL_EXPERIMENTS).includes(name);
|
|
@@ -7,13 +7,51 @@ const fs = require("fs/promises");
|
|
|
7
7
|
const path = require("path");
|
|
8
8
|
const child_process_1 = require("child_process");
|
|
9
9
|
const logger_1 = require("../logger");
|
|
10
|
-
const error_1 = require("../error");
|
|
11
10
|
const prompt = require("../prompt");
|
|
12
11
|
const apphosting = require("../gcp/apphosting");
|
|
13
12
|
const utils = require("../utils");
|
|
14
13
|
const templates_1 = require("../templates");
|
|
14
|
+
const track = require("../track");
|
|
15
15
|
const secrets_1 = require("../apphosting/secrets");
|
|
16
16
|
const env = require("../functions/env");
|
|
17
|
+
const error_1 = require("../error");
|
|
18
|
+
async function detectAppType(rootPath) {
|
|
19
|
+
try {
|
|
20
|
+
await fs.access(path.join(rootPath, "pubspec.yaml"));
|
|
21
|
+
return "FLUTTER";
|
|
22
|
+
}
|
|
23
|
+
catch {
|
|
24
|
+
}
|
|
25
|
+
try {
|
|
26
|
+
await fs.access(path.join(rootPath, "angular.json"));
|
|
27
|
+
return "ANGULAR";
|
|
28
|
+
}
|
|
29
|
+
catch {
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
const packageJsonPath = path.join(rootPath, "package.json");
|
|
33
|
+
const packageJsonContent = await fs.readFile(packageJsonPath, "utf8");
|
|
34
|
+
const packageJson = JSON.parse(packageJsonContent);
|
|
35
|
+
const deps = { ...packageJson.dependencies, ...packageJson.devDependencies };
|
|
36
|
+
if (deps.next) {
|
|
37
|
+
return "NEXT_JS";
|
|
38
|
+
}
|
|
39
|
+
if (deps["@angular/core"]) {
|
|
40
|
+
return "ANGULAR";
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
catch {
|
|
44
|
+
}
|
|
45
|
+
for (const configFile of ["next.config.js", "next.config.mjs"]) {
|
|
46
|
+
try {
|
|
47
|
+
await fs.access(path.join(rootPath, configFile));
|
|
48
|
+
return "NEXT_JS";
|
|
49
|
+
}
|
|
50
|
+
catch {
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return "OTHER";
|
|
54
|
+
}
|
|
17
55
|
async function downloadGitHubDir(apiUrl, localPath) {
|
|
18
56
|
const response = await fetch(apiUrl);
|
|
19
57
|
if (!response.ok) {
|
|
@@ -45,6 +83,8 @@ async function extractMetadata(rootPath, overrideProjectId) {
|
|
|
45
83
|
catch (err) {
|
|
46
84
|
logger_1.logger.debug(`Could not read metadata.json at ${metadataPath}: ${err}`);
|
|
47
85
|
}
|
|
86
|
+
logger_1.logger.debug(`overrideProjectId ${overrideProjectId}`);
|
|
87
|
+
logger_1.logger.debug(`metadata.projectId ${metadata.projectId}`);
|
|
48
88
|
let projectId = overrideProjectId || metadata.projectId;
|
|
49
89
|
if (!projectId) {
|
|
50
90
|
try {
|
|
@@ -60,7 +100,7 @@ async function extractMetadata(rootPath, overrideProjectId) {
|
|
|
60
100
|
logger_1.logger.info(`✅ Detected Firebase Project: ${projectId}`);
|
|
61
101
|
}
|
|
62
102
|
else {
|
|
63
|
-
logger_1.logger.info(
|
|
103
|
+
logger_1.logger.info(`❌ Failed to determine the Firebase Project ID. You can set a project later with 'firebase use <project-id>' or by setting the '--project' flag.`);
|
|
64
104
|
}
|
|
65
105
|
let appName = "firebase-studio-export";
|
|
66
106
|
let blueprintContent = "";
|
|
@@ -146,18 +186,30 @@ async function injectAgyContext(rootPath, projectId, appName) {
|
|
|
146
186
|
logger_1.logger.debug(`Could not read or write startup workflow: ${err}`);
|
|
147
187
|
}
|
|
148
188
|
}
|
|
149
|
-
async function
|
|
150
|
-
if (startAgy
|
|
151
|
-
return;
|
|
152
|
-
}
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
189
|
+
async function getAgyCommand(startAgy) {
|
|
190
|
+
if (!startAgy) {
|
|
191
|
+
return undefined;
|
|
192
|
+
}
|
|
193
|
+
const commands = ["agy", "antigravity"];
|
|
194
|
+
for (const cmd of commands) {
|
|
195
|
+
if (utils.commandExistsSync(cmd)) {
|
|
196
|
+
logger_1.logger.info(`✅ Antigravity IDE CLI (${cmd}) detected`);
|
|
197
|
+
return cmd;
|
|
198
|
+
}
|
|
156
199
|
}
|
|
157
|
-
|
|
158
|
-
const
|
|
159
|
-
|
|
200
|
+
if (process.platform === "darwin") {
|
|
201
|
+
const macPath = "/Applications/Antigravity.app/Contents/Resources/app/bin/agy";
|
|
202
|
+
try {
|
|
203
|
+
await fs.access(macPath);
|
|
204
|
+
logger_1.logger.info(`✅ Antigravity IDE CLI detected at ${macPath}`);
|
|
205
|
+
return macPath;
|
|
206
|
+
}
|
|
207
|
+
catch {
|
|
208
|
+
}
|
|
160
209
|
}
|
|
210
|
+
const downloadLink = "https://antigravity.google/download";
|
|
211
|
+
logger_1.logger.info(`⚠️ Antigravity IDE CLI (agy) not found in your PATH. To ensure a seamless migration, please download and install Antigravity: ${downloadLink}`);
|
|
212
|
+
return undefined;
|
|
161
213
|
}
|
|
162
214
|
async function createFirebaseConfigs(rootPath, projectId) {
|
|
163
215
|
if (!projectId) {
|
|
@@ -334,7 +386,8 @@ async function uploadSecrets(rootPath, projectId) {
|
|
|
334
386
|
}
|
|
335
387
|
}
|
|
336
388
|
async function askToOpenAntigravity(rootPath, appName, startAgy) {
|
|
337
|
-
|
|
389
|
+
const agyCommand = await getAgyCommand(startAgy);
|
|
390
|
+
if (!startAgy || !agyCommand) {
|
|
338
391
|
logger_1.logger.info('\n👉 Next steps: Open this folder in Antigravity and run the "Initial Project Setup" workflow.');
|
|
339
392
|
return;
|
|
340
393
|
}
|
|
@@ -345,7 +398,7 @@ async function askToOpenAntigravity(rootPath, appName, startAgy) {
|
|
|
345
398
|
if (answer) {
|
|
346
399
|
logger_1.logger.info(`⏳ Opening ${appName} in Antigravity...`);
|
|
347
400
|
try {
|
|
348
|
-
const agyProcess = (0, child_process_1.spawn)(
|
|
401
|
+
const agyProcess = (0, child_process_1.spawn)(agyCommand, ["."], {
|
|
349
402
|
cwd: rootPath,
|
|
350
403
|
stdio: "ignore",
|
|
351
404
|
detached: true,
|
|
@@ -361,8 +414,14 @@ async function askToOpenAntigravity(rootPath, appName, startAgy) {
|
|
|
361
414
|
}
|
|
362
415
|
}
|
|
363
416
|
async function migrate(rootPath, options = { startAgy: true }) {
|
|
417
|
+
if (process.platform === "win32") {
|
|
418
|
+
throw new error_1.FirebaseError("Firebase Studio migration is currently not supported on Windows.", {
|
|
419
|
+
exit: 1,
|
|
420
|
+
});
|
|
421
|
+
}
|
|
422
|
+
const appType = await detectAppType(rootPath);
|
|
423
|
+
void track.trackGA4("firebase_studio_migrate", { app_type: appType, result: "started" });
|
|
364
424
|
logger_1.logger.info("🚀 Starting Firebase Studio to Antigravity migration...");
|
|
365
|
-
await assertSystemState(options.startAgy);
|
|
366
425
|
const { projectId, appName, blueprintContent } = await extractMetadata(rootPath, options.project);
|
|
367
426
|
await updateReadme(rootPath, blueprintContent, appName);
|
|
368
427
|
await createFirebaseConfigs(rootPath, projectId);
|
|
@@ -374,5 +433,6 @@ async function migrate(rootPath, options = { startAgy: true }) {
|
|
|
374
433
|
if (currentFolderName === "download") {
|
|
375
434
|
logger_1.logger.info(`\n💡 Tip: You might want to rename this folder to "${appName.toLowerCase().replace(/\s+/g, "-")}"`);
|
|
376
435
|
}
|
|
436
|
+
await track.trackGA4("firebase_studio_migrate", { app_type: appType, result: "success" });
|
|
377
437
|
await askToOpenAntigravity(rootPath, appName, options.startAgy);
|
|
378
438
|
}
|
|
@@ -10,7 +10,12 @@ exports.PRERENDER_MANIFEST = "prerender-manifest.json";
|
|
|
10
10
|
exports.ROUTES_MANIFEST = "routes-manifest.json";
|
|
11
11
|
exports.APP_PATHS_MANIFEST = "app-paths-manifest.json";
|
|
12
12
|
exports.SERVER_REFERENCE_MANIFEST = "server-reference-manifest.json";
|
|
13
|
-
exports.CONFIG_FILES = [
|
|
13
|
+
exports.CONFIG_FILES = [
|
|
14
|
+
"next.config.js",
|
|
15
|
+
"next.config.mjs",
|
|
16
|
+
"next.config.ts",
|
|
17
|
+
"next.config.mts",
|
|
18
|
+
];
|
|
14
19
|
exports.ESBUILD_VERSION = "^0.19.2";
|
|
15
20
|
const WEBPACK_LAYERS_NAMES = {
|
|
16
21
|
shared: "shared",
|
|
@@ -424,8 +424,12 @@ async function ɵcodegenFunctionsDirectory(sourceDir, destDir, target, context)
|
|
|
424
424
|
logLevel: "error",
|
|
425
425
|
external: productionDeps,
|
|
426
426
|
};
|
|
427
|
-
if (configFile === "next.config.mjs") {
|
|
427
|
+
if (configFile === "next.config.mjs" || configFile === "next.config.mts") {
|
|
428
428
|
esbuildArgs.format = "esm";
|
|
429
|
+
esbuildArgs.outfile = (0, path_1.join)(destDir, "next.config.mjs");
|
|
430
|
+
}
|
|
431
|
+
else {
|
|
432
|
+
esbuildArgs.outfile = (0, path_1.join)(destDir, "next.config.js");
|
|
429
433
|
}
|
|
430
434
|
const bundle = await esbuild.build(esbuildArgs);
|
|
431
435
|
if (bundle.errors && bundle.errors.length > 0) {
|
|
@@ -187,12 +187,22 @@ function functionFromEndpoint(endpoint) {
|
|
|
187
187
|
return String(cpu);
|
|
188
188
|
});
|
|
189
189
|
if (endpoint.vpc) {
|
|
190
|
-
|
|
191
|
-
|
|
190
|
+
if (endpoint.vpc.connector) {
|
|
191
|
+
gcfFunction.serviceConfig.vpcConnector = endpoint.vpc.connector;
|
|
192
|
+
gcfFunction.serviceConfig.vpcConnectorEgressSettings = endpoint.vpc.egressSettings || null;
|
|
193
|
+
}
|
|
194
|
+
else if (endpoint.vpc.networkInterfaces) {
|
|
195
|
+
gcfFunction.serviceConfig.directVpcNetworkInterface = endpoint.vpc.networkInterfaces;
|
|
196
|
+
gcfFunction.serviceConfig.directVpcEgress = endpoint.vpc.egressSettings
|
|
197
|
+
? `VPC_EGRESS_${endpoint.vpc.egressSettings}`
|
|
198
|
+
: null;
|
|
199
|
+
}
|
|
192
200
|
}
|
|
193
201
|
else if (endpoint.vpc === null) {
|
|
194
202
|
gcfFunction.serviceConfig.vpcConnector = null;
|
|
195
203
|
gcfFunction.serviceConfig.vpcConnectorEgressSettings = null;
|
|
204
|
+
gcfFunction.serviceConfig.directVpcNetworkInterface = null;
|
|
205
|
+
gcfFunction.serviceConfig.directVpcEgress = null;
|
|
196
206
|
}
|
|
197
207
|
if (backend.isEventTriggered(endpoint)) {
|
|
198
208
|
gcfFunction.eventTrigger = {
|
|
@@ -384,6 +394,17 @@ function endpointFromFunction(gcfFunction) {
|
|
|
384
394
|
endpoint.vpc = { connector: gcfFunction.serviceConfig.vpcConnector };
|
|
385
395
|
proto.renameIfPresent(endpoint.vpc, gcfFunction.serviceConfig, "egressSettings", "vpcConnectorEgressSettings");
|
|
386
396
|
}
|
|
397
|
+
else if (gcfFunction.serviceConfig.directVpcNetworkInterface) {
|
|
398
|
+
endpoint.vpc = { networkInterfaces: gcfFunction.serviceConfig.directVpcNetworkInterface };
|
|
399
|
+
if (gcfFunction.serviceConfig.directVpcEgress) {
|
|
400
|
+
if (!gcfFunction.serviceConfig.directVpcEgress.startsWith("VPC_EGRESS_")) {
|
|
401
|
+
throw new error_1.FirebaseError(`Unexpected VPC egress setting: ${gcfFunction.serviceConfig.directVpcEgress}`);
|
|
402
|
+
}
|
|
403
|
+
if (gcfFunction.serviceConfig.directVpcEgress !== "VPC_EGRESS_UNSPECIFIED") {
|
|
404
|
+
endpoint.vpc.egressSettings = gcfFunction.serviceConfig.directVpcEgress.substring("VPC_EGRESS_".length);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
}
|
|
387
408
|
const serviceName = gcfFunction.serviceConfig.service;
|
|
388
409
|
if (!serviceName) {
|
|
389
410
|
logger_1.logger.debug("Got a v2 function without a service name." +
|
|
@@ -125,9 +125,9 @@ async function setupSQLPermissions(instanceId, databaseId, schemaInfo, options,
|
|
|
125
125
|
}
|
|
126
126
|
async function greenFieldSchemaSetup(instanceId, databaseId, schema, options) {
|
|
127
127
|
const revokes = [];
|
|
128
|
-
if (await checkSQLRoleIsGranted(options, instanceId, databaseId, "cloudsqlsuperuser", (0, permissions_1.firebaseowner)(databaseId))) {
|
|
128
|
+
if (await checkSQLRoleIsGranted(options, instanceId, databaseId, "cloudsqlsuperuser", (0, permissions_1.firebaseowner)(databaseId, schema))) {
|
|
129
129
|
logger_1.logger.warn("Detected cloudsqlsuperuser was previously given to firebase owner, revoking to improve database security.");
|
|
130
|
-
revokes.push(`REVOKE "cloudsqlsuperuser" FROM "${(0, permissions_1.firebaseowner)(databaseId)}"`);
|
|
130
|
+
revokes.push(`REVOKE "cloudsqlsuperuser" FROM "${(0, permissions_1.firebaseowner)(databaseId, schema)}"`);
|
|
131
131
|
}
|
|
132
132
|
const user = (await (0, connect_1.getIAMUser)(options)).user;
|
|
133
133
|
const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
|
|
@@ -222,10 +222,10 @@ async function brownfieldSqlSetup(instanceId, databaseId, schemaInfo, options, s
|
|
|
222
222
|
];
|
|
223
223
|
await (0, connect_1.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, brownfieldSetupCmds, silent, true);
|
|
224
224
|
}
|
|
225
|
-
async function grantRoleTo(options, instanceId, databaseId, role, email) {
|
|
225
|
+
async function grantRoleTo(options, instanceId, databaseId, role, email, schema = permissions_1.DEFAULT_SCHEMA) {
|
|
226
226
|
const projectId = (0, projectUtils_1.needProjectId)(options);
|
|
227
227
|
const { user, mode } = (0, connect_2.toDatabaseUser)(email);
|
|
228
228
|
await cloudSqlAdminClient.createUser(projectId, instanceId, mode, user);
|
|
229
|
-
const fdcSqlRole = exports.fdcSqlRoleMap[role](databaseId);
|
|
229
|
+
const fdcSqlRole = exports.fdcSqlRoleMap[role](databaseId, schema);
|
|
230
230
|
await (0, connect_1.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, [`GRANT "${fdcSqlRole}" TO "${user}"`], false);
|
|
231
231
|
}
|
|
@@ -58,8 +58,8 @@ exports.run_tests = (0, tool_1.tool)("apptesting", {
|
|
|
58
58
|
}, async ({ appId, releaseBinaryFile, testDevices, testCase }) => {
|
|
59
59
|
const devices = testDevices || defaultDevices;
|
|
60
60
|
const client = new client_1.AppDistributionClient();
|
|
61
|
-
const
|
|
62
|
-
return (0, util_1.toContent)(await client.createReleaseTest(
|
|
61
|
+
const release = await (0, distribution_1.upload)(client, (0, options_parser_util_1.toAppName)(appId), new distribution_1.Distribution(releaseBinaryFile));
|
|
62
|
+
return (0, util_1.toContent)(await client.createReleaseTest(release.name, devices, testCase));
|
|
63
63
|
});
|
|
64
64
|
exports.check_status = (0, tool_1.tool)("apptesting", {
|
|
65
65
|
name: "check_status",
|
package/package.json
CHANGED
|
@@ -10,6 +10,10 @@
|
|
|
10
10
|
"type": "string",
|
|
11
11
|
"description": "The name of the PostgreSQL database."
|
|
12
12
|
},
|
|
13
|
+
"schema": {
|
|
14
|
+
"type": "string",
|
|
15
|
+
"description": "The PostgreSQL schema name. Defaults to 'public' if not specified."
|
|
16
|
+
},
|
|
13
17
|
"cloudSql": {
|
|
14
18
|
"additionalProperties": false,
|
|
15
19
|
"type": "object",
|
|
@@ -9,6 +9,17 @@ App Name: ${appName}
|
|
|
9
9
|
|
|
10
10
|
# Migration Guidelines
|
|
11
11
|
|
|
12
|
-
-
|
|
13
|
-
|
|
14
|
-
- Use
|
|
12
|
+
- `docs/blueprint.md` describes what this app was supposed to do when it was exported. Read it before making changes to the app.
|
|
13
|
+
|
|
14
|
+
- Use Agent Skills to guide Firebase usage. If they are not available, install them by running `npx -y skills add firebase/agent-skills --skill "*" -y`
|
|
15
|
+
|
|
16
|
+
- When asked to deploy or publish the app, deploy to Firebase App Hosting using `firebase deploy`.
|
|
17
|
+
If you run into issues, ensure `firebase.json` contains a valid `apphosting` block (with `backendId` and `rootDir`).
|
|
18
|
+
DO NOT use `firebase apphosting:backends:create` or `firebase apphosting:rollouts:create`.
|
|
19
|
+
DO NOT try to deploy to Firebase Hosting.
|
|
20
|
+
|
|
21
|
+
- When asked to add AI features, use Genkit (as already configured in `src/ai/`).
|
|
22
|
+
|
|
23
|
+
# Important
|
|
24
|
+
|
|
25
|
+
Never display, log, or commit sensitive credentials, .env files, or service account keys.
|