firebase-tools 14.7.0 → 14.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api.js +2 -4
- package/lib/commands/functions-delete.js +0 -2
- package/lib/commands/login.js +2 -1
- package/lib/dataconnect/ensureApis.js +6 -3
- package/lib/deploy/functions/checkIam.js +2 -2
- package/lib/deploy/functions/runtimes/discovery/index.js +3 -2
- package/lib/deploy/functions/runtimes/supported/types.js +1 -1
- package/lib/emulator/dataconnect/pgliteServer.js +144 -60
- package/lib/emulator/downloadableEmulatorInfo.json +18 -18
- package/lib/ensureApiEnabled.js +22 -0
- package/lib/fsutils.js +16 -1
- package/lib/gcp/cloudfunctions.js +15 -1
- package/lib/gcp/cloudfunctionsv2.js +10 -5
- package/lib/gcp/serviceusage.js +2 -2
- package/lib/gemini/fdcExperience.js +12 -2
- package/lib/init/features/dataconnect/index.js +23 -2
- package/lib/management/projects.js +38 -4
- package/lib/mcp/errors.js +6 -1
- package/lib/mcp/index.js +17 -5
- package/lib/mcp/tools/core/consult_assistant.js +1 -0
- package/lib/mcp/tools/core/index.js +2 -0
- package/lib/mcp/tools/core/list_apps.js +2 -2
- package/lib/mcp/tools/dataconnect/generate_operation.js +1 -0
- package/lib/mcp/tools/dataconnect/generate_schema.js +1 -0
- package/lib/mcp/tools/dataconnect/index.js +4 -0
- package/lib/operation-poller.js +3 -1
- package/package.json +4 -2
- package/schema/firebase-config.json +1 -0
- package/templates/init/functions/javascript/index.js +14 -1
- package/templates/init/functions/python/main.py +8 -0
- package/templates/init/functions/typescript/index.ts +14 -1
package/lib/api.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.runtimeconfigOrigin = exports.rulesOrigin = exports.resourceManagerOrigin = exports.crashlyticsApiOrigin = exports.messagingApiOrigin = exports.remoteConfigApiOrigin = exports.rtdbMetadataOrigin = exports.rtdbManagementOrigin = exports.realtimeOrigin = exports.extensionsTOSOrigin = exports.extensionsPublisherOrigin = exports.extensionsOrigin = exports.iamOrigin = exports.identityOrigin = exports.hostingOrigin = exports.googleOrigin = exports.pubsubOrigin = exports.cloudTasksOrigin = exports.cloudschedulerOrigin = exports.
|
|
4
|
-
exports.setScopes = exports.getScopes = exports.cloudAiCompanionOrigin = exports.vertexAIOrigin = exports.cloudSQLAdminOrigin = exports.dataConnectLocalConnString = exports.dataconnectP4SADomain = exports.dataconnectOrigin = exports.githubClientSecret = exports.githubClientId = exports.computeOrigin = exports.secretManagerOrigin = exports.githubApiOrigin = exports.githubOrigin = exports.serviceUsageOrigin = exports.cloudRunApiOrigin = exports.hostingApiOrigin = exports.firebaseStorageOrigin =
|
|
3
|
+
exports.storageOrigin = exports.runtimeconfigOrigin = exports.rulesOrigin = exports.resourceManagerOrigin = exports.crashlyticsApiOrigin = exports.messagingApiOrigin = exports.remoteConfigApiOrigin = exports.rtdbMetadataOrigin = exports.rtdbManagementOrigin = exports.realtimeOrigin = exports.extensionsTOSOrigin = exports.extensionsPublisherOrigin = exports.extensionsOrigin = exports.iamOrigin = exports.identityOrigin = exports.hostingOrigin = exports.googleOrigin = exports.pubsubOrigin = exports.cloudTasksOrigin = exports.cloudschedulerOrigin = exports.cloudbuildOrigin = exports.functionsDefaultRegion = exports.runOrigin = exports.functionsV2Origin = exports.functionsOrigin = exports.firestoreOrigin = exports.firestoreOriginOrEmulator = exports.firedataOrigin = exports.firebaseExtensionsRegistryOrigin = exports.firebaseApiOrigin = exports.eventarcOrigin = exports.dynamicLinksKey = exports.dynamicLinksOrigin = exports.consoleOrigin = exports.authManagementOrigin = exports.authOrigin = exports.apphostingGitHubAppInstallationURL = exports.apphostingP4SADomain = exports.apphostingOrigin = exports.appDistributionOrigin = exports.artifactRegistryDomain = exports.developerConnectP4SADomain = exports.developerConnectOrigin = exports.containerRegistryDomain = exports.cloudMonitoringOrigin = exports.cloudloggingOrigin = exports.cloudbillingOrigin = exports.clientSecret = exports.clientId = exports.authProxyOrigin = void 0;
|
|
4
|
+
exports.setScopes = exports.getScopes = exports.cloudAiCompanionOrigin = exports.vertexAIOrigin = exports.cloudSQLAdminOrigin = exports.dataConnectLocalConnString = exports.dataconnectP4SADomain = exports.dataconnectOrigin = exports.githubClientSecret = exports.githubClientId = exports.computeOrigin = exports.secretManagerOrigin = exports.githubApiOrigin = exports.githubOrigin = exports.serviceUsageOrigin = exports.cloudRunApiOrigin = exports.hostingApiOrigin = exports.firebaseStorageOrigin = void 0;
|
|
5
5
|
const constants_1 = require("./emulator/constants");
|
|
6
6
|
const logger_1 = require("./logger");
|
|
7
7
|
const scopes = require("./scopes");
|
|
@@ -72,8 +72,6 @@ const functionsDefaultRegion = () => utils.envOverride("FIREBASE_FUNCTIONS_DEFAU
|
|
|
72
72
|
exports.functionsDefaultRegion = functionsDefaultRegion;
|
|
73
73
|
const cloudbuildOrigin = () => utils.envOverride("FIREBASE_CLOUDBUILD_URL", "https://cloudbuild.googleapis.com");
|
|
74
74
|
exports.cloudbuildOrigin = cloudbuildOrigin;
|
|
75
|
-
const cloudCompanionOrigin = () => utils.envOverride("CLOUD_COMPANION_URL", "https://cloudaicompanion.googleapis.com");
|
|
76
|
-
exports.cloudCompanionOrigin = cloudCompanionOrigin;
|
|
77
75
|
const cloudschedulerOrigin = () => utils.envOverride("FIREBASE_CLOUDSCHEDULER_URL", "https://cloudscheduler.googleapis.com");
|
|
78
76
|
exports.cloudschedulerOrigin = cloudschedulerOrigin;
|
|
79
77
|
const cloudTasksOrigin = () => utils.envOverride("FIREBASE_CLOUD_TAKS_URL", "https://cloudtasks.googleapis.com");
|
|
@@ -16,7 +16,6 @@ const planner = require("../deploy/functions/release/planner");
|
|
|
16
16
|
const fabricator = require("../deploy/functions/release/fabricator");
|
|
17
17
|
const executor = require("../deploy/functions/release/executor");
|
|
18
18
|
const reporter = require("../deploy/functions/release/reporter");
|
|
19
|
-
const containerCleaner = require("../deploy/functions/containerCleaner");
|
|
20
19
|
const getProjectNumber_1 = require("../getProjectNumber");
|
|
21
20
|
exports.command = new command_1.Command("functions:delete [filters...]")
|
|
22
21
|
.description("delete one or more Cloud Functions by name or group name.")
|
|
@@ -91,5 +90,4 @@ exports.command = new command_1.Command("functions:delete [filters...]")
|
|
|
91
90
|
exit: 1,
|
|
92
91
|
});
|
|
93
92
|
}
|
|
94
|
-
await containerCleaner.cleanupBuildImages([], allEpToDelete);
|
|
95
93
|
});
|
package/lib/commands/login.js
CHANGED
|
@@ -27,7 +27,8 @@ exports.command = new command_1.Command("login")
|
|
|
27
27
|
return user;
|
|
28
28
|
}
|
|
29
29
|
if (!options.reauth) {
|
|
30
|
-
utils.logBullet("Firebase CLI
|
|
30
|
+
utils.logBullet("The Firebase CLI’s MCP server feature can optionally make use of Gemini in Firebase. " +
|
|
31
|
+
"Learn more about Gemini in Firebase and how it uses your data: https://firebase.google.com/docs/gemini-in-firebase#how-gemini-in-firebase-uses-your-data");
|
|
31
32
|
const geminiUsage = await (0, prompt_1.confirm)("Enable Gemini in Firebase features?");
|
|
32
33
|
configstore_1.configstore.set("gemini", geminiUsage);
|
|
33
34
|
logger_1.logger.info();
|
|
@@ -1,17 +1,20 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ensureSparkApis = exports.ensureApis = void 0;
|
|
3
|
+
exports.ensureGIFApis = exports.ensureSparkApis = exports.ensureApis = void 0;
|
|
4
4
|
const api = require("../api");
|
|
5
5
|
const ensureApiEnabled_1 = require("../ensureApiEnabled");
|
|
6
|
+
const prefix = "dataconnect";
|
|
6
7
|
async function ensureApis(projectId) {
|
|
7
|
-
const prefix = "dataconnect";
|
|
8
8
|
await (0, ensureApiEnabled_1.ensure)(projectId, api.dataconnectOrigin(), prefix);
|
|
9
9
|
await (0, ensureApiEnabled_1.ensure)(projectId, api.cloudSQLAdminOrigin(), prefix);
|
|
10
10
|
await (0, ensureApiEnabled_1.ensure)(projectId, api.computeOrigin(), prefix);
|
|
11
11
|
}
|
|
12
12
|
exports.ensureApis = ensureApis;
|
|
13
13
|
async function ensureSparkApis(projectId) {
|
|
14
|
-
const prefix = "dataconnect";
|
|
15
14
|
await (0, ensureApiEnabled_1.ensure)(projectId, api.cloudSQLAdminOrigin(), prefix);
|
|
16
15
|
}
|
|
17
16
|
exports.ensureSparkApis = ensureSparkApis;
|
|
17
|
+
async function ensureGIFApis(projectId) {
|
|
18
|
+
await (0, ensureApiEnabled_1.ensure)(projectId, api.cloudAiCompanionOrigin(), prefix);
|
|
19
|
+
}
|
|
20
|
+
exports.ensureGIFApis = ensureGIFApis;
|
|
@@ -120,10 +120,10 @@ async function ensureGenkitMonitoringRoles(projectId, projectNumber, want, have,
|
|
|
120
120
|
.map((endpoint) => endpoint.serviceAccount || "")
|
|
121
121
|
.filter((value, index, self) => self.indexOf(value) === index);
|
|
122
122
|
const defaultServiceAccountIndex = serviceAccounts.indexOf("");
|
|
123
|
-
if (defaultServiceAccountIndex) {
|
|
123
|
+
if (defaultServiceAccountIndex !== -1) {
|
|
124
124
|
serviceAccounts[defaultServiceAccountIndex] = await gce.getDefaultServiceAccount(projectNumber);
|
|
125
125
|
}
|
|
126
|
-
const members = serviceAccounts.map((sa) => `serviceAccount:${sa}`);
|
|
126
|
+
const members = serviceAccounts.filter((sa) => !!sa).map((sa) => `serviceAccount:${sa}`);
|
|
127
127
|
const requiredBindings = [];
|
|
128
128
|
for (const monitoringRole of exports.GENKIT_MONITORING_ROLES) {
|
|
129
129
|
requiredBindings.push({
|
|
@@ -52,12 +52,13 @@ async function detectFromYaml(directory, project, runtime) {
|
|
|
52
52
|
exports.detectFromYaml = detectFromYaml;
|
|
53
53
|
async function detectFromPort(port, project, runtime, initialDelay = 0, timeout = 10000) {
|
|
54
54
|
let res;
|
|
55
|
+
const discoveryTimeout = getFunctionDiscoveryTimeout() || timeout;
|
|
55
56
|
const timedOut = new Promise((resolve, reject) => {
|
|
56
57
|
setTimeout(() => {
|
|
57
58
|
const originalError = "User code failed to load. Cannot determine backend specification.";
|
|
58
|
-
const error = `${originalError} Timeout after ${
|
|
59
|
+
const error = `${originalError} Timeout after ${discoveryTimeout}. See https://firebase.google.com/docs/functions/tips#avoid_deployment_timeouts_during_initialization'`;
|
|
59
60
|
reject(new error_1.FirebaseError(error));
|
|
60
|
-
},
|
|
61
|
+
}, discoveryTimeout);
|
|
61
62
|
});
|
|
62
63
|
if (initialDelay > 0) {
|
|
63
64
|
await new Promise((resolve) => setTimeout(resolve, initialDelay));
|
|
@@ -19,15 +19,17 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
|
|
|
19
19
|
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
|
20
20
|
};
|
|
21
21
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
22
|
-
exports.
|
|
22
|
+
exports.PGliteExtendedQueryPatch = exports.fromNodeSocket = exports.PostgresServer = exports.TRUNCATE_TABLES_SQL = void 0;
|
|
23
23
|
const pglite_1 = require("@electric-sql/pglite");
|
|
24
24
|
const { dynamicImport } = require(true && "../../dynamicImport");
|
|
25
25
|
const net = require("node:net");
|
|
26
26
|
const node_stream_1 = require("node:stream");
|
|
27
27
|
const fs = require("fs");
|
|
28
|
+
const path = require("node:path");
|
|
28
29
|
const pg_gateway_1 = require("pg-gateway");
|
|
29
30
|
const logger_1 = require("../../logger");
|
|
30
31
|
const error_1 = require("../../error");
|
|
32
|
+
const fsutils_1 = require("../../fsutils");
|
|
31
33
|
const node_string_decoder_1 = require("node:string_decoder");
|
|
32
34
|
exports.TRUNCATE_TABLES_SQL = `
|
|
33
35
|
DO $do$
|
|
@@ -41,23 +43,47 @@ BEGIN
|
|
|
41
43
|
EXECUTE COALESCE(_clear, 'select now()');
|
|
42
44
|
END
|
|
43
45
|
$do$;`;
|
|
46
|
+
const decoder = new node_string_decoder_1.StringDecoder();
|
|
47
|
+
const pgliteDebugLog = fs.createWriteStream("pglite-debug.log");
|
|
44
48
|
class PostgresServer {
|
|
45
49
|
async createPGServer(host = "127.0.0.1", port) {
|
|
46
50
|
const getDb = this.getDb.bind(this);
|
|
47
51
|
const server = net.createServer(async (socket) => {
|
|
48
52
|
const connection = await fromNodeSocket(socket, {
|
|
49
|
-
serverVersion: "
|
|
53
|
+
serverVersion: "17.4 (PGlite 0.3.3)",
|
|
50
54
|
auth: { method: "trust" },
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
55
|
+
onMessage(data, { isAuthenticated }) {
|
|
56
|
+
return __asyncGenerator(this, arguments, function* onMessage_1() {
|
|
57
|
+
var _a, e_1, _b, _c;
|
|
58
|
+
if (!isAuthenticated) {
|
|
59
|
+
return yield __await(void 0);
|
|
60
|
+
}
|
|
61
|
+
const db = yield __await(getDb());
|
|
62
|
+
if (data[0] === pg_gateway_1.FrontendMessageCode.Terminate) {
|
|
63
|
+
yield __await(db.query("DEALLOCATE ALL"));
|
|
64
|
+
}
|
|
65
|
+
const response = yield __await(db.execProtocolRaw(data));
|
|
66
|
+
try {
|
|
67
|
+
for (var _d = true, _e = __asyncValues(extendedQueryPatch.filterResponse(data, response)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a;) {
|
|
68
|
+
_c = _f.value;
|
|
69
|
+
_d = false;
|
|
70
|
+
try {
|
|
71
|
+
const message = _c;
|
|
72
|
+
yield yield __await(message);
|
|
73
|
+
}
|
|
74
|
+
finally {
|
|
75
|
+
_d = true;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
80
|
+
finally {
|
|
81
|
+
try {
|
|
82
|
+
if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e));
|
|
83
|
+
}
|
|
84
|
+
finally { if (e_1) throw e_1.error; }
|
|
85
|
+
}
|
|
86
|
+
});
|
|
61
87
|
},
|
|
62
88
|
});
|
|
63
89
|
const extendedQueryPatch = new PGliteExtendedQueryPatch(connection);
|
|
@@ -79,30 +105,15 @@ class PostgresServer {
|
|
|
79
105
|
}
|
|
80
106
|
async getDb() {
|
|
81
107
|
if (!this.db) {
|
|
82
|
-
|
|
83
|
-
fs.mkdirSync(this.dataDirectory, { recursive: true });
|
|
84
|
-
}
|
|
85
|
-
const vector = (await dynamicImport("@electric-sql/pglite/vector")).vector;
|
|
86
|
-
const uuidOssp = (await dynamicImport("@electric-sql/pglite/contrib/uuid_ossp")).uuid_ossp;
|
|
87
|
-
const pgliteArgs = {
|
|
88
|
-
debug: this.debug,
|
|
89
|
-
extensions: {
|
|
90
|
-
vector,
|
|
91
|
-
uuidOssp,
|
|
92
|
-
},
|
|
93
|
-
dataDir: this.dataDirectory,
|
|
94
|
-
};
|
|
95
|
-
if (this.importPath) {
|
|
96
|
-
logger_1.logger.debug(`Importing from ${this.importPath}`);
|
|
97
|
-
const rf = fs.readFileSync(this.importPath);
|
|
98
|
-
const file = new File([rf], this.importPath);
|
|
99
|
-
pgliteArgs.loadDataDir = file;
|
|
100
|
-
}
|
|
101
|
-
this.db = await this.forceCreateDB(pgliteArgs);
|
|
102
|
-
await this.db.waitReady;
|
|
108
|
+
this.db = await this.forceCreateDB();
|
|
103
109
|
}
|
|
104
110
|
return this.db;
|
|
105
111
|
}
|
|
112
|
+
async getExtensions() {
|
|
113
|
+
const vector = (await dynamicImport("@electric-sql/pglite/vector")).vector;
|
|
114
|
+
const uuidOssp = (await dynamicImport("@electric-sql/pglite/contrib/uuid_ossp")).uuid_ossp;
|
|
115
|
+
return { vector, uuidOssp };
|
|
116
|
+
}
|
|
106
117
|
async clearDb() {
|
|
107
118
|
const db = await this.getDb();
|
|
108
119
|
await db.query(exports.TRUNCATE_TABLES_SQL);
|
|
@@ -113,18 +124,92 @@ class PostgresServer {
|
|
|
113
124
|
const arrayBuff = await dump.arrayBuffer();
|
|
114
125
|
fs.writeFileSync(exportPath, new Uint8Array(arrayBuff));
|
|
115
126
|
}
|
|
116
|
-
async
|
|
127
|
+
async migrateDb(pgliteArgs) {
|
|
128
|
+
if (!this.baseDataDirectory) {
|
|
129
|
+
throw new error_1.FirebaseError("Cannot migrate database without a data directory.");
|
|
130
|
+
}
|
|
131
|
+
const { PGlite: PGlite02 } = await dynamicImport("pglite-2");
|
|
132
|
+
const pgDump = (await dynamicImport("@electric-sql/pglite-tools/pg_dump")).pgDump;
|
|
133
|
+
logger_1.logger.info("Opening database with Postgres 16...");
|
|
134
|
+
const extensions = await this.getExtensions();
|
|
135
|
+
const dataDir = this.baseDataDirectory;
|
|
136
|
+
const oldDb = new PGlite02(Object.assign(Object.assign({}, pgliteArgs), { dataDir }));
|
|
137
|
+
await oldDb.waitReady;
|
|
138
|
+
const oldVersion = await oldDb.query("SELECT version();");
|
|
139
|
+
logger_1.logger.debug(`Old database version: ${oldVersion.rows[0].version}`);
|
|
140
|
+
if (!oldVersion.rows[0].version.includes("PostgreSQL 16")) {
|
|
141
|
+
await oldDb.close();
|
|
142
|
+
throw new error_1.FirebaseError("Migration started, but DB version is not PostgreSQL 16.");
|
|
143
|
+
}
|
|
144
|
+
logger_1.logger.info("Dumping data from old database...");
|
|
145
|
+
const dumpDir = await oldDb.dumpDataDir("none");
|
|
146
|
+
const tempOldDb = await PGlite02.create({
|
|
147
|
+
loadDataDir: dumpDir,
|
|
148
|
+
extensions,
|
|
149
|
+
});
|
|
150
|
+
const dumpResult = await pgDump({ pg: tempOldDb, args: ["--verbose", "--verbose"] });
|
|
151
|
+
await tempOldDb.close();
|
|
152
|
+
await oldDb.close();
|
|
153
|
+
logger_1.logger.info(`Moving old database directory to ${this.baseDataDirectory}/pg16...`);
|
|
154
|
+
const pg16Dir = this.getVersionedDataDir(16);
|
|
155
|
+
(0, fsutils_1.moveAll)(this.baseDataDirectory, pg16Dir);
|
|
156
|
+
logger_1.logger.info("If you need to use an older version of the Firebase CLI, you can restore from that directory.");
|
|
157
|
+
logger_1.logger.info("Creating new database with Postgres 17...");
|
|
158
|
+
const pg17Dir = this.getVersionedDataDir(17);
|
|
159
|
+
const newDb = new pglite_1.PGlite(Object.assign(Object.assign({}, pgliteArgs), { dataDir: pg17Dir }));
|
|
160
|
+
await newDb.waitReady;
|
|
161
|
+
logger_1.logger.info("Importing data into new database...");
|
|
162
|
+
const dumpText = await dumpResult.text();
|
|
163
|
+
await newDb.exec(dumpText);
|
|
164
|
+
await newDb.exec("SET SEARCH_PATH = public;");
|
|
165
|
+
logger_1.logger.info("Postgres database migration successful.");
|
|
166
|
+
return newDb;
|
|
167
|
+
}
|
|
168
|
+
getVersionedDataDir(version) {
|
|
169
|
+
if (!this.baseDataDirectory) {
|
|
170
|
+
return;
|
|
171
|
+
}
|
|
172
|
+
return path.join(this.baseDataDirectory, `pg${version}`);
|
|
173
|
+
}
|
|
174
|
+
async forceCreateDB() {
|
|
175
|
+
const baseArgs = {
|
|
176
|
+
debug: this.debug,
|
|
177
|
+
extensions: await this.getExtensions(),
|
|
178
|
+
};
|
|
179
|
+
const pg17Dir = this.getVersionedDataDir(17);
|
|
180
|
+
if (pg17Dir && !fs.existsSync(pg17Dir)) {
|
|
181
|
+
fs.mkdirSync(pg17Dir, { recursive: true });
|
|
182
|
+
}
|
|
183
|
+
if (this.importPath) {
|
|
184
|
+
logger_1.logger.debug(`Importing from ${this.importPath}`);
|
|
185
|
+
const rf = fs.readFileSync(this.importPath);
|
|
186
|
+
const file = new File([rf], this.importPath);
|
|
187
|
+
baseArgs.loadDataDir = file;
|
|
188
|
+
}
|
|
189
|
+
if (this.baseDataDirectory && fs.existsSync(this.baseDataDirectory)) {
|
|
190
|
+
const versionFilePath = path.join(this.baseDataDirectory, "PG_VERSION");
|
|
191
|
+
if (fs.existsSync(versionFilePath)) {
|
|
192
|
+
const version = fs.readFileSync(versionFilePath, "utf-8").trim();
|
|
193
|
+
logger_1.logger.debug(`Found Postgres version file with version: ${version}`);
|
|
194
|
+
if (version === "16") {
|
|
195
|
+
logger_1.logger.info("Detected a Postgres 16 data directory from an older version of firebase-tools. Migrating to Postgres 17...");
|
|
196
|
+
return this.migrateDb(baseArgs);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
117
200
|
try {
|
|
118
|
-
const db =
|
|
201
|
+
const db = new pglite_1.PGlite(Object.assign(Object.assign({}, baseArgs), { dataDir: pg17Dir }));
|
|
202
|
+
await db.waitReady;
|
|
119
203
|
return db;
|
|
120
204
|
}
|
|
121
205
|
catch (err) {
|
|
122
|
-
if (
|
|
123
|
-
fs.rmSync(
|
|
124
|
-
const db =
|
|
206
|
+
if (pg17Dir && (0, error_1.hasMessage)(err) && /Database already exists/.test(err.message)) {
|
|
207
|
+
fs.rmSync(pg17Dir, { force: true, recursive: true });
|
|
208
|
+
const db = new pglite_1.PGlite(Object.assign(Object.assign({}, baseArgs), { dataDir: pg17Dir }));
|
|
209
|
+
await db.waitReady;
|
|
125
210
|
return db;
|
|
126
211
|
}
|
|
127
|
-
logger_1.logger.
|
|
212
|
+
logger_1.logger.warn(`Error from pglite: ${err}`);
|
|
128
213
|
throw new error_1.FirebaseError("Unexpected error starting up Postgres.");
|
|
129
214
|
}
|
|
130
215
|
}
|
|
@@ -140,12 +225,20 @@ class PostgresServer {
|
|
|
140
225
|
constructor(args) {
|
|
141
226
|
this.db = undefined;
|
|
142
227
|
this.server = undefined;
|
|
143
|
-
this.
|
|
228
|
+
this.baseDataDirectory = args.dataDirectory;
|
|
144
229
|
this.importPath = args.importPath;
|
|
145
|
-
this.debug = args.debug ?
|
|
230
|
+
this.debug = args.debug ? 1 : 0;
|
|
146
231
|
}
|
|
147
232
|
}
|
|
148
233
|
exports.PostgresServer = PostgresServer;
|
|
234
|
+
async function fromNodeSocket(socket, options) {
|
|
235
|
+
const rs = node_stream_1.Readable.toWeb(socket);
|
|
236
|
+
const ws = node_stream_1.Writable.toWeb(socket);
|
|
237
|
+
const opts = options
|
|
238
|
+
? Object.assign({}, options) : undefined;
|
|
239
|
+
return new pg_gateway_1.PostgresConnection({ readable: rs, writable: ws }, opts);
|
|
240
|
+
}
|
|
241
|
+
exports.fromNodeSocket = fromNodeSocket;
|
|
149
242
|
class PGliteExtendedQueryPatch {
|
|
150
243
|
constructor(connection) {
|
|
151
244
|
this.connection = connection;
|
|
@@ -154,62 +247,53 @@ class PGliteExtendedQueryPatch {
|
|
|
154
247
|
}
|
|
155
248
|
filterResponse(message, response) {
|
|
156
249
|
return __asyncGenerator(this, arguments, function* filterResponse_1() {
|
|
157
|
-
var _a,
|
|
250
|
+
var _a, e_2, _b, _c;
|
|
158
251
|
const pipelineStartMessages = [
|
|
159
252
|
pg_gateway_1.FrontendMessageCode.Parse,
|
|
160
253
|
pg_gateway_1.FrontendMessageCode.Bind,
|
|
161
254
|
pg_gateway_1.FrontendMessageCode.Close,
|
|
162
255
|
];
|
|
163
|
-
const decoder = new node_string_decoder_1.StringDecoder();
|
|
164
256
|
const decoded = decoder.write(message);
|
|
165
|
-
|
|
257
|
+
pgliteDebugLog.write("Front: " + decoded);
|
|
166
258
|
if (pipelineStartMessages.includes(message[0])) {
|
|
167
259
|
this.isExtendedQuery = true;
|
|
168
260
|
}
|
|
169
261
|
if (message[0] === pg_gateway_1.FrontendMessageCode.Sync) {
|
|
170
262
|
this.isExtendedQuery = false;
|
|
171
263
|
this.eqpErrored = false;
|
|
172
|
-
return yield __await(this.connection.createReadyForQuery());
|
|
173
264
|
}
|
|
174
265
|
try {
|
|
175
266
|
for (var _d = true, _e = __asyncValues((0, pg_gateway_1.getMessages)(response)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a;) {
|
|
176
267
|
_c = _f.value;
|
|
177
268
|
_d = false;
|
|
178
269
|
try {
|
|
179
|
-
const
|
|
270
|
+
const bm = _c;
|
|
180
271
|
if (this.eqpErrored) {
|
|
181
272
|
continue;
|
|
182
273
|
}
|
|
183
|
-
if (this.isExtendedQuery &&
|
|
274
|
+
if (this.isExtendedQuery && bm[0] === pg_gateway_1.BackendMessageCode.ErrorMessage) {
|
|
184
275
|
this.eqpErrored = true;
|
|
185
276
|
}
|
|
186
|
-
if (this.isExtendedQuery &&
|
|
187
|
-
|
|
277
|
+
if (this.isExtendedQuery && bm[0] === pg_gateway_1.BackendMessageCode.ReadyForQuery) {
|
|
278
|
+
pgliteDebugLog.write("Filtered: " + decoder.write(bm));
|
|
188
279
|
continue;
|
|
189
280
|
}
|
|
190
|
-
|
|
281
|
+
pgliteDebugLog.write("Sent: " + decoder.write(bm));
|
|
282
|
+
yield yield __await(bm);
|
|
191
283
|
}
|
|
192
284
|
finally {
|
|
193
285
|
_d = true;
|
|
194
286
|
}
|
|
195
287
|
}
|
|
196
288
|
}
|
|
197
|
-
catch (
|
|
289
|
+
catch (e_2_1) { e_2 = { error: e_2_1 }; }
|
|
198
290
|
finally {
|
|
199
291
|
try {
|
|
200
292
|
if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e));
|
|
201
293
|
}
|
|
202
|
-
finally { if (
|
|
294
|
+
finally { if (e_2) throw e_2.error; }
|
|
203
295
|
}
|
|
204
296
|
});
|
|
205
297
|
}
|
|
206
298
|
}
|
|
207
299
|
exports.PGliteExtendedQueryPatch = PGliteExtendedQueryPatch;
|
|
208
|
-
async function fromNodeSocket(socket, options) {
|
|
209
|
-
const rs = node_stream_1.Readable.toWeb(socket);
|
|
210
|
-
const ws = node_stream_1.Writable.toWeb(socket);
|
|
211
|
-
const opts = options
|
|
212
|
-
? Object.assign({}, options) : undefined;
|
|
213
|
-
return new pg_gateway_1.PostgresConnection({ readable: rs, writable: ws }, opts);
|
|
214
|
-
}
|
|
215
|
-
exports.fromNodeSocket = fromNodeSocket;
|
|
@@ -54,28 +54,28 @@
|
|
|
54
54
|
},
|
|
55
55
|
"dataconnect": {
|
|
56
56
|
"darwin": {
|
|
57
|
-
"version": "2.
|
|
58
|
-
"expectedSize":
|
|
59
|
-
"expectedChecksum": "
|
|
60
|
-
"expectedChecksumSHA256": "
|
|
61
|
-
"remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-macos-v2.
|
|
62
|
-
"downloadPathRelativeToCacheDir": "dataconnect-emulator-2.
|
|
57
|
+
"version": "2.8.0",
|
|
58
|
+
"expectedSize": 29279072,
|
|
59
|
+
"expectedChecksum": "0381ba8dd2eb67629cbbf4a88b276850",
|
|
60
|
+
"expectedChecksumSHA256": "bc88bca96c83df21f7ed4b66c2a518ca48a21ba259fe481c0911fb5010d2fa0f",
|
|
61
|
+
"remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-macos-v2.8.0",
|
|
62
|
+
"downloadPathRelativeToCacheDir": "dataconnect-emulator-2.8.0"
|
|
63
63
|
},
|
|
64
64
|
"win32": {
|
|
65
|
-
"version": "2.
|
|
66
|
-
"expectedSize":
|
|
67
|
-
"expectedChecksum": "
|
|
68
|
-
"expectedChecksumSHA256": "
|
|
69
|
-
"remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-windows-v2.
|
|
70
|
-
"downloadPathRelativeToCacheDir": "dataconnect-emulator-2.
|
|
65
|
+
"version": "2.8.0",
|
|
66
|
+
"expectedSize": 29769216,
|
|
67
|
+
"expectedChecksum": "622e7c7e23b0bd7592ad5a0ca0934987",
|
|
68
|
+
"expectedChecksumSHA256": "c23484eaf8f1ac68653bb29bb307de9d1c47a1fc4005610b094772e7ac368512",
|
|
69
|
+
"remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-windows-v2.8.0",
|
|
70
|
+
"downloadPathRelativeToCacheDir": "dataconnect-emulator-2.8.0.exe"
|
|
71
71
|
},
|
|
72
72
|
"linux": {
|
|
73
|
-
"version": "2.
|
|
74
|
-
"expectedSize":
|
|
75
|
-
"expectedChecksum": "
|
|
76
|
-
"expectedChecksumSHA256": "
|
|
77
|
-
"remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-linux-v2.
|
|
78
|
-
"downloadPathRelativeToCacheDir": "dataconnect-emulator-2.
|
|
73
|
+
"version": "2.8.0",
|
|
74
|
+
"expectedSize": 29208760,
|
|
75
|
+
"expectedChecksum": "dd9b1b9a55761f4d763a11bb7b84cbff",
|
|
76
|
+
"expectedChecksumSHA256": "7ffe98a28e9c185ddb07cdbf1610dc86747083afca9ac8cb850214ca8f1147c4",
|
|
77
|
+
"remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-linux-v2.8.0",
|
|
78
|
+
"downloadPathRelativeToCacheDir": "dataconnect-emulator-2.8.0"
|
|
79
79
|
}
|
|
80
80
|
}
|
|
81
81
|
}
|
package/lib/ensureApiEnabled.js
CHANGED
|
@@ -8,6 +8,7 @@ const apiv2_1 = require("./apiv2");
|
|
|
8
8
|
const utils = require("./utils");
|
|
9
9
|
const error_1 = require("./error");
|
|
10
10
|
const logger_1 = require("./logger");
|
|
11
|
+
const configstore_1 = require("./configstore");
|
|
11
12
|
exports.POLL_SETTINGS = {
|
|
12
13
|
pollInterval: 10000,
|
|
13
14
|
pollsBeforeRetry: 12,
|
|
@@ -18,6 +19,9 @@ const apiClient = new apiv2_1.Client({
|
|
|
18
19
|
});
|
|
19
20
|
async function check(projectId, apiUri, prefix, silent = false) {
|
|
20
21
|
const apiName = apiUri.startsWith("http") ? new URL(apiUri).hostname : apiUri;
|
|
22
|
+
if (checkAPIEnablementCache(projectId, apiName)) {
|
|
23
|
+
return true;
|
|
24
|
+
}
|
|
21
25
|
const res = await apiClient.get(`/projects/${projectId}/services/${apiName}`, {
|
|
22
26
|
headers: { "x-goog-quota-user": `projects/${projectId}` },
|
|
23
27
|
skipLog: { resBody: true },
|
|
@@ -26,6 +30,9 @@ async function check(projectId, apiUri, prefix, silent = false) {
|
|
|
26
30
|
if (isEnabled && !silent) {
|
|
27
31
|
utils.logLabeledSuccess(prefix, `required API ${(0, colorette_1.bold)(apiName)} is enabled`);
|
|
28
32
|
}
|
|
33
|
+
if (isEnabled) {
|
|
34
|
+
cacheEnabledAPI(projectId, apiName);
|
|
35
|
+
}
|
|
29
36
|
return isEnabled;
|
|
30
37
|
}
|
|
31
38
|
exports.check = check;
|
|
@@ -39,6 +46,7 @@ async function enable(projectId, apiName) {
|
|
|
39
46
|
headers: { "x-goog-quota-user": `projects/${projectId}` },
|
|
40
47
|
skipLog: { resBody: true },
|
|
41
48
|
});
|
|
49
|
+
cacheEnabledAPI(projectId, apiName);
|
|
42
50
|
}
|
|
43
51
|
catch (err) {
|
|
44
52
|
if ((0, error_1.isBillingError)(err)) {
|
|
@@ -120,3 +128,17 @@ function enableApiURI(projectId, apiName) {
|
|
|
120
128
|
return `https://console.cloud.google.com/apis/library/${apiName}?project=${projectId}`;
|
|
121
129
|
}
|
|
122
130
|
exports.enableApiURI = enableApiURI;
|
|
131
|
+
const API_ENABLEMENT_CACHE_KEY = "apiEnablementCache";
|
|
132
|
+
function checkAPIEnablementCache(projectId, apiName) {
|
|
133
|
+
var _a;
|
|
134
|
+
const cache = configstore_1.configstore.get(API_ENABLEMENT_CACHE_KEY);
|
|
135
|
+
return !!((_a = cache === null || cache === void 0 ? void 0 : cache[projectId]) === null || _a === void 0 ? void 0 : _a[apiName]);
|
|
136
|
+
}
|
|
137
|
+
function cacheEnabledAPI(projectId, apiName) {
|
|
138
|
+
const cache = (configstore_1.configstore.get(API_ENABLEMENT_CACHE_KEY) || {});
|
|
139
|
+
if (!cache[projectId]) {
|
|
140
|
+
cache[projectId] = {};
|
|
141
|
+
}
|
|
142
|
+
cache[projectId][apiName] = true;
|
|
143
|
+
configstore_1.configstore.set(API_ENABLEMENT_CACHE_KEY, cache);
|
|
144
|
+
}
|
package/lib/fsutils.js
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.listFiles = exports.readFile = exports.dirExistsSync = exports.fileExistsSync = void 0;
|
|
3
|
+
exports.moveAll = exports.listFiles = exports.readFile = exports.dirExistsSync = exports.fileExistsSync = void 0;
|
|
4
4
|
const fs_1 = require("fs");
|
|
5
|
+
const path = require("path");
|
|
5
6
|
const error_1 = require("./error");
|
|
7
|
+
const fs_extra_1 = require("fs-extra");
|
|
6
8
|
function fileExistsSync(path) {
|
|
7
9
|
try {
|
|
8
10
|
return (0, fs_1.statSync)(path).isFile();
|
|
@@ -45,3 +47,16 @@ function listFiles(path) {
|
|
|
45
47
|
}
|
|
46
48
|
}
|
|
47
49
|
exports.listFiles = listFiles;
|
|
50
|
+
function moveAll(srcDir, destDir) {
|
|
51
|
+
if (!(0, fs_1.existsSync)(destDir)) {
|
|
52
|
+
(0, fs_1.mkdirSync)(destDir, { recursive: true });
|
|
53
|
+
}
|
|
54
|
+
const files = listFiles(srcDir);
|
|
55
|
+
for (const f of files) {
|
|
56
|
+
const srcPath = path.join(srcDir, f);
|
|
57
|
+
if (srcPath === destDir)
|
|
58
|
+
continue;
|
|
59
|
+
(0, fs_extra_1.moveSync)(srcPath, path.join(destDir, f));
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
exports.moveAll = moveAll;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.functionFromEndpoint = exports.endpointFromFunction = exports.listAllFunctions = exports.listFunctions = exports.deleteFunction = exports.updateFunction = exports.setInvokerUpdate = exports.setInvokerCreate = exports.getIamPolicy = exports.setIamPolicy = exports.createFunction = exports.generateUploadUrl = exports.API_VERSION = void 0;
|
|
3
|
+
exports.functionFromEndpoint = exports.endpointFromFunction = exports.listAllFunctions = exports.listFunctions = exports.deleteFunction = exports.updateFunction = exports.setInvokerUpdate = exports.setInvokerCreate = exports.getIamPolicy = exports.setIamPolicy = exports.createFunction = exports.generateUploadUrl = exports.captureRuntimeValidationError = exports.API_VERSION = void 0;
|
|
4
4
|
const clc = require("colorette");
|
|
5
5
|
const error_1 = require("../error");
|
|
6
6
|
const logger_1 = require("../logger");
|
|
@@ -14,8 +14,22 @@ const api_1 = require("../api");
|
|
|
14
14
|
const constants_1 = require("../functions/constants");
|
|
15
15
|
exports.API_VERSION = "v1";
|
|
16
16
|
const client = new apiv2_1.Client({ urlPrefix: (0, api_1.functionsOrigin)(), apiVersion: exports.API_VERSION });
|
|
17
|
+
function captureRuntimeValidationError(errMessage) {
|
|
18
|
+
const regex = /message: "((?:\\.|[^"\\])*)"/;
|
|
19
|
+
const match = errMessage.match(regex);
|
|
20
|
+
if (match && match[1]) {
|
|
21
|
+
const capturedMessage = match[1].replace(/\\"/g, '"');
|
|
22
|
+
return capturedMessage;
|
|
23
|
+
}
|
|
24
|
+
return "invalid runtime detected, please see https://cloud.google.com/functions/docs/runtime-support for the latest supported runtimes";
|
|
25
|
+
}
|
|
26
|
+
exports.captureRuntimeValidationError = captureRuntimeValidationError;
|
|
17
27
|
function functionsOpLogReject(funcName, type, err) {
|
|
18
28
|
var _a, _b, _c, _d;
|
|
29
|
+
if ((err === null || err === void 0 ? void 0 : err.message).includes("Runtime validation errors")) {
|
|
30
|
+
const capturedMessage = captureRuntimeValidationError(err.message);
|
|
31
|
+
utils.logWarning(clc.bold(clc.yellow("functions:")) + " " + capturedMessage + " for function " + funcName);
|
|
32
|
+
}
|
|
19
33
|
if (((_b = (_a = err === null || err === void 0 ? void 0 : err.context) === null || _a === void 0 ? void 0 : _a.response) === null || _b === void 0 ? void 0 : _b.statusCode) === 429) {
|
|
20
34
|
utils.logWarning(`${clc.bold(clc.yellow("functions:"))} got "Quota Exceeded" error while trying to ${type} ${funcName}. Waiting to retry...`);
|
|
21
35
|
}
|
|
@@ -12,6 +12,7 @@ const proto = require("./proto");
|
|
|
12
12
|
const utils = require("../utils");
|
|
13
13
|
const projectConfig = require("../functions/projectConfig");
|
|
14
14
|
const constants_1 = require("../functions/constants");
|
|
15
|
+
const cloudfunctions_1 = require("./cloudfunctions");
|
|
15
16
|
exports.API_VERSION = "v2";
|
|
16
17
|
const DEFAULT_MAX_INSTANCE_COUNT = 100;
|
|
17
18
|
const client = new apiv2_1.Client({
|
|
@@ -49,8 +50,12 @@ function mebibytes(memory) {
|
|
|
49
50
|
}
|
|
50
51
|
exports.mebibytes = mebibytes;
|
|
51
52
|
function functionsOpLogReject(func, type, err) {
|
|
52
|
-
var _a, _b, _c, _d, _e, _f;
|
|
53
|
-
if ((_a = err === null || err === void 0 ? void 0 : err.message) === null || _a === void 0 ? void 0 : _a.includes("
|
|
53
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
54
|
+
if ((_a = err === null || err === void 0 ? void 0 : err.message) === null || _a === void 0 ? void 0 : _a.includes("Runtime validation errors")) {
|
|
55
|
+
const capturedMessage = (0, cloudfunctions_1.captureRuntimeValidationError)(err.message);
|
|
56
|
+
utils.logLabeledWarning("functions", capturedMessage + " for function " + func.name);
|
|
57
|
+
}
|
|
58
|
+
if ((_b = err === null || err === void 0 ? void 0 : err.message) === null || _b === void 0 ? void 0 : _b.includes("maxScale may not exceed")) {
|
|
54
59
|
const maxInstances = func.serviceConfig.maxInstanceCount || DEFAULT_MAX_INSTANCE_COUNT;
|
|
55
60
|
utils.logLabeledWarning("functions", `Your current project quotas don't allow for the current max instances setting of ${maxInstances}. ` +
|
|
56
61
|
"Either reduce this function's maximum instances, or request a quota increase on the underlying Cloud Run service " +
|
|
@@ -62,17 +67,17 @@ function functionsOpLogReject(func, type, err) {
|
|
|
62
67
|
}
|
|
63
68
|
else {
|
|
64
69
|
utils.logLabeledWarning("functions", `${err === null || err === void 0 ? void 0 : err.message}`);
|
|
65
|
-
if (((
|
|
70
|
+
if (((_d = (_c = err === null || err === void 0 ? void 0 : err.context) === null || _c === void 0 ? void 0 : _c.response) === null || _d === void 0 ? void 0 : _d.statusCode) === 429) {
|
|
66
71
|
utils.logLabeledWarning("functions", `Got "Quota Exceeded" error while trying to ${type} ${func.name}. Waiting to retry...`);
|
|
67
72
|
}
|
|
68
|
-
else if ((
|
|
73
|
+
else if ((_e = err === null || err === void 0 ? void 0 : err.message) === null || _e === void 0 ? void 0 : _e.includes("If you recently started to use Eventarc, it may take a few minutes before all necessary permissions are propagated to the Service Agent")) {
|
|
69
74
|
utils.logLabeledWarning("functions", `Since this is your first time using 2nd gen functions, we need a little bit longer to finish setting everything up. Retry the deployment in a few minutes.`);
|
|
70
75
|
}
|
|
71
76
|
utils.logLabeledWarning("functions", ` failed to ${type} function ${func.name}`);
|
|
72
77
|
}
|
|
73
78
|
throw new error_1.FirebaseError(`Failed to ${type} function ${func.name}`, {
|
|
74
79
|
original: err,
|
|
75
|
-
status: (
|
|
80
|
+
status: (_g = (_f = err === null || err === void 0 ? void 0 : err.context) === null || _f === void 0 ? void 0 : _f.response) === null || _g === void 0 ? void 0 : _g.statusCode,
|
|
76
81
|
context: { function: func.name },
|
|
77
82
|
});
|
|
78
83
|
}
|
package/lib/gcp/serviceusage.js
CHANGED
|
@@ -20,7 +20,7 @@ const serviceUsagePollerOptions = {
|
|
|
20
20
|
async function generateServiceIdentity(projectNumber, service, prefix) {
|
|
21
21
|
utils.logLabeledBullet(prefix, `generating the service identity for ${(0, colorette_1.bold)(service)}...`);
|
|
22
22
|
try {
|
|
23
|
-
const res = await exports.apiClient.post(`projects/${projectNumber}/services/${service}:generateServiceIdentity`);
|
|
23
|
+
const res = await exports.apiClient.post(`projects/${projectNumber}/services/${service}:generateServiceIdentity`, {}, { headers: { "x-goog-quota-user": `projects/${projectNumber}` } });
|
|
24
24
|
return res.body;
|
|
25
25
|
}
|
|
26
26
|
catch (err) {
|
|
@@ -35,6 +35,6 @@ async function generateServiceIdentityAndPoll(projectNumber, service, prefix) {
|
|
|
35
35
|
if (op.done) {
|
|
36
36
|
return;
|
|
37
37
|
}
|
|
38
|
-
await poller.pollOperation(Object.assign(Object.assign({}, serviceUsagePollerOptions), { operationResourceName: op.name }));
|
|
38
|
+
await poller.pollOperation(Object.assign(Object.assign({}, serviceUsagePollerOptions), { operationResourceName: op.name, headers: { "x-goog-quota-user": `projects/${projectNumber}` } }));
|
|
39
39
|
}
|
|
40
40
|
exports.generateServiceIdentityAndPoll = generateServiceIdentityAndPoll;
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.generateOperation = exports.chatWithFirebase = exports.generateSchema = void 0;
|
|
3
|
+
exports.extractCodeBlock = exports.generateOperation = exports.chatWithFirebase = exports.generateSchema = void 0;
|
|
4
4
|
const apiv2_1 = require("../apiv2");
|
|
5
5
|
const api_1 = require("../api");
|
|
6
|
-
const
|
|
6
|
+
const error_1 = require("../error");
|
|
7
|
+
const apiClient = new apiv2_1.Client({ urlPrefix: (0, api_1.cloudAiCompanionOrigin)(), auth: true });
|
|
7
8
|
const SCHEMA_GENERATOR_EXPERIENCE = "/appeco/firebase/fdc-schema-generator";
|
|
8
9
|
const GEMINI_IN_FIREBASE_EXPERIENCE = "/appeco/firebase/firebase-chat/free";
|
|
9
10
|
const OPERATION_GENERATION_EXPERIENCE = "/appeco/firebase/fdc-query-generator";
|
|
@@ -44,3 +45,12 @@ async function generateOperation(prompt, service, project, chatHistory = []) {
|
|
|
44
45
|
return res.body.output.messages[0].content;
|
|
45
46
|
}
|
|
46
47
|
exports.generateOperation = generateOperation;
|
|
48
|
+
function extractCodeBlock(text) {
|
|
49
|
+
const regex = /```(?:[a-z]+\n)?([\s\S]*?)```/m;
|
|
50
|
+
const match = text.match(regex);
|
|
51
|
+
if (match && match[1]) {
|
|
52
|
+
return match[1].trim();
|
|
53
|
+
}
|
|
54
|
+
throw new error_1.FirebaseError(`No code block found in the generated response: ${text}`);
|
|
55
|
+
}
|
|
56
|
+
exports.extractCodeBlock = extractCodeBlock;
|
|
@@ -18,6 +18,8 @@ const utils_1 = require("../../../utils");
|
|
|
18
18
|
const cloudbilling_1 = require("../../../gcp/cloudbilling");
|
|
19
19
|
const sdk = require("./sdk");
|
|
20
20
|
const fileUtils_1 = require("../../../dataconnect/fileUtils");
|
|
21
|
+
const fdcExperience_1 = require("../../../gemini/fdcExperience");
|
|
22
|
+
const configstore_1 = require("../../../configstore");
|
|
21
23
|
const DATACONNECT_YAML_TEMPLATE = (0, templates_1.readTemplateSync)("init/dataconnect/dataconnect.yaml");
|
|
22
24
|
const CONNECTOR_YAML_TEMPLATE = (0, templates_1.readTemplateSync)("init/dataconnect/connector.yaml");
|
|
23
25
|
const SCHEMA_TEMPLATE = (0, templates_1.readTemplateSync)("init/dataconnect/schema.gql");
|
|
@@ -71,7 +73,7 @@ async function askQuestions(setup) {
|
|
|
71
73
|
default: true,
|
|
72
74
|
}));
|
|
73
75
|
if (shouldConfigureBackend) {
|
|
74
|
-
info = await
|
|
76
|
+
info = await promptForSchema(setup, info);
|
|
75
77
|
info = await promptForCloudSQL(setup, info);
|
|
76
78
|
info.shouldProvisionCSQL = !!(setup.projectId &&
|
|
77
79
|
(info.isNewInstance || info.isNewDatabase) &&
|
|
@@ -322,12 +324,31 @@ async function promptForCloudSQL(setup, info) {
|
|
|
322
324
|
}
|
|
323
325
|
return info;
|
|
324
326
|
}
|
|
325
|
-
async function
|
|
327
|
+
async function promptForSchema(setup, info) {
|
|
326
328
|
if (info.serviceId === "") {
|
|
327
329
|
info.serviceId = await (0, prompt_1.input)({
|
|
328
330
|
message: "What ID would you like to use for this service?",
|
|
329
331
|
default: (0, path_1.basename)(process.cwd()),
|
|
330
332
|
});
|
|
333
|
+
if (setup.projectId) {
|
|
334
|
+
if (!configstore_1.configstore.get("gemini")) {
|
|
335
|
+
(0, utils_1.logBullet)("Learn more about Gemini in Firebase and how it uses your data: https://firebase.google.com/docs/gemini-in-firebase#how-gemini-in-firebase-uses-your-data");
|
|
336
|
+
}
|
|
337
|
+
if (await (0, prompt_1.confirm)({
|
|
338
|
+
message: `Do you want Gemini in Firebase to help generate a schema for your service?`,
|
|
339
|
+
default: false,
|
|
340
|
+
})) {
|
|
341
|
+
configstore_1.configstore.set("gemini", true);
|
|
342
|
+
await (0, ensureApis_1.ensureGIFApis)(setup.projectId);
|
|
343
|
+
const prompt = await (0, prompt_1.input)({
|
|
344
|
+
message: "Describe the app you are building:",
|
|
345
|
+
default: "movie rating app",
|
|
346
|
+
});
|
|
347
|
+
const schema = await (0, utils_1.promiseWithSpinner)(() => (0, fdcExperience_1.generateSchema)(prompt, setup.projectId), "Generating the Data Connect Schema...");
|
|
348
|
+
info.schemaGql = [{ path: "schema.gql", content: (0, fdcExperience_1.extractCodeBlock)(schema) }];
|
|
349
|
+
info.connectors = [emptyConnector];
|
|
350
|
+
}
|
|
351
|
+
}
|
|
331
352
|
}
|
|
332
353
|
return info;
|
|
333
354
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.checkFirebaseEnabledForCloudProject = exports.getProject = exports.getFirebaseProject = exports.listFirebaseProjects = exports.getAvailableCloudProjectPage = exports.getFirebaseProjectPage = exports.addFirebaseToCloudProject = exports.createCloudProject = exports.promptAvailableProjectId = exports.getOrPromptProject = exports.addFirebaseToCloudProjectAndLog = exports.createFirebaseProjectAndLog = exports.promptProjectCreation = exports.ProjectParentResourceType = void 0;
|
|
3
|
+
exports.checkFirebaseEnabledForCloudProject = exports.getProject = exports.getFirebaseProject = exports.checkAndRecommendProjectId = exports.listFirebaseProjects = exports.getAvailableCloudProjectPage = exports.getFirebaseProjectPage = exports.addFirebaseToCloudProject = exports.createCloudProject = exports.promptAvailableProjectId = exports.getOrPromptProject = exports.addFirebaseToCloudProjectAndLog = exports.createFirebaseProjectAndLog = exports.promptProjectCreation = exports.ProjectParentResourceType = void 0;
|
|
4
4
|
const clc = require("colorette");
|
|
5
5
|
const ora = require("ora");
|
|
6
6
|
const apiv2_1 = require("../apiv2");
|
|
@@ -15,6 +15,7 @@ const TIMEOUT_MILLIS = 30000;
|
|
|
15
15
|
const MAXIMUM_PROMPT_LIST = 100;
|
|
16
16
|
const PROJECT_LIST_PAGE_SIZE = 1000;
|
|
17
17
|
const CREATE_PROJECT_API_REQUEST_TIMEOUT_MILLIS = 15000;
|
|
18
|
+
const CHECK_PROJECT_ID_API_REQUEST_TIMEOUT_MILLIS = 15000;
|
|
18
19
|
var ProjectParentResourceType;
|
|
19
20
|
(function (ProjectParentResourceType) {
|
|
20
21
|
ProjectParentResourceType["ORGANIZATION"] = "organization";
|
|
@@ -25,16 +26,23 @@ async function promptProjectCreation(options) {
|
|
|
25
26
|
const projectId = (_a = options.projectId) !== null && _a !== void 0 ? _a : (await prompt.input({
|
|
26
27
|
message: "Please specify a unique project id " +
|
|
27
28
|
`(${clc.yellow("warning")}: cannot be modified afterward) [6-30 characters]:\n`,
|
|
28
|
-
validate: (projectId) => {
|
|
29
|
+
validate: async (projectId) => {
|
|
29
30
|
if (projectId.length < 6) {
|
|
30
31
|
return "Project ID must be at least 6 characters long";
|
|
31
32
|
}
|
|
32
33
|
else if (projectId.length > 30) {
|
|
33
34
|
return "Project ID cannot be longer than 30 characters";
|
|
34
35
|
}
|
|
35
|
-
|
|
36
|
-
|
|
36
|
+
try {
|
|
37
|
+
const { isAvailable, suggestedProjectId } = await checkAndRecommendProjectId(projectId);
|
|
38
|
+
if (!isAvailable && suggestedProjectId) {
|
|
39
|
+
return `Project ID is taken or unavailable. Try ${clc.bold(suggestedProjectId)}.`;
|
|
40
|
+
}
|
|
37
41
|
}
|
|
42
|
+
catch (error) {
|
|
43
|
+
logger_1.logger.debug(`Couldn't check if project ID ${projectId} is available. Original error: ${error}`);
|
|
44
|
+
}
|
|
45
|
+
return true;
|
|
38
46
|
},
|
|
39
47
|
}));
|
|
40
48
|
const displayName = (_b = options.displayName) !== null && _b !== void 0 ? _b : (await prompt.input({
|
|
@@ -60,6 +68,11 @@ const firebaseAPIClient = new apiv2_1.Client({
|
|
|
60
68
|
auth: true,
|
|
61
69
|
apiVersion: "v1beta1",
|
|
62
70
|
});
|
|
71
|
+
const firebaseV1APIClient = new apiv2_1.Client({
|
|
72
|
+
urlPrefix: api.firebaseApiOrigin(),
|
|
73
|
+
auth: true,
|
|
74
|
+
apiVersion: "v1",
|
|
75
|
+
});
|
|
63
76
|
const resourceManagerClient = new apiv2_1.Client({
|
|
64
77
|
urlPrefix: api.resourceManagerOrigin(),
|
|
65
78
|
apiVersion: "v1",
|
|
@@ -306,6 +319,27 @@ async function listFirebaseProjects(pageSize) {
|
|
|
306
319
|
return projects;
|
|
307
320
|
}
|
|
308
321
|
exports.listFirebaseProjects = listFirebaseProjects;
|
|
322
|
+
async function checkAndRecommendProjectId(projectId) {
|
|
323
|
+
try {
|
|
324
|
+
const res = await firebaseV1APIClient.request({
|
|
325
|
+
method: "POST",
|
|
326
|
+
path: "/projects:checkProjectId",
|
|
327
|
+
body: {
|
|
328
|
+
proposedId: projectId,
|
|
329
|
+
},
|
|
330
|
+
timeout: CHECK_PROJECT_ID_API_REQUEST_TIMEOUT_MILLIS,
|
|
331
|
+
});
|
|
332
|
+
const { projectIdStatus, suggestedProjectId } = res.body;
|
|
333
|
+
return {
|
|
334
|
+
isAvailable: projectIdStatus === "PROJECT_ID_AVAILABLE",
|
|
335
|
+
suggestedProjectId,
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
catch (err) {
|
|
339
|
+
throw new error_1.FirebaseError("Failed to check if project ID is available. See firebase-debug.log for more info.", { exit: 2, original: err });
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
exports.checkAndRecommendProjectId = checkAndRecommendProjectId;
|
|
309
343
|
async function getFirebaseProject(projectId) {
|
|
310
344
|
try {
|
|
311
345
|
const res = await firebaseAPIClient.request({
|
package/lib/mcp/errors.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.mcpAuthError = exports.NO_PROJECT_ERROR = void 0;
|
|
3
|
+
exports.mcpGeminiError = exports.mcpAuthError = exports.NO_PROJECT_ERROR = void 0;
|
|
4
4
|
const util_1 = require("./util");
|
|
5
5
|
exports.NO_PROJECT_ERROR = (0, util_1.mcpError)('No active project was found. Use the `firebase_update_environment` tool to set the project directory to an absolute folder location containing a firebase.json config file. Alternatively, change the MCP server config to add [...,"--dir","/absolute/path/to/project/directory"] in its command-line arguments.', "PRECONDITION_FAILED");
|
|
6
6
|
function mcpAuthError() {
|
|
@@ -13,3 +13,8 @@ ${cmd} login
|
|
|
13
13
|
[ADC]: https://cloud.google.com/docs/authentication/application-default-credentials`);
|
|
14
14
|
}
|
|
15
15
|
exports.mcpAuthError = mcpAuthError;
|
|
16
|
+
function mcpGeminiError(projectId) {
|
|
17
|
+
const consoleUrl = `https://firebase.corp.google.com/project/${projectId}/overview`;
|
|
18
|
+
return (0, util_1.mcpError)(`This tool uses the Gemini in Firebase API. Visit Firebase Console to enable the Gemini in Firebase API ${consoleUrl} and try again.`, "PRECONDITION_FAILED");
|
|
19
|
+
}
|
|
20
|
+
exports.mcpGeminiError = mcpGeminiError;
|
package/lib/mcp/index.js
CHANGED
|
@@ -17,6 +17,8 @@ const config_js_1 = require("../config.js");
|
|
|
17
17
|
const rc_js_1 = require("../rc.js");
|
|
18
18
|
const hubClient_js_1 = require("../emulator/hubClient.js");
|
|
19
19
|
const node_fs_1 = require("node:fs");
|
|
20
|
+
const ensureApiEnabled_js_1 = require("../ensureApiEnabled.js");
|
|
21
|
+
const api = require("../api.js");
|
|
20
22
|
const SERVER_VERSION = "0.1.0";
|
|
21
23
|
const cmd = new command_js_1.Command("experimental:mcp").before(requireAuth_js_1.requireAuth);
|
|
22
24
|
class FirebaseMcpServer {
|
|
@@ -165,7 +167,7 @@ class FirebaseMcpServer {
|
|
|
165
167
|
};
|
|
166
168
|
}
|
|
167
169
|
async mcpCallTool(request) {
|
|
168
|
-
var _a, _b, _c, _d, _e, _f;
|
|
170
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
169
171
|
await this.detectProjectRoot();
|
|
170
172
|
const toolName = request.params.name;
|
|
171
173
|
const toolArgs = request.params.arguments;
|
|
@@ -185,6 +187,16 @@ class FirebaseMcpServer {
|
|
|
185
187
|
if (((_b = tool.mcp._meta) === null || _b === void 0 ? void 0 : _b.requiresAuth) && !accountEmail) {
|
|
186
188
|
return (0, errors_js_1.mcpAuthError)();
|
|
187
189
|
}
|
|
190
|
+
if ((_c = tool.mcp._meta) === null || _c === void 0 ? void 0 : _c.requiresGemini) {
|
|
191
|
+
if (configstore_js_1.configstore.get("gemini")) {
|
|
192
|
+
await (0, ensureApiEnabled_js_1.ensure)(projectId, api.cloudAiCompanionOrigin(), "");
|
|
193
|
+
}
|
|
194
|
+
else {
|
|
195
|
+
if (!(await (0, ensureApiEnabled_js_1.check)(projectId, api.cloudAiCompanionOrigin(), ""))) {
|
|
196
|
+
return (0, errors_js_1.mcpGeminiError)(projectId);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
188
200
|
const options = { projectDir: this.cachedProjectRoot, cwd: this.cachedProjectRoot };
|
|
189
201
|
const toolsCtx = {
|
|
190
202
|
projectId: projectId,
|
|
@@ -198,8 +210,8 @@ class FirebaseMcpServer {
|
|
|
198
210
|
await (0, track_js_1.trackGA4)("mcp_tool_call", {
|
|
199
211
|
tool_name: toolName,
|
|
200
212
|
error: res.isError ? 1 : 0,
|
|
201
|
-
mcp_client_name: (
|
|
202
|
-
mcp_client_version: (
|
|
213
|
+
mcp_client_name: (_d = this.clientInfo) === null || _d === void 0 ? void 0 : _d.name,
|
|
214
|
+
mcp_client_version: (_e = this.clientInfo) === null || _e === void 0 ? void 0 : _e.version,
|
|
203
215
|
});
|
|
204
216
|
return res;
|
|
205
217
|
}
|
|
@@ -207,8 +219,8 @@ class FirebaseMcpServer {
|
|
|
207
219
|
await (0, track_js_1.trackGA4)("mcp_tool_call", {
|
|
208
220
|
tool_name: toolName,
|
|
209
221
|
error: 1,
|
|
210
|
-
mcp_client_name: (
|
|
211
|
-
mcp_client_version: (
|
|
222
|
+
mcp_client_name: (_f = this.clientInfo) === null || _f === void 0 ? void 0 : _f.name,
|
|
223
|
+
mcp_client_version: (_g = this.clientInfo) === null || _g === void 0 ? void 0 : _g.version,
|
|
212
224
|
});
|
|
213
225
|
return (0, util_js_1.mcpError)(err);
|
|
214
226
|
}
|
|
@@ -20,6 +20,7 @@ exports.consult_assistant = (0, tool_js_1.tool)({
|
|
|
20
20
|
_meta: {
|
|
21
21
|
requiresProject: true,
|
|
22
22
|
requiresAuth: true,
|
|
23
|
+
requiresGemini: true,
|
|
23
24
|
},
|
|
24
25
|
}, async ({ prompt }, { projectId }) => {
|
|
25
26
|
const schema = await (0, fdcExperience_js_1.chatWithFirebase)(prompt, projectId);
|
|
@@ -12,6 +12,7 @@ const init_js_1 = require("./init.js");
|
|
|
12
12
|
const get_environment_js_1 = require("./get_environment.js");
|
|
13
13
|
const update_environment_js_1 = require("./update_environment.js");
|
|
14
14
|
const list_projects_js_1 = require("./list_projects.js");
|
|
15
|
+
const consult_assistant_js_1 = require("./consult_assistant.js");
|
|
15
16
|
exports.coreTools = [
|
|
16
17
|
get_project_js_1.get_project,
|
|
17
18
|
list_apps_js_1.list_apps,
|
|
@@ -21,6 +22,7 @@ exports.coreTools = [
|
|
|
21
22
|
create_project_js_1.create_project,
|
|
22
23
|
create_app_js_1.create_app,
|
|
23
24
|
create_android_sha_js_1.create_android_sha,
|
|
25
|
+
consult_assistant_js_1.consult_assistant,
|
|
24
26
|
get_environment_js_1.get_environment,
|
|
25
27
|
update_environment_js_1.update_environment,
|
|
26
28
|
init_js_1.init,
|
|
@@ -11,7 +11,7 @@ exports.list_apps = (0, tool_js_1.tool)({
|
|
|
11
11
|
inputSchema: zod_1.z.object({
|
|
12
12
|
platform: zod_1.z
|
|
13
13
|
.enum(["ios", "android", "web", "all"])
|
|
14
|
-
.
|
|
14
|
+
.optional()
|
|
15
15
|
.describe("the specific platform to list (omit to list all platforms)"),
|
|
16
16
|
}),
|
|
17
17
|
annotations: {
|
|
@@ -24,7 +24,7 @@ exports.list_apps = (0, tool_js_1.tool)({
|
|
|
24
24
|
},
|
|
25
25
|
}, async ({ platform }, { projectId }) => {
|
|
26
26
|
try {
|
|
27
|
-
const apps = await (0, apps_js_1.listFirebaseApps)(projectId, platform === "all" ? apps_js_1.AppPlatform.ANY : platform.toUpperCase());
|
|
27
|
+
const apps = await (0, apps_js_1.listFirebaseApps)(projectId, !platform || platform === "all" ? apps_js_1.AppPlatform.ANY : platform.toUpperCase());
|
|
28
28
|
return (0, util_js_1.toContent)(apps);
|
|
29
29
|
}
|
|
30
30
|
catch (err) {
|
|
@@ -25,6 +25,7 @@ exports.generate_operation = (0, tool_js_1.tool)({
|
|
|
25
25
|
_meta: {
|
|
26
26
|
requiresProject: true,
|
|
27
27
|
requiresAuth: true,
|
|
28
|
+
requiresGemini: true,
|
|
28
29
|
},
|
|
29
30
|
}, async ({ prompt, service_id }, { projectId, config }) => {
|
|
30
31
|
const serviceInfo = await (0, fileUtils_js_1.pickService)(projectId, config, service_id || undefined);
|
|
@@ -18,6 +18,7 @@ exports.generate_schema = (0, tool_js_1.tool)({
|
|
|
18
18
|
_meta: {
|
|
19
19
|
requiresProject: true,
|
|
20
20
|
requiresAuth: true,
|
|
21
|
+
requiresGemini: true,
|
|
21
22
|
},
|
|
22
23
|
}, async ({ prompt }, { projectId }) => {
|
|
23
24
|
const schema = await (0, fdcExperience_js_1.generateSchema)(prompt, projectId);
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.dataconnectTools = void 0;
|
|
4
|
+
const generate_operation_js_1 = require("./generate_operation.js");
|
|
5
|
+
const generate_schema_js_1 = require("./generate_schema.js");
|
|
4
6
|
const list_services_js_1 = require("./list_services.js");
|
|
5
7
|
const get_schema_js_1 = require("./get_schema.js");
|
|
6
8
|
const get_connector_js_1 = require("./get_connector.js");
|
|
@@ -10,6 +12,8 @@ const execute_query_js_1 = require("./execute_query.js");
|
|
|
10
12
|
const execute_mutation_js_1 = require("./execute_mutation.js");
|
|
11
13
|
exports.dataconnectTools = [
|
|
12
14
|
list_services_js_1.list_services,
|
|
15
|
+
generate_schema_js_1.generate_schema,
|
|
16
|
+
generate_operation_js_1.generate_operation,
|
|
13
17
|
get_schema_js_1.get_schema,
|
|
14
18
|
get_connector_js_1.get_connectors,
|
|
15
19
|
execute_graphql_js_1.execute_graphql,
|
package/lib/operation-poller.js
CHANGED
|
@@ -34,7 +34,9 @@ class OperationPoller {
|
|
|
34
34
|
return async () => {
|
|
35
35
|
let res;
|
|
36
36
|
try {
|
|
37
|
-
res = await apiClient.get(options.operationResourceName
|
|
37
|
+
res = await apiClient.get(options.operationResourceName, {
|
|
38
|
+
headers: options.headers,
|
|
39
|
+
});
|
|
38
40
|
}
|
|
39
41
|
catch (err) {
|
|
40
42
|
if (err.status === 500 || err.status === 503) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "firebase-tools",
|
|
3
|
-
"version": "14.
|
|
3
|
+
"version": "14.9.0",
|
|
4
4
|
"description": "Command-Line Interface for Firebase",
|
|
5
5
|
"main": "./lib/index.js",
|
|
6
6
|
"bin": {
|
|
@@ -60,7 +60,8 @@
|
|
|
60
60
|
]
|
|
61
61
|
},
|
|
62
62
|
"dependencies": {
|
|
63
|
-
"@electric-sql/pglite": "^0.
|
|
63
|
+
"@electric-sql/pglite": "^0.3.3",
|
|
64
|
+
"@electric-sql/pglite-tools": "^0.2.8",
|
|
64
65
|
"@google-cloud/cloud-sql-connector": "^1.3.3",
|
|
65
66
|
"@google-cloud/pubsub": "^4.5.0",
|
|
66
67
|
"@inquirer/prompts": "^7.4.0",
|
|
@@ -110,6 +111,7 @@
|
|
|
110
111
|
"p-limit": "^3.0.1",
|
|
111
112
|
"pg": "^8.11.3",
|
|
112
113
|
"pg-gateway": "^0.3.0-beta.4",
|
|
114
|
+
"pglite-2": "npm:@electric-sql/pglite@0.2.17",
|
|
113
115
|
"portfinder": "^1.0.32",
|
|
114
116
|
"progress": "^2.0.3",
|
|
115
117
|
"proxy-agent": "^6.3.0",
|
|
@@ -7,9 +7,22 @@
|
|
|
7
7
|
* See a full list of supported triggers at https://firebase.google.com/docs/functions
|
|
8
8
|
*/
|
|
9
9
|
|
|
10
|
-
const {
|
|
10
|
+
const {setGlobalOptions} = require("firebase-functions");
|
|
11
|
+
const {onRequest} = require("firebase-functions/https");
|
|
11
12
|
const logger = require("firebase-functions/logger");
|
|
12
13
|
|
|
14
|
+
// For cost control, you can set the maximum number of containers that can be
|
|
15
|
+
// running at the same time. This helps mitigate the impact of unexpected
|
|
16
|
+
// traffic spikes by instead downgrading performance. This limit is a
|
|
17
|
+
// per-function limit. You can override the limit for each function using the
|
|
18
|
+
// `maxInstances` option in the function's options, e.g.
|
|
19
|
+
// `onRequest({ maxInstances: 5 }, (req, res) => { ... })`.
|
|
20
|
+
// NOTE: setGlobalOptions does not apply to functions using the v1 API. V1
|
|
21
|
+
// functions should each use functions.runWith({ maxInstances: 10 }) instead.
|
|
22
|
+
// In the v1 API, each function can only serve one request per container, so
|
|
23
|
+
// this will be the maximum concurrent request count.
|
|
24
|
+
setGlobalOptions({ maxInstances: 10 });
|
|
25
|
+
|
|
13
26
|
// Create and deploy your first functions
|
|
14
27
|
// https://firebase.google.com/docs/functions/get-started
|
|
15
28
|
|
|
@@ -3,8 +3,16 @@
|
|
|
3
3
|
# Deploy with `firebase deploy`
|
|
4
4
|
|
|
5
5
|
from firebase_functions import https_fn
|
|
6
|
+
from firebase_functions.options import set_global_options
|
|
6
7
|
from firebase_admin import initialize_app
|
|
7
8
|
|
|
9
|
+
# For cost control, you can set the maximum number of containers that can be
|
|
10
|
+
# running at the same time. This helps mitigate the impact of unexpected
|
|
11
|
+
# traffic spikes by instead downgrading performance. This limit is a per-function
|
|
12
|
+
# limit. You can override the limit for each function using the max_instances
|
|
13
|
+
# parameter in the decorator, e.g. @https_fn.on_request(max_instances=5).
|
|
14
|
+
set_global_options(max_instances=10)
|
|
15
|
+
|
|
8
16
|
# initialize_app()
|
|
9
17
|
#
|
|
10
18
|
#
|
|
@@ -7,12 +7,25 @@
|
|
|
7
7
|
* See a full list of supported triggers at https://firebase.google.com/docs/functions
|
|
8
8
|
*/
|
|
9
9
|
|
|
10
|
-
import {
|
|
10
|
+
import {setGlobalOptions} from "firebase-functions";
|
|
11
|
+
import {onRequest} from "firebase-functions/https";
|
|
11
12
|
import * as logger from "firebase-functions/logger";
|
|
12
13
|
|
|
13
14
|
// Start writing functions
|
|
14
15
|
// https://firebase.google.com/docs/functions/typescript
|
|
15
16
|
|
|
17
|
+
// For cost control, you can set the maximum number of containers that can be
|
|
18
|
+
// running at the same time. This helps mitigate the impact of unexpected
|
|
19
|
+
// traffic spikes by instead downgrading performance. This limit is a
|
|
20
|
+
// per-function limit. You can override the limit for each function using the
|
|
21
|
+
// `maxInstances` option in the function's options, e.g.
|
|
22
|
+
// `onRequest({ maxInstances: 5 }, (req, res) => { ... })`.
|
|
23
|
+
// NOTE: setGlobalOptions does not apply to functions using the v1 API. V1
|
|
24
|
+
// functions should each use functions.runWith({ maxInstances: 10 }) instead.
|
|
25
|
+
// In the v1 API, each function can only serve one request per container, so
|
|
26
|
+
// this will be the maximum concurrent request count.
|
|
27
|
+
setGlobalOptions({ maxInstances: 10 });
|
|
28
|
+
|
|
16
29
|
// export const helloWorld = onRequest((request, response) => {
|
|
17
30
|
// logger.info("Hello logs!", {structuredData: true});
|
|
18
31
|
// response.send("Hello from Firebase!");
|