firebase-tools 14.5.1 → 14.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/lib/commands/functions-list.js +23 -32
  2. package/lib/commands/init.js +14 -1
  3. package/lib/commands/projects-create.js +1 -1
  4. package/lib/commands/use.js +4 -1
  5. package/lib/crashlytics/listTopIssues.js +2 -1
  6. package/lib/dataconnect/client.js +1 -0
  7. package/lib/deploy/functions/checkIam.js +38 -3
  8. package/lib/deploy/functions/prepare.js +1 -0
  9. package/lib/deploy/functions/services/firestore.js +23 -4
  10. package/lib/emulator/auth/cloudFunctions.js +14 -1
  11. package/lib/emulator/commandUtils.js +2 -1
  12. package/lib/emulator/controller.js +1 -1
  13. package/lib/emulator/dataconnect/pgliteServer.js +20 -12
  14. package/lib/emulator/downloadableEmulatorInfo.json +18 -18
  15. package/lib/firestore/delete.js +6 -4
  16. package/lib/frameworks/constants.js +1 -1
  17. package/lib/frameworks/utils.js +8 -2
  18. package/lib/gcp/firestore.js +26 -21
  19. package/lib/gcp/storage.js +8 -4
  20. package/lib/{gif → gemini}/fdcExperience.js +14 -13
  21. package/lib/init/features/dataconnect/index.js +1 -1
  22. package/lib/init/features/project.js +13 -6
  23. package/lib/management/projects.js +6 -5
  24. package/lib/mcp/index.js +15 -1
  25. package/lib/mcp/tools/core/consult_assistant.js +1 -1
  26. package/lib/mcp/tools/crashlytics/list_top_issues.js +7 -2
  27. package/lib/mcp/tools/dataconnect/emulator.js +3 -19
  28. package/lib/mcp/tools/dataconnect/execute_graphql.js +1 -1
  29. package/lib/mcp/tools/dataconnect/execute_graphql_read.js +1 -1
  30. package/lib/mcp/tools/dataconnect/execute_mutation.js +1 -1
  31. package/lib/mcp/tools/dataconnect/execute_query.js +1 -1
  32. package/lib/mcp/tools/dataconnect/generate_operation.js +1 -1
  33. package/lib/mcp/tools/dataconnect/generate_schema.js +1 -1
  34. package/lib/mcp/tools/firestore/delete_document.js +17 -4
  35. package/lib/mcp/tools/firestore/get_documents.js +12 -2
  36. package/lib/mcp/tools/firestore/list_collections.js +14 -3
  37. package/lib/mcp/tools/firestore/query_collection.js +12 -2
  38. package/lib/mcp/tools/storage/get_download_url.js +8 -2
  39. package/lib/track.js +4 -0
  40. package/package.json +3 -2
  41. package/lib/emulator/dataconnect/pg-gateway/auth/base-auth-flow.js +0 -11
  42. package/lib/emulator/dataconnect/pg-gateway/auth/cert.js +0 -69
  43. package/lib/emulator/dataconnect/pg-gateway/auth/index.js +0 -22
  44. package/lib/emulator/dataconnect/pg-gateway/auth/md5.js +0 -135
  45. package/lib/emulator/dataconnect/pg-gateway/auth/password.js +0 -65
  46. package/lib/emulator/dataconnect/pg-gateway/auth/sasl/sasl-mechanism.js +0 -34
  47. package/lib/emulator/dataconnect/pg-gateway/auth/sasl/scram-sha-256.js +0 -298
  48. package/lib/emulator/dataconnect/pg-gateway/backend-error.js +0 -75
  49. package/lib/emulator/dataconnect/pg-gateway/buffer-reader.js +0 -55
  50. package/lib/emulator/dataconnect/pg-gateway/buffer-writer.js +0 -79
  51. package/lib/emulator/dataconnect/pg-gateway/connection.js +0 -419
  52. package/lib/emulator/dataconnect/pg-gateway/connection.types.js +0 -8
  53. package/lib/emulator/dataconnect/pg-gateway/crypto.js +0 -40
  54. package/lib/emulator/dataconnect/pg-gateway/duplex.js +0 -53
  55. package/lib/emulator/dataconnect/pg-gateway/index.js +0 -27
  56. package/lib/emulator/dataconnect/pg-gateway/message-buffer.js +0 -96
  57. package/lib/emulator/dataconnect/pg-gateway/message-codes.js +0 -54
  58. package/lib/emulator/dataconnect/pg-gateway/platforms/node/index.js +0 -13
  59. package/lib/emulator/dataconnect/pg-gateway/polyfills/readable-stream-async-iterator.js +0 -36
  60. package/lib/emulator/dataconnect/pg-gateway/utils.js +0 -40
  61. /package/lib/{emulator/dataconnect/pg-gateway/auth/trust.js → gemini/types.js} +0 -0
@@ -2,7 +2,6 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.command = void 0;
4
4
  const command_1 = require("../command");
5
- const error_1 = require("../error");
6
5
  const projectUtils_1 = require("../projectUtils");
7
6
  const requirePermissions_1 = require("../requirePermissions");
8
7
  const backend = require("../deploy/functions/backend");
@@ -12,36 +11,28 @@ exports.command = new command_1.Command("functions:list")
12
11
  .description("list all deployed functions in your Firebase project")
13
12
  .before(requirePermissions_1.requirePermissions, ["cloudfunctions.functions.list"])
14
13
  .action(async (options) => {
15
- try {
16
- const context = {
17
- projectId: (0, projectUtils_1.needProjectId)(options),
18
- };
19
- const existing = await backend.existingBackend(context);
20
- const endpointsList = backend.allEndpoints(existing).sort(backend.compareFunctions);
21
- const table = new Table({
22
- head: ["Function", "Version", "Trigger", "Location", "Memory", "Runtime"],
23
- style: { head: ["yellow"] },
24
- });
25
- for (const endpoint of endpointsList) {
26
- const trigger = backend.endpointTriggerType(endpoint);
27
- const availableMemoryMb = endpoint.availableMemoryMb || "---";
28
- const entry = [
29
- endpoint.id,
30
- endpoint.platform === "gcfv2" ? "v2" : "v1",
31
- trigger,
32
- endpoint.region,
33
- availableMemoryMb,
34
- endpoint.runtime,
35
- ];
36
- table.push(entry);
37
- }
38
- logger_1.logger.info(table.toString());
39
- return endpointsList;
40
- }
41
- catch (err) {
42
- throw new error_1.FirebaseError("Failed to list functions", {
43
- exit: 1,
44
- original: err,
45
- });
14
+ const context = {
15
+ projectId: (0, projectUtils_1.needProjectId)(options),
16
+ };
17
+ const existing = await backend.existingBackend(context);
18
+ const endpointsList = backend.allEndpoints(existing).sort(backend.compareFunctions);
19
+ const table = new Table({
20
+ head: ["Function", "Version", "Trigger", "Location", "Memory", "Runtime"],
21
+ style: { head: ["yellow"] },
22
+ });
23
+ for (const endpoint of endpointsList) {
24
+ const trigger = backend.endpointTriggerType(endpoint);
25
+ const availableMemoryMb = endpoint.availableMemoryMb || "---";
26
+ const entry = [
27
+ endpoint.id,
28
+ endpoint.platform === "gcfv2" ? "v2" : "v1",
29
+ trigger,
30
+ endpoint.region,
31
+ availableMemoryMb,
32
+ endpoint.runtime,
33
+ ];
34
+ table.push(entry);
46
35
  }
36
+ logger_1.logger.info(table.toString());
37
+ return endpointsList;
47
38
  });
@@ -16,6 +16,7 @@ const utils = require("../utils");
16
16
  const experiments_1 = require("../experiments");
17
17
  const templates_1 = require("../templates");
18
18
  const error_1 = require("../error");
19
+ const track_1 = require("../track");
19
20
  const homeDir = os.homedir();
20
21
  const BANNER_TEXT = (0, templates_1.readTemplateSync)("banner.txt");
21
22
  const GITIGNORE_TEMPLATE = (0, templates_1.readTemplateSync)("_gitignore");
@@ -114,13 +115,14 @@ exports.command = new command_1.Command("init [feature]")
114
115
  .before(requireAuth_1.requireAuth)
115
116
  .action(initAction);
116
117
  async function initAction(feature, options) {
117
- var _a;
118
+ var _a, _b;
118
119
  if (feature && !featureNames.includes(feature)) {
119
120
  return utils.reject(clc.bold(feature) +
120
121
  " is not a supported feature; must be one of " +
121
122
  featureNames.join(", ") +
122
123
  ".");
123
124
  }
125
+ const start = process.uptime();
124
126
  const cwd = options.cwd || process.cwd();
125
127
  const warnings = [];
126
128
  let warningText = "";
@@ -168,6 +170,15 @@ async function initAction(feature, options) {
168
170
  message: "Which Firebase features do you want to set up for this directory? " +
169
171
  "Press Space to select features, then Enter to confirm your choices.",
170
172
  choices: choices.filter((c) => !c.hidden),
173
+ validate: (choices) => {
174
+ if (choices.length === 0) {
175
+ return ("Must select at least one feature. Use " +
176
+ clc.bold(clc.underline("SPACEBAR")) +
177
+ " to select features, or specify a feature by running " +
178
+ clc.bold("firebase init [feature_name]"));
179
+ }
180
+ return true;
181
+ },
171
182
  });
172
183
  }
173
184
  if (!setup.features || ((_a = setup.features) === null || _a === void 0 ? void 0 : _a.length) === 0) {
@@ -191,6 +202,8 @@ async function initAction(feature, options) {
191
202
  if (!fsutils.fileExistsSync(config.path(".gitignore"))) {
192
203
  config.writeProjectFile(".gitignore", GITIGNORE_TEMPLATE);
193
204
  }
205
+ const duration = Math.floor((process.uptime() - start) * 1000);
206
+ await (0, track_1.trackGA4)("product_init", { products_initialized: (_b = setup.features) === null || _b === void 0 ? void 0 : _b.join(",") }, duration);
194
207
  logger_1.logger.info();
195
208
  utils.logSuccess("Firebase initialization complete!");
196
209
  }
@@ -17,7 +17,7 @@ exports.command = new command_1.Command("projects:create [projectId]")
17
17
  throw new error_1.FirebaseError("Invalid argument, please provide only one type of project parent (organization or folder)");
18
18
  }
19
19
  if (!options.nonInteractive) {
20
- options = Object.assign(Object.assign({}, options), (await (0, projects_1.promptProjectCreation)()));
20
+ options = Object.assign(Object.assign({}, options), (await (0, projects_1.promptProjectCreation)(options)));
21
21
  }
22
22
  if (!options.projectId) {
23
23
  throw new error_1.FirebaseError("Project ID cannot be empty");
@@ -81,7 +81,6 @@ async function addAlias(options) {
81
81
  " instead.");
82
82
  }
83
83
  const projects = await (0, projects_1.listFirebaseProjects)();
84
- const results = {};
85
84
  const project = await (0, prompt_1.select)({
86
85
  message: "Which project do you want to add?",
87
86
  choices: projects.map((p) => p.projectId).sort(),
@@ -92,6 +91,10 @@ async function addAlias(options) {
92
91
  return input && input.length > 0;
93
92
  },
94
93
  });
94
+ const results = {
95
+ project,
96
+ alias,
97
+ };
95
98
  options.rc.addProjectAlias(alias, project);
96
99
  utils.makeActiveProject(options.projectRoot, results.alias);
97
100
  logger_1.logger.info();
@@ -10,10 +10,11 @@ const apiClient = new apiv2_1.Client({
10
10
  urlPrefix: (0, api_1.crashlyticsApiOrigin)(),
11
11
  apiVersion: "v1alpha",
12
12
  });
13
- async function listTopIssues(projectId, appId, issueCount) {
13
+ async function listTopIssues(projectId, appId, issueType, issueCount) {
14
14
  try {
15
15
  const queryParams = new URLSearchParams();
16
16
  queryParams.set("page_size", `${issueCount}`);
17
+ queryParams.set("filter.issue.error_types", `${issueType}`);
17
18
  const requestProjectId = parseProjectId(appId);
18
19
  if (requestProjectId === undefined) {
19
20
  throw new error_1.FirebaseError("Unable to get the projectId from the AppId.");
@@ -103,6 +103,7 @@ async function upsertSchema(schema, validateOnly = false) {
103
103
  apiOrigin: (0, api_1.dataconnectOrigin)(),
104
104
  apiVersion: DATACONNECT_API_VERSION,
105
105
  operationResourceName: op.body.name,
106
+ masterTimeout: 120000,
106
107
  });
107
108
  }
108
109
  exports.upsertSchema = upsertSchema;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ensureServiceAgentRoles = exports.obtainDefaultComputeServiceAgentBindings = exports.obtainPubSubServiceAgentBindings = exports.checkHttpIam = exports.checkServiceAccountIam = exports.EVENTARC_EVENT_RECEIVER_ROLE = exports.RUN_INVOKER_ROLE = exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = void 0;
3
+ exports.ensureServiceAgentRoles = exports.ensureGenkitMonitoringRoles = exports.obtainDefaultComputeServiceAgentBindings = exports.obtainPubSubServiceAgentBindings = exports.checkHttpIam = exports.checkServiceAccountIam = exports.GENKIT_MONITORING_ROLES = exports.EVENTARC_EVENT_RECEIVER_ROLE = exports.RUN_INVOKER_ROLE = exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = void 0;
4
4
  const colorette_1 = require("colorette");
5
5
  const logger_1 = require("../../logger");
6
6
  const functionsDeployHelper_1 = require("./functionsDeployHelper");
@@ -17,6 +17,11 @@ const PERMISSION = "cloudfunctions.functions.setIamPolicy";
17
17
  exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = "roles/iam.serviceAccountTokenCreator";
18
18
  exports.RUN_INVOKER_ROLE = "roles/run.invoker";
19
19
  exports.EVENTARC_EVENT_RECEIVER_ROLE = "roles/eventarc.eventReceiver";
20
+ exports.GENKIT_MONITORING_ROLES = [
21
+ "roles/monitoring.metricWriter",
22
+ "roles/cloudtrace.agent",
23
+ "roles/logging.logWriter",
24
+ ];
20
25
  async function checkServiceAccountIam(projectId) {
21
26
  const saEmail = `${projectId}@appspot.gserviceaccount.com`;
22
27
  let passed = false;
@@ -81,6 +86,9 @@ function reduceEventsToServices(services, endpoint) {
81
86
  }
82
87
  return services;
83
88
  }
89
+ function isGenkitEndpoint(endpoint) {
90
+ return (backend.isCallableTriggered(endpoint) && endpoint.callableTrigger.genkitAction !== undefined);
91
+ }
84
92
  function obtainPubSubServiceAgentBindings(projectNumber) {
85
93
  const serviceAccountTokenCreatorBinding = {
86
94
  role: exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE,
@@ -102,6 +110,30 @@ async function obtainDefaultComputeServiceAgentBindings(projectNumber) {
102
110
  return [runInvokerBinding, eventarcEventReceiverBinding];
103
111
  }
104
112
  exports.obtainDefaultComputeServiceAgentBindings = obtainDefaultComputeServiceAgentBindings;
113
+ async function ensureGenkitMonitoringRoles(projectId, projectNumber, want, have, dryRun) {
114
+ const wantEndpoints = backend.allEndpoints(want).filter(isGenkitEndpoint);
115
+ const newEndpoints = wantEndpoints.filter(backend.missingEndpoint(have));
116
+ if (newEndpoints.length === 0) {
117
+ return;
118
+ }
119
+ const serviceAccounts = newEndpoints
120
+ .map((endpoint) => endpoint.serviceAccount || "")
121
+ .filter((value, index, self) => self.indexOf(value) === index);
122
+ const defaultServiceAccountIndex = serviceAccounts.indexOf("");
123
+ if (defaultServiceAccountIndex) {
124
+ serviceAccounts[defaultServiceAccountIndex] = await gce.getDefaultServiceAccount(projectNumber);
125
+ }
126
+ const members = serviceAccounts.map((sa) => `serviceAccount:${sa}`);
127
+ const requiredBindings = [];
128
+ for (const monitoringRole of exports.GENKIT_MONITORING_ROLES) {
129
+ requiredBindings.push({
130
+ role: monitoringRole,
131
+ members: members,
132
+ });
133
+ }
134
+ await ensureBindings(projectId, projectNumber, requiredBindings, newEndpoints.map((endpoint) => endpoint.id), dryRun);
135
+ }
136
+ exports.ensureGenkitMonitoringRoles = ensureGenkitMonitoringRoles;
105
137
  async function ensureServiceAgentRoles(projectId, projectNumber, want, have, dryRun) {
106
138
  const wantServices = backend.allEndpoints(want).reduce(reduceEventsToServices, []);
107
139
  const haveServices = backend.allEndpoints(have).reduce(reduceEventsToServices, []);
@@ -122,6 +154,10 @@ async function ensureServiceAgentRoles(projectId, projectNumber, want, have, dry
122
154
  if (requiredBindings.length === 0) {
123
155
  return;
124
156
  }
157
+ await ensureBindings(projectId, projectNumber, requiredBindings, newServices.map((service) => service.api), dryRun);
158
+ }
159
+ exports.ensureServiceAgentRoles = ensureServiceAgentRoles;
160
+ async function ensureBindings(projectId, projectNumber, requiredBindings, newServicesOrEndpoints, dryRun) {
125
161
  let policy;
126
162
  try {
127
163
  policy = await (0, resourceManager_1.getIamPolicy)(projectNumber);
@@ -129,7 +165,7 @@ async function ensureServiceAgentRoles(projectId, projectNumber, want, have, dry
129
165
  catch (err) {
130
166
  iam.printManualIamConfig(requiredBindings, projectId, "functions");
131
167
  utils.logLabeledBullet("functions", "Could not verify the necessary IAM configuration for the following newly-integrated services: " +
132
- `${newServices.map((service) => service.api).join(", ")}` +
168
+ `${newServicesOrEndpoints.join(", ")}` +
133
169
  ". Deployment may fail.", "warn");
134
170
  return;
135
171
  }
@@ -152,4 +188,3 @@ async function ensureServiceAgentRoles(projectId, projectNumber, want, have, dry
152
188
  " otherwise the deployment will fail.", { original: err });
153
189
  }
154
190
  }
155
- exports.ensureServiceAgentRoles = ensureServiceAgentRoles;
@@ -181,6 +181,7 @@ async function prepare(context, options, payload) {
181
181
  await backend.checkAvailability(context, matchingBackend);
182
182
  await validate.secretsAreValid(projectId, matchingBackend);
183
183
  await (0, checkIam_1.ensureServiceAgentRoles)(projectId, projectNumber, matchingBackend, haveBackend, options.dryRun);
184
+ await (0, checkIam_1.ensureGenkitMonitoringRoles)(projectId, projectNumber, matchingBackend, haveBackend, options.dryRun);
184
185
  await ensure.secretAccess(projectId, matchingBackend, haveBackend, options.dryRun);
185
186
  updateEndpointTargetedStatus(wantBackends, context.filters || []);
186
187
  (0, applyHash_1.applyBackendHashToBackends)(wantBackends, context);
@@ -1,17 +1,36 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ensureFirestoreTriggerRegion = void 0;
3
+ exports.ensureFirestoreTriggerRegion = exports.clearCache = void 0;
4
4
  const firestore = require("../../../gcp/firestore");
5
5
  const error_1 = require("../../../error");
6
6
  const dbCache = new Map();
7
+ const dbPromiseCache = new Map();
8
+ function clearCache() {
9
+ dbCache.clear();
10
+ dbPromiseCache.clear();
11
+ }
12
+ exports.clearCache = clearCache;
7
13
  async function getDatabase(project, databaseId) {
8
14
  const key = `${project}/${databaseId}`;
9
15
  if (dbCache.has(key)) {
10
16
  return dbCache.get(key);
11
17
  }
12
- const db = await firestore.getDatabase(project, databaseId, false);
13
- dbCache.set(key, db);
14
- return db;
18
+ if (dbPromiseCache.has(key)) {
19
+ return dbPromiseCache.get(key);
20
+ }
21
+ const dbPromise = firestore
22
+ .getDatabase(project, databaseId)
23
+ .then((db) => {
24
+ dbCache.set(key, db);
25
+ dbPromiseCache.delete(key);
26
+ return db;
27
+ })
28
+ .catch((error) => {
29
+ dbPromiseCache.delete(key);
30
+ throw error;
31
+ });
32
+ dbPromiseCache.set(key, dbPromise);
33
+ return dbPromise;
15
34
  }
16
35
  async function ensureFirestoreTriggerRegion(endpoint) {
17
36
  var _a;
@@ -44,6 +44,7 @@ class AuthCloudFunction {
44
44
  };
45
45
  }
46
46
  createUserInfoPayload(user) {
47
+ var _a;
47
48
  return {
48
49
  uid: user.localId,
49
50
  email: user.email,
@@ -61,10 +62,22 @@ class AuthCloudFunction {
61
62
  : undefined,
62
63
  },
63
64
  customClaims: JSON.parse(user.customAttributes || "{}"),
64
- providerData: user.providerUserInfo,
65
+ providerData: (_a = user.providerUserInfo) === null || _a === void 0 ? void 0 : _a.map((info) => this.createProviderUserInfoPayload(info)),
65
66
  tenantId: user.tenantId,
66
67
  mfaInfo: user.mfaInfo,
67
68
  };
68
69
  }
70
+ createProviderUserInfoPayload(info) {
71
+ return {
72
+ rawId: info.rawId,
73
+ providerId: info.providerId,
74
+ displayName: info.displayName,
75
+ email: info.email,
76
+ federatedId: info.federatedId,
77
+ phoneNumber: info.phoneNumber,
78
+ photoURL: info.photoUrl,
79
+ screenName: info.screenName,
80
+ };
81
+ }
69
82
  }
70
83
  exports.AuthCloudFunction = AuthCloudFunction;
@@ -92,7 +92,8 @@ async function beforeEmulatorCommand(options) {
92
92
  const canStartWithoutConfig = options.only &&
93
93
  !controller.shouldStart(optionsWithConfig, types_1.Emulators.FUNCTIONS) &&
94
94
  !controller.shouldStart(optionsWithConfig, types_1.Emulators.HOSTING);
95
- if (!constants_1.Constants.isDemoProject(options.project)) {
95
+ if (!constants_1.Constants.isDemoProject(options.project) ||
96
+ controller.shouldStart(optionsWithConfig, types_1.Emulators.EXTENSIONS)) {
96
97
  try {
97
98
  await (0, requireAuth_1.requireAuth)(options);
98
99
  }
@@ -740,7 +740,7 @@ async function exportEmulatorData(exportPath, options, initiatedBy) {
740
740
  const exportAbsPath = path.resolve(exportPath);
741
741
  if (!fs.existsSync(exportAbsPath)) {
742
742
  utils.logBullet(`Creating export directory ${exportAbsPath}`);
743
- fs.mkdirSync(exportAbsPath);
743
+ fs.mkdirSync(exportAbsPath, { recursive: true });
744
744
  }
745
745
  const existingMetadata = hubExport_1.HubExport.readMetadata(exportAbsPath);
746
746
  const isExportDirEmpty = fs.readdirSync(exportAbsPath).length === 0;
@@ -19,13 +19,13 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
19
19
  function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
20
20
  };
21
21
  Object.defineProperty(exports, "__esModule", { value: true });
22
- exports.PGliteExtendedQueryPatch = exports.PostgresServer = exports.TRUNCATE_TABLES_SQL = void 0;
22
+ exports.fromNodeSocket = exports.PGliteExtendedQueryPatch = exports.PostgresServer = exports.TRUNCATE_TABLES_SQL = void 0;
23
23
  const pglite_1 = require("@electric-sql/pglite");
24
24
  const { dynamicImport } = require(true && "../../dynamicImport");
25
25
  const net = require("node:net");
26
+ const node_stream_1 = require("node:stream");
26
27
  const fs = require("fs");
27
- const index_1 = require("./pg-gateway/index");
28
- const node_1 = require("./pg-gateway/platforms/node");
28
+ const pg_gateway_1 = require("pg-gateway");
29
29
  const logger_1 = require("../../logger");
30
30
  const error_1 = require("../../error");
31
31
  const node_string_decoder_1 = require("node:string_decoder");
@@ -45,7 +45,7 @@ class PostgresServer {
45
45
  async createPGServer(host = "127.0.0.1", port) {
46
46
  const getDb = this.getDb.bind(this);
47
47
  const server = net.createServer(async (socket) => {
48
- const connection = await (0, node_1.fromNodeSocket)(socket, {
48
+ const connection = await fromNodeSocket(socket, {
49
49
  serverVersion: "16.3 (PGlite 0.2.0)",
50
50
  auth: { method: "trust" },
51
51
  async onMessage(data, { isAuthenticated }) {
@@ -53,7 +53,7 @@ class PostgresServer {
53
53
  return;
54
54
  }
55
55
  const db = await getDb();
56
- if (data[0] === index_1.FrontendMessageCode.Terminate) {
56
+ if (data[0] === pg_gateway_1.FrontendMessageCode.Terminate) {
57
57
  await db.query("DEALLOCATE ALL");
58
58
  }
59
59
  const result = await db.execProtocolRaw(data);
@@ -156,9 +156,9 @@ class PGliteExtendedQueryPatch {
156
156
  return __asyncGenerator(this, arguments, function* filterResponse_1() {
157
157
  var _a, e_1, _b, _c;
158
158
  const pipelineStartMessages = [
159
- index_1.FrontendMessageCode.Parse,
160
- index_1.FrontendMessageCode.Bind,
161
- index_1.FrontendMessageCode.Close,
159
+ pg_gateway_1.FrontendMessageCode.Parse,
160
+ pg_gateway_1.FrontendMessageCode.Bind,
161
+ pg_gateway_1.FrontendMessageCode.Close,
162
162
  ];
163
163
  const decoder = new node_string_decoder_1.StringDecoder();
164
164
  const decoded = decoder.write(message);
@@ -166,13 +166,13 @@ class PGliteExtendedQueryPatch {
166
166
  if (pipelineStartMessages.includes(message[0])) {
167
167
  this.isExtendedQuery = true;
168
168
  }
169
- if (message[0] === index_1.FrontendMessageCode.Sync) {
169
+ if (message[0] === pg_gateway_1.FrontendMessageCode.Sync) {
170
170
  this.isExtendedQuery = false;
171
171
  this.eqpErrored = false;
172
172
  return yield __await(this.connection.createReadyForQuery());
173
173
  }
174
174
  try {
175
- for (var _d = true, _e = __asyncValues((0, index_1.getMessages)(response)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a;) {
175
+ for (var _d = true, _e = __asyncValues((0, pg_gateway_1.getMessages)(response)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a;) {
176
176
  _c = _f.value;
177
177
  _d = false;
178
178
  try {
@@ -180,10 +180,10 @@ class PGliteExtendedQueryPatch {
180
180
  if (this.eqpErrored) {
181
181
  continue;
182
182
  }
183
- if (this.isExtendedQuery && message[0] === index_1.BackendMessageCode.ErrorMessage) {
183
+ if (this.isExtendedQuery && message[0] === pg_gateway_1.BackendMessageCode.ErrorMessage) {
184
184
  this.eqpErrored = true;
185
185
  }
186
- if (this.isExtendedQuery && message[0] === index_1.BackendMessageCode.ReadyForQuery) {
186
+ if (this.isExtendedQuery && message[0] === pg_gateway_1.BackendMessageCode.ReadyForQuery) {
187
187
  logger_1.logger.debug("Filtered out a ReadyForQuery.");
188
188
  continue;
189
189
  }
@@ -205,3 +205,11 @@ class PGliteExtendedQueryPatch {
205
205
  }
206
206
  }
207
207
  exports.PGliteExtendedQueryPatch = PGliteExtendedQueryPatch;
208
+ async function fromNodeSocket(socket, options) {
209
+ const rs = node_stream_1.Readable.toWeb(socket);
210
+ const ws = node_stream_1.Writable.toWeb(socket);
211
+ const opts = options
212
+ ? Object.assign({}, options) : undefined;
213
+ return new pg_gateway_1.PostgresConnection({ readable: rs, writable: ws }, opts);
214
+ }
215
+ exports.fromNodeSocket = fromNodeSocket;
@@ -54,28 +54,28 @@
54
54
  },
55
55
  "dataconnect": {
56
56
  "darwin": {
57
- "version": "2.6.2",
58
- "expectedSize": 27501312,
59
- "expectedChecksum": "f4adceec52a29bbc8eabf39dc07460a8",
60
- "expectedChecksumSHA256": "038b1a763d2afd487423b37841323554e13bb6973c69a35f993c315819506ff8",
61
- "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-macos-v2.6.2",
62
- "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.6.2"
57
+ "version": "2.7.0",
58
+ "expectedSize": 27542272,
59
+ "expectedChecksum": "8b68e45ccae2d2cf35bea368e7ce379c",
60
+ "expectedChecksumSHA256": "6dce6ea23c39e4e44dbaa6db7181c9d7761dd1098589a627fa5aa0fb4a07ccd7",
61
+ "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-macos-v2.7.0",
62
+ "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.7.0"
63
63
  },
64
64
  "win32": {
65
- "version": "2.6.2",
66
- "expectedSize": 27961856,
67
- "expectedChecksum": "36c75d09d9def62891be1ba84424cc5d",
68
- "expectedChecksumSHA256": "eb33efdf0374bfe0772f3adff7b2ab7eb3353f5e03e2a781623d10f61a92444e",
69
- "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-windows-v2.6.2",
70
- "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.6.2.exe"
65
+ "version": "2.7.0",
66
+ "expectedSize": 28001280,
67
+ "expectedChecksum": "03ef2fd3ed2f7263539f1a9d7b748ee6",
68
+ "expectedChecksumSHA256": "3462f15ddc5d11371774de8f19b54468eb75eefac5f3a5623334f42154f06bc8",
69
+ "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-windows-v2.7.0",
70
+ "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.7.0.exe"
71
71
  },
72
72
  "linux": {
73
- "version": "2.6.2",
74
- "expectedSize": 27414680,
75
- "expectedChecksum": "deb1bad4fbccf4e3bbc437f8e2b34536",
76
- "expectedChecksumSHA256": "549633c0e3ab26621da197b202e5712163c4be2d1f5d1e6c81bb33f20ccd1d7a",
77
- "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-linux-v2.6.2",
78
- "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.6.2"
73
+ "version": "2.7.0",
74
+ "expectedSize": 27451544,
75
+ "expectedChecksum": "8b73f21f1bdf168a5f2c3137c30abc7b",
76
+ "expectedChecksumSHA256": "e2d03d1d0524f8053b34376c8ad6ea1fdfb8bc48568e80a35c2e3e510754c121",
77
+ "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-linux-v2.7.0",
78
+ "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.7.0"
79
79
  }
80
80
  }
81
81
  }
@@ -12,12 +12,14 @@ const api_1 = require("../api");
12
12
  const MIN_ID = "__id-9223372036854775808__";
13
13
  class FirestoreDelete {
14
14
  constructor(project, path, options) {
15
+ var _a;
15
16
  this.project = project;
16
17
  this.path = path || "";
17
18
  this.recursive = Boolean(options.recursive);
18
19
  this.shallow = Boolean(options.shallow);
19
20
  this.allCollections = Boolean(options.allCollections);
20
21
  this.databaseId = options.databaseId;
22
+ this.urlPrefix = (_a = options.urlPrefix) !== null && _a !== void 0 ? _a : (0, api_1.firestoreOriginOrEmulator)();
21
23
  this.readBatchSize = 7500;
22
24
  this.maxPendingDeletes = 15;
23
25
  this.deleteBatchSize = 250;
@@ -41,7 +43,7 @@ class FirestoreDelete {
41
43
  this.apiClient = new apiv2.Client({
42
44
  auth: true,
43
45
  apiVersion: "v1",
44
- urlPrefix: (0, api_1.firestoreOriginOrEmulator)(),
46
+ urlPrefix: this.urlPrefix,
45
47
  });
46
48
  }
47
49
  setDeleteBatchSize(size) {
@@ -219,7 +221,7 @@ class FirestoreDelete {
219
221
  }
220
222
  numPendingDeletes++;
221
223
  firestore
222
- .deleteDocuments(this.project, toDelete, true)
224
+ .deleteDocuments(this.project, toDelete, this.databaseId, this.urlPrefix)
223
225
  .then((numDeleted) => {
224
226
  FirestoreDelete.progressBar.tick(numDeleted);
225
227
  numDocsDeleted += numDeleted;
@@ -291,7 +293,7 @@ class FirestoreDelete {
291
293
  let initialDelete;
292
294
  if (this.isDocumentPath) {
293
295
  const doc = { name: this.root + "/" + this.path };
294
- initialDelete = firestore.deleteDocument(doc, true).catch((err) => {
296
+ initialDelete = firestore.deleteDocument(doc, this.urlPrefix).catch((err) => {
295
297
  logger_1.logger.debug("deletePath:initialDelete:error", err);
296
298
  if (this.allDescendants) {
297
299
  return Promise.resolve();
@@ -308,7 +310,7 @@ class FirestoreDelete {
308
310
  }
309
311
  deleteDatabase() {
310
312
  return firestore
311
- .listCollectionIds(this.project, true)
313
+ .listCollectionIds(this.project, this.databaseId, this.urlPrefix)
312
314
  .catch((err) => {
313
315
  logger_1.logger.debug("deleteDatabase:listCollectionIds:error", err);
314
316
  return utils.reject("Unable to list collection IDs");
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.DEFAULT_SHOULD_USE_DEV_MODE_HANDLE = exports.GET_DEFAULT_BUILD_TARGETS = exports.I18N_ROOT = exports.ALLOWED_SSR_REGIONS = exports.DEFAULT_REGION = exports.VALID_LOCALE_FORMATS = exports.VALID_ENGINES = exports.NODE_VERSION = exports.SHARP_VERSION = exports.FIREBASE_ADMIN_VERSION = exports.FIREBASE_FUNCTIONS_VERSION = exports.FIREBASE_FRAMEWORKS_VERSION = exports.MAILING_LIST_URL = exports.FEATURE_REQUEST_URL = exports.FILE_BUG_URL = exports.DEFAULT_DOCS_URL = exports.SupportLevelWarnings = exports.NPM_COMMAND_TIMEOUT_MILLIES = void 0;
4
4
  const clc = require("colorette");
5
5
  const experiments = require("../experiments");
6
- exports.NPM_COMMAND_TIMEOUT_MILLIES = 10000;
6
+ exports.NPM_COMMAND_TIMEOUT_MILLIES = 60000;
7
7
  exports.SupportLevelWarnings = {
8
8
  ["experimental"]: (framework) => `Thank you for trying our ${clc.italic("experimental")} support for ${framework} on Firebase Hosting.
9
9
  ${clc.red(`While this integration is maintained by Googlers it is not a supported Firebase product.
@@ -187,8 +187,14 @@ function findDependency(name, options = {}) {
187
187
  ], { cwd, env, timeout: constants_1.NPM_COMMAND_TIMEOUT_MILLIES });
188
188
  if (!result.stdout)
189
189
  return;
190
- const json = JSON.parse(result.stdout.toString());
191
- return scanDependencyTree(name, json.dependencies);
190
+ try {
191
+ const json = JSON.parse(result.stdout.toString());
192
+ return scanDependencyTree(name, json.dependencies);
193
+ }
194
+ catch (e) {
195
+ const packageJson = (0, fs_extra_1.readJsonSync)((0, path_1.join)(cwd, name, "package.json"), { throws: false });
196
+ return (packageJson === null || packageJson === void 0 ? void 0 : packageJson.version) ? { version: packageJson.version } : undefined;
197
+ }
192
198
  }
193
199
  exports.findDependency = findDependency;
194
200
  async function relativeRequire(dir, mod) {