firebase-tools 14.5.0 → 14.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/lib/commands/login.js +8 -3
  2. package/lib/commands/projects-create.js +1 -1
  3. package/lib/commands/use.js +4 -1
  4. package/lib/dataconnect/client.js +1 -0
  5. package/lib/deploy/firestore/deploy.js +5 -2
  6. package/lib/deploy/functions/checkIam.js +38 -3
  7. package/lib/deploy/functions/prepare.js +1 -0
  8. package/lib/deploy/functions/services/firestore.js +23 -4
  9. package/lib/emulator/auth/cloudFunctions.js +14 -1
  10. package/lib/emulator/dataconnect/pgliteServer.js +20 -12
  11. package/lib/emulator/downloadableEmulatorInfo.json +18 -18
  12. package/lib/firestore/delete.js +6 -4
  13. package/lib/frameworks/constants.js +1 -1
  14. package/lib/frameworks/utils.js +8 -2
  15. package/lib/gcp/firestore.js +26 -21
  16. package/lib/{gif → gemini}/fdcExperience.js +14 -13
  17. package/lib/init/features/dataconnect/index.js +14 -11
  18. package/lib/init/features/dataconnect/sdk.js +8 -6
  19. package/lib/init/features/project.js +13 -6
  20. package/lib/management/projects.js +6 -5
  21. package/lib/mcp/tools/core/consult_assistant.js +1 -1
  22. package/lib/mcp/tools/dataconnect/generate_operation.js +1 -1
  23. package/lib/mcp/tools/dataconnect/generate_schema.js +1 -1
  24. package/lib/mcp/tools/firestore/delete_document.js +17 -4
  25. package/lib/mcp/tools/firestore/emulator.js +16 -0
  26. package/lib/mcp/tools/firestore/get_documents.js +12 -2
  27. package/lib/mcp/tools/firestore/list_collections.js +14 -3
  28. package/lib/mcp/tools/firestore/query_collection.js +12 -2
  29. package/package.json +3 -2
  30. package/lib/emulator/dataconnect/pg-gateway/auth/base-auth-flow.js +0 -11
  31. package/lib/emulator/dataconnect/pg-gateway/auth/cert.js +0 -69
  32. package/lib/emulator/dataconnect/pg-gateway/auth/index.js +0 -22
  33. package/lib/emulator/dataconnect/pg-gateway/auth/md5.js +0 -135
  34. package/lib/emulator/dataconnect/pg-gateway/auth/password.js +0 -65
  35. package/lib/emulator/dataconnect/pg-gateway/auth/sasl/sasl-mechanism.js +0 -34
  36. package/lib/emulator/dataconnect/pg-gateway/auth/sasl/scram-sha-256.js +0 -298
  37. package/lib/emulator/dataconnect/pg-gateway/backend-error.js +0 -75
  38. package/lib/emulator/dataconnect/pg-gateway/buffer-reader.js +0 -55
  39. package/lib/emulator/dataconnect/pg-gateway/buffer-writer.js +0 -79
  40. package/lib/emulator/dataconnect/pg-gateway/connection.js +0 -419
  41. package/lib/emulator/dataconnect/pg-gateway/connection.types.js +0 -8
  42. package/lib/emulator/dataconnect/pg-gateway/crypto.js +0 -40
  43. package/lib/emulator/dataconnect/pg-gateway/duplex.js +0 -53
  44. package/lib/emulator/dataconnect/pg-gateway/index.js +0 -27
  45. package/lib/emulator/dataconnect/pg-gateway/message-buffer.js +0 -96
  46. package/lib/emulator/dataconnect/pg-gateway/message-codes.js +0 -54
  47. package/lib/emulator/dataconnect/pg-gateway/platforms/node/index.js +0 -13
  48. package/lib/emulator/dataconnect/pg-gateway/polyfills/readable-stream-async-iterator.js +0 -36
  49. package/lib/emulator/dataconnect/pg-gateway/utils.js +0 -40
  50. /package/lib/{emulator/dataconnect/pg-gateway/auth/trust.js → gemini/types.js} +0 -0
@@ -27,11 +27,16 @@ exports.command = new command_1.Command("login")
27
27
  return user;
28
28
  }
29
29
  if (!options.reauth) {
30
- utils.logBullet("Firebase optionally collects CLI and Emulator Suite usage and error reporting information to help improve our products. Data is collected in accordance with Google's privacy policy (https://policies.google.com/privacy) and is not used to identify you.\n");
30
+ utils.logBullet("Firebase CLI integrates with Gemini in Firebase API to provide assistant features. Learn more about using Gemini in Firebase and how we train our models: https://firebase.google.com/docs/gemini-in-firebase/set-up-gemini#required-permissions");
31
+ const geminiUsage = await (0, prompt_1.confirm)("Enable Gemini in Firebase features?");
32
+ configstore_1.configstore.set("gemini", geminiUsage);
33
+ logger_1.logger.info();
34
+ utils.logBullet("Firebase optionally collects CLI and Emulator Suite usage and error reporting information to help improve our products. Data is collected in accordance with Google's privacy policy (https://policies.google.com/privacy) and is not used to identify you.");
31
35
  const collectUsage = await (0, prompt_1.confirm)("Allow Firebase to collect CLI and Emulator Suite usage and error reporting information?");
32
36
  configstore_1.configstore.set("usage", collectUsage);
33
- if (collectUsage) {
34
- utils.logBullet("To change your data collection preference at any time, run `firebase logout` and log in again.");
37
+ if (geminiUsage || collectUsage) {
38
+ logger_1.logger.info();
39
+ utils.logBullet("To change your the preference at any time, run `firebase logout` and `firebase login` again.");
35
40
  }
36
41
  }
37
42
  const useLocalhost = (0, utils_1.isCloudEnvironment)() ? false : options.localhost;
@@ -17,7 +17,7 @@ exports.command = new command_1.Command("projects:create [projectId]")
17
17
  throw new error_1.FirebaseError("Invalid argument, please provide only one type of project parent (organization or folder)");
18
18
  }
19
19
  if (!options.nonInteractive) {
20
- options = Object.assign(Object.assign({}, options), (await (0, projects_1.promptProjectCreation)()));
20
+ options = Object.assign(Object.assign({}, options), (await (0, projects_1.promptProjectCreation)(options)));
21
21
  }
22
22
  if (!options.projectId) {
23
23
  throw new error_1.FirebaseError("Project ID cannot be empty");
@@ -81,7 +81,6 @@ async function addAlias(options) {
81
81
  " instead.");
82
82
  }
83
83
  const projects = await (0, projects_1.listFirebaseProjects)();
84
- const results = {};
85
84
  const project = await (0, prompt_1.select)({
86
85
  message: "Which project do you want to add?",
87
86
  choices: projects.map((p) => p.projectId).sort(),
@@ -92,6 +91,10 @@ async function addAlias(options) {
92
91
  return input && input.length > 0;
93
92
  },
94
93
  });
94
+ const results = {
95
+ project,
96
+ alias,
97
+ };
95
98
  options.rc.addProjectAlias(alias, project);
96
99
  utils.makeActiveProject(options.projectRoot, results.alias);
97
100
  logger_1.logger.info();
@@ -103,6 +103,7 @@ async function upsertSchema(schema, validateOnly = false) {
103
103
  apiOrigin: (0, api_1.dataconnectOrigin)(),
104
104
  apiVersion: DATACONNECT_API_VERSION,
105
105
  operationResourceName: op.body.name,
106
+ masterTimeout: 120000,
106
107
  });
107
108
  }
108
109
  exports.upsertSchema = upsertSchema;
@@ -16,8 +16,11 @@ async function createDatabase(context, options) {
16
16
  if (!options.projectId) {
17
17
  throw new error_1.FirebaseError("Project ID is required to create a Firestore database.");
18
18
  }
19
- if (!firestoreCfg || !firestoreCfg.database) {
20
- throw new error_1.FirebaseError("Firestore database configuration is missing in firebase.json.");
19
+ if (!firestoreCfg) {
20
+ throw new error_1.FirebaseError("Firestore database configuration not found in firebase.json.");
21
+ }
22
+ if (!firestoreCfg.database) {
23
+ firestoreCfg.database = "(default)";
21
24
  }
22
25
  const api = new api_1.FirestoreApi();
23
26
  try {
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ensureServiceAgentRoles = exports.obtainDefaultComputeServiceAgentBindings = exports.obtainPubSubServiceAgentBindings = exports.checkHttpIam = exports.checkServiceAccountIam = exports.EVENTARC_EVENT_RECEIVER_ROLE = exports.RUN_INVOKER_ROLE = exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = void 0;
3
+ exports.ensureServiceAgentRoles = exports.ensureGenkitMonitoringRoles = exports.obtainDefaultComputeServiceAgentBindings = exports.obtainPubSubServiceAgentBindings = exports.checkHttpIam = exports.checkServiceAccountIam = exports.GENKIT_MONITORING_ROLES = exports.EVENTARC_EVENT_RECEIVER_ROLE = exports.RUN_INVOKER_ROLE = exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = void 0;
4
4
  const colorette_1 = require("colorette");
5
5
  const logger_1 = require("../../logger");
6
6
  const functionsDeployHelper_1 = require("./functionsDeployHelper");
@@ -17,6 +17,11 @@ const PERMISSION = "cloudfunctions.functions.setIamPolicy";
17
17
  exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE = "roles/iam.serviceAccountTokenCreator";
18
18
  exports.RUN_INVOKER_ROLE = "roles/run.invoker";
19
19
  exports.EVENTARC_EVENT_RECEIVER_ROLE = "roles/eventarc.eventReceiver";
20
+ exports.GENKIT_MONITORING_ROLES = [
21
+ "roles/monitoring.metricWriter",
22
+ "roles/cloudtrace.agent",
23
+ "roles/logging.logWriter",
24
+ ];
20
25
  async function checkServiceAccountIam(projectId) {
21
26
  const saEmail = `${projectId}@appspot.gserviceaccount.com`;
22
27
  let passed = false;
@@ -81,6 +86,9 @@ function reduceEventsToServices(services, endpoint) {
81
86
  }
82
87
  return services;
83
88
  }
89
+ function isGenkitEndpoint(endpoint) {
90
+ return (backend.isCallableTriggered(endpoint) && endpoint.callableTrigger.genkitAction !== undefined);
91
+ }
84
92
  function obtainPubSubServiceAgentBindings(projectNumber) {
85
93
  const serviceAccountTokenCreatorBinding = {
86
94
  role: exports.SERVICE_ACCOUNT_TOKEN_CREATOR_ROLE,
@@ -102,6 +110,30 @@ async function obtainDefaultComputeServiceAgentBindings(projectNumber) {
102
110
  return [runInvokerBinding, eventarcEventReceiverBinding];
103
111
  }
104
112
  exports.obtainDefaultComputeServiceAgentBindings = obtainDefaultComputeServiceAgentBindings;
113
+ async function ensureGenkitMonitoringRoles(projectId, projectNumber, want, have, dryRun) {
114
+ const wantEndpoints = backend.allEndpoints(want).filter(isGenkitEndpoint);
115
+ const newEndpoints = wantEndpoints.filter(backend.missingEndpoint(have));
116
+ if (newEndpoints.length === 0) {
117
+ return;
118
+ }
119
+ const serviceAccounts = newEndpoints
120
+ .map((endpoint) => endpoint.serviceAccount || "")
121
+ .filter((value, index, self) => self.indexOf(value) === index);
122
+ const defaultServiceAccountIndex = serviceAccounts.indexOf("");
123
+ if (defaultServiceAccountIndex) {
124
+ serviceAccounts[defaultServiceAccountIndex] = await gce.getDefaultServiceAccount(projectNumber);
125
+ }
126
+ const members = serviceAccounts.map((sa) => `serviceAccount:${sa}`);
127
+ const requiredBindings = [];
128
+ for (const monitoringRole of exports.GENKIT_MONITORING_ROLES) {
129
+ requiredBindings.push({
130
+ role: monitoringRole,
131
+ members: members,
132
+ });
133
+ }
134
+ await ensureBindings(projectId, projectNumber, requiredBindings, newEndpoints.map((endpoint) => endpoint.id), dryRun);
135
+ }
136
+ exports.ensureGenkitMonitoringRoles = ensureGenkitMonitoringRoles;
105
137
  async function ensureServiceAgentRoles(projectId, projectNumber, want, have, dryRun) {
106
138
  const wantServices = backend.allEndpoints(want).reduce(reduceEventsToServices, []);
107
139
  const haveServices = backend.allEndpoints(have).reduce(reduceEventsToServices, []);
@@ -122,6 +154,10 @@ async function ensureServiceAgentRoles(projectId, projectNumber, want, have, dry
122
154
  if (requiredBindings.length === 0) {
123
155
  return;
124
156
  }
157
+ await ensureBindings(projectId, projectNumber, requiredBindings, newServices.map((service) => service.api), dryRun);
158
+ }
159
+ exports.ensureServiceAgentRoles = ensureServiceAgentRoles;
160
+ async function ensureBindings(projectId, projectNumber, requiredBindings, newServicesOrEndpoints, dryRun) {
125
161
  let policy;
126
162
  try {
127
163
  policy = await (0, resourceManager_1.getIamPolicy)(projectNumber);
@@ -129,7 +165,7 @@ async function ensureServiceAgentRoles(projectId, projectNumber, want, have, dry
129
165
  catch (err) {
130
166
  iam.printManualIamConfig(requiredBindings, projectId, "functions");
131
167
  utils.logLabeledBullet("functions", "Could not verify the necessary IAM configuration for the following newly-integrated services: " +
132
- `${newServices.map((service) => service.api).join(", ")}` +
168
+ `${newServicesOrEndpoints.join(", ")}` +
133
169
  ". Deployment may fail.", "warn");
134
170
  return;
135
171
  }
@@ -152,4 +188,3 @@ async function ensureServiceAgentRoles(projectId, projectNumber, want, have, dry
152
188
  " otherwise the deployment will fail.", { original: err });
153
189
  }
154
190
  }
155
- exports.ensureServiceAgentRoles = ensureServiceAgentRoles;
@@ -181,6 +181,7 @@ async function prepare(context, options, payload) {
181
181
  await backend.checkAvailability(context, matchingBackend);
182
182
  await validate.secretsAreValid(projectId, matchingBackend);
183
183
  await (0, checkIam_1.ensureServiceAgentRoles)(projectId, projectNumber, matchingBackend, haveBackend, options.dryRun);
184
+ await (0, checkIam_1.ensureGenkitMonitoringRoles)(projectId, projectNumber, matchingBackend, haveBackend, options.dryRun);
184
185
  await ensure.secretAccess(projectId, matchingBackend, haveBackend, options.dryRun);
185
186
  updateEndpointTargetedStatus(wantBackends, context.filters || []);
186
187
  (0, applyHash_1.applyBackendHashToBackends)(wantBackends, context);
@@ -1,17 +1,36 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ensureFirestoreTriggerRegion = void 0;
3
+ exports.ensureFirestoreTriggerRegion = exports.clearCache = void 0;
4
4
  const firestore = require("../../../gcp/firestore");
5
5
  const error_1 = require("../../../error");
6
6
  const dbCache = new Map();
7
+ const dbPromiseCache = new Map();
8
+ function clearCache() {
9
+ dbCache.clear();
10
+ dbPromiseCache.clear();
11
+ }
12
+ exports.clearCache = clearCache;
7
13
  async function getDatabase(project, databaseId) {
8
14
  const key = `${project}/${databaseId}`;
9
15
  if (dbCache.has(key)) {
10
16
  return dbCache.get(key);
11
17
  }
12
- const db = await firestore.getDatabase(project, databaseId, false);
13
- dbCache.set(key, db);
14
- return db;
18
+ if (dbPromiseCache.has(key)) {
19
+ return dbPromiseCache.get(key);
20
+ }
21
+ const dbPromise = firestore
22
+ .getDatabase(project, databaseId)
23
+ .then((db) => {
24
+ dbCache.set(key, db);
25
+ dbPromiseCache.delete(key);
26
+ return db;
27
+ })
28
+ .catch((error) => {
29
+ dbPromiseCache.delete(key);
30
+ throw error;
31
+ });
32
+ dbPromiseCache.set(key, dbPromise);
33
+ return dbPromise;
15
34
  }
16
35
  async function ensureFirestoreTriggerRegion(endpoint) {
17
36
  var _a;
@@ -44,6 +44,7 @@ class AuthCloudFunction {
44
44
  };
45
45
  }
46
46
  createUserInfoPayload(user) {
47
+ var _a;
47
48
  return {
48
49
  uid: user.localId,
49
50
  email: user.email,
@@ -61,10 +62,22 @@ class AuthCloudFunction {
61
62
  : undefined,
62
63
  },
63
64
  customClaims: JSON.parse(user.customAttributes || "{}"),
64
- providerData: user.providerUserInfo,
65
+ providerData: (_a = user.providerUserInfo) === null || _a === void 0 ? void 0 : _a.map((info) => this.createProviderUserInfoPayload(info)),
65
66
  tenantId: user.tenantId,
66
67
  mfaInfo: user.mfaInfo,
67
68
  };
68
69
  }
70
+ createProviderUserInfoPayload(info) {
71
+ return {
72
+ rawId: info.rawId,
73
+ providerId: info.providerId,
74
+ displayName: info.displayName,
75
+ email: info.email,
76
+ federatedId: info.federatedId,
77
+ phoneNumber: info.phoneNumber,
78
+ photoURL: info.photoUrl,
79
+ screenName: info.screenName,
80
+ };
81
+ }
69
82
  }
70
83
  exports.AuthCloudFunction = AuthCloudFunction;
@@ -19,13 +19,13 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
19
19
  function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
20
20
  };
21
21
  Object.defineProperty(exports, "__esModule", { value: true });
22
- exports.PGliteExtendedQueryPatch = exports.PostgresServer = exports.TRUNCATE_TABLES_SQL = void 0;
22
+ exports.fromNodeSocket = exports.PGliteExtendedQueryPatch = exports.PostgresServer = exports.TRUNCATE_TABLES_SQL = void 0;
23
23
  const pglite_1 = require("@electric-sql/pglite");
24
24
  const { dynamicImport } = require(true && "../../dynamicImport");
25
25
  const net = require("node:net");
26
+ const node_stream_1 = require("node:stream");
26
27
  const fs = require("fs");
27
- const index_1 = require("./pg-gateway/index");
28
- const node_1 = require("./pg-gateway/platforms/node");
28
+ const pg_gateway_1 = require("pg-gateway");
29
29
  const logger_1 = require("../../logger");
30
30
  const error_1 = require("../../error");
31
31
  const node_string_decoder_1 = require("node:string_decoder");
@@ -45,7 +45,7 @@ class PostgresServer {
45
45
  async createPGServer(host = "127.0.0.1", port) {
46
46
  const getDb = this.getDb.bind(this);
47
47
  const server = net.createServer(async (socket) => {
48
- const connection = await (0, node_1.fromNodeSocket)(socket, {
48
+ const connection = await fromNodeSocket(socket, {
49
49
  serverVersion: "16.3 (PGlite 0.2.0)",
50
50
  auth: { method: "trust" },
51
51
  async onMessage(data, { isAuthenticated }) {
@@ -53,7 +53,7 @@ class PostgresServer {
53
53
  return;
54
54
  }
55
55
  const db = await getDb();
56
- if (data[0] === index_1.FrontendMessageCode.Terminate) {
56
+ if (data[0] === pg_gateway_1.FrontendMessageCode.Terminate) {
57
57
  await db.query("DEALLOCATE ALL");
58
58
  }
59
59
  const result = await db.execProtocolRaw(data);
@@ -156,9 +156,9 @@ class PGliteExtendedQueryPatch {
156
156
  return __asyncGenerator(this, arguments, function* filterResponse_1() {
157
157
  var _a, e_1, _b, _c;
158
158
  const pipelineStartMessages = [
159
- index_1.FrontendMessageCode.Parse,
160
- index_1.FrontendMessageCode.Bind,
161
- index_1.FrontendMessageCode.Close,
159
+ pg_gateway_1.FrontendMessageCode.Parse,
160
+ pg_gateway_1.FrontendMessageCode.Bind,
161
+ pg_gateway_1.FrontendMessageCode.Close,
162
162
  ];
163
163
  const decoder = new node_string_decoder_1.StringDecoder();
164
164
  const decoded = decoder.write(message);
@@ -166,13 +166,13 @@ class PGliteExtendedQueryPatch {
166
166
  if (pipelineStartMessages.includes(message[0])) {
167
167
  this.isExtendedQuery = true;
168
168
  }
169
- if (message[0] === index_1.FrontendMessageCode.Sync) {
169
+ if (message[0] === pg_gateway_1.FrontendMessageCode.Sync) {
170
170
  this.isExtendedQuery = false;
171
171
  this.eqpErrored = false;
172
172
  return yield __await(this.connection.createReadyForQuery());
173
173
  }
174
174
  try {
175
- for (var _d = true, _e = __asyncValues((0, index_1.getMessages)(response)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a;) {
175
+ for (var _d = true, _e = __asyncValues((0, pg_gateway_1.getMessages)(response)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a;) {
176
176
  _c = _f.value;
177
177
  _d = false;
178
178
  try {
@@ -180,10 +180,10 @@ class PGliteExtendedQueryPatch {
180
180
  if (this.eqpErrored) {
181
181
  continue;
182
182
  }
183
- if (this.isExtendedQuery && message[0] === index_1.BackendMessageCode.ErrorMessage) {
183
+ if (this.isExtendedQuery && message[0] === pg_gateway_1.BackendMessageCode.ErrorMessage) {
184
184
  this.eqpErrored = true;
185
185
  }
186
- if (this.isExtendedQuery && message[0] === index_1.BackendMessageCode.ReadyForQuery) {
186
+ if (this.isExtendedQuery && message[0] === pg_gateway_1.BackendMessageCode.ReadyForQuery) {
187
187
  logger_1.logger.debug("Filtered out a ReadyForQuery.");
188
188
  continue;
189
189
  }
@@ -205,3 +205,11 @@ class PGliteExtendedQueryPatch {
205
205
  }
206
206
  }
207
207
  exports.PGliteExtendedQueryPatch = PGliteExtendedQueryPatch;
208
+ async function fromNodeSocket(socket, options) {
209
+ const rs = node_stream_1.Readable.toWeb(socket);
210
+ const ws = node_stream_1.Writable.toWeb(socket);
211
+ const opts = options
212
+ ? Object.assign({}, options) : undefined;
213
+ return new pg_gateway_1.PostgresConnection({ readable: rs, writable: ws }, opts);
214
+ }
215
+ exports.fromNodeSocket = fromNodeSocket;
@@ -54,28 +54,28 @@
54
54
  },
55
55
  "dataconnect": {
56
56
  "darwin": {
57
- "version": "2.6.2",
58
- "expectedSize": 27501312,
59
- "expectedChecksum": "f4adceec52a29bbc8eabf39dc07460a8",
60
- "expectedChecksumSHA256": "038b1a763d2afd487423b37841323554e13bb6973c69a35f993c315819506ff8",
61
- "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-macos-v2.6.2",
62
- "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.6.2"
57
+ "version": "2.6.3",
58
+ "expectedSize": 27517696,
59
+ "expectedChecksum": "01e0c04374c29e5c5e9d75613362f8e5",
60
+ "expectedChecksumSHA256": "54182c7545c99cb524d65959a165f1d727a1407edae91648dffc9c79850fad17",
61
+ "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-macos-v2.6.3",
62
+ "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.6.3"
63
63
  },
64
64
  "win32": {
65
- "version": "2.6.2",
66
- "expectedSize": 27961856,
67
- "expectedChecksum": "36c75d09d9def62891be1ba84424cc5d",
68
- "expectedChecksumSHA256": "eb33efdf0374bfe0772f3adff7b2ab7eb3353f5e03e2a781623d10f61a92444e",
69
- "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-windows-v2.6.2",
70
- "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.6.2.exe"
65
+ "version": "2.6.3",
66
+ "expectedSize": 27975168,
67
+ "expectedChecksum": "d58c93123ac5fdddfdc3e92301ba335b",
68
+ "expectedChecksumSHA256": "ab228eb606b522aa8bb5916994282215f74c33eb3db0c59c836b3bf8ccaab289",
69
+ "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-windows-v2.6.3",
70
+ "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.6.3.exe"
71
71
  },
72
72
  "linux": {
73
- "version": "2.6.2",
74
- "expectedSize": 27414680,
75
- "expectedChecksum": "deb1bad4fbccf4e3bbc437f8e2b34536",
76
- "expectedChecksumSHA256": "549633c0e3ab26621da197b202e5712163c4be2d1f5d1e6c81bb33f20ccd1d7a",
77
- "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-linux-v2.6.2",
78
- "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.6.2"
73
+ "version": "2.6.3",
74
+ "expectedSize": 27431064,
75
+ "expectedChecksum": "67a7457a5a77c2f8d4edc9898e88291b",
76
+ "expectedChecksumSHA256": "efa12b568cdda0e08f6273c371f3485a5fb512f45c7ba76abbfb55f7a25ccbb6",
77
+ "remoteUrl": "https://storage.googleapis.com/firemat-preview-drop/emulator/dataconnect-emulator-linux-v2.6.3",
78
+ "downloadPathRelativeToCacheDir": "dataconnect-emulator-2.6.3"
79
79
  }
80
80
  }
81
81
  }
@@ -12,12 +12,14 @@ const api_1 = require("../api");
12
12
  const MIN_ID = "__id-9223372036854775808__";
13
13
  class FirestoreDelete {
14
14
  constructor(project, path, options) {
15
+ var _a;
15
16
  this.project = project;
16
17
  this.path = path || "";
17
18
  this.recursive = Boolean(options.recursive);
18
19
  this.shallow = Boolean(options.shallow);
19
20
  this.allCollections = Boolean(options.allCollections);
20
21
  this.databaseId = options.databaseId;
22
+ this.urlPrefix = (_a = options.urlPrefix) !== null && _a !== void 0 ? _a : (0, api_1.firestoreOriginOrEmulator)();
21
23
  this.readBatchSize = 7500;
22
24
  this.maxPendingDeletes = 15;
23
25
  this.deleteBatchSize = 250;
@@ -41,7 +43,7 @@ class FirestoreDelete {
41
43
  this.apiClient = new apiv2.Client({
42
44
  auth: true,
43
45
  apiVersion: "v1",
44
- urlPrefix: (0, api_1.firestoreOriginOrEmulator)(),
46
+ urlPrefix: this.urlPrefix,
45
47
  });
46
48
  }
47
49
  setDeleteBatchSize(size) {
@@ -219,7 +221,7 @@ class FirestoreDelete {
219
221
  }
220
222
  numPendingDeletes++;
221
223
  firestore
222
- .deleteDocuments(this.project, toDelete, true)
224
+ .deleteDocuments(this.project, toDelete, this.databaseId, this.urlPrefix)
223
225
  .then((numDeleted) => {
224
226
  FirestoreDelete.progressBar.tick(numDeleted);
225
227
  numDocsDeleted += numDeleted;
@@ -291,7 +293,7 @@ class FirestoreDelete {
291
293
  let initialDelete;
292
294
  if (this.isDocumentPath) {
293
295
  const doc = { name: this.root + "/" + this.path };
294
- initialDelete = firestore.deleteDocument(doc, true).catch((err) => {
296
+ initialDelete = firestore.deleteDocument(doc, this.urlPrefix).catch((err) => {
295
297
  logger_1.logger.debug("deletePath:initialDelete:error", err);
296
298
  if (this.allDescendants) {
297
299
  return Promise.resolve();
@@ -308,7 +310,7 @@ class FirestoreDelete {
308
310
  }
309
311
  deleteDatabase() {
310
312
  return firestore
311
- .listCollectionIds(this.project, true)
313
+ .listCollectionIds(this.project, this.databaseId, this.urlPrefix)
312
314
  .catch((err) => {
313
315
  logger_1.logger.debug("deleteDatabase:listCollectionIds:error", err);
314
316
  return utils.reject("Unable to list collection IDs");
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.DEFAULT_SHOULD_USE_DEV_MODE_HANDLE = exports.GET_DEFAULT_BUILD_TARGETS = exports.I18N_ROOT = exports.ALLOWED_SSR_REGIONS = exports.DEFAULT_REGION = exports.VALID_LOCALE_FORMATS = exports.VALID_ENGINES = exports.NODE_VERSION = exports.SHARP_VERSION = exports.FIREBASE_ADMIN_VERSION = exports.FIREBASE_FUNCTIONS_VERSION = exports.FIREBASE_FRAMEWORKS_VERSION = exports.MAILING_LIST_URL = exports.FEATURE_REQUEST_URL = exports.FILE_BUG_URL = exports.DEFAULT_DOCS_URL = exports.SupportLevelWarnings = exports.NPM_COMMAND_TIMEOUT_MILLIES = void 0;
4
4
  const clc = require("colorette");
5
5
  const experiments = require("../experiments");
6
- exports.NPM_COMMAND_TIMEOUT_MILLIES = 10000;
6
+ exports.NPM_COMMAND_TIMEOUT_MILLIES = 60000;
7
7
  exports.SupportLevelWarnings = {
8
8
  ["experimental"]: (framework) => `Thank you for trying our ${clc.italic("experimental")} support for ${framework} on Firebase Hosting.
9
9
  ${clc.red(`While this integration is maintained by Googlers it is not a supported Firebase product.
@@ -187,8 +187,14 @@ function findDependency(name, options = {}) {
187
187
  ], { cwd, env, timeout: constants_1.NPM_COMMAND_TIMEOUT_MILLIES });
188
188
  if (!result.stdout)
189
189
  return;
190
- const json = JSON.parse(result.stdout.toString());
191
- return scanDependencyTree(name, json.dependencies);
190
+ try {
191
+ const json = JSON.parse(result.stdout.toString());
192
+ return scanDependencyTree(name, json.dependencies);
193
+ }
194
+ catch (e) {
195
+ const packageJson = (0, fs_extra_1.readJsonSync)((0, path_1.join)(cwd, name, "package.json"), { throws: false });
196
+ return (packageJson === null || packageJson === void 0 ? void 0 : packageJson.version) ? { version: packageJson.version } : undefined;
197
+ }
192
198
  }
193
199
  exports.findDependency = findDependency;
194
200
  async function relativeRequire(dir, mod) {
@@ -11,11 +11,16 @@ const prodOnlyClient = new apiv2_1.Client({
11
11
  apiVersion: "v1",
12
12
  urlPrefix: (0, api_1.firestoreOrigin)(),
13
13
  });
14
- const emuOrProdClient = new apiv2_1.Client({
15
- auth: true,
16
- apiVersion: "v1",
17
- urlPrefix: (0, api_1.firestoreOriginOrEmulator)(),
18
- });
14
+ function getClient(emulatorUrl) {
15
+ if (emulatorUrl) {
16
+ return new apiv2_1.Client({
17
+ auth: true,
18
+ apiVersion: "v1",
19
+ urlPrefix: emulatorUrl,
20
+ });
21
+ }
22
+ return prodOnlyClient;
23
+ }
19
24
  var DayOfWeek;
20
25
  (function (DayOfWeek) {
21
26
  DayOfWeek["MONDAY"] = "MONDAY";
@@ -26,8 +31,8 @@ var DayOfWeek;
26
31
  DayOfWeek["SATURDAY"] = "SATURDAY";
27
32
  DayOfWeek["SUNDAY"] = "SUNDAY";
28
33
  })(DayOfWeek = exports.DayOfWeek || (exports.DayOfWeek = {}));
29
- async function getDatabase(project, database, allowEmulator = false) {
30
- const apiClient = allowEmulator ? emuOrProdClient : prodOnlyClient;
34
+ async function getDatabase(project, database, emulatorUrl) {
35
+ const apiClient = getClient(emulatorUrl);
31
36
  const url = `projects/${project}/databases/${database}`;
32
37
  try {
33
38
  const resp = await apiClient.get(url);
@@ -39,9 +44,9 @@ async function getDatabase(project, database, allowEmulator = false) {
39
44
  }
40
45
  }
41
46
  exports.getDatabase = getDatabase;
42
- function listCollectionIds(project, allowEmulator = false) {
43
- const apiClient = allowEmulator ? emuOrProdClient : prodOnlyClient;
44
- const url = "projects/" + project + "/databases/(default)/documents:listCollectionIds";
47
+ function listCollectionIds(project, databaseId = "(default)", emulatorUrl) {
48
+ const apiClient = getClient(emulatorUrl);
49
+ const url = `projects/${project}/databases/${databaseId}/documents:listCollectionIds`;
45
50
  const data = {
46
51
  pageSize: 2147483647,
47
52
  };
@@ -50,9 +55,9 @@ function listCollectionIds(project, allowEmulator = false) {
50
55
  });
51
56
  }
52
57
  exports.listCollectionIds = listCollectionIds;
53
- async function getDocuments(project, paths, allowEmulator) {
54
- const apiClient = allowEmulator ? emuOrProdClient : prodOnlyClient;
55
- const basePath = `projects/${project}/databases/(default)/documents`;
58
+ async function getDocuments(project, paths, databaseId = "(default)", emulatorUrl) {
59
+ const apiClient = getClient(emulatorUrl);
60
+ const basePath = `projects/${project}/databases/${databaseId}/documents`;
56
61
  const url = `${basePath}:batchGet`;
57
62
  const fullPaths = paths.map((p) => `${basePath}/${p}`);
58
63
  const res = await apiClient.post(url, { documents: fullPaths });
@@ -61,9 +66,9 @@ async function getDocuments(project, paths, allowEmulator) {
61
66
  return out;
62
67
  }
63
68
  exports.getDocuments = getDocuments;
64
- async function queryCollection(project, structuredQuery, allowEmulator) {
65
- const apiClient = allowEmulator ? emuOrProdClient : prodOnlyClient;
66
- const basePath = `projects/${project}/databases/(default)/documents`;
69
+ async function queryCollection(project, structuredQuery, databaseId = "(default)", emulatorUrl) {
70
+ const apiClient = getClient(emulatorUrl);
71
+ const basePath = `projects/${project}/databases/${databaseId}/documents`;
67
72
  const url = `${basePath}:runQuery`;
68
73
  try {
69
74
  const res = await apiClient.post(url, {
@@ -84,14 +89,14 @@ async function queryCollection(project, structuredQuery, allowEmulator) {
84
89
  }
85
90
  }
86
91
  exports.queryCollection = queryCollection;
87
- async function deleteDocument(doc, allowEmulator = false) {
88
- const apiClient = allowEmulator ? emuOrProdClient : prodOnlyClient;
92
+ async function deleteDocument(doc, emulatorUrl) {
93
+ const apiClient = getClient(emulatorUrl);
89
94
  return apiClient.delete(doc.name);
90
95
  }
91
96
  exports.deleteDocument = deleteDocument;
92
- async function deleteDocuments(project, docs, allowEmulator = false) {
93
- const apiClient = allowEmulator ? emuOrProdClient : prodOnlyClient;
94
- const url = "projects/" + project + "/databases/(default)/documents:commit";
97
+ async function deleteDocuments(project, docs, databaseId = "(default)", emulatorUrl) {
98
+ const apiClient = getClient(emulatorUrl);
99
+ const url = `projects/${project}/databases/${databaseId}/documents:commit`;
95
100
  const writes = docs.map((doc) => {
96
101
  return { delete: doc.name };
97
102
  });
@@ -4,38 +4,39 @@ exports.generateOperation = exports.chatWithFirebase = exports.generateSchema =
4
4
  const apiv2_1 = require("../apiv2");
5
5
  const api_1 = require("../api");
6
6
  const apiClient = new apiv2_1.Client({ urlPrefix: (0, api_1.cloudCompanionOrigin)(), auth: true });
7
- const schemaGeneratorExperience = "/appeco/firebase/fdc-schema-generator";
8
- const geminiInFirebaseChatExperience = "/appeco/firebase/firebase-chat/free";
9
- const operationGeneratorExperience = "/appeco/firebase/fdc-query-generator";
10
- async function generateSchema(prompt, project) {
7
+ const SCHEMA_GENERATOR_EXPERIENCE = "/appeco/firebase/fdc-schema-generator";
8
+ const GEMINI_IN_FIREBASE_EXPERIENCE = "/appeco/firebase/firebase-chat/free";
9
+ const OPERATION_GENERATION_EXPERIENCE = "/appeco/firebase/fdc-query-generator";
10
+ const FIREBASE_CHAT_REQUEST_CONTEXT_TYPE_NAME = "type.googleapis.com/google.cloud.cloudaicompanion.v1main.FirebaseChatRequestContext";
11
+ async function generateSchema(prompt, project, chatHistory = []) {
11
12
  const res = await apiClient.post(`/v1beta/projects/${project}/locations/global/instances/default:completeTask`, {
12
- input: { messages: [{ content: prompt, author: "USER" }] },
13
+ input: { messages: [...chatHistory, { content: prompt, author: "USER" }] },
13
14
  experienceContext: {
14
- experience: schemaGeneratorExperience,
15
+ experience: SCHEMA_GENERATOR_EXPERIENCE,
15
16
  },
16
17
  });
17
18
  return res.body.output.messages[0].content;
18
19
  }
19
20
  exports.generateSchema = generateSchema;
20
- async function chatWithFirebase(prompt, project) {
21
+ async function chatWithFirebase(prompt, project, chatHistory = []) {
21
22
  const res = await apiClient.post(`/v1beta/projects/${project}/locations/global/instances/default:completeTask`, {
22
- input: { messages: [{ content: prompt, author: "USER" }] },
23
+ input: { messages: [...chatHistory, { content: prompt, author: "USER" }] },
23
24
  experienceContext: {
24
- experience: geminiInFirebaseChatExperience,
25
+ experience: GEMINI_IN_FIREBASE_EXPERIENCE,
25
26
  },
26
27
  });
27
28
  return res.body;
28
29
  }
29
30
  exports.chatWithFirebase = chatWithFirebase;
30
- async function generateOperation(prompt, service, project) {
31
+ async function generateOperation(prompt, service, project, chatHistory = []) {
31
32
  const res = await apiClient.post(`/v1beta/projects/${project}/locations/global/instances/default:completeTask`, {
32
- input: { messages: [{ content: prompt, author: "USER" }] },
33
+ input: { messages: [...chatHistory, { content: prompt, author: "USER" }] },
33
34
  experienceContext: {
34
- experience: operationGeneratorExperience,
35
+ experience: OPERATION_GENERATION_EXPERIENCE,
35
36
  },
36
37
  clientContext: {
37
38
  additionalContext: {
38
- "@type": "type.googleapis.com/google.cloud.cloudaicompanion.v1main.FirebaseChatRequestContext",
39
+ "@type": FIREBASE_CHAT_REQUEST_CONTEXT_TYPE_NAME,
39
40
  fdcInfo: { fdcServiceName: service, requiresQuery: true },
40
41
  },
41
42
  },