firebase-tools 14.12.1 → 14.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/README.md +1 -1
  2. package/lib/commands/dataconnect-services-list.js +5 -5
  3. package/lib/commands/dataconnect-sql-grant.js +5 -0
  4. package/lib/commands/dataconnect-sql-setup.js +1 -3
  5. package/lib/crashlytics/getIssueDetails.js +41 -0
  6. package/lib/crashlytics/getSampleCrash.js +48 -0
  7. package/lib/dataconnect/client.js +23 -15
  8. package/lib/dataconnect/ensureApis.js +5 -9
  9. package/lib/dataconnect/errors.js +7 -1
  10. package/lib/dataconnect/fileUtils.js +5 -6
  11. package/lib/dataconnect/freeTrial.js +16 -39
  12. package/lib/dataconnect/provisionCloudSql.js +67 -70
  13. package/lib/dataconnect/schemaMigration.js +222 -170
  14. package/lib/deploy/dataconnect/deploy.js +9 -11
  15. package/lib/deploy/dataconnect/prepare.js +7 -10
  16. package/lib/deploy/dataconnect/release.js +42 -30
  17. package/lib/deploy/functions/backend.js +8 -2
  18. package/lib/deploy/functions/build.js +23 -1
  19. package/lib/deploy/functions/ensure.js +1 -1
  20. package/lib/deploy/functions/functionsDeployHelper.js +8 -1
  21. package/lib/deploy/functions/prepare.js +8 -4
  22. package/lib/deploy/functions/pricing.js +12 -5
  23. package/lib/deploy/functions/release/fabricator.js +25 -3
  24. package/lib/emulator/controller.js +7 -3
  25. package/lib/emulator/downloadableEmulatorInfo.json +18 -18
  26. package/lib/emulator/functionsEmulator.js +11 -1
  27. package/lib/experiments.js +4 -0
  28. package/lib/extensions/extensionsHelper.js +4 -15
  29. package/lib/extensions/utils.js +1 -12
  30. package/lib/firestore/api.js +25 -11
  31. package/lib/firestore/pretty-print.js +7 -0
  32. package/lib/functional.js +7 -1
  33. package/lib/functions/env.js +19 -15
  34. package/lib/functions/projectConfig.js +25 -2
  35. package/lib/functions/secrets.js +3 -0
  36. package/lib/gcp/cloudfunctionsv2.js +3 -31
  37. package/lib/gcp/cloudscheduler.js +1 -1
  38. package/lib/gcp/cloudsql/cloudsqladmin.js +2 -14
  39. package/lib/gcp/cloudsql/connect.js +3 -2
  40. package/lib/gcp/cloudsql/permissionsSetup.js +23 -16
  41. package/lib/gcp/k8s.js +32 -0
  42. package/lib/gcp/runv2.js +178 -0
  43. package/lib/gemini/fdcExperience.js +5 -3
  44. package/lib/init/features/dataconnect/index.js +266 -162
  45. package/lib/init/features/dataconnect/sdk.js +36 -20
  46. package/lib/init/features/project.js +4 -0
  47. package/lib/management/studio.js +1 -1
  48. package/lib/mcp/tools/core/init.js +7 -6
  49. package/lib/mcp/tools/crashlytics/get_issue_details.js +33 -0
  50. package/lib/mcp/tools/crashlytics/get_sample_crash.js +43 -0
  51. package/lib/mcp/tools/crashlytics/index.js +7 -1
  52. package/lib/mcp/tools/crashlytics/list_top_issues.js +2 -1
  53. package/lib/rtdb.js +1 -1
  54. package/package.json +1 -1
  55. package/schema/firebase-config.json +6 -0
  56. package/lib/extensions/resolveSource.js +0 -24
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getResourceRuntime = exports.formatTimestamp = exports.getRandomString = exports.convertOfficialExtensionsToList = exports.convertExtensionOptionToLabeledList = void 0;
3
+ exports.getResourceRuntime = exports.formatTimestamp = exports.getRandomString = exports.convertExtensionOptionToLabeledList = void 0;
4
4
  const types_1 = require("./types");
5
5
  function convertExtensionOptionToLabeledList(options) {
6
6
  return options.map((option) => {
@@ -12,17 +12,6 @@ function convertExtensionOptionToLabeledList(options) {
12
12
  });
13
13
  }
14
14
  exports.convertExtensionOptionToLabeledList = convertExtensionOptionToLabeledList;
15
- function convertOfficialExtensionsToList(officialExts) {
16
- const l = Object.entries(officialExts).map(([key, entry]) => {
17
- return {
18
- checked: false,
19
- value: `${entry.publisher}/${key}`,
20
- };
21
- });
22
- l.sort((a, b) => a.value.localeCompare(b.value));
23
- return l;
24
- }
25
- exports.convertOfficialExtensionsToList = convertOfficialExtensionsToList;
26
15
  function getRandomString(length) {
27
16
  const SUFFIX_CHAR_SET = "abcdefghijklmnopqrstuvwxyz0123456789";
28
17
  let result = "";
@@ -6,6 +6,7 @@ const logger_1 = require("../logger");
6
6
  const utils = require("../utils");
7
7
  const validator = require("./validator");
8
8
  const types = require("./api-types");
9
+ const api_types_1 = require("./api-types");
9
10
  const sort = require("./api-sort");
10
11
  const util = require("./util");
11
12
  const prompt_1 = require("../prompt");
@@ -13,6 +14,7 @@ const api_1 = require("../api");
13
14
  const error_1 = require("../error");
14
15
  const apiv2_1 = require("../apiv2");
15
16
  const pretty_print_1 = require("./pretty-print");
17
+ const functional_1 = require("../functional");
16
18
  class FirestoreApi {
17
19
  constructor() {
18
20
  this.apiClient = new apiv2_1.Client({ urlPrefix: (0, api_1.firestoreOrigin)(), apiVersion: "v1" });
@@ -33,6 +35,7 @@ class FirestoreApi {
33
35
  });
34
36
  }
35
37
  async deploy(options, indexes, fieldOverrides, databaseId = "(default)") {
38
+ var _a;
36
39
  const spec = this.upgradeOldSpec({
37
40
  indexes,
38
41
  fieldOverrides,
@@ -42,8 +45,10 @@ class FirestoreApi {
42
45
  const fieldOverridesToDeploy = spec.fieldOverrides;
43
46
  const existingIndexes = await this.listIndexes(options.project, databaseId);
44
47
  const existingFieldOverrides = await this.listFieldOverrides(options.project, databaseId);
48
+ const database = await this.getDatabase(options.project, databaseId);
49
+ const edition = (_a = database.databaseEdition) !== null && _a !== void 0 ? _a : api_types_1.DatabaseEdition.STANDARD;
45
50
  const indexesToDelete = existingIndexes.filter((index) => {
46
- return !indexesToDeploy.some((spec) => this.indexMatchesSpec(index, spec));
51
+ return !indexesToDeploy.some((spec) => this.indexMatchesSpec(index, spec, edition));
47
52
  });
48
53
  const fieldOverridesToDelete = existingFieldOverrides.filter((field) => {
49
54
  return !fieldOverridesToDeploy.some((spec) => {
@@ -79,7 +84,7 @@ class FirestoreApi {
79
84
  }
80
85
  }
81
86
  for (const index of indexesToDeploy) {
82
- const exists = existingIndexes.some((x) => this.indexMatchesSpec(x, index));
87
+ const exists = existingIndexes.some((x) => this.indexMatchesSpec(x, index, edition));
83
88
  if (exists) {
84
89
  logger_1.logger.debug(`Skipping existing index: ${JSON.stringify(index)}`);
85
90
  }
@@ -222,8 +227,9 @@ class FirestoreApi {
222
227
  if (index.multikey) {
223
228
  validator.assertType("multikey", index.multikey, "boolean");
224
229
  }
225
- if (index.unique) {
230
+ if (index.unique !== undefined) {
226
231
  validator.assertType("unique", index.unique, "boolean");
232
+ throw new error_1.FirebaseError("The `unique` index configuration is not supported yet.");
227
233
  }
228
234
  validator.assertHas(index, "fields");
229
235
  index.fields.forEach((field) => {
@@ -328,7 +334,18 @@ class FirestoreApi {
328
334
  const url = index.name;
329
335
  return this.apiClient.delete(`/${url}`);
330
336
  }
331
- indexMatchesSpec(index, spec) {
337
+ optionalApiScopeMatches(lhs, rhs) {
338
+ return (0, functional_1.optionalValueMatches)(lhs, rhs, types.ApiScope.ANY_API);
339
+ }
340
+ optionalDensityMatches(lhs, rhs, edition) {
341
+ const defaultValue = edition === api_types_1.DatabaseEdition.STANDARD ? types.Density.SPARSE_ALL : types.Density.DENSE;
342
+ return (0, functional_1.optionalValueMatches)(lhs, rhs, defaultValue);
343
+ }
344
+ optionalMultikeyMatches(lhs, rhs) {
345
+ const defaultValue = false;
346
+ return (0, functional_1.optionalValueMatches)(lhs, rhs, defaultValue);
347
+ }
348
+ indexMatchesSpec(index, spec, edition) {
332
349
  const collection = util.parseIndexName(index.name).collectionGroupId;
333
350
  if (collection !== spec.collectionGroup) {
334
351
  return false;
@@ -336,16 +353,13 @@ class FirestoreApi {
336
353
  if (index.queryScope !== spec.queryScope) {
337
354
  return false;
338
355
  }
339
- if (index.apiScope !== spec.apiScope) {
340
- return false;
341
- }
342
- if (index.density !== spec.density) {
356
+ if (!this.optionalApiScopeMatches(index.apiScope, spec.apiScope)) {
343
357
  return false;
344
358
  }
345
- if (index.multikey !== spec.multikey) {
359
+ if (!this.optionalDensityMatches(index.density, spec.density, edition)) {
346
360
  return false;
347
361
  }
348
- if (index.unique !== spec.unique) {
362
+ if (!this.optionalMultikeyMatches(index.multikey, spec.multikey)) {
349
363
  return false;
350
364
  }
351
365
  if (index.fields.length !== spec.fields.length) {
@@ -364,7 +378,7 @@ class FirestoreApi {
364
378
  if (iField.arrayConfig !== sField.arrayConfig) {
365
379
  return false;
366
380
  }
367
- if (iField.vectorConfig !== sField.vectorConfig) {
381
+ if (!utils.deepEqual(iField.vectorConfig, sField.vectorConfig)) {
368
382
  return false;
369
383
  }
370
384
  i++;
@@ -177,6 +177,13 @@ class PrettyPrint {
177
177
  }
178
178
  result += `(${field.fieldPath},${configString}) `;
179
179
  });
180
+ result += " -- ";
181
+ if (index.density !== undefined) {
182
+ result += clc.cyan(`Density:${index.density} `);
183
+ }
184
+ if (index.multikey !== undefined) {
185
+ result += clc.cyan(`Multikey:${index.multikey ? "YES" : "NO"}`);
186
+ }
180
187
  return result;
181
188
  }
182
189
  prettyBackupString(backup) {
package/lib/functional.js CHANGED
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.nullsafeVisitor = exports.mapObject = exports.partitionRecord = exports.partition = exports.assertExhaustive = exports.zipIn = exports.zip = exports.reduceFlat = exports.flatten = exports.flattenArray = exports.flattenObject = void 0;
3
+ exports.optionalValueMatches = exports.nullsafeVisitor = exports.mapObject = exports.partitionRecord = exports.partition = exports.assertExhaustive = exports.zipIn = exports.zip = exports.reduceFlat = exports.flatten = exports.flattenArray = exports.flattenObject = void 0;
4
4
  function* flattenObject(obj) {
5
5
  function* helper(path, obj) {
6
6
  for (const [k, v] of Object.entries(obj)) {
@@ -85,3 +85,9 @@ const nullsafeVisitor = (func, ...rest) => (first) => {
85
85
  return func(first, ...rest);
86
86
  };
87
87
  exports.nullsafeVisitor = nullsafeVisitor;
88
+ function optionalValueMatches(lhs, rhs, defaultValue) {
89
+ lhs = lhs === undefined ? defaultValue : lhs;
90
+ rhs = rhs === undefined ? defaultValue : rhs;
91
+ return lhs === rhs;
92
+ }
93
+ exports.optionalValueMatches = optionalValueMatches;
@@ -140,7 +140,7 @@ function parseStrict(data) {
140
140
  return envs;
141
141
  }
142
142
  exports.parseStrict = parseStrict;
143
- function findEnvfiles(functionsSource, projectId, projectAlias, isEmulator) {
143
+ function findEnvfiles(configDir, projectId, projectAlias, isEmulator) {
144
144
  const files = [".env"];
145
145
  files.push(`.env.${projectId}`);
146
146
  if (projectAlias) {
@@ -150,26 +150,28 @@ function findEnvfiles(functionsSource, projectId, projectAlias, isEmulator) {
150
150
  files.push(FUNCTIONS_EMULATOR_DOTENV);
151
151
  }
152
152
  return files
153
- .map((f) => path.join(functionsSource, f))
153
+ .map((f) => path.join(configDir, f))
154
154
  .filter(fs.existsSync)
155
155
  .map((p) => path.basename(p));
156
156
  }
157
- function hasUserEnvs({ functionsSource, projectId, projectAlias, isEmulator, }) {
158
- return findEnvfiles(functionsSource, projectId, projectAlias, isEmulator).length > 0;
157
+ function hasUserEnvs(opts) {
158
+ const configDir = opts.configDir || opts.functionsSource;
159
+ return findEnvfiles(configDir, opts.projectId, opts.projectAlias, opts.isEmulator).length > 0;
159
160
  }
160
161
  exports.hasUserEnvs = hasUserEnvs;
161
162
  function writeUserEnvs(toWrite, envOpts) {
162
163
  if (Object.keys(toWrite).length === 0) {
163
164
  return;
164
165
  }
165
- const { functionsSource, projectId, projectAlias, isEmulator } = envOpts;
166
- const allEnvFiles = findEnvfiles(functionsSource, projectId, projectAlias, isEmulator);
166
+ const { projectId, projectAlias, isEmulator } = envOpts;
167
+ const configDir = envOpts.configDir || envOpts.functionsSource;
168
+ const allEnvFiles = findEnvfiles(configDir, projectId, projectAlias, isEmulator);
167
169
  const targetEnvFile = envOpts.isEmulator
168
170
  ? FUNCTIONS_EMULATOR_DOTENV
169
171
  : `.env.${envOpts.projectId}`;
170
172
  const targetEnvFileExists = allEnvFiles.includes(targetEnvFile);
171
173
  if (!targetEnvFileExists) {
172
- fs.writeFileSync(path.join(envOpts.functionsSource, targetEnvFile), "", { flag: "wx" });
174
+ fs.writeFileSync(path.join(configDir, targetEnvFile), "", { flag: "wx" });
173
175
  (0, utils_1.logBullet)(clc.yellow(clc.bold("functions: ")) +
174
176
  `Created new local file ${targetEnvFile} to store param values. We suggest explicitly adding or excluding this file from version control.`);
175
177
  }
@@ -186,7 +188,7 @@ function writeUserEnvs(toWrite, envOpts) {
186
188
  for (const k of Object.keys(toWrite)) {
187
189
  lines += formatUserEnvForWrite(k, toWrite[k]);
188
190
  }
189
- fs.appendFileSync(path.join(functionsSource, targetEnvFile), lines);
191
+ fs.appendFileSync(path.join(configDir, targetEnvFile), lines);
190
192
  }
191
193
  exports.writeUserEnvs = writeUserEnvs;
192
194
  function checkForDuplicateKeys(isEmulator, keys, fullEnv, envsWithoutLocal) {
@@ -210,22 +212,24 @@ function formatUserEnvForWrite(key, value) {
210
212
  }
211
213
  return `${key}=${escapedValue}\n`;
212
214
  }
213
- function loadUserEnvs({ functionsSource, projectId, projectAlias, isEmulator, }) {
215
+ function loadUserEnvs(opts) {
214
216
  var _a;
215
- const envFiles = findEnvfiles(functionsSource, projectId, projectAlias, isEmulator);
217
+ const configDir = opts.configDir || opts.functionsSource;
218
+ const envFiles = findEnvfiles(configDir, opts.projectId, opts.projectAlias, opts.isEmulator);
216
219
  if (envFiles.length === 0) {
217
220
  return {};
218
221
  }
219
- if (projectAlias) {
220
- if (envFiles.includes(`.env.${projectId}`) && envFiles.includes(`.env.${projectAlias}`)) {
221
- throw new error_1.FirebaseError(`Can't have both dotenv files with projectId (env.${projectId}) ` +
222
- `and projectAlias (.env.${projectAlias}) as extensions.`);
222
+ if (opts.projectAlias) {
223
+ if (envFiles.includes(`.env.${opts.projectId}`) &&
224
+ envFiles.includes(`.env.${opts.projectAlias}`)) {
225
+ throw new error_1.FirebaseError(`Can't have both dotenv files with projectId (env.${opts.projectId}) ` +
226
+ `and projectAlias (.env.${opts.projectAlias}) as extensions.`);
223
227
  }
224
228
  }
225
229
  let envs = {};
226
230
  for (const f of envFiles) {
227
231
  try {
228
- const data = fs.readFileSync(path.join(functionsSource, f), "utf8");
232
+ const data = fs.readFileSync(path.join(configDir, f), "utf8");
229
233
  envs = Object.assign(Object.assign({}, envs), parseStrict(data));
230
234
  }
231
235
  catch (err) {
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.configForCodebase = exports.normalizeAndValidate = exports.validate = exports.assertUnique = exports.validateCodebase = exports.normalize = exports.DEFAULT_CODEBASE = void 0;
3
+ exports.configForCodebase = exports.normalizeAndValidate = exports.validate = exports.assertUnique = exports.validatePrefix = exports.validateCodebase = exports.normalize = exports.DEFAULT_CODEBASE = void 0;
4
4
  const error_1 = require("../error");
5
5
  exports.DEFAULT_CODEBASE = "default";
6
6
  function normalize(config) {
@@ -23,6 +23,15 @@ function validateCodebase(codebase) {
23
23
  }
24
24
  }
25
25
  exports.validateCodebase = validateCodebase;
26
+ function validatePrefix(prefix) {
27
+ if (prefix.length > 30) {
28
+ throw new error_1.FirebaseError("Invalid prefix. Prefix must be 30 characters or less.");
29
+ }
30
+ if (!/^[a-z](?:[a-z0-9-]*[a-z0-9])?$/.test(prefix)) {
31
+ throw new error_1.FirebaseError("Invalid prefix. Prefix must start with a lowercase letter, can contain only lowercase letters, numeric characters, and dashes, and cannot start or end with a dash.");
32
+ }
33
+ }
34
+ exports.validatePrefix = validatePrefix;
26
35
  function validateSingle(config) {
27
36
  if (!config.source) {
28
37
  throw new error_1.FirebaseError("codebase source must be specified");
@@ -31,6 +40,9 @@ function validateSingle(config) {
31
40
  config.codebase = exports.DEFAULT_CODEBASE;
32
41
  }
33
42
  validateCodebase(config.codebase);
43
+ if (config.prefix) {
44
+ validatePrefix(config.prefix);
45
+ }
34
46
  return Object.assign(Object.assign({}, config), { source: config.source, codebase: config.codebase });
35
47
  }
36
48
  function assertUnique(config, property, propval) {
@@ -47,10 +59,21 @@ function assertUnique(config, property, propval) {
47
59
  }
48
60
  }
49
61
  exports.assertUnique = assertUnique;
62
+ function assertUniqueSourcePrefixPair(config) {
63
+ var _a;
64
+ const sourcePrefixPairs = new Set();
65
+ for (const c of config) {
66
+ const key = JSON.stringify({ source: c.source, prefix: c.prefix || "" });
67
+ if (sourcePrefixPairs.has(key)) {
68
+ throw new error_1.FirebaseError(`More than one functions config specifies the same source directory ('${c.source}') and prefix ('${(_a = c.prefix) !== null && _a !== void 0 ? _a : ""}'). Please add a unique 'prefix' to each function configuration that shares this source to resolve the conflict.`);
69
+ }
70
+ sourcePrefixPairs.add(key);
71
+ }
72
+ }
50
73
  function validate(config) {
51
74
  const validated = config.map((cfg) => validateSingle(cfg));
52
- assertUnique(validated, "source");
53
75
  assertUnique(validated, "codebase");
76
+ assertUniqueSourcePrefixPair(validated);
54
77
  return validated;
55
78
  }
56
79
  exports.validate = validate;
@@ -235,6 +235,9 @@ async function updateEndpointSecret(projectInfo, secretVersion, endpoint) {
235
235
  const cfn = await poller.pollOperation(Object.assign(Object.assign({}, gcfV2PollerOptions), { operationResourceName: op.name }));
236
236
  return gcfV2.endpointFromFunction(cfn);
237
237
  }
238
+ else if (endpoint.platform === "run") {
239
+ throw new error_1.FirebaseError("Updating Cloud Run functions is not yet implemented.");
240
+ }
238
241
  else {
239
242
  (0, functional_1.assertExhaustive)(endpoint.platform);
240
243
  }
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.endpointFromFunction = exports.functionFromEndpoint = exports.deleteFunction = exports.updateFunction = exports.listAllFunctions = exports.listFunctions = exports.getFunction = exports.createFunction = exports.generateUploadUrl = exports.mebibytes = exports.API_VERSION = void 0;
3
+ exports.endpointFromFunction = exports.functionFromEndpoint = exports.deleteFunction = exports.updateFunction = exports.listAllFunctions = exports.listFunctions = exports.getFunction = exports.createFunction = exports.generateUploadUrl = exports.API_VERSION = void 0;
4
4
  const apiv2_1 = require("../apiv2");
5
5
  const error_1 = require("../error");
6
6
  const api_1 = require("../api");
@@ -13,6 +13,7 @@ const utils = require("../utils");
13
13
  const projectConfig = require("../functions/projectConfig");
14
14
  const constants_1 = require("../functions/constants");
15
15
  const cloudfunctions_1 = require("./cloudfunctions");
16
+ const k8s_1 = require("./k8s");
16
17
  exports.API_VERSION = "v2";
17
18
  const DEFAULT_MAX_INSTANCE_COUNT = 100;
18
19
  const client = new apiv2_1.Client({
@@ -20,35 +21,6 @@ const client = new apiv2_1.Client({
20
21
  auth: true,
21
22
  apiVersion: exports.API_VERSION,
22
23
  });
23
- const BYTES_PER_UNIT = {
24
- "": 1,
25
- k: 1e3,
26
- M: 1e6,
27
- G: 1e9,
28
- T: 1e12,
29
- Ki: 1 << 10,
30
- Mi: 1 << 20,
31
- Gi: 1 << 30,
32
- Ti: 1 << 40,
33
- };
34
- function mebibytes(memory) {
35
- const re = /^([0-9]+(\.[0-9]*)?)(Ki|Mi|Gi|Ti|k|M|G|T|([eE]([0-9]+)))?$/;
36
- const matches = re.exec(memory);
37
- if (!matches) {
38
- throw new Error(`Invalid memory quantity "${memory}""`);
39
- }
40
- const quantity = Number.parseFloat(matches[1]);
41
- let bytes;
42
- if (matches[5]) {
43
- bytes = quantity * Math.pow(10, Number.parseFloat(matches[5]));
44
- }
45
- else {
46
- const suffix = matches[3] || "";
47
- bytes = quantity * BYTES_PER_UNIT[suffix];
48
- }
49
- return bytes / (1 << 20);
50
- }
51
- exports.mebibytes = mebibytes;
52
24
  function functionsOpLogReject(func, type, err) {
53
25
  var _a, _b, _c, _d, _e, _f, _g;
54
26
  if ((_a = err === null || err === void 0 ? void 0 : err.message) === null || _a === void 0 ? void 0 : _a.includes("Runtime validation errors")) {
@@ -360,7 +332,7 @@ function endpointFromFunction(gcfFunction) {
360
332
  logger_1.logger.debug("Prod should always return a valid memory amount");
361
333
  return prod;
362
334
  }
363
- const mem = mebibytes(prod);
335
+ const mem = (0, k8s_1.mebibytes)(prod);
364
336
  if (!backend.isValidMemoryOption(mem)) {
365
337
  logger_1.logger.debug("Converting a function to an endpoint with an invalid memory option", mem);
366
338
  }
@@ -124,7 +124,7 @@ async function jobFromEndpoint(endpoint, location, projectNumber) {
124
124
  },
125
125
  };
126
126
  }
127
- else if (endpoint.platform === "gcfv2") {
127
+ else if (endpoint.platform === "gcfv2" || endpoint.platform === "run") {
128
128
  job.timeZone = endpoint.scheduleTrigger.timeZone || DEFAULT_TIME_ZONE_V2;
129
129
  job.httpTarget = {
130
130
  uri: endpoint.uri,
@@ -55,9 +55,8 @@ async function createInstance(args) {
55
55
  if (args.enableGoogleMlIntegration) {
56
56
  databaseFlags.push({ name: "cloudsql.enable_google_ml_integration", value: "on" });
57
57
  }
58
- let op;
59
58
  try {
60
- op = await client.post(`projects/${args.projectId}/instances`, {
59
+ await client.post(`projects/${args.projectId}/instances`, {
61
60
  name: args.instanceId,
62
61
  region: args.location,
63
62
  databaseVersion: "POSTGRES_15",
@@ -78,23 +77,12 @@ async function createInstance(args) {
78
77
  },
79
78
  },
80
79
  });
80
+ return;
81
81
  }
82
82
  catch (err) {
83
83
  handleAllowlistError(err, args.location);
84
84
  throw err;
85
85
  }
86
- if (!args.waitForCreation) {
87
- return;
88
- }
89
- const opName = `projects/${args.projectId}/operations/${op.body.name}`;
90
- const pollRes = await operationPoller.pollOperation({
91
- apiOrigin: (0, api_1.cloudSQLAdminOrigin)(),
92
- apiVersion: API_VERSION,
93
- operationResourceName: opName,
94
- doneFn: (op) => op.status === "DONE",
95
- masterTimeout: 1200000,
96
- });
97
- return pollRes;
98
86
  }
99
87
  exports.createInstance = createInstance;
100
88
  async function updateInstanceForDataConnect(instance, enableGoogleMlIntegration) {
@@ -72,7 +72,7 @@ async function execute(sqlStatements, opts) {
72
72
  sqlStatements.push("COMMIT;");
73
73
  }
74
74
  for (const s of sqlStatements) {
75
- logFn(`Executing: '${s}'`);
75
+ logFn(`> ${s}`);
76
76
  try {
77
77
  results.push(await conn.query(s));
78
78
  }
@@ -84,6 +84,7 @@ async function execute(sqlStatements, opts) {
84
84
  }
85
85
  }
86
86
  await cleanUpFn();
87
+ logFn(``);
87
88
  return results;
88
89
  }
89
90
  exports.execute = execute;
@@ -128,7 +129,7 @@ async function getIAMUser(options) {
128
129
  return toDatabaseUser(account);
129
130
  }
130
131
  exports.getIAMUser = getIAMUser;
131
- async function setupIAMUsers(instanceId, databaseId, options) {
132
+ async function setupIAMUsers(instanceId, options) {
132
133
  const projectId = (0, projectUtils_1.needProjectId)(options);
133
134
  const { user, mode } = await getIAMUser(options);
134
135
  await cloudSqlAdminClient.createUser(projectId, instanceId, mode, user);
@@ -1,18 +1,18 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.brownfieldSqlSetup = exports.setupBrownfieldAsGreenfield = exports.getSchemaMetadata = exports.greenFieldSchemaSetup = exports.setupSQLPermissions = exports.checkSQLRoleIsGranted = exports.fdcSqlRoleMap = exports.SchemaSetupStatus = void 0;
3
+ exports.grantRoleTo = exports.brownfieldSqlSetup = exports.setupBrownfieldAsGreenfield = exports.getSchemaMetadata = exports.greenFieldSchemaSetup = exports.setupSQLPermissions = exports.checkSQLRoleIsGranted = exports.fdcSqlRoleMap = exports.SchemaSetupStatus = void 0;
4
4
  const clc = require("colorette");
5
5
  const permissions_1 = require("./permissions");
6
6
  const cloudsqladmin_1 = require("./cloudsqladmin");
7
- const connect_1 = require("./connect");
8
7
  const logger_1 = require("../../logger");
9
8
  const prompt_1 = require("../../prompt");
10
9
  const error_1 = require("../../error");
11
10
  const projectUtils_1 = require("../../projectUtils");
12
- const connect_2 = require("./connect");
11
+ const connect_1 = require("./connect");
13
12
  const lodash_1 = require("lodash");
14
- const connect_3 = require("./connect");
13
+ const connect_2 = require("./connect");
15
14
  const utils = require("../../utils");
15
+ const cloudSqlAdminClient = require("./cloudsqladmin");
16
16
  var SchemaSetupStatus;
17
17
  (function (SchemaSetupStatus) {
18
18
  SchemaSetupStatus["NotSetup"] = "not-setup";
@@ -53,7 +53,7 @@ async function checkSQLRoleIsGranted(options, instanceId, databaseId, grantedRol
53
53
  END $$;
54
54
  `;
55
55
  try {
56
- await (0, connect_2.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, [checkCmd], true);
56
+ await (0, connect_1.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, [checkCmd], true);
57
57
  return true;
58
58
  }
59
59
  catch (e) {
@@ -77,7 +77,6 @@ async function setupSQLPermissions(instanceId, databaseId, schemaInfo, options,
77
77
  if (!userIsCSQLAdmin) {
78
78
  throw new error_1.FirebaseError(`Missing required IAM permission to setup SQL schemas. SQL schema setup requires 'roles/cloudsql.admin' or an equivalent role.`);
79
79
  }
80
- await (0, connect_1.setupIAMUsers)(instanceId, databaseId, options);
81
80
  let runGreenfieldSetup = false;
82
81
  if (schemaInfo.setupStatus === SchemaSetupStatus.GreenField) {
83
82
  runGreenfieldSetup = true;
@@ -89,7 +88,7 @@ async function setupSQLPermissions(instanceId, databaseId, schemaInfo, options,
89
88
  }
90
89
  if (runGreenfieldSetup) {
91
90
  const greenfieldSetupCmds = await greenFieldSchemaSetup(instanceId, databaseId, schema, options);
92
- await (0, connect_2.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, greenfieldSetupCmds, silent, true);
91
+ await (0, connect_1.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, greenfieldSetupCmds, silent, true);
93
92
  logFn(clc.green("Database setup complete."));
94
93
  return SchemaSetupStatus.GreenField;
95
94
  }
@@ -125,15 +124,15 @@ async function greenFieldSchemaSetup(instanceId, databaseId, schema, options) {
125
124
  logger_1.logger.warn("Detected cloudsqlsuperuser was previously given to firebase owner, revoking to improve database security.");
126
125
  revokes.push(`REVOKE "cloudsqlsuperuser" FROM "${(0, permissions_1.firebaseowner)(databaseId)}"`);
127
126
  }
128
- const user = (await (0, connect_2.getIAMUser)(options)).user;
127
+ const user = (await (0, connect_1.getIAMUser)(options)).user;
129
128
  const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
130
- const { user: fdcP4SAUser } = (0, connect_3.toDatabaseUser)((0, connect_3.getDataConnectP4SA)(projectNumber));
129
+ const { user: fdcP4SAUser } = (0, connect_2.toDatabaseUser)((0, connect_2.getDataConnectP4SA)(projectNumber));
131
130
  const sqlRoleSetupCmds = (0, lodash_1.concat)(revokes, [`CREATE SCHEMA IF NOT EXISTS "${schema}"`], (0, permissions_1.ownerRolePermissions)(databaseId, permissions_1.FIREBASE_SUPER_USER, schema), (0, permissions_1.writerRolePermissions)(databaseId, permissions_1.FIREBASE_SUPER_USER, schema), (0, permissions_1.readerRolePermissions)(databaseId, permissions_1.FIREBASE_SUPER_USER, schema), `GRANT "${(0, permissions_1.firebaseowner)(databaseId, schema)}" TO "${user}"`, `GRANT "${(0, permissions_1.firebasewriter)(databaseId, schema)}" TO "${fdcP4SAUser}"`, (0, permissions_1.defaultPermissions)(databaseId, schema, (0, permissions_1.firebaseowner)(databaseId, schema)));
132
131
  return sqlRoleSetupCmds;
133
132
  }
134
133
  exports.greenFieldSchemaSetup = greenFieldSchemaSetup;
135
134
  async function getSchemaMetadata(instanceId, databaseId, schema, options) {
136
- const checkSchemaExists = await (0, connect_2.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, [
135
+ const checkSchemaExists = await (0, connect_1.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, [
137
136
  `SELECT pg_get_userbyid(nspowner)
138
137
  FROM pg_namespace
139
138
  WHERE nspname = '${schema}';`,
@@ -148,7 +147,7 @@ async function getSchemaMetadata(instanceId, databaseId, schema, options) {
148
147
  }
149
148
  const schemaOwner = checkSchemaExists[0].rows[0].pg_get_userbyid;
150
149
  const cmd = `SELECT tablename, tableowner FROM pg_tables WHERE schemaname='${schema}'`;
151
- const res = await (0, connect_2.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, [cmd], true);
150
+ const res = await (0, connect_1.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, [cmd], true);
152
151
  const tables = res[0].rows.map((row) => {
153
152
  return {
154
153
  name: row.tablename,
@@ -157,7 +156,7 @@ async function getSchemaMetadata(instanceId, databaseId, schema, options) {
157
156
  });
158
157
  const checkRoleExists = async (role) => {
159
158
  const cmd = [`SELECT to_regrole('"${role}"') IS NOT NULL AS exists;`];
160
- const result = await (0, connect_2.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, cmd, true);
159
+ const result = await (0, connect_1.executeSqlCmdsAsIamUser)(options, instanceId, databaseId, cmd, true);
161
160
  return result[0].rows[0].exists;
162
161
  };
163
162
  let setupStatus;
@@ -198,7 +197,7 @@ async function setupBrownfieldAsGreenfield(instanceId, databaseId, schemaInfo, o
198
197
  ...alterTableCmds,
199
198
  ...revokeOwnersFromSuperuserCmds,
200
199
  ];
201
- await (0, connect_2.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, setupCmds, silent, true);
200
+ await (0, connect_1.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, setupCmds, silent, true);
202
201
  }
203
202
  exports.setupBrownfieldAsGreenfield = setupBrownfieldAsGreenfield;
204
203
  async function brownfieldSqlSetup(instanceId, databaseId, schemaInfo, options, silent = false) {
@@ -206,9 +205,9 @@ async function brownfieldSqlSetup(instanceId, databaseId, schemaInfo, options, s
206
205
  const uniqueTablesOwners = filterTableOwners(schemaInfo, databaseId);
207
206
  const grantOwnersToFirebasesuperuser = uniqueTablesOwners.map((owner) => `GRANT "${owner}" TO "${permissions_1.FIREBASE_SUPER_USER}"`);
208
207
  const revokeOwnersFromFirebasesuperuser = uniqueTablesOwners.map((owner) => `REVOKE "${owner}" FROM "${permissions_1.FIREBASE_SUPER_USER}"`);
209
- const iamUser = (await (0, connect_2.getIAMUser)(options)).user;
208
+ const iamUser = (await (0, connect_1.getIAMUser)(options)).user;
210
209
  const projectNumber = await (0, projectUtils_1.needProjectNumber)(options);
211
- const { user: fdcP4SAUser } = (0, connect_3.toDatabaseUser)((0, connect_3.getDataConnectP4SA)(projectNumber));
210
+ const { user: fdcP4SAUser } = (0, connect_2.toDatabaseUser)((0, connect_2.getDataConnectP4SA)(projectNumber));
212
211
  const firebaseDefaultPermissions = uniqueTablesOwners.flatMap((owner) => (0, permissions_1.defaultPermissions)(databaseId, schema, owner));
213
212
  const brownfieldSetupCmds = [
214
213
  ...grantOwnersToFirebasesuperuser,
@@ -219,6 +218,14 @@ async function brownfieldSqlSetup(instanceId, databaseId, schemaInfo, options, s
219
218
  ...firebaseDefaultPermissions,
220
219
  ...revokeOwnersFromFirebasesuperuser,
221
220
  ];
222
- await (0, connect_2.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, brownfieldSetupCmds, silent, true);
221
+ await (0, connect_1.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, brownfieldSetupCmds, silent, true);
223
222
  }
224
223
  exports.brownfieldSqlSetup = brownfieldSqlSetup;
224
+ async function grantRoleTo(options, instanceId, databaseId, role, email) {
225
+ const projectId = (0, projectUtils_1.needProjectId)(options);
226
+ const { user, mode } = (0, connect_2.toDatabaseUser)(email);
227
+ await cloudSqlAdminClient.createUser(projectId, instanceId, mode, user);
228
+ const fdcSqlRole = exports.fdcSqlRoleMap[role](databaseId);
229
+ await (0, connect_1.executeSqlCmdsAsSuperUser)(options, instanceId, databaseId, [`GRANT "${fdcSqlRole}" TO "${user}"`], false);
230
+ }
231
+ exports.grantRoleTo = grantRoleTo;
package/lib/gcp/k8s.js ADDED
@@ -0,0 +1,32 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.mebibytes = void 0;
4
+ const BYTES_PER_UNIT = {
5
+ "": 1,
6
+ k: 1e3,
7
+ M: 1e6,
8
+ G: 1e9,
9
+ T: 1e12,
10
+ Ki: 1 << 10,
11
+ Mi: 1 << 20,
12
+ Gi: 1 << 30,
13
+ Ti: 1 << 40,
14
+ };
15
+ function mebibytes(memory) {
16
+ const re = /^([0-9]+(\.[0-9]*)?)(Ki|Mi|Gi|Ti|k|M|G|T|([eE]([0-9]+)))?$/;
17
+ const matches = re.exec(memory);
18
+ if (!matches) {
19
+ throw new Error(`Invalid memory quantity "${memory}""`);
20
+ }
21
+ const quantity = Number.parseFloat(matches[1]);
22
+ let bytes;
23
+ if (matches[5]) {
24
+ bytes = quantity * Math.pow(10, Number.parseFloat(matches[5]));
25
+ }
26
+ else {
27
+ const suffix = matches[3] || "";
28
+ bytes = quantity * BYTES_PER_UNIT[suffix];
29
+ }
30
+ return bytes / (1 << 20);
31
+ }
32
+ exports.mebibytes = mebibytes;