@salesforce/core 3.13.0 → 3.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/CHANGELOG.md +18 -0
  2. package/lib/config/config.d.ts +23 -1
  3. package/lib/config/config.js +28 -18
  4. package/lib/config/configAggregator.d.ts +37 -24
  5. package/lib/config/configAggregator.js +83 -41
  6. package/lib/config/envVars.js +3 -3
  7. package/lib/config/sandboxProcessCache.d.ts +15 -0
  8. package/lib/config/sandboxProcessCache.js +38 -0
  9. package/lib/exported.d.ts +5 -3
  10. package/lib/exported.js +8 -2
  11. package/lib/globalInfo/accessors/sandboxAccessor.d.ts +36 -0
  12. package/lib/globalInfo/accessors/sandboxAccessor.js +63 -0
  13. package/lib/globalInfo/globalInfoConfig.d.ts +2 -0
  14. package/lib/globalInfo/globalInfoConfig.js +5 -0
  15. package/lib/globalInfo/sfdxDataHandler.d.ts +12 -2
  16. package/lib/globalInfo/sfdxDataHandler.js +116 -25
  17. package/lib/globalInfo/types.d.ts +19 -1
  18. package/lib/globalInfo/types.js +1 -0
  19. package/lib/org/authInfo.d.ts +2 -1
  20. package/lib/org/authInfo.js +2 -1
  21. package/lib/org/connection.js +4 -4
  22. package/lib/org/org.d.ts +61 -39
  23. package/lib/org/org.js +261 -159
  24. package/lib/org/scratchOrgCache.d.ts +19 -0
  25. package/lib/org/scratchOrgCache.js +33 -0
  26. package/lib/org/scratchOrgCreate.d.ts +25 -16
  27. package/lib/org/scratchOrgCreate.js +110 -41
  28. package/lib/org/scratchOrgErrorCodes.d.ts +8 -2
  29. package/lib/org/scratchOrgErrorCodes.js +26 -3
  30. package/lib/org/scratchOrgInfoApi.d.ts +19 -8
  31. package/lib/org/scratchOrgInfoApi.js +91 -42
  32. package/lib/org/scratchOrgLifecycleEvents.d.ts +2 -0
  33. package/lib/org/scratchOrgLifecycleEvents.js +20 -1
  34. package/lib/org/scratchOrgSettingsGenerator.d.ts +7 -2
  35. package/lib/org/scratchOrgSettingsGenerator.js +1 -0
  36. package/lib/sfProject.js +1 -1
  37. package/lib/status/pollingClient.js +1 -0
  38. package/lib/testSetup.js +0 -2
  39. package/messages/org.md +9 -1
  40. package/messages/scratchOrgCreate.md +20 -0
  41. package/package.json +2 -2
@@ -0,0 +1,36 @@
1
+ import { Nullable } from '@salesforce/ts-types';
2
+ import { GlobalInfo } from '../globalInfoConfig';
3
+ import { SfSandbox, SfSandboxes } from '../types';
4
+ export declare class SandboxAccessor {
5
+ private globalInfo;
6
+ constructor(globalInfo: GlobalInfo);
7
+ /**
8
+ * Returns all the sandboxes (or all the sandboxes for a given prod org)
9
+ *
10
+ * @param entity entity as a string should be a production org username
11
+ * and when entity is a SfSandbox, the prod org entity.prodOrgUsername will
12
+ * used in the filter.
13
+ */
14
+ getAll(entity?: string | SfSandbox): SfSandboxes;
15
+ /**
16
+ * Returns the SfSandbox config entry that corresponds to the given
17
+ * sandbox org id if it exists
18
+ *
19
+ * @param sandboxOrgId the sandboxOrgId that corresponds to a sandbox
20
+ */
21
+ get(sandboxOrgId?: string): Nullable<SfSandbox>;
22
+ /**
23
+ * Returns true if the given sandbox org id exists
24
+ *
25
+ * @param sandboxOrgId the sandboxOrgId that corresponds to a sandbox
26
+ */
27
+ has(sandboxOrgId?: string): boolean;
28
+ /**
29
+ * Set an sandboxOrgId for the given sandbox entity
30
+ *
31
+ * @param sandboxOrgId the sandboxOrgId you want to set
32
+ * @param entity the sandbox entity
33
+ */
34
+ set(sandboxOrgId: string, entity: SfSandbox): void;
35
+ unset(sandboxOrgId: string): void;
36
+ }
@@ -0,0 +1,63 @@
1
+ "use strict";
2
+ /*
3
+ * Copyright (c) 2021, salesforce.com, inc.
4
+ * All rights reserved.
5
+ * Licensed under the BSD 3-Clause license.
6
+ * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
7
+ */
8
+ Object.defineProperty(exports, "__esModule", { value: true });
9
+ exports.SandboxAccessor = void 0;
10
+ const types_1 = require("../types");
11
+ class SandboxAccessor {
12
+ constructor(globalInfo) {
13
+ this.globalInfo = globalInfo;
14
+ }
15
+ /**
16
+ * Returns all the sandboxes (or all the sandboxes for a given prod org)
17
+ *
18
+ * @param entity entity as a string should be a production org username
19
+ * and when entity is a SfSandbox, the prod org entity.prodOrgUsername will
20
+ * used in the filter.
21
+ */
22
+ getAll(entity) {
23
+ const all = this.globalInfo.get(types_1.SfInfoKeys.SANDBOXES) || {};
24
+ if (!entity) {
25
+ return all;
26
+ }
27
+ const prodOrgUsername = typeof entity === 'string' ? entity : entity.prodOrgUsername;
28
+ return Object.fromEntries(Object.entries(all).filter(([, value]) => value.prodOrgUsername === prodOrgUsername));
29
+ }
30
+ /**
31
+ * Returns the SfSandbox config entry that corresponds to the given
32
+ * sandbox org id if it exists
33
+ *
34
+ * @param sandboxOrgId the sandboxOrgId that corresponds to a sandbox
35
+ */
36
+ get(sandboxOrgId) {
37
+ var _a;
38
+ return sandboxOrgId ? (_a = this.getAll()[sandboxOrgId]) !== null && _a !== void 0 ? _a : null : null;
39
+ }
40
+ /**
41
+ * Returns true if the given sandbox org id exists
42
+ *
43
+ * @param sandboxOrgId the sandboxOrgId that corresponds to a sandbox
44
+ */
45
+ has(sandboxOrgId) {
46
+ var _a;
47
+ return !!(sandboxOrgId ? (_a = this.getAll()[sandboxOrgId]) !== null && _a !== void 0 ? _a : null : null);
48
+ }
49
+ /**
50
+ * Set an sandboxOrgId for the given sandbox entity
51
+ *
52
+ * @param sandboxOrgId the sandboxOrgId you want to set
53
+ * @param entity the sandbox entity
54
+ */
55
+ set(sandboxOrgId, entity) {
56
+ this.globalInfo.set(`${types_1.SfInfoKeys.SANDBOXES}["${sandboxOrgId}"]`, entity);
57
+ }
58
+ unset(sandboxOrgId) {
59
+ delete this.globalInfo.get(types_1.SfInfoKeys.SANDBOXES)[sandboxOrgId];
60
+ }
61
+ }
62
+ exports.SandboxAccessor = SandboxAccessor;
63
+ //# sourceMappingURL=sandboxAccessor.js.map
@@ -5,6 +5,7 @@ import { OrgAccessor } from './accessors/orgAccessor';
5
5
  import { TokenAccessor } from './accessors/tokenAccessor';
6
6
  import { AliasAccessor } from './accessors/aliasAccessor';
7
7
  import { SfInfo } from './types';
8
+ import { SandboxAccessor } from './accessors/sandboxAccessor';
8
9
  export declare function deepCopy<T extends AnyJson>(data: T): T;
9
10
  export declare class GlobalInfo extends ConfigFile<ConfigFile.Options, SfInfo> {
10
11
  protected static encryptedKeys: RegExp[];
@@ -27,6 +28,7 @@ export declare class GlobalInfo extends ConfigFile<ConfigFile.Options, SfInfo> {
27
28
  get orgs(): OrgAccessor;
28
29
  get tokens(): TokenAccessor;
29
30
  get aliases(): AliasAccessor;
31
+ get sandboxes(): SandboxAccessor;
30
32
  set(key: string, value: ConfigValue): void;
31
33
  write(newContents?: SfInfo): Promise<SfInfo>;
32
34
  protected init(): Promise<void>;
@@ -15,6 +15,7 @@ const orgAccessor_1 = require("./accessors/orgAccessor");
15
15
  const tokenAccessor_1 = require("./accessors/tokenAccessor");
16
16
  const aliasAccessor_1 = require("./accessors/aliasAccessor");
17
17
  const types_1 = require("./types");
18
+ const sandboxAccessor_1 = require("./accessors/sandboxAccessor");
18
19
  function deepCopy(data) {
19
20
  return JSON.parse(JSON.stringify(data));
20
21
  }
@@ -64,6 +65,9 @@ class GlobalInfo extends configFile_1.ConfigFile {
64
65
  get aliases() {
65
66
  return new aliasAccessor_1.AliasAccessor(this);
66
67
  }
68
+ get sandboxes() {
69
+ return new sandboxAccessor_1.SandboxAccessor(this);
70
+ }
67
71
  set(key, value) {
68
72
  if ((0, ts_types_1.isPlainObject)(value)) {
69
73
  value = this.timestamp(value);
@@ -101,5 +105,6 @@ GlobalInfo.EMPTY_DATA_MODEL = {
101
105
  [types_1.SfInfoKeys.ORGS]: {},
102
106
  [types_1.SfInfoKeys.TOKENS]: {},
103
107
  [types_1.SfInfoKeys.ALIASES]: {},
108
+ [types_1.SfInfoKeys.SANDBOXES]: {},
104
109
  };
105
110
  //# sourceMappingURL=globalInfoConfig.js.map
@@ -1,5 +1,5 @@
1
1
  import { ConfigFile } from '../config/configFile';
2
- import { SfInfo, SfInfoKeys, SfOrg, SfOrgs } from './types';
2
+ import { SfInfo, SfInfoKeys, SfOrg, SfOrgs, SfSandbox } from './types';
3
3
  interface Handler<T extends SfInfoKeys> {
4
4
  sfKey: T;
5
5
  merge: (sfData: SfInfo) => Promise<Partial<SfInfo>>;
@@ -11,7 +11,7 @@ interface Changes<T> {
11
11
  deleted: string[];
12
12
  }
13
13
  export declare class SfdxDataHandler {
14
- handlers: (AuthHandler | AliasesHandler)[];
14
+ handlers: (AuthHandler | AliasesHandler | SandboxesHandler)[];
15
15
  private original;
16
16
  write(latest?: SfInfo): Promise<void>;
17
17
  merge(sfData?: SfInfo): Promise<SfInfo>;
@@ -40,4 +40,14 @@ export declare class AliasesHandler extends BaseHandler<SfInfoKeys.ALIASES> {
40
40
  merge(sfData?: SfInfo): Promise<Partial<SfInfo>>;
41
41
  write(latest: SfInfo): Promise<void>;
42
42
  }
43
+ export declare class SandboxesHandler extends BaseHandler<SfInfoKeys.SANDBOXES> {
44
+ private static sandboxFilenameFilterRegEx;
45
+ sfKey: typeof SfInfoKeys.SANDBOXES;
46
+ merge(sfData?: SfInfo): Promise<Partial<SfInfo>>;
47
+ migrate(): Promise<Pick<SfInfo, SfInfoKeys.SANDBOXES>>;
48
+ write(latest: SfInfo, original: SfInfo): Promise<void>;
49
+ listAllSandboxFiles(): Promise<string[]>;
50
+ listAllSandboxes(): Promise<SfSandbox[]>;
51
+ private findChanges;
52
+ }
43
53
  export {};
@@ -6,13 +6,14 @@
6
6
  * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
7
7
  */
8
8
  Object.defineProperty(exports, "__esModule", { value: true });
9
- exports.AliasesHandler = exports.AuthHandler = exports.SfdxDataHandler = void 0;
9
+ exports.SandboxesHandler = exports.AliasesHandler = exports.AuthHandler = exports.SfdxDataHandler = void 0;
10
10
  const path_1 = require("path");
11
11
  const fs = require("fs");
12
12
  const kit_1 = require("@salesforce/kit");
13
13
  const ts_types_1 = require("@salesforce/ts-types");
14
14
  const global_1 = require("../global");
15
15
  const configFile_1 = require("../config/configFile");
16
+ const sandboxOrgConfig_1 = require("../config/sandboxOrgConfig");
16
17
  const globalInfoConfig_1 = require("./globalInfoConfig");
17
18
  const types_1 = require("./types");
18
19
  function isEqual(object1, object2) {
@@ -28,13 +29,11 @@ function isEqual(object1, object2) {
28
29
  }
29
30
  class SfdxDataHandler {
30
31
  constructor() {
31
- this.handlers = [new AuthHandler(), new AliasesHandler()];
32
+ this.handlers = [new AuthHandler(), new AliasesHandler(), new SandboxesHandler()];
32
33
  }
33
34
  async write(latest = globalInfoConfig_1.GlobalInfo.emptyDataModel) {
34
- for (const handler of this.handlers) {
35
- await handler.write(latest, this.original);
36
- this.setOriginal(latest);
37
- }
35
+ await Promise.all(this.handlers.map((handler) => handler.write(latest, this.original)));
36
+ this.setOriginal(latest);
38
37
  }
39
38
  async merge(sfData = globalInfoConfig_1.GlobalInfo.emptyDataModel) {
40
39
  let merged = (0, globalInfoConfig_1.deepCopy)(sfData);
@@ -67,13 +66,13 @@ class BaseHandler {
67
66
  });
68
67
  (0, kit_1.set)(merged, `${key}["${k}"]`, Object.assign({}, older, newer));
69
68
  }
70
- // Keys that exist in .sfdx but not .sf are added becase we assume
69
+ // Keys that exist in .sfdx but not .sf are added because we assume
71
70
  // that this means the key was created using sfdx.
72
71
  // However, this is not always a valid assumption because it could
73
72
  // also mean that the key was deleted using sf, in which case we
74
73
  // do not want to migrate the sfdx key to sf.
75
74
  // Programmatically differentiating between a new key and a deleted key
76
- // would be nearly impossible. Instead we should ensure that whenever
75
+ // would be nearly impossible. Instead, we should ensure that whenever
77
76
  // sf deletes a key it also deletes it in sfdx. This way, we can safely
78
77
  // assume that we should migrate any keys that exist in in .sfdx
79
78
  const unhandledSfdxKeys = sfdxKeys.filter((k) => !sfKeys.includes(k));
@@ -103,17 +102,17 @@ class AuthHandler extends BaseHandler {
103
102
  }
104
103
  async write(latest, original) {
105
104
  const { changed, deleted } = await this.findChanges(latest, original);
106
- for (const [username, authData] of Object.entries(changed)) {
107
- if (authData) {
108
- const config = await this.createAuthFileConfig(username);
109
- config.setContentsFromObject(authData);
110
- await config.write();
111
- }
112
- }
113
- for (const username of deleted) {
105
+ await Promise.all(Object.entries(changed)
106
+ .filter(([, authData]) => authData)
107
+ .map(async ([username, authData]) => {
114
108
  const config = await this.createAuthFileConfig(username);
115
- await config.unlink();
116
- }
109
+ config.setContentsFromObject(authData);
110
+ return config.write();
111
+ }));
112
+ await Promise.all(deleted.map(async (username) => {
113
+ const config = await this.createAuthFileConfig(username);
114
+ return config.unlink();
115
+ }));
117
116
  }
118
117
  async findChanges(latest, original) {
119
118
  var _a;
@@ -146,16 +145,14 @@ class AuthHandler extends BaseHandler {
146
145
  }
147
146
  async listAllAuthorizations() {
148
147
  const filenames = await this.listAllAuthFiles();
149
- const auths = [];
150
- for (const filename of filenames) {
151
- const username = (0, path_1.basename)(filename, (0, path_1.extname)(filename));
148
+ return Promise.all(filenames
149
+ .map((f) => (0, path_1.basename)(f, (0, path_1.extname)(f)))
150
+ .map(async (username) => {
152
151
  const configFile = await this.createAuthFileConfig(username);
153
152
  const contents = configFile.getContents();
154
153
  const stat = await configFile.stat();
155
- const auth = Object.assign(contents, { timestamp: stat.mtime.toISOString() });
156
- auths.push(auth);
157
- }
158
- return auths;
154
+ return { ...contents, timestamp: stat.mtime.toISOString() };
155
+ }));
159
156
  }
160
157
  }
161
158
  exports.AuthHandler = AuthHandler;
@@ -215,4 +212,98 @@ class AliasesHandler extends BaseHandler {
215
212
  }
216
213
  exports.AliasesHandler = AliasesHandler;
217
214
  AliasesHandler.SFDX_ALIASES_FILENAME = 'alias.json';
215
+ class SandboxesHandler extends BaseHandler {
216
+ constructor() {
217
+ super(...arguments);
218
+ this.sfKey = types_1.SfInfoKeys.SANDBOXES;
219
+ }
220
+ async merge(sfData = globalInfoConfig_1.GlobalInfo.emptyDataModel) {
221
+ var _a, _b;
222
+ const sfdxData = await this.migrate();
223
+ const merged = (0, globalInfoConfig_1.deepCopy)(sfData);
224
+ // Only merge the key this handler is responsible for.
225
+ const key = this.sfKey;
226
+ const sfKeys = Object.keys((_a = sfData[key]) !== null && _a !== void 0 ? _a : {});
227
+ const sfdxKeys = Object.keys((_b = sfdxData[key]) !== null && _b !== void 0 ? _b : {});
228
+ // sandbox entries for .sf and .sfdx contain static data. Given there
229
+ // can be no mutation during the life of the sandbox, having to merge common keys
230
+ // is unnecessary.
231
+ // Keys that exist in .sfdx but not .sf are added because we assume
232
+ // that this means the key was created using sfdx.
233
+ // However, this is not always a valid assumption because it could
234
+ // also mean that the key was deleted using sf, in which case we
235
+ // do not want to migrate the sfdx key to sf.
236
+ // Programmatically differentiating between a new key and a deleted key
237
+ // would be nearly impossible. Instead, we should ensure that whenever
238
+ // sf deletes a key it also deletes it in sfdx. This way, we can safely
239
+ // assume that we should migrate any keys that exist in .sfdx
240
+ const unhandledSfdxKeys = sfdxKeys.filter((k) => !sfKeys.includes(k));
241
+ for (const k of unhandledSfdxKeys) {
242
+ (0, kit_1.set)(merged, `${key}["${k}"]`, sfdxData[key][k]);
243
+ }
244
+ // Keys that exist in .sf but not .sfdx are deleted because we assume
245
+ // that this means the key was deleted while using sfdx.
246
+ // We can make this assumption because keys that are created by sf will
247
+ // always be migrated back to sfdx
248
+ const unhandledSfKeys = sfKeys.filter((k) => !sfdxKeys.includes(k));
249
+ for (const k of unhandledSfKeys) {
250
+ delete merged[key][k];
251
+ }
252
+ return merged;
253
+ }
254
+ async migrate() {
255
+ const oldSandboxes = await this.listAllSandboxes();
256
+ const newSandboxes = Object.fromEntries(oldSandboxes.map((old) => [old.sandboxOrgId, old]));
257
+ return { [this.sfKey]: newSandboxes };
258
+ }
259
+ async write(latest, original) {
260
+ const { changed, deleted } = await this.findChanges(latest, original);
261
+ for (const sandboxData of Object.values(changed)) {
262
+ if (sandboxData) {
263
+ const orgId = sandboxData.sandboxOrgId;
264
+ const sandboxConfig = new sandboxOrgConfig_1.SandboxOrgConfig(sandboxOrgConfig_1.SandboxOrgConfig.getOptions(orgId));
265
+ sandboxConfig.set(sandboxOrgConfig_1.SandboxOrgConfig.Fields.PROD_ORG_USERNAME, sandboxData.prodOrgUsername);
266
+ await sandboxConfig.write();
267
+ }
268
+ }
269
+ for (const username of deleted) {
270
+ const originalSandbox = original.sandboxes[username];
271
+ const orgId = originalSandbox.sandboxOrgId;
272
+ const sandboxConfig = new sandboxOrgConfig_1.SandboxOrgConfig(sandboxOrgConfig_1.SandboxOrgConfig.getOptions(orgId));
273
+ await sandboxConfig.unlink();
274
+ }
275
+ }
276
+ async listAllSandboxFiles() {
277
+ const globalFiles = await fs.promises.readdir(global_1.Global.SFDX_DIR);
278
+ return globalFiles.filter((file) => file.match(SandboxesHandler.sandboxFilenameFilterRegEx));
279
+ }
280
+ async listAllSandboxes() {
281
+ return Promise.all((await this.listAllSandboxFiles()).map(async (filename) => {
282
+ const matches = filename.match(SandboxesHandler.sandboxFilenameFilterRegEx);
283
+ const orgId = matches ? matches[1] : '';
284
+ const sandboxConfig = new sandboxOrgConfig_1.SandboxOrgConfig(sandboxOrgConfig_1.SandboxOrgConfig.getOptions(orgId));
285
+ const stat = await sandboxConfig.stat();
286
+ const contents = { ...(await sandboxConfig.read(true)), sandboxOrgId: orgId };
287
+ const sandbox = Object.assign(contents, { timestamp: stat.mtime.toISOString() });
288
+ return sandbox;
289
+ }));
290
+ }
291
+ async findChanges(latest, original) {
292
+ var _a;
293
+ const latestSandboxes = latest.sandboxes;
294
+ const originalSandboxes = original.sandboxes;
295
+ const changed = {};
296
+ for (const [sandboxOrgId, sandbox] of Object.entries(latestSandboxes)) {
297
+ const originalSandbox = (_a = originalSandboxes[sandboxOrgId]) !== null && _a !== void 0 ? _a : {};
298
+ if (!isEqual(sandbox, originalSandbox)) {
299
+ changed[sandboxOrgId] = sandbox;
300
+ }
301
+ }
302
+ const deleted = Object.keys(originalSandboxes).filter((sandboxOrgId) => !latestSandboxes[sandboxOrgId]);
303
+ return { changed, deleted };
304
+ }
305
+ }
306
+ exports.SandboxesHandler = SandboxesHandler;
307
+ // The regular expression that filters files stored in $HOME/.sfdx
308
+ SandboxesHandler.sandboxFilenameFilterRegEx = /^(00D.*?)\.sandbox\.json$/;
218
309
  //# sourceMappingURL=sfdxDataHandler.js.map
@@ -2,7 +2,8 @@ import { JsonMap } from '@salesforce/ts-types';
2
2
  export declare enum SfInfoKeys {
3
3
  ORGS = "orgs",
4
4
  TOKENS = "tokens",
5
- ALIASES = "aliases"
5
+ ALIASES = "aliases",
6
+ SANDBOXES = "sandboxes"
6
7
  }
7
8
  export declare type Timestamp = {
8
9
  timestamp: string;
@@ -32,8 +33,25 @@ export interface SfTokens {
32
33
  export interface SfAliases {
33
34
  [alias: string]: string;
34
35
  }
36
+ export declare type SfSandbox = {
37
+ sandboxOrgId: string;
38
+ prodOrgUsername: string;
39
+ sandboxName?: string;
40
+ sandboxUsername?: string;
41
+ sandboxProcessId?: string;
42
+ sandboxInfoId?: string;
43
+ } & Timestamp & SfEntry;
44
+ /**
45
+ * The key will always be the sandbox username and the value will always be the
46
+ * production org username
47
+ * { "user@salesforce.com.mysandbox": "user@salesforce.com" }
48
+ */
49
+ export interface SfSandboxes {
50
+ [sandboxOrgId: string]: SfSandbox;
51
+ }
35
52
  export declare type SfInfo = {
36
53
  [SfInfoKeys.ORGS]: SfOrgs;
37
54
  [SfInfoKeys.TOKENS]: SfTokens;
38
55
  [SfInfoKeys.ALIASES]: SfAliases;
56
+ [SfInfoKeys.SANDBOXES]: SfSandboxes;
39
57
  };
@@ -6,5 +6,6 @@ var SfInfoKeys;
6
6
  SfInfoKeys["ORGS"] = "orgs";
7
7
  SfInfoKeys["TOKENS"] = "tokens";
8
8
  SfInfoKeys["ALIASES"] = "aliases";
9
+ SfInfoKeys["SANDBOXES"] = "sandboxes";
9
10
  })(SfInfoKeys = exports.SfInfoKeys || (exports.SfInfoKeys = {}));
10
11
  //# sourceMappingURL=types.js.map
@@ -46,6 +46,7 @@ export declare type OrgAuthorization = {
46
46
  configs: Nullable<string[]>;
47
47
  isScratchOrg?: boolean;
48
48
  isDevHub?: boolean;
49
+ isSandbox?: boolean;
49
50
  instanceUrl?: string;
50
51
  accessToken?: string;
51
52
  error?: string;
@@ -60,7 +61,7 @@ export interface AccessTokenOptions {
60
61
  instanceUrl?: string;
61
62
  }
62
63
  export declare type AuthSideEffects = {
63
- alias: string;
64
+ alias?: string;
64
65
  setDefault: boolean;
65
66
  setDefaultDevHub: boolean;
66
67
  };
@@ -130,7 +130,7 @@ class AuthInfo extends kit_1.AsyncOptionalCreatable {
130
130
  * @returns {string}
131
131
  */
132
132
  static getDefaultInstanceUrl() {
133
- const configuredInstanceUrl = configAggregator_1.ConfigAggregator.getValue('instanceUrl').value;
133
+ const configuredInstanceUrl = configAggregator_1.ConfigAggregator.getValue(orgConfigProperties_1.OrgConfigProperties.ORG_INSTANCE_URL).value;
134
134
  return configuredInstanceUrl || sfdcUrl_1.SfdcUrl.PRODUCTION;
135
135
  }
136
136
  /**
@@ -166,6 +166,7 @@ class AuthInfo extends kit_1.AsyncOptionalCreatable {
166
166
  instanceUrl,
167
167
  isScratchOrg: Boolean(devHubUsername),
168
168
  isDevHub: isDevHub || false,
169
+ isSandbox: globalInfo.sandboxes.has(orgId),
169
170
  orgId: orgId,
170
171
  accessToken: authInfo.getConnectionOptions().accessToken,
171
172
  oauthMethod: authInfo.isJwt() ? 'jwt' : authInfo.isOauth() ? 'web' : 'token',
@@ -86,7 +86,7 @@ class Connection extends jsforce_1.Connection {
86
86
  if (!baseOptions.version) {
87
87
  // Set the API version obtained from the config aggregator.
88
88
  const configAggregator = options.configAggregator || (await configAggregator_1.ConfigAggregator.create());
89
- baseOptions.version = (0, ts_types_1.asString)(configAggregator.getInfo('apiVersion').value);
89
+ baseOptions.version = (0, ts_types_1.asString)(configAggregator.getInfo('org-api-version').value);
90
90
  }
91
91
  const providedOptions = options.authInfo.getConnectionOptions();
92
92
  // Get connection options from auth info and create a new jsForce connection
@@ -102,7 +102,7 @@ class Connection extends jsforce_1.Connection {
102
102
  }
103
103
  }
104
104
  else {
105
- conn.logger.debug(`The apiVersion ${baseOptions.version} was found from ${((_b = options.connectionOptions) === null || _b === void 0 ? void 0 : _b.version) ? 'passed in options' : 'config'}`);
105
+ conn.logger.debug(`The org-api-version ${baseOptions.version} was found from ${((_b = options.connectionOptions) === null || _b === void 0 ? void 0 : _b.version) ? 'passed in options' : 'config'}`);
106
106
  }
107
107
  }
108
108
  catch (err) {
@@ -334,7 +334,7 @@ class Connection extends jsforce_1.Connection {
334
334
  async autoFetchQuery(soql, queryOptions = {}) {
335
335
  const config = await configAggregator_1.ConfigAggregator.create();
336
336
  // take the limit from the calling function, then the config, then default 10,000
337
- const maxFetch = config.getInfo('maxQueryLimit').value || queryOptions.maxFetch || 10000;
337
+ const maxFetch = config.getInfo('org-max-query-limit').value || queryOptions.maxFetch || 10000;
338
338
  const options = Object.assign(queryOptions, {
339
339
  autoFetch: true,
340
340
  maxFetch,
@@ -418,7 +418,7 @@ class Connection extends jsforce_1.Connection {
418
418
  // so get the latest.
419
419
  await useLatest();
420
420
  }
421
- this.logger.debug(`Loaded latest apiVersion ${version}`);
421
+ this.logger.debug(`Loaded latest org-api-version ${version}`);
422
422
  return version;
423
423
  }
424
424
  }