@lightdash/cli 0.1462.1 → 0.1463.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -22,7 +22,7 @@ const getDbtContext = async ({ projectDir, initialProjectDir, }) => {
22
22
  initialProjectDir: initialProjectDir || projectDir,
23
23
  });
24
24
  }
25
- const msg = e instanceof Error ? e.message : '-';
25
+ const msg = (0, common_1.getErrorMessage)(e);
26
26
  throw new common_1.ParseError(`Is ${initialProjectDir} a valid dbt project directory? Couldn't find a valid dbt_project.yml on ${initialProjectDir} or any of its parents:\n ${msg}`);
27
27
  }
28
28
  const config = yaml.load(file);
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.loadManifest = exports.getManifestPath = void 0;
4
4
  const tslib_1 = require("tslib");
5
+ const common_1 = require("@lightdash/common");
5
6
  const fs_1 = require("fs");
6
7
  const path = tslib_1.__importStar(require("path"));
7
8
  const globalState_1 = tslib_1.__importDefault(require("../globalState"));
@@ -15,7 +16,7 @@ const loadManifest = async ({ targetDir, }) => {
15
16
  return manifest;
16
17
  }
17
18
  catch (err) {
18
- const msg = err instanceof Error ? err.message : '-';
19
+ const msg = (0, common_1.getErrorMessage)(err);
19
20
  throw new Error(`Could not load manifest from ${filename}:\n ${msg}`);
20
21
  }
21
22
  };
@@ -23,7 +23,7 @@ const loadDbtTarget = async ({ profilesDir, profileName, targetName, }) => {
23
23
  allProfiles = yaml.load(rendered);
24
24
  }
25
25
  catch (e) {
26
- const msg = e instanceof Error ? e.message : '-';
26
+ const msg = (0, common_1.getErrorMessage)(e);
27
27
  throw new common_1.ParseError(`Could not find a valid profiles.yml file at ${profilePath}:\n ${msg}`);
28
28
  }
29
29
  const profile = allProfiles[profileName];
@@ -65,7 +65,7 @@ const getBigqueryCredentialsFromServiceAccount = async (target) => {
65
65
  return JSON.parse(await fs_1.promises.readFile(keyfilePath, 'utf8'));
66
66
  }
67
67
  catch (e) {
68
- const msg = e instanceof Error ? e.message : '-';
68
+ const msg = (0, common_1.getErrorMessage)(e);
69
69
  throw new common_1.ParseError(`Cannot read keyfile for bigquery target expect at: ${keyfilePath}:\n ${msg}`);
70
70
  }
71
71
  }
@@ -85,7 +85,7 @@ const convertSnowflakeSchema = async (target) => {
85
85
  privateKey = await fs_1.promises.readFile(keyfilePath, 'utf8');
86
86
  }
87
87
  catch (e) {
88
- const msg = e instanceof Error ? e.message : '-';
88
+ const msg = (0, common_1.getErrorMessage)(e);
89
89
  throw new common_1.ParseError(`Cannot read keyfile for snowflake target at: ${keyfilePath}:\n ${msg}`);
90
90
  }
91
91
  }
@@ -4,6 +4,7 @@ exports.compileHandler = exports.compile = void 0;
4
4
  const tslib_1 = require("tslib");
5
5
  const common_1 = require("@lightdash/common");
6
6
  const path_1 = tslib_1.__importDefault(require("path"));
7
+ const fs_1 = require("fs");
7
8
  const uuid_1 = require("uuid");
8
9
  const analytics_1 = require("../analytics/analytics");
9
10
  const context_1 = require("../dbt/context");
@@ -14,6 +15,24 @@ const styles = tslib_1.__importStar(require("../styles"));
14
15
  const compile_1 = require("./dbt/compile");
15
16
  const getDbtVersion_1 = require("./dbt/getDbtVersion");
16
17
  const getWarehouseClient_1 = tslib_1.__importDefault(require("./dbt/getWarehouseClient"));
18
+ const readAndLoadLightdashProjectConfig = async (projectDir) => {
19
+ const configPath = path_1.default.join(projectDir, 'lightdash.config.yml');
20
+ try {
21
+ const fileContents = await fs_1.promises.readFile(configPath, 'utf8');
22
+ const config = await (0, common_1.loadLightdashProjectConfig)(fileContents);
23
+ return config;
24
+ }
25
+ catch (e) {
26
+ globalState_1.default.debug(`No lightdash.config.yml found in ${configPath}`);
27
+ if (e instanceof Error && 'code' in e && e.code === 'ENOENT') {
28
+ // Return default config if file doesn't exist
29
+ return {
30
+ spotlight: common_1.DEFAULT_SPOTLIGHT_CONFIG,
31
+ };
32
+ }
33
+ throw e;
34
+ }
35
+ };
17
36
  const compile = async (options) => {
18
37
  const dbtVersion = await (0, getDbtVersion_1.getDbtVersion)();
19
38
  globalState_1.default.debug(`> dbt version ${dbtVersion}`);
@@ -72,13 +91,16 @@ ${errors.join('')}`));
72
91
  throw new common_1.ParseError(`Dbt adapter ${manifest.metadata.adapter_type} is not supported`);
73
92
  }
74
93
  globalState_1.default.debug(`> Converting explores with adapter: ${manifest.metadata.adapter_type}`);
94
+ globalState_1.default.debug(`> Loading lightdash project config from ${absoluteProjectPath}`);
95
+ const lightdashProjectConfig = await readAndLoadLightdashProjectConfig(absoluteProjectPath);
96
+ globalState_1.default.debug(`> Loaded lightdash project config`);
75
97
  const validExplores = await (0, common_1.convertExplores)(validModelsWithTypes, false, manifest.metadata.adapter_type, [
76
98
  common_1.DbtManifestVersion.V10,
77
99
  common_1.DbtManifestVersion.V11,
78
100
  common_1.DbtManifestVersion.V12,
79
101
  ].includes(manifestVersion)
80
102
  ? []
81
- : Object.values(manifest.metrics), warehouseClient);
103
+ : Object.values(manifest.metrics), warehouseClient, lightdashProjectConfig);
82
104
  console.error('');
83
105
  const explores = [...validExplores, ...failedExplores];
84
106
  explores.forEach((e) => {
@@ -49,6 +49,7 @@ const lightdashApi = async ({ method, url, body, }) => {
49
49
  }
50
50
  })
51
51
  .catch((err) => {
52
+ // ApiErrorResponse
52
53
  throw err;
53
54
  });
54
55
  };
@@ -47,7 +47,7 @@ const dbtCompile = async (options) => {
47
47
  console.error(stderr);
48
48
  }
49
49
  catch (e) {
50
- const msg = e instanceof Error ? e.message : '-';
50
+ const msg = (0, common_1.getErrorMessage)(e);
51
51
  throw new common_1.ParseError(`Failed to run dbt compile:\n ${msg}`);
52
52
  }
53
53
  };
@@ -105,7 +105,7 @@ async function dbtList(options) {
105
105
  return models;
106
106
  }
107
107
  catch (e) {
108
- const msg = e instanceof Error ? e.message : '-';
108
+ const msg = (0, common_1.getErrorMessage)(e);
109
109
  throw new common_1.ParseError(`Error executing 'dbt ls':\n ${msg}\nEnsure you're on the latest patch version. '--use-dbt-list' is true by default; if you encounter issues, try using '--use-dbt-list=false`);
110
110
  }
111
111
  }
@@ -21,8 +21,7 @@ const getDbtCloudTargetName = async () => {
21
21
  return targetName[1];
22
22
  }
23
23
  catch (e) {
24
- const msg = e instanceof Error ? e.message : '-';
25
- throw new common_1.ParseError(`Failed to get profile target name:\n ${msg}`);
24
+ throw new common_1.ParseError(`Failed to get profile target name:\n ${(0, common_1.getErrorMessage)(e)}`);
26
25
  }
27
26
  };
28
27
  async function getDbtProfileTargetName(options) {
@@ -26,7 +26,7 @@ const getDbtCLIVersion = async () => {
26
26
  return version[0].split(':')[1].trim();
27
27
  }
28
28
  catch (e) {
29
- const msg = e instanceof Error ? e.message : '-';
29
+ const msg = (0, common_1.getErrorMessage)(e);
30
30
  throw new common_1.ParseError(`Failed to get dbt --version:\n ${msg}`);
31
31
  }
32
32
  };
@@ -34,8 +34,7 @@ const getDbtCloudConnectionType = async () => {
34
34
  return connectionType[1];
35
35
  }
36
36
  catch (e) {
37
- const msg = e instanceof Error ? e.message : '-';
38
- throw new common_1.ParseError(`Failed to get connection type:\n ${msg}`);
37
+ throw new common_1.ParseError(`Failed to get connection type:\n ${(0, common_1.getErrorMessage)(e)}`);
39
38
  }
40
39
  };
41
40
  function getMockCredentials(dbtAdaptorType) {
@@ -31,7 +31,7 @@ const dbtRunHandler = async (options, command) => {
31
31
  await subprocess;
32
32
  }
33
33
  catch (e) {
34
- const msg = e instanceof Error ? e.message : '-';
34
+ const msg = (0, common_1.getErrorMessage)(e);
35
35
  await analytics_1.LightdashAnalytics.track({
36
36
  event: 'dbt_command.error',
37
37
  properties: {
@@ -301,7 +301,7 @@ const upsertResources = async (type, projectId, changes, force, slugs, customPat
301
301
  catch (error) {
302
302
  changes[`${type} with errors`] =
303
303
  (changes[`${type} with errors`] ?? 0) + 1;
304
- console.error(styles.error(`Error upserting ${type}: ${error}`));
304
+ console.error(styles.error(`Error upserting ${type}: ${(0, common_1.getErrorMessage)(error)}`));
305
305
  await analytics_1.LightdashAnalytics.track({
306
306
  event: 'download.error',
307
307
  properties: {
@@ -309,7 +309,7 @@ const upsertResources = async (type, projectId, changes, force, slugs, customPat
309
309
  organizationId: config.user?.organizationUuid,
310
310
  projectId,
311
311
  type,
312
- error: `${error}`,
312
+ error: (0, common_1.getErrorMessage)(error),
313
313
  },
314
314
  });
315
315
  }
@@ -375,14 +375,14 @@ const uploadHandler = async (options) => {
375
375
  logUploadChanges(changes);
376
376
  }
377
377
  catch (error) {
378
- console.error(styles.error(`\nError downloading ${error}`));
378
+ console.error(styles.error(`\nError downloading: ${(0, common_1.getErrorMessage)(error)}`));
379
379
  await analytics_1.LightdashAnalytics.track({
380
380
  event: 'download.error',
381
381
  properties: {
382
382
  userId: config.user?.userUuid,
383
383
  organizationId: config.user?.organizationUuid,
384
384
  projectId,
385
- error: `${error}`,
385
+ error: (0, common_1.getErrorMessage)(error),
386
386
  },
387
387
  });
388
388
  }
@@ -118,13 +118,13 @@ const generateHandler = async (options) => {
118
118
  : ymlString);
119
119
  }
120
120
  catch (e) {
121
- const msg = e instanceof Error ? e.message : '-';
121
+ const msg = (0, common_1.getErrorMessage)(e);
122
122
  throw new common_1.ParseError(`Failed to write file ${outputFilePath}\n ${msg}`);
123
123
  }
124
124
  spinner.succeed(` ${styles.bold(compiledModel.name)}${styles.info(` ➡️ ${path.relative(process.cwd(), outputFilePath)}`)}`);
125
125
  }
126
126
  catch (e) {
127
- const msg = e instanceof Error ? e.message : '-';
127
+ const msg = (0, common_1.getErrorMessage)(e);
128
128
  await analytics_1.LightdashAnalytics.track({
129
129
  event: 'generate.error',
130
130
  properties: {
@@ -59,13 +59,12 @@ const generateExposuresHandler = async (options) => {
59
59
  });
60
60
  }
61
61
  catch (e) {
62
- const msg = e instanceof Error ? e.message : '-';
63
62
  await analytics_1.LightdashAnalytics.track({
64
63
  event: 'generate_exposures.error',
65
64
  properties: {
66
65
  executionId,
67
66
  trigger: 'generate',
68
- error: `${msg}`,
67
+ error: `${(0, common_1.getErrorMessage)(e)}`,
69
68
  },
70
69
  });
71
70
  spinner.fail(` Failed to generate exposures file'`);
package/dist/index.js CHANGED
@@ -318,7 +318,7 @@ ${styles.bold('Examples:')}
318
318
  .option('--output <path>', 'The path where the output exposures YAML file will be written', undefined)
319
319
  .action(generateExposures_1.generateExposuresHandler);
320
320
  const errorHandler = (err) => {
321
- console.error(styles.error(err.message || 'Error had no message'));
321
+ console.error(styles.error((0, common_1.getErrorMessage)(err)));
322
322
  if (err.name === 'AuthorizationError') {
323
323
  console.error(`Looks like you did not authenticate or the personal access token expired.\n\n👀 See https://docs.lightdash.com/guides/cli/cli-authentication for help and examples`);
324
324
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lightdash/cli",
3
- "version": "0.1462.1",
3
+ "version": "0.1463.0",
4
4
  "license": "MIT",
5
5
  "bin": {
6
6
  "lightdash": "dist/index.js"
@@ -30,8 +30,8 @@
30
30
  "parse-node-version": "^2.0.0",
31
31
  "unique-names-generator": "^4.7.1",
32
32
  "uuid": "^11.0.3",
33
- "@lightdash/common": "0.1462.1",
34
- "@lightdash/warehouses": "0.1462.1"
33
+ "@lightdash/common": "0.1463.0",
34
+ "@lightdash/warehouses": "0.1463.0"
35
35
  },
36
36
  "description": "Lightdash CLI tool",
37
37
  "devDependencies": {