@lightdash/cli 0.1448.0 → 0.1449.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dbt/models.d.ts +2 -2
- package/dist/dbt/models.js +8 -6
- package/dist/globalState.d.ts +1 -0
- package/dist/handlers/compile.js +8 -19
- package/dist/handlers/createProject.js +16 -14
- package/dist/handlers/dbt/compile.d.ts +3 -3
- package/dist/handlers/dbt/compile.js +13 -2
- package/dist/handlers/dbt/getDbtProfileTargetName.d.ts +8 -0
- package/dist/handlers/dbt/getDbtProfileTargetName.js +45 -0
- package/dist/handlers/dbt/getDbtVersion.d.ts +2 -0
- package/dist/handlers/dbt/getDbtVersion.js +36 -2
- package/dist/handlers/dbt/getDbtVersion.mocks.d.ts +1 -0
- package/dist/handlers/dbt/getDbtVersion.mocks.js +3 -0
- package/dist/handlers/dbt/getDbtVersion.test.js +11 -2
- package/dist/handlers/dbt/getWarehouseClient.d.ts +21 -0
- package/dist/handlers/dbt/getWarehouseClient.js +198 -0
- package/dist/handlers/dbt/run.d.ts +2 -0
- package/dist/handlers/deploy.js +10 -2
- package/dist/handlers/generate.js +14 -14
- package/package.json +3 -3
package/dist/dbt/models.d.ts
CHANGED
@@ -33,8 +33,8 @@ export declare const getModelsFromManifest: (manifest: DbtManifest) => DbtModelN
|
|
33
33
|
export declare const getCompiledModels: (models: DbtModelNode[], args: {
|
34
34
|
select: string[] | undefined;
|
35
35
|
exclude: string[] | undefined;
|
36
|
-
projectDir: string;
|
37
|
-
profilesDir: string;
|
36
|
+
projectDir: string | undefined;
|
37
|
+
profilesDir: string | undefined;
|
38
38
|
target: string | undefined;
|
39
39
|
profile: string | undefined;
|
40
40
|
vars: string | undefined;
|
package/dist/dbt/models.js
CHANGED
@@ -211,10 +211,10 @@ const getCompiledModels = async (models, args) => {
|
|
211
211
|
try {
|
212
212
|
const { stdout } = await (0, execa_1.default)('dbt', [
|
213
213
|
'ls',
|
214
|
-
'--
|
215
|
-
args.profilesDir
|
216
|
-
|
217
|
-
|
214
|
+
...(args.projectDir ? ['--project-dir', args.projectDir] : []),
|
215
|
+
...(args.profilesDir
|
216
|
+
? ['--profiles-dir', args.profilesDir]
|
217
|
+
: []),
|
218
218
|
...(args.target ? ['--target', args.target] : []),
|
219
219
|
...(args.profile ? ['--profile', args.profile] : []),
|
220
220
|
...(args.select ? ['--select', args.select.join(' ')] : []),
|
@@ -229,9 +229,11 @@ const getCompiledModels = async (models, args) => {
|
|
229
229
|
.filter((l) => l.length > 0)
|
230
230
|
.map((l) => {
|
231
231
|
try {
|
232
|
-
|
232
|
+
// remove prefixed time in dbt cloud cli output
|
233
|
+
const lineWithoutPrefixedTime = l.replace(/^\d{2}:\d{2}:\d{2}\s*/, '');
|
234
|
+
return JSON.parse(lineWithoutPrefixedTime);
|
233
235
|
}
|
234
|
-
catch
|
236
|
+
catch {
|
235
237
|
return null;
|
236
238
|
}
|
237
239
|
})
|
package/dist/globalState.d.ts
CHANGED
package/dist/handlers/compile.js
CHANGED
@@ -3,19 +3,18 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.compileHandler = exports.compile = void 0;
|
4
4
|
const tslib_1 = require("tslib");
|
5
5
|
const common_1 = require("@lightdash/common");
|
6
|
-
const warehouses_1 = require("@lightdash/warehouses");
|
7
6
|
const path_1 = tslib_1.__importDefault(require("path"));
|
8
7
|
const uuid_1 = require("uuid");
|
9
8
|
const analytics_1 = require("../analytics/analytics");
|
10
9
|
const context_1 = require("../dbt/context");
|
11
10
|
const manifest_1 = require("../dbt/manifest");
|
12
11
|
const models_1 = require("../dbt/models");
|
13
|
-
const profile_1 = require("../dbt/profile");
|
14
12
|
const validation_1 = require("../dbt/validation");
|
15
13
|
const globalState_1 = tslib_1.__importDefault(require("../globalState"));
|
16
14
|
const styles = tslib_1.__importStar(require("../styles"));
|
17
15
|
const compile_1 = require("./dbt/compile");
|
18
16
|
const getDbtVersion_1 = require("./dbt/getDbtVersion");
|
17
|
+
const getWarehouseClient_1 = tslib_1.__importDefault(require("./dbt/getWarehouseClient"));
|
19
18
|
const compile = async (options) => {
|
20
19
|
const dbtVersion = await (0, getDbtVersion_1.getDbtVersion)();
|
21
20
|
globalState_1.default.debug(`> dbt version ${dbtVersion}`);
|
@@ -31,26 +30,16 @@ const compile = async (options) => {
|
|
31
30
|
},
|
32
31
|
});
|
33
32
|
const absoluteProjectPath = path_1.default.resolve(options.projectDir);
|
34
|
-
const absoluteProfilesPath = path_1.default.resolve(options.profilesDir);
|
35
33
|
globalState_1.default.debug(`> Compiling with project dir ${absoluteProjectPath}`);
|
36
|
-
globalState_1.default.debug(`> Compiling with profiles dir ${absoluteProfilesPath}`);
|
37
34
|
const context = await (0, context_1.getDbtContext)({ projectDir: absoluteProjectPath });
|
38
|
-
const
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
});
|
45
|
-
globalState_1.default.debug(`> Compiling with profile ${profileName}`);
|
46
|
-
globalState_1.default.debug(`> Compiling with target ${target}`);
|
47
|
-
const credentials = await (0, profile_1.warehouseCredentialsFromDbtTarget)(target);
|
48
|
-
const warehouseClient = (0, warehouses_1.warehouseClientFromCredentials)({
|
49
|
-
...credentials,
|
50
|
-
startOfWeek: (0, common_1.isWeekDay)(options.startOfWeek)
|
51
|
-
? options.startOfWeek
|
52
|
-
: undefined,
|
35
|
+
const { warehouseClient } = await (0, getWarehouseClient_1.default)({
|
36
|
+
isDbtCloudCLI: dbtVersion.isDbtCloudCLI,
|
37
|
+
profilesDir: options.profilesDir,
|
38
|
+
profile: options.profile || context.profileName,
|
39
|
+
target: options.target,
|
40
|
+
startOfWeek: options.startOfWeek,
|
53
41
|
});
|
42
|
+
const compiledModelIds = await (0, compile_1.maybeCompileModelsAndJoins)({ targetDir: context.targetDir }, options);
|
54
43
|
const manifest = await (0, manifest_1.loadManifest)({ targetDir: context.targetDir });
|
55
44
|
const manifestVersion = (0, common_1.getDbtManifestVersion)(manifest);
|
56
45
|
const manifestModels = (0, models_1.getModelsFromManifest)(manifest);
|
@@ -7,11 +7,12 @@ const inquirer_1 = tslib_1.__importDefault(require("inquirer"));
|
|
7
7
|
const path_1 = tslib_1.__importDefault(require("path"));
|
8
8
|
const config_1 = require("../config");
|
9
9
|
const context_1 = require("../dbt/context");
|
10
|
-
const profile_1 = require("../dbt/profile");
|
11
10
|
const globalState_1 = tslib_1.__importDefault(require("../globalState"));
|
12
11
|
const styles = tslib_1.__importStar(require("../styles"));
|
13
12
|
const apiClient_1 = require("./dbt/apiClient");
|
13
|
+
const getDbtProfileTargetName_1 = tslib_1.__importDefault(require("./dbt/getDbtProfileTargetName"));
|
14
14
|
const getDbtVersion_1 = require("./dbt/getDbtVersion");
|
15
|
+
const getWarehouseClient_1 = tslib_1.__importDefault(require("./dbt/getWarehouseClient"));
|
15
16
|
const askToRememberAnswer = async () => {
|
16
17
|
const answers = await inquirer_1.default.prompt([
|
17
18
|
{
|
@@ -57,19 +58,24 @@ const askPermissionToStoreWarehouseCredentials = async () => {
|
|
57
58
|
const createProject = async (options) => {
|
58
59
|
const dbtVersion = await (0, getDbtVersion_1.getDbtVersion)();
|
59
60
|
const absoluteProjectPath = path_1.default.resolve(options.projectDir);
|
60
|
-
const absoluteProfilesPath = path_1.default.resolve(options.profilesDir);
|
61
61
|
const context = await (0, context_1.getDbtContext)({ projectDir: absoluteProjectPath });
|
62
|
-
const
|
63
|
-
|
64
|
-
profilesDir:
|
65
|
-
profileName,
|
66
|
-
|
62
|
+
const targetName = await (0, getDbtProfileTargetName_1.default)({
|
63
|
+
isDbtCloudCLI: dbtVersion.isDbtCloudCLI,
|
64
|
+
profilesDir: options.profilesDir,
|
65
|
+
profile: options.profile || context.profileName,
|
66
|
+
target: options.target,
|
67
67
|
});
|
68
68
|
const canStoreWarehouseCredentials = await askPermissionToStoreWarehouseCredentials();
|
69
69
|
if (!canStoreWarehouseCredentials) {
|
70
70
|
return undefined;
|
71
71
|
}
|
72
|
-
const credentials = await (0,
|
72
|
+
const { credentials } = await (0, getWarehouseClient_1.default)({
|
73
|
+
isDbtCloudCLI: dbtVersion.isDbtCloudCLI,
|
74
|
+
profilesDir: options.profilesDir,
|
75
|
+
profile: options.profile || context.profileName,
|
76
|
+
target: options.target,
|
77
|
+
startOfWeek: options.startOfWeek,
|
78
|
+
});
|
73
79
|
if (credentials.type === common_1.WarehouseTypes.BIGQUERY &&
|
74
80
|
'project_id' in credentials.keyfileContents &&
|
75
81
|
credentials.keyfileContents.project_id &&
|
@@ -91,12 +97,8 @@ const createProject = async (options) => {
|
|
91
97
|
const project = {
|
92
98
|
name: options.name,
|
93
99
|
type: options.type,
|
94
|
-
warehouseConnection:
|
95
|
-
|
96
|
-
startOfWeek: (0, common_1.isWeekDay)(options.startOfWeek)
|
97
|
-
? options.startOfWeek
|
98
|
-
: undefined,
|
99
|
-
},
|
100
|
+
warehouseConnection: credentials,
|
101
|
+
copyWarehouseConnectionFromUpstreamProject: dbtVersion.isDbtCloudCLI,
|
100
102
|
dbtConnection: {
|
101
103
|
type: common_1.DbtProjectType.NONE,
|
102
104
|
target: targetName,
|
@@ -1,8 +1,8 @@
|
|
1
1
|
import { DbtModelNode } from '@lightdash/common';
|
2
2
|
import { LoadManifestArgs } from '../../dbt/manifest';
|
3
3
|
export type DbtCompileOptions = {
|
4
|
-
profilesDir: string;
|
5
|
-
projectDir: string;
|
4
|
+
profilesDir: string | undefined;
|
5
|
+
projectDir: string | undefined;
|
6
6
|
target: string | undefined;
|
7
7
|
profile: string | undefined;
|
8
8
|
select: string[] | undefined;
|
@@ -20,4 +20,4 @@ export type DbtCompileOptions = {
|
|
20
20
|
};
|
21
21
|
export declare const dbtCompile: (options: DbtCompileOptions) => Promise<void>;
|
22
22
|
export declare function getCompiledModels(manifestModels: DbtModelNode[], compiledModelIds?: string[]): DbtModelNode[];
|
23
|
-
export declare function maybeCompileModelsAndJoins(loadManifestOpts: LoadManifestArgs,
|
23
|
+
export declare function maybeCompileModelsAndJoins(loadManifestOpts: LoadManifestArgs, initialOptions: DbtCompileOptions): Promise<string[] | undefined>;
|
@@ -100,7 +100,9 @@ async function dbtList(options) {
|
|
100
100
|
.split('\n')
|
101
101
|
.map((line) => {
|
102
102
|
try {
|
103
|
-
|
103
|
+
// remove prefixed time in dbt cloud cli output
|
104
|
+
const lineWithoutPrefixedTime = line.replace(/^\d{2}:\d{2}:\d{2}\s*/, '');
|
105
|
+
return JSON.parse(lineWithoutPrefixedTime).unique_id;
|
104
106
|
}
|
105
107
|
catch {
|
106
108
|
// ignore non-json lines
|
@@ -117,7 +119,16 @@ async function dbtList(options) {
|
|
117
119
|
throw new common_1.ParseError(`Error executing 'dbt ls':\n ${msg}\nEnsure you're on the latest patch version. '--use-dbt-list' is true by default; if you encounter issues, try using '--use-dbt-list=false`);
|
118
120
|
}
|
119
121
|
}
|
120
|
-
async function maybeCompileModelsAndJoins(loadManifestOpts,
|
122
|
+
async function maybeCompileModelsAndJoins(loadManifestOpts, initialOptions) {
|
123
|
+
const dbtVersion = await (0, getDbtVersion_1.getDbtVersion)();
|
124
|
+
let options = initialOptions;
|
125
|
+
if (dbtVersion.isDbtCloudCLI) {
|
126
|
+
options = {
|
127
|
+
...initialOptions,
|
128
|
+
projectDir: undefined,
|
129
|
+
profilesDir: undefined,
|
130
|
+
};
|
131
|
+
}
|
121
132
|
// Skipping assumes manifest.json already exists.
|
122
133
|
if (options.skipDbtCompile) {
|
123
134
|
globalState_1.default.debug('> Skipping dbt compile');
|
@@ -0,0 +1,45 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
const tslib_1 = require("tslib");
|
4
|
+
const common_1 = require("@lightdash/common");
|
5
|
+
const execa_1 = tslib_1.__importDefault(require("execa"));
|
6
|
+
const path_1 = tslib_1.__importDefault(require("path"));
|
7
|
+
const profile_1 = require("../../dbt/profile");
|
8
|
+
const globalState_1 = tslib_1.__importDefault(require("../../globalState"));
|
9
|
+
const DBT_CLOUD_TARGET_NAME_REGEX = /Target name\s+(\w+)/;
|
10
|
+
const getDbtCloudTargetName = async () => {
|
11
|
+
try {
|
12
|
+
const { all } = await (0, execa_1.default)('dbt', ['environment', 'show'], {
|
13
|
+
all: true,
|
14
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
15
|
+
});
|
16
|
+
const logs = all || '';
|
17
|
+
const targetName = logs.match(DBT_CLOUD_TARGET_NAME_REGEX);
|
18
|
+
if (targetName === null || targetName.length === 0) {
|
19
|
+
throw new common_1.ParseError(`Can't locate profile target name in 'dbt environment show' response`);
|
20
|
+
}
|
21
|
+
return targetName[1];
|
22
|
+
}
|
23
|
+
catch (e) {
|
24
|
+
const msg = e instanceof Error ? e.message : '-';
|
25
|
+
throw new common_1.ParseError(`Failed to get profile target name:\n ${msg}`);
|
26
|
+
}
|
27
|
+
};
|
28
|
+
async function getDbtProfileTargetName(options) {
|
29
|
+
let targetName;
|
30
|
+
if (options.isDbtCloudCLI) {
|
31
|
+
targetName = await getDbtCloudTargetName();
|
32
|
+
}
|
33
|
+
else {
|
34
|
+
const absoluteProfilesPath = path_1.default.resolve(options.profilesDir);
|
35
|
+
globalState_1.default.debug(`> Using profiles dir ${absoluteProfilesPath} and profile ${options.profile}`);
|
36
|
+
const { name } = await (0, profile_1.loadDbtTarget)({
|
37
|
+
profilesDir: absoluteProfilesPath,
|
38
|
+
profileName: options.profile,
|
39
|
+
targetName: options.target,
|
40
|
+
});
|
41
|
+
targetName = name;
|
42
|
+
}
|
43
|
+
return targetName;
|
44
|
+
}
|
45
|
+
exports.default = getDbtProfileTargetName;
|
@@ -1,7 +1,9 @@
|
|
1
1
|
import { DbtVersionOption } from '@lightdash/common';
|
2
|
+
export declare const DBT_CLOUD_CLI_REGEX: RegExp;
|
2
3
|
type DbtVersion = {
|
3
4
|
verboseVersion: string;
|
4
5
|
versionOption: DbtVersionOption;
|
6
|
+
isDbtCloudCLI: boolean;
|
5
7
|
};
|
6
8
|
export declare const getDbtVersion: () => Promise<DbtVersion>;
|
7
9
|
export {};
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.getDbtVersion = void 0;
|
3
|
+
exports.getDbtVersion = exports.DBT_CLOUD_CLI_REGEX = void 0;
|
4
4
|
const tslib_1 = require("tslib");
|
5
5
|
const common_1 = require("@lightdash/common");
|
6
6
|
const execa_1 = tslib_1.__importDefault(require("execa"));
|
@@ -8,6 +8,7 @@ const inquirer_1 = tslib_1.__importDefault(require("inquirer"));
|
|
8
8
|
const globalState_1 = tslib_1.__importDefault(require("../../globalState"));
|
9
9
|
const styles = tslib_1.__importStar(require("../../styles"));
|
10
10
|
const DBT_CORE_VERSION_REGEX = /installed:.*/;
|
11
|
+
exports.DBT_CLOUD_CLI_REGEX = /dbt Cloud CLI.*/;
|
11
12
|
const getDbtCLIVersion = async () => {
|
12
13
|
try {
|
13
14
|
const { all } = await (0, execa_1.default)('dbt', ['--version'], {
|
@@ -15,6 +16,10 @@ const getDbtCLIVersion = async () => {
|
|
15
16
|
stdio: ['pipe', 'pipe', 'pipe'],
|
16
17
|
});
|
17
18
|
const logs = all || '';
|
19
|
+
const cloudVersion = logs.match(exports.DBT_CLOUD_CLI_REGEX);
|
20
|
+
if (cloudVersion) {
|
21
|
+
return cloudVersion[0];
|
22
|
+
}
|
18
23
|
const version = logs.match(DBT_CORE_VERSION_REGEX);
|
19
24
|
if (version === null || version.length === 0)
|
20
25
|
throw new common_1.ParseError(`Can't locate dbt --version: ${logs}`);
|
@@ -25,7 +30,10 @@ const getDbtCLIVersion = async () => {
|
|
25
30
|
throw new common_1.ParseError(`Failed to get dbt --version:\n ${msg}`);
|
26
31
|
}
|
27
32
|
};
|
33
|
+
const isDbtCloudCLI = (version) => version.match(exports.DBT_CLOUD_CLI_REGEX) !== null;
|
28
34
|
const getSupportedDbtVersionOption = (version) => {
|
35
|
+
if (version.match(exports.DBT_CLOUD_CLI_REGEX))
|
36
|
+
return common_1.DbtVersionOptionLatest.LATEST;
|
29
37
|
if (version.startsWith('1.4.'))
|
30
38
|
return common_1.SupportedDbtVersions.V1_4;
|
31
39
|
if (version.startsWith('1.5.'))
|
@@ -70,14 +78,40 @@ const getDbtVersion = async () => {
|
|
70
78
|
},
|
71
79
|
]);
|
72
80
|
if (!answers.isConfirm) {
|
73
|
-
|
81
|
+
console.error(styles.error(`Unsupported dbt version ${verboseVersion}. Please consider using a supported version (${supportedVersionsRangeMessage}).`));
|
82
|
+
process.exit(1);
|
74
83
|
}
|
75
84
|
}
|
76
85
|
spinner?.start();
|
77
86
|
globalState_1.default.savePromptAnswer('useFallbackDbtVersion', true);
|
78
87
|
}
|
88
|
+
if (isDbtCloudCLI(verboseVersion) &&
|
89
|
+
!globalState_1.default.getSavedPromptAnswer('useExperimentalDbtCloudCLI')) {
|
90
|
+
const message = `Support for dbt Cloud CLI is still experimental and might not work as expected.`;
|
91
|
+
const spinner = globalState_1.default.getActiveSpinner();
|
92
|
+
spinner?.stop();
|
93
|
+
if (process.env.CI === 'true') {
|
94
|
+
console.error(styles.warning(message));
|
95
|
+
}
|
96
|
+
else {
|
97
|
+
const answers = await inquirer_1.default.prompt([
|
98
|
+
{
|
99
|
+
type: 'confirm',
|
100
|
+
name: 'isConfirm',
|
101
|
+
message: `${styles.warning(message)}\nDo you still want to continue?`,
|
102
|
+
},
|
103
|
+
]);
|
104
|
+
if (!answers.isConfirm) {
|
105
|
+
console.error(styles.error(`Command using dbt cloud CLI has been canceled. Please consider using dbt core CLI for the best experience.`));
|
106
|
+
process.exit(1);
|
107
|
+
}
|
108
|
+
}
|
109
|
+
spinner?.start();
|
110
|
+
globalState_1.default.savePromptAnswer('useExperimentalDbtCloudCLI', true);
|
111
|
+
}
|
79
112
|
return {
|
80
113
|
verboseVersion,
|
114
|
+
isDbtCloudCLI: isDbtCloudCLI(verboseVersion),
|
81
115
|
versionOption: supportedVersionOption ?? fallbackVersionOption,
|
82
116
|
};
|
83
117
|
};
|
@@ -3,6 +3,7 @@ export declare const cliMocks: {
|
|
3
3
|
dbt1_3: Partial<ExecaReturnValue<string>>;
|
4
4
|
dbt1_4: Partial<ExecaReturnValue<string>>;
|
5
5
|
dbt1_9: Partial<ExecaReturnValue<string>>;
|
6
|
+
dbtCloud: Partial<ExecaReturnValue<string>>;
|
6
7
|
dbt20_1: Partial<ExecaReturnValue<string>>;
|
7
8
|
error: Partial<ExecaError<string>>;
|
8
9
|
};
|
@@ -42,6 +42,9 @@ exports.cliMocks = {
|
|
42
42
|
' - snowflake: 1.9.0 - Up to date!\n' +
|
43
43
|
' - postgres: 1.9.0 - Up to date!\n',
|
44
44
|
},
|
45
|
+
dbtCloud: {
|
46
|
+
all: 'dbt Cloud CLI - 0.38.22 (1183c2abdb6003083b0fa91fcd89cd5feb25f9f7 2024-11-20T15:49:01Z)',
|
47
|
+
},
|
45
48
|
dbt20_1: {
|
46
49
|
all: 'Core:\n' +
|
47
50
|
' - installed: 20.1.0\n' +
|
@@ -43,6 +43,12 @@ describe('Get dbt version', () => {
|
|
43
43
|
expect(version2.verboseVersion).toEqual('1.9.1');
|
44
44
|
expect(version2.versionOption).toEqual(common_1.SupportedDbtVersions.V1_9);
|
45
45
|
});
|
46
|
+
test('should return latest for dbt cloud', async () => {
|
47
|
+
execaMock.mockImplementation(async () => getDbtVersion_mocks_1.cliMocks.dbtCloud);
|
48
|
+
const version3 = await (0, getDbtVersion_1.getDbtVersion)();
|
49
|
+
expect(version3.verboseVersion).toEqual(expect.stringContaining('dbt Cloud CLI'));
|
50
|
+
expect(version3.versionOption).toEqual(common_1.DbtVersionOptionLatest.LATEST);
|
51
|
+
});
|
46
52
|
test('when CI=true, should warn user about unsupported version and return fallback', async () => {
|
47
53
|
process.env.CI = 'true';
|
48
54
|
// Test for 1.3
|
@@ -72,12 +78,15 @@ describe('Get dbt version', () => {
|
|
72
78
|
expect(consoleError).toHaveBeenCalledTimes(0);
|
73
79
|
});
|
74
80
|
test('when CI=false, should return error if user declines fallback', async () => {
|
81
|
+
const exitSpy = jest.spyOn(process, 'exit').mockImplementation();
|
75
82
|
process.env.CI = 'false';
|
76
83
|
execaMock.mockImplementation(async () => getDbtVersion_mocks_1.cliMocks.dbt1_3);
|
77
84
|
promptMock.mockImplementation(async () => ({ isConfirm: false }));
|
78
|
-
await
|
85
|
+
await (0, getDbtVersion_1.getDbtVersion)();
|
79
86
|
expect(promptMock).toHaveBeenCalledTimes(1);
|
80
|
-
expect(consoleError).toHaveBeenCalledTimes(
|
87
|
+
expect(consoleError).toHaveBeenCalledTimes(1);
|
88
|
+
expect(exitSpy).toHaveBeenCalledWith(1);
|
89
|
+
exitSpy.mockRestore();
|
81
90
|
});
|
82
91
|
});
|
83
92
|
});
|
@@ -0,0 +1,21 @@
|
|
1
|
+
import { CreateWarehouseCredentials, WarehouseTableSchema } from '@lightdash/common';
|
2
|
+
import { warehouseClientFromCredentials } from '@lightdash/warehouses';
|
3
|
+
type GetTableCatalogProps = {
|
4
|
+
projectUuid: string;
|
5
|
+
tableName: string;
|
6
|
+
schemaName: string;
|
7
|
+
};
|
8
|
+
export declare const getTableSchema: ({ projectUuid, tableName, schemaName, }: GetTableCatalogProps) => Promise<WarehouseTableSchema>;
|
9
|
+
type GetWarehouseClientOptions = {
|
10
|
+
isDbtCloudCLI: boolean;
|
11
|
+
profilesDir: string;
|
12
|
+
profile: string;
|
13
|
+
target?: string;
|
14
|
+
startOfWeek?: number;
|
15
|
+
};
|
16
|
+
type GetWarehouseClientReturn = {
|
17
|
+
warehouseClient: ReturnType<typeof warehouseClientFromCredentials>;
|
18
|
+
credentials: CreateWarehouseCredentials;
|
19
|
+
};
|
20
|
+
export default function getWarehouseClient(options: GetWarehouseClientOptions): Promise<GetWarehouseClientReturn>;
|
21
|
+
export {};
|
@@ -0,0 +1,198 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getTableSchema = void 0;
|
4
|
+
const tslib_1 = require("tslib");
|
5
|
+
const common_1 = require("@lightdash/common");
|
6
|
+
const warehouses_1 = require("@lightdash/warehouses");
|
7
|
+
const execa_1 = tslib_1.__importDefault(require("execa"));
|
8
|
+
const path_1 = tslib_1.__importDefault(require("path"));
|
9
|
+
const config_1 = require("../../config");
|
10
|
+
const profile_1 = require("../../dbt/profile");
|
11
|
+
const globalState_1 = tslib_1.__importDefault(require("../../globalState"));
|
12
|
+
const apiClient_1 = require("./apiClient");
|
13
|
+
const getTableSchema = async ({ projectUuid, tableName, schemaName, }) => (0, apiClient_1.lightdashApi)({
|
14
|
+
method: 'GET',
|
15
|
+
url: `/api/v1/projects/${projectUuid}/sqlRunner/fields?tableName=${tableName}&schemaName=${schemaName}`,
|
16
|
+
body: undefined,
|
17
|
+
});
|
18
|
+
exports.getTableSchema = getTableSchema;
|
19
|
+
const DBT_CLOUD_CONNECTION_TYPE_REGEX = /Connection type\s+(\w+)/;
|
20
|
+
const getDbtCloudConnectionType = async () => {
|
21
|
+
try {
|
22
|
+
const { all } = await (0, execa_1.default)('dbt', ['environment', 'show'], {
|
23
|
+
all: true,
|
24
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
25
|
+
});
|
26
|
+
const logs = all || '';
|
27
|
+
const connectionType = logs.match(DBT_CLOUD_CONNECTION_TYPE_REGEX);
|
28
|
+
if (connectionType === null || connectionType.length === 0) {
|
29
|
+
throw new common_1.ParseError(`Can't locate connection type in 'dbt environment show' response`);
|
30
|
+
}
|
31
|
+
if (!(0, common_1.isSupportedDbtAdapterType)(connectionType[1])) {
|
32
|
+
throw new common_1.ParseError(`Unsupported dbt adaptor type ${connectionType[1]}`);
|
33
|
+
}
|
34
|
+
return connectionType[1];
|
35
|
+
}
|
36
|
+
catch (e) {
|
37
|
+
const msg = e instanceof Error ? e.message : '-';
|
38
|
+
throw new common_1.ParseError(`Failed to get connection type:\n ${msg}`);
|
39
|
+
}
|
40
|
+
};
|
41
|
+
function getMockCredentials(dbtAdaptorType) {
|
42
|
+
let credentials;
|
43
|
+
switch (dbtAdaptorType) {
|
44
|
+
case common_1.SupportedDbtAdapter.BIGQUERY:
|
45
|
+
credentials = {
|
46
|
+
type: common_1.WarehouseTypes.BIGQUERY,
|
47
|
+
project: '',
|
48
|
+
dataset: '',
|
49
|
+
timeoutSeconds: undefined,
|
50
|
+
priority: undefined,
|
51
|
+
keyfileContents: {},
|
52
|
+
retries: undefined,
|
53
|
+
location: undefined,
|
54
|
+
maximumBytesBilled: undefined,
|
55
|
+
};
|
56
|
+
break;
|
57
|
+
case common_1.SupportedDbtAdapter.POSTGRES:
|
58
|
+
credentials = {
|
59
|
+
type: common_1.WarehouseTypes.POSTGRES,
|
60
|
+
host: '',
|
61
|
+
user: '',
|
62
|
+
password: '',
|
63
|
+
port: 5432,
|
64
|
+
dbname: '',
|
65
|
+
schema: '',
|
66
|
+
};
|
67
|
+
break;
|
68
|
+
case common_1.SupportedDbtAdapter.REDSHIFT:
|
69
|
+
credentials = {
|
70
|
+
type: common_1.WarehouseTypes.REDSHIFT,
|
71
|
+
host: '',
|
72
|
+
user: '',
|
73
|
+
password: '',
|
74
|
+
port: 5432,
|
75
|
+
dbname: '',
|
76
|
+
schema: '',
|
77
|
+
};
|
78
|
+
break;
|
79
|
+
case common_1.SupportedDbtAdapter.SNOWFLAKE:
|
80
|
+
credentials = {
|
81
|
+
type: common_1.WarehouseTypes.SNOWFLAKE,
|
82
|
+
account: '',
|
83
|
+
user: '',
|
84
|
+
password: '',
|
85
|
+
warehouse: '',
|
86
|
+
database: '',
|
87
|
+
schema: '',
|
88
|
+
role: '',
|
89
|
+
};
|
90
|
+
break;
|
91
|
+
case common_1.SupportedDbtAdapter.DATABRICKS:
|
92
|
+
credentials = {
|
93
|
+
type: common_1.WarehouseTypes.DATABRICKS,
|
94
|
+
catalog: '',
|
95
|
+
database: '',
|
96
|
+
serverHostName: '',
|
97
|
+
httpPath: '',
|
98
|
+
personalAccessToken: '',
|
99
|
+
};
|
100
|
+
break;
|
101
|
+
case common_1.SupportedDbtAdapter.TRINO:
|
102
|
+
credentials = {
|
103
|
+
type: common_1.WarehouseTypes.TRINO,
|
104
|
+
host: '',
|
105
|
+
user: '',
|
106
|
+
password: '',
|
107
|
+
port: 5432,
|
108
|
+
dbname: '',
|
109
|
+
schema: '',
|
110
|
+
http_scheme: '',
|
111
|
+
};
|
112
|
+
break;
|
113
|
+
default:
|
114
|
+
(0, common_1.assertUnreachable)(dbtAdaptorType, `Unsupported dbt adaptor type ${dbtAdaptorType}`);
|
115
|
+
}
|
116
|
+
return credentials;
|
117
|
+
}
|
118
|
+
async function getWarehouseClient(options) {
|
119
|
+
let warehouseClient;
|
120
|
+
let credentials;
|
121
|
+
if (options.isDbtCloudCLI) {
|
122
|
+
const dbtAdaptorType = await getDbtCloudConnectionType();
|
123
|
+
globalState_1.default.debug(`> Using ${dbtAdaptorType} client mock`);
|
124
|
+
credentials = getMockCredentials(dbtAdaptorType);
|
125
|
+
warehouseClient = (0, warehouses_1.warehouseClientFromCredentials)({
|
126
|
+
...credentials,
|
127
|
+
startOfWeek: (0, common_1.isWeekDay)(options.startOfWeek)
|
128
|
+
? options.startOfWeek
|
129
|
+
: undefined,
|
130
|
+
});
|
131
|
+
const config = await (0, config_1.getConfig)();
|
132
|
+
// Overwrite methods that need to connect to the warehouse
|
133
|
+
warehouseClient.getCatalog = async (refs) => refs.reduce(async (accPromise, ref) => {
|
134
|
+
const acc = await accPromise; // Wait for the previous step's result
|
135
|
+
if (!config.context?.project) {
|
136
|
+
throw new common_1.AuthorizationError(`No active Lightdash project.`);
|
137
|
+
}
|
138
|
+
try {
|
139
|
+
globalState_1.default.debug(`> Warehouse schema information is not available in dbt Cloud CLI. The schema ${ref.database}.${ref.schema}.${ref.table} will be fetched from the active project.`);
|
140
|
+
const fields = await (0, exports.getTableSchema)({
|
141
|
+
projectUuid: config.context.project,
|
142
|
+
tableName: ref.table,
|
143
|
+
schemaName: ref.schema,
|
144
|
+
});
|
145
|
+
acc[ref.database] = {
|
146
|
+
[ref.schema]: {
|
147
|
+
[ref.table]: fields,
|
148
|
+
},
|
149
|
+
};
|
150
|
+
}
|
151
|
+
catch (e) {
|
152
|
+
globalState_1.default.debug(`Failed to get schema for ${ref.database}.${ref.schema}.${ref.table}.`);
|
153
|
+
}
|
154
|
+
return acc;
|
155
|
+
}, Promise.resolve({}));
|
156
|
+
warehouseClient.streamQuery = async (_query, streamCallback) => {
|
157
|
+
globalState_1.default.debug(`> WarehouseClient.streamQuery() is not supported with dbt Cloud CLI. An empty result will be used.`);
|
158
|
+
return streamCallback({ fields: {}, rows: [] });
|
159
|
+
};
|
160
|
+
warehouseClient.runQuery = async () => {
|
161
|
+
globalState_1.default.debug(`> WarehouseClient.runQuery() is not supported with dbt Cloud CLI. An empty result will be used.`);
|
162
|
+
return { fields: {}, rows: [] };
|
163
|
+
};
|
164
|
+
warehouseClient.test = async () => {
|
165
|
+
globalState_1.default.debug(`> WarehouseClient.test() is not supported with dbt Cloud CLI. No test will be run.`);
|
166
|
+
};
|
167
|
+
warehouseClient.getAllTables = async () => {
|
168
|
+
globalState_1.default.debug(`> WarehouseClient.getAllTables() is not supported with dbt Cloud CLI. An empty result will be used.`);
|
169
|
+
return [];
|
170
|
+
};
|
171
|
+
warehouseClient.getFields = async () => {
|
172
|
+
globalState_1.default.debug(`> WarehouseClient.getFields() is not supported with dbt Cloud CLI. An empty result will be used.`);
|
173
|
+
return { fields: {} };
|
174
|
+
};
|
175
|
+
}
|
176
|
+
else {
|
177
|
+
const absoluteProfilesPath = path_1.default.resolve(options.profilesDir);
|
178
|
+
globalState_1.default.debug(`> Using profiles dir ${absoluteProfilesPath} and profile ${options.profile}`);
|
179
|
+
const { target } = await (0, profile_1.loadDbtTarget)({
|
180
|
+
profilesDir: absoluteProfilesPath,
|
181
|
+
profileName: options.profile,
|
182
|
+
targetName: options.target,
|
183
|
+
});
|
184
|
+
globalState_1.default.debug(`> Using target ${target}`);
|
185
|
+
credentials = await (0, profile_1.warehouseCredentialsFromDbtTarget)(target);
|
186
|
+
warehouseClient = (0, warehouses_1.warehouseClientFromCredentials)({
|
187
|
+
...credentials,
|
188
|
+
startOfWeek: (0, common_1.isWeekDay)(options.startOfWeek)
|
189
|
+
? options.startOfWeek
|
190
|
+
: undefined,
|
191
|
+
});
|
192
|
+
}
|
193
|
+
return {
|
194
|
+
warehouseClient,
|
195
|
+
credentials,
|
196
|
+
};
|
197
|
+
}
|
198
|
+
exports.default = getWarehouseClient;
|
package/dist/handlers/deploy.js
CHANGED
@@ -15,6 +15,7 @@ const styles = tslib_1.__importStar(require("../styles"));
|
|
15
15
|
const compile_1 = require("./compile");
|
16
16
|
const createProject_1 = require("./createProject");
|
17
17
|
const apiClient_1 = require("./dbt/apiClient");
|
18
|
+
const getDbtVersion_1 = require("./dbt/getDbtVersion");
|
18
19
|
const deploy = async (explores, options) => {
|
19
20
|
if (explores.length === 0) {
|
20
21
|
globalState_1.default.log(styles.warning('No explores found'));
|
@@ -116,6 +117,7 @@ const createNewProject = async (executionId, options) => {
|
|
116
117
|
};
|
117
118
|
const deployHandler = async (options) => {
|
118
119
|
globalState_1.default.setVerbose(options.verbose);
|
120
|
+
const dbtVersion = await (0, getDbtVersion_1.getDbtVersion)();
|
119
121
|
await (0, apiClient_1.checkLightdashVersion)();
|
120
122
|
const executionId = (0, uuid_1.v4)();
|
121
123
|
const explores = await (0, compile_1.compile)(options);
|
@@ -143,10 +145,16 @@ const deployHandler = async (options) => {
|
|
143
145
|
}
|
144
146
|
await (0, exports.deploy)(explores, { ...options, projectUuid });
|
145
147
|
const serverUrl = config.context?.serverUrl?.replace(/\/$/, '');
|
146
|
-
|
148
|
+
let displayUrl = options.create
|
147
149
|
? `${serverUrl}/createProject/cli?projectUuid=${projectUuid}`
|
148
150
|
: `${serverUrl}/projects/${projectUuid}/home`;
|
149
|
-
|
151
|
+
let successMessage = 'Successfully deployed project:';
|
152
|
+
if (dbtVersion.isDbtCloudCLI && options.create) {
|
153
|
+
successMessage =
|
154
|
+
'Successfully deployed project! Complete the setup by adding warehouse connection details here:';
|
155
|
+
displayUrl = `${serverUrl}/generalSettings/projectManagement/${projectUuid}/settings`;
|
156
|
+
}
|
157
|
+
console.error(`${styles.bold(successMessage)}`);
|
150
158
|
console.error('');
|
151
159
|
console.error(` ${styles.bold(`⚡️ ${displayUrl}`)}`);
|
152
160
|
console.error('');
|
@@ -3,7 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.generateHandler = void 0;
|
4
4
|
const tslib_1 = require("tslib");
|
5
5
|
const common_1 = require("@lightdash/common");
|
6
|
-
const warehouses_1 = require("@lightdash/warehouses");
|
7
6
|
const fs_1 = require("fs");
|
8
7
|
const inquirer_1 = tslib_1.__importDefault(require("inquirer"));
|
9
8
|
const yaml = tslib_1.__importStar(require("js-yaml"));
|
@@ -13,11 +12,12 @@ const analytics_1 = require("../analytics/analytics");
|
|
13
12
|
const context_1 = require("../dbt/context");
|
14
13
|
const manifest_1 = require("../dbt/manifest");
|
15
14
|
const models_1 = require("../dbt/models");
|
16
|
-
const profile_1 = require("../dbt/profile");
|
17
15
|
const schema_1 = require("../dbt/schema");
|
18
16
|
const globalState_1 = tslib_1.__importDefault(require("../globalState"));
|
19
17
|
const styles = tslib_1.__importStar(require("../styles"));
|
20
18
|
const apiClient_1 = require("./dbt/apiClient");
|
19
|
+
const getDbtVersion_1 = require("./dbt/getDbtVersion");
|
20
|
+
const getWarehouseClient_1 = tslib_1.__importDefault(require("./dbt/getWarehouseClient"));
|
21
21
|
const generateHandler = async (options) => {
|
22
22
|
globalState_1.default.setVerbose(options.verbose);
|
23
23
|
await (0, apiClient_1.checkLightdashVersion)();
|
@@ -46,26 +46,26 @@ const generateHandler = async (options) => {
|
|
46
46
|
},
|
47
47
|
});
|
48
48
|
const absoluteProjectPath = path.resolve(options.projectDir);
|
49
|
-
const absoluteProfilesPath = path.resolve(options.profilesDir);
|
50
49
|
const context = await (0, context_1.getDbtContext)({
|
51
50
|
projectDir: absoluteProjectPath,
|
52
51
|
});
|
53
52
|
const profileName = options.profile || context.profileName;
|
54
|
-
|
55
|
-
const {
|
56
|
-
|
57
|
-
|
58
|
-
|
53
|
+
const dbtVersion = await (0, getDbtVersion_1.getDbtVersion)();
|
54
|
+
const { warehouseClient } = await (0, getWarehouseClient_1.default)({
|
55
|
+
isDbtCloudCLI: dbtVersion.isDbtCloudCLI,
|
56
|
+
profilesDir: options.profilesDir,
|
57
|
+
profile: options.profile || context.profileName,
|
58
|
+
target: options.target,
|
59
|
+
startOfWeek: options.startOfWeek,
|
59
60
|
});
|
60
|
-
globalState_1.default.debug(`> Loaded target from profiles: ${target.type}`);
|
61
|
-
const credentials = await (0, profile_1.warehouseCredentialsFromDbtTarget)(target);
|
62
|
-
const warehouseClient = (0, warehouses_1.warehouseClientFromCredentials)(credentials);
|
63
61
|
const manifest = await (0, manifest_1.loadManifest)({ targetDir: context.targetDir });
|
64
62
|
const models = (0, models_1.getModelsFromManifest)(manifest);
|
65
63
|
const compiledModels = await (0, models_1.getCompiledModels)(models, {
|
66
|
-
projectDir: absoluteProjectPath,
|
67
|
-
profilesDir:
|
68
|
-
|
64
|
+
projectDir: dbtVersion.isDbtCloudCLI ? undefined : absoluteProjectPath,
|
65
|
+
profilesDir: dbtVersion.isDbtCloudCLI
|
66
|
+
? undefined
|
67
|
+
: path.resolve(options.profilesDir),
|
68
|
+
profile: dbtVersion.isDbtCloudCLI ? undefined : profileName,
|
69
69
|
target: options.target,
|
70
70
|
select: options.select || options.models,
|
71
71
|
exclude: options.exclude,
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@lightdash/cli",
|
3
|
-
"version": "0.
|
3
|
+
"version": "0.1449.0",
|
4
4
|
"license": "MIT",
|
5
5
|
"bin": {
|
6
6
|
"lightdash": "dist/index.js"
|
@@ -11,8 +11,8 @@
|
|
11
11
|
],
|
12
12
|
"dependencies": {
|
13
13
|
"@actions/core": "^1.11.1",
|
14
|
-
"@lightdash/common": "^0.
|
15
|
-
"@lightdash/warehouses": "^0.
|
14
|
+
"@lightdash/common": "^0.1449.0",
|
15
|
+
"@lightdash/warehouses": "^0.1449.0",
|
16
16
|
"@types/columnify": "^1.5.1",
|
17
17
|
"ajv": "^8.11.0",
|
18
18
|
"ajv-formats": "^2.1.1",
|