@claryai/cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +25 -0
- package/README.md +197 -0
- package/dist/.tsbuildinfo +1 -0
- package/dist/ajv.d.ts +3 -0
- package/dist/ajv.d.ts.map +1 -0
- package/dist/ajv.js +13 -0
- package/dist/analytics/analytics.d.ts +370 -0
- package/dist/analytics/analytics.d.ts.map +1 -0
- package/dist/analytics/analytics.js +143 -0
- package/dist/config.d.ts +34 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +134 -0
- package/dist/dbt/context.d.ts +14 -0
- package/dist/dbt/context.d.ts.map +1 -0
- package/dist/dbt/context.js +76 -0
- package/dist/dbt/context.test.d.ts +2 -0
- package/dist/dbt/context.test.d.ts.map +1 -0
- package/dist/dbt/context.test.js +152 -0
- package/dist/dbt/manifest.d.ts +7 -0
- package/dist/dbt/manifest.d.ts.map +1 -0
- package/dist/dbt/manifest.js +23 -0
- package/dist/dbt/models.d.ts +43 -0
- package/dist/dbt/models.d.ts.map +1 -0
- package/dist/dbt/models.js +256 -0
- package/dist/dbt/models.test.d.ts +2 -0
- package/dist/dbt/models.test.d.ts.map +1 -0
- package/dist/dbt/models.test.js +19 -0
- package/dist/dbt/profile.d.ts +9 -0
- package/dist/dbt/profile.d.ts.map +1 -0
- package/dist/dbt/profile.js +86 -0
- package/dist/dbt/profiles.test.d.ts +2 -0
- package/dist/dbt/profiles.test.d.ts.map +1 -0
- package/dist/dbt/profiles.test.js +50 -0
- package/dist/dbt/schema.d.ts +31 -0
- package/dist/dbt/schema.d.ts.map +1 -0
- package/dist/dbt/schema.js +49 -0
- package/dist/dbt/targets/Bigquery/index.d.ts +18 -0
- package/dist/dbt/targets/Bigquery/index.d.ts.map +1 -0
- package/dist/dbt/targets/Bigquery/index.js +105 -0
- package/dist/dbt/targets/Bigquery/oauth.d.ts +2 -0
- package/dist/dbt/targets/Bigquery/oauth.d.ts.map +1 -0
- package/dist/dbt/targets/Bigquery/oauth.js +43 -0
- package/dist/dbt/targets/Bigquery/serviceAccount.d.ts +35 -0
- package/dist/dbt/targets/Bigquery/serviceAccount.d.ts.map +1 -0
- package/dist/dbt/targets/Bigquery/serviceAccount.js +149 -0
- package/dist/dbt/targets/Databricks/oauth.d.ts +21 -0
- package/dist/dbt/targets/Databricks/oauth.d.ts.map +1 -0
- package/dist/dbt/targets/Databricks/oauth.js +184 -0
- package/dist/dbt/targets/athena.d.ts +21 -0
- package/dist/dbt/targets/athena.d.ts.map +1 -0
- package/dist/dbt/targets/athena.js +91 -0
- package/dist/dbt/targets/athena.test.d.ts +2 -0
- package/dist/dbt/targets/athena.test.d.ts.map +1 -0
- package/dist/dbt/targets/athena.test.js +60 -0
- package/dist/dbt/targets/clickhouse.d.ts +24 -0
- package/dist/dbt/targets/clickhouse.d.ts.map +1 -0
- package/dist/dbt/targets/clickhouse.js +90 -0
- package/dist/dbt/targets/databricks.d.ts +27 -0
- package/dist/dbt/targets/databricks.d.ts.map +1 -0
- package/dist/dbt/targets/databricks.js +138 -0
- package/dist/dbt/targets/duckdb.d.ts +16 -0
- package/dist/dbt/targets/duckdb.d.ts.map +1 -0
- package/dist/dbt/targets/duckdb.js +63 -0
- package/dist/dbt/targets/duckdb.test.d.ts +2 -0
- package/dist/dbt/targets/duckdb.test.d.ts.map +1 -0
- package/dist/dbt/targets/duckdb.test.js +37 -0
- package/dist/dbt/targets/postgres.d.ts +26 -0
- package/dist/dbt/targets/postgres.d.ts.map +1 -0
- package/dist/dbt/targets/postgres.js +142 -0
- package/dist/dbt/targets/redshift.d.ts +23 -0
- package/dist/dbt/targets/redshift.d.ts.map +1 -0
- package/dist/dbt/targets/redshift.js +96 -0
- package/dist/dbt/targets/snowflake.d.ts +4 -0
- package/dist/dbt/targets/snowflake.d.ts.map +1 -0
- package/dist/dbt/targets/snowflake.js +134 -0
- package/dist/dbt/targets/trino.d.ts +16 -0
- package/dist/dbt/targets/trino.d.ts.map +1 -0
- package/dist/dbt/targets/trino.js +65 -0
- package/dist/dbt/templating.d.ts +15 -0
- package/dist/dbt/templating.d.ts.map +1 -0
- package/dist/dbt/templating.js +50 -0
- package/dist/dbt/templating.test.d.ts +2 -0
- package/dist/dbt/templating.test.d.ts.map +1 -0
- package/dist/dbt/templating.test.js +51 -0
- package/dist/dbt/types.d.ts +17 -0
- package/dist/dbt/types.d.ts.map +1 -0
- package/dist/dbt/types.js +2 -0
- package/dist/dbt/validation.d.ts +9 -0
- package/dist/dbt/validation.d.ts.map +1 -0
- package/dist/dbt/validation.js +54 -0
- package/dist/env.d.ts +12 -0
- package/dist/env.d.ts.map +1 -0
- package/dist/env.js +40 -0
- package/dist/error.d.ts +2 -0
- package/dist/error.d.ts.map +1 -0
- package/dist/error.js +12 -0
- package/dist/globalState.d.ts +29 -0
- package/dist/globalState.d.ts.map +1 -0
- package/dist/globalState.js +67 -0
- package/dist/handlers/asyncQuery.d.ts +7 -0
- package/dist/handlers/asyncQuery.d.ts.map +1 -0
- package/dist/handlers/asyncQuery.js +50 -0
- package/dist/handlers/compile.d.ts +16 -0
- package/dist/handlers/compile.d.ts.map +1 -0
- package/dist/handlers/compile.js +277 -0
- package/dist/handlers/compile.test.d.ts +2 -0
- package/dist/handlers/compile.test.d.ts.map +1 -0
- package/dist/handlers/compile.test.js +201 -0
- package/dist/handlers/createProject.d.ts +37 -0
- package/dist/handlers/createProject.d.ts.map +1 -0
- package/dist/handlers/createProject.js +272 -0
- package/dist/handlers/dbt/apiClient.d.ts +14 -0
- package/dist/handlers/dbt/apiClient.d.ts.map +1 -0
- package/dist/handlers/dbt/apiClient.js +167 -0
- package/dist/handlers/dbt/compile.d.ts +35 -0
- package/dist/handlers/dbt/compile.d.ts.map +1 -0
- package/dist/handlers/dbt/compile.js +220 -0
- package/dist/handlers/dbt/getDbtProfileTargetName.d.ts +9 -0
- package/dist/handlers/dbt/getDbtProfileTargetName.d.ts.map +1 -0
- package/dist/handlers/dbt/getDbtProfileTargetName.js +44 -0
- package/dist/handlers/dbt/getDbtVersion.d.ts +16 -0
- package/dist/handlers/dbt/getDbtVersion.d.ts.map +1 -0
- package/dist/handlers/dbt/getDbtVersion.js +141 -0
- package/dist/handlers/dbt/getDbtVersion.mocks.d.ts +11 -0
- package/dist/handlers/dbt/getDbtVersion.mocks.d.ts.map +1 -0
- package/dist/handlers/dbt/getDbtVersion.mocks.js +70 -0
- package/dist/handlers/dbt/getDbtVersion.test.d.ts +2 -0
- package/dist/handlers/dbt/getDbtVersion.test.d.ts.map +1 -0
- package/dist/handlers/dbt/getDbtVersion.test.js +97 -0
- package/dist/handlers/dbt/getWarehouseClient.d.ts +24 -0
- package/dist/handlers/dbt/getWarehouseClient.d.ts.map +1 -0
- package/dist/handlers/dbt/getWarehouseClient.js +312 -0
- package/dist/handlers/dbt/refresh.d.ts +11 -0
- package/dist/handlers/dbt/refresh.d.ts.map +1 -0
- package/dist/handlers/dbt/refresh.js +114 -0
- package/dist/handlers/dbt/run.d.ts +14 -0
- package/dist/handlers/dbt/run.d.ts.map +1 -0
- package/dist/handlers/dbt/run.js +67 -0
- package/dist/handlers/deploy.d.ts +26 -0
- package/dist/handlers/deploy.d.ts.map +1 -0
- package/dist/handlers/deploy.js +377 -0
- package/dist/handlers/diagnostics.d.ts +11 -0
- package/dist/handlers/diagnostics.d.ts.map +1 -0
- package/dist/handlers/diagnostics.js +194 -0
- package/dist/handlers/download.d.ts +29 -0
- package/dist/handlers/download.d.ts.map +1 -0
- package/dist/handlers/download.js +955 -0
- package/dist/handlers/exportChartImage.d.ts +7 -0
- package/dist/handlers/exportChartImage.d.ts.map +1 -0
- package/dist/handlers/exportChartImage.js +33 -0
- package/dist/handlers/generate.d.ts +13 -0
- package/dist/handlers/generate.d.ts.map +1 -0
- package/dist/handlers/generate.js +159 -0
- package/dist/handlers/generateExposures.d.ts +8 -0
- package/dist/handlers/generateExposures.d.ts.map +1 -0
- package/dist/handlers/generateExposures.js +100 -0
- package/dist/handlers/getProject.d.ts +6 -0
- package/dist/handlers/getProject.d.ts.map +1 -0
- package/dist/handlers/getProject.js +43 -0
- package/dist/handlers/installSkills.d.ts +12 -0
- package/dist/handlers/installSkills.d.ts.map +1 -0
- package/dist/handlers/installSkills.js +321 -0
- package/dist/handlers/lint/ajvToSarif.d.ts +66 -0
- package/dist/handlers/lint/ajvToSarif.d.ts.map +1 -0
- package/dist/handlers/lint/ajvToSarif.js +222 -0
- package/dist/handlers/lint/sarifFormatter.d.ts +14 -0
- package/dist/handlers/lint/sarifFormatter.d.ts.map +1 -0
- package/dist/handlers/lint/sarifFormatter.js +111 -0
- package/dist/handlers/lint.d.ts +8 -0
- package/dist/handlers/lint.d.ts.map +1 -0
- package/dist/handlers/lint.js +308 -0
- package/dist/handlers/listProjects.d.ts +6 -0
- package/dist/handlers/listProjects.d.ts.map +1 -0
- package/dist/handlers/listProjects.js +53 -0
- package/dist/handlers/login/oauth.d.ts +2 -0
- package/dist/handlers/login/oauth.d.ts.map +1 -0
- package/dist/handlers/login/oauth.js +27 -0
- package/dist/handlers/login/pat.d.ts +2 -0
- package/dist/handlers/login/pat.d.ts.map +1 -0
- package/dist/handlers/login/pat.js +31 -0
- package/dist/handlers/login.d.ts +15 -0
- package/dist/handlers/login.d.ts.map +1 -0
- package/dist/handlers/login.js +239 -0
- package/dist/handlers/metadataFile.d.ts +9 -0
- package/dist/handlers/metadataFile.d.ts.map +1 -0
- package/dist/handlers/metadataFile.js +34 -0
- package/dist/handlers/oauthLogin.d.ts +6 -0
- package/dist/handlers/oauthLogin.d.ts.map +1 -0
- package/dist/handlers/oauthLogin.js +191 -0
- package/dist/handlers/preview.d.ts +29 -0
- package/dist/handlers/preview.d.ts.map +1 -0
- package/dist/handlers/preview.js +415 -0
- package/dist/handlers/renameHandler.d.ts +16 -0
- package/dist/handlers/renameHandler.d.ts.map +1 -0
- package/dist/handlers/renameHandler.js +160 -0
- package/dist/handlers/runChart.d.ts +10 -0
- package/dist/handlers/runChart.d.ts.map +1 -0
- package/dist/handlers/runChart.js +105 -0
- package/dist/handlers/selectProject.d.ts +20 -0
- package/dist/handlers/selectProject.d.ts.map +1 -0
- package/dist/handlers/selectProject.js +91 -0
- package/dist/handlers/setProject.d.ts +14 -0
- package/dist/handlers/setProject.d.ts.map +1 -0
- package/dist/handlers/setProject.js +131 -0
- package/dist/handlers/setWarehouse.d.ts +14 -0
- package/dist/handlers/setWarehouse.d.ts.map +1 -0
- package/dist/handlers/setWarehouse.js +94 -0
- package/dist/handlers/sql.d.ts +9 -0
- package/dist/handlers/sql.d.ts.map +1 -0
- package/dist/handlers/sql.js +89 -0
- package/dist/handlers/utils.d.ts +11 -0
- package/dist/handlers/utils.d.ts.map +1 -0
- package/dist/handlers/utils.js +36 -0
- package/dist/handlers/validate.d.ts +22 -0
- package/dist/handlers/validate.d.ts.map +1 -0
- package/dist/handlers/validate.js +201 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +581 -0
- package/dist/lightdash/loader.d.ts +21 -0
- package/dist/lightdash/loader.d.ts.map +1 -0
- package/dist/lightdash/loader.js +122 -0
- package/dist/lightdash/projectType.d.ts +84 -0
- package/dist/lightdash/projectType.d.ts.map +1 -0
- package/dist/lightdash/projectType.js +75 -0
- package/dist/lightdash-config/index.d.ts +2 -0
- package/dist/lightdash-config/index.d.ts.map +1 -0
- package/dist/lightdash-config/index.js +41 -0
- package/dist/lightdash-config/lightdash-config.test.d.ts +2 -0
- package/dist/lightdash-config/lightdash-config.test.d.ts.map +1 -0
- package/dist/lightdash-config/lightdash-config.test.js +70 -0
- package/dist/styles.d.ts +10 -0
- package/dist/styles.d.ts.map +1 -0
- package/dist/styles.js +14 -0
- package/entitlements.plist +33 -0
- package/package.json +71 -0
- package/track.sh +116 -0
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getBigqueryCredentialsFromOauth = void 0;
|
|
4
|
+
const common_1 = require("@lightdash/common");
|
|
5
|
+
const google_auth_library_1 = require("google-auth-library");
|
|
6
|
+
const getBigqueryCredentialsFromOauth = async () => {
|
|
7
|
+
const auth = new google_auth_library_1.GoogleAuth();
|
|
8
|
+
const credentials = await auth.getApplicationDefault();
|
|
9
|
+
if (credentials.credential instanceof google_auth_library_1.UserRefreshClient) {
|
|
10
|
+
// eslint-disable-next-line @typescript-eslint/naming-convention
|
|
11
|
+
const { _clientId, _clientSecret, _refreshToken, projectId } = credentials.credential;
|
|
12
|
+
if (_clientId && _clientSecret && _refreshToken) {
|
|
13
|
+
return {
|
|
14
|
+
client_id: _clientId,
|
|
15
|
+
client_secret: _clientSecret,
|
|
16
|
+
refresh_token: _refreshToken,
|
|
17
|
+
project_id: projectId || '',
|
|
18
|
+
type: 'authorized_user',
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
throw new common_1.ParseError(`Cannot get credentials from UserRefreshClient`);
|
|
22
|
+
}
|
|
23
|
+
else if (credentials.credential instanceof google_auth_library_1.ExternalAccountClient ||
|
|
24
|
+
credentials.credential instanceof google_auth_library_1.IdentityPoolClient) {
|
|
25
|
+
// Support ADC via workforce identity federation / external_account configuration.
|
|
26
|
+
// In this case we should rely on ADC at runtime and not pass explicit credentials.
|
|
27
|
+
return {
|
|
28
|
+
authenticationType: common_1.BigqueryAuthenticationType.ADC,
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
else if ('email' in credentials.credential &&
|
|
32
|
+
'key' in credentials.credential) {
|
|
33
|
+
// Works with service credentials
|
|
34
|
+
const { email, key, projectId } = credentials.credential;
|
|
35
|
+
return {
|
|
36
|
+
client_email: email,
|
|
37
|
+
private_key: key,
|
|
38
|
+
project_id: projectId,
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
throw new common_1.ParseError(`Cannot get credentials from oauth`);
|
|
42
|
+
};
|
|
43
|
+
exports.getBigqueryCredentialsFromOauth = getBigqueryCredentialsFromOauth;
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { JSONSchemaType } from 'ajv';
|
|
2
|
+
import { Target } from '../../types';
|
|
3
|
+
export type BigqueryServiceAccountTarget = {
|
|
4
|
+
type: 'bigquery';
|
|
5
|
+
project: string;
|
|
6
|
+
dataset: string;
|
|
7
|
+
threads?: number;
|
|
8
|
+
method: 'service-account' | 'oauth';
|
|
9
|
+
keyfile: string;
|
|
10
|
+
priority?: 'interactive' | 'batch';
|
|
11
|
+
retries?: number;
|
|
12
|
+
location?: string;
|
|
13
|
+
maximum_bytes_billed?: number;
|
|
14
|
+
timeout_seconds?: number;
|
|
15
|
+
execution_project?: string;
|
|
16
|
+
};
|
|
17
|
+
export declare const bigqueryServiceAccountSchema: JSONSchemaType<BigqueryServiceAccountTarget>;
|
|
18
|
+
export declare const getBigqueryCredentialsFromServiceAccount: (target: Target) => Promise<Record<string, string>>;
|
|
19
|
+
export type BigqueryServiceAccountJsonTarget = {
|
|
20
|
+
type: 'bigquery';
|
|
21
|
+
method: 'service-account-json';
|
|
22
|
+
project: string;
|
|
23
|
+
dataset: string;
|
|
24
|
+
threads?: number;
|
|
25
|
+
keyfile_json: object;
|
|
26
|
+
priority?: 'interactive' | 'batch';
|
|
27
|
+
retries?: number;
|
|
28
|
+
location?: string;
|
|
29
|
+
maximum_bytes_billed?: number;
|
|
30
|
+
timeout_seconds?: number;
|
|
31
|
+
execution_project?: string;
|
|
32
|
+
};
|
|
33
|
+
export declare const bigqueryServiceAccountJsonSchema: JSONSchemaType<BigqueryServiceAccountJsonTarget>;
|
|
34
|
+
export declare const getBigqueryCredentialsFromServiceAccountJson: (target: Target) => Promise<Record<string, string>>;
|
|
35
|
+
//# sourceMappingURL=serviceAccount.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"serviceAccount.d.ts","sourceRoot":"","sources":["../../../../src/dbt/targets/Bigquery/serviceAccount.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AAGrC,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AAErC,MAAM,MAAM,4BAA4B,GAAG;IACvC,IAAI,EAAE,UAAU,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,iBAAiB,GAAG,OAAO,CAAC;IACpC,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,aAAa,GAAG,OAAO,CAAC;IACnC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC9B,CAAC;AACF,eAAO,MAAM,4BAA4B,EAAE,cAAc,CAAC,4BAA4B,CAqDjF,CAAC;AAEN,eAAO,MAAM,wCAAwC,GACjD,QAAQ,MAAM,KACf,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAwBhC,CAAC;AAEF,MAAM,MAAM,gCAAgC,GAAG;IAC3C,IAAI,EAAE,UAAU,CAAC;IACjB,MAAM,EAAE,sBAAsB,CAAC;IAC/B,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,YAAY,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,EAAE,aAAa,GAAG,OAAO,CAAC;IACnC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC9B,CAAC;AACF,eAAO,MAAM,gCAAgC,EAAE,cAAc,CAAC,gCAAgC,CAqDzF,CAAC;AAEN,eAAO,MAAM,4CAA4C,GACrD,QAAQ,MAAM,KACf,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAsBhC,CAAC"}
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getBigqueryCredentialsFromServiceAccountJson = exports.bigqueryServiceAccountJsonSchema = exports.getBigqueryCredentialsFromServiceAccount = exports.bigqueryServiceAccountSchema = void 0;
|
|
4
|
+
const common_1 = require("@lightdash/common");
|
|
5
|
+
const fs_1 = require("fs");
|
|
6
|
+
const ajv_1 = require("../../../ajv");
|
|
7
|
+
exports.bigqueryServiceAccountSchema = {
|
|
8
|
+
type: 'object',
|
|
9
|
+
properties: {
|
|
10
|
+
type: {
|
|
11
|
+
type: 'string',
|
|
12
|
+
enum: ['bigquery'],
|
|
13
|
+
},
|
|
14
|
+
project: {
|
|
15
|
+
type: 'string',
|
|
16
|
+
},
|
|
17
|
+
dataset: {
|
|
18
|
+
type: 'string',
|
|
19
|
+
},
|
|
20
|
+
threads: {
|
|
21
|
+
type: 'integer',
|
|
22
|
+
minimum: 1,
|
|
23
|
+
nullable: true,
|
|
24
|
+
},
|
|
25
|
+
method: {
|
|
26
|
+
type: 'string',
|
|
27
|
+
enum: ['service-account', 'oauth'],
|
|
28
|
+
},
|
|
29
|
+
keyfile: {
|
|
30
|
+
type: 'string',
|
|
31
|
+
},
|
|
32
|
+
priority: {
|
|
33
|
+
type: 'string',
|
|
34
|
+
enum: ['interactive', 'batch'],
|
|
35
|
+
nullable: true,
|
|
36
|
+
},
|
|
37
|
+
retries: {
|
|
38
|
+
type: 'integer',
|
|
39
|
+
nullable: true,
|
|
40
|
+
},
|
|
41
|
+
location: {
|
|
42
|
+
type: 'string',
|
|
43
|
+
nullable: true,
|
|
44
|
+
},
|
|
45
|
+
maximum_bytes_billed: {
|
|
46
|
+
type: 'integer',
|
|
47
|
+
nullable: true,
|
|
48
|
+
},
|
|
49
|
+
timeout_seconds: {
|
|
50
|
+
type: 'integer',
|
|
51
|
+
nullable: true,
|
|
52
|
+
},
|
|
53
|
+
execution_project: {
|
|
54
|
+
type: 'string',
|
|
55
|
+
nullable: true,
|
|
56
|
+
},
|
|
57
|
+
},
|
|
58
|
+
required: ['type', 'project', 'dataset', 'method', 'keyfile'],
|
|
59
|
+
};
|
|
60
|
+
const getBigqueryCredentialsFromServiceAccount = async (target) => {
|
|
61
|
+
const validate = ajv_1.ajv.compile(exports.bigqueryServiceAccountSchema);
|
|
62
|
+
if (validate(target)) {
|
|
63
|
+
const keyfilePath = target.keyfile;
|
|
64
|
+
try {
|
|
65
|
+
return JSON.parse(await fs_1.promises.readFile(keyfilePath, 'utf8'));
|
|
66
|
+
}
|
|
67
|
+
catch (e) {
|
|
68
|
+
const msg = (0, common_1.getErrorMessage)(e);
|
|
69
|
+
throw new common_1.ParseError(`Cannot read keyfile for bigquery target expect at: ${keyfilePath}:\n ${msg}`);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
const lineErrorMessages = (validate.errors || [])
|
|
73
|
+
.map((err) => `Field at ${err.instancePath} ${err.message}`)
|
|
74
|
+
.join('\n');
|
|
75
|
+
throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\n ${lineErrorMessages}`);
|
|
76
|
+
};
|
|
77
|
+
exports.getBigqueryCredentialsFromServiceAccount = getBigqueryCredentialsFromServiceAccount;
|
|
78
|
+
exports.bigqueryServiceAccountJsonSchema = {
|
|
79
|
+
type: 'object',
|
|
80
|
+
properties: {
|
|
81
|
+
type: {
|
|
82
|
+
type: 'string',
|
|
83
|
+
enum: ['bigquery'],
|
|
84
|
+
},
|
|
85
|
+
project: {
|
|
86
|
+
type: 'string',
|
|
87
|
+
},
|
|
88
|
+
dataset: {
|
|
89
|
+
type: 'string',
|
|
90
|
+
},
|
|
91
|
+
threads: {
|
|
92
|
+
type: 'integer',
|
|
93
|
+
minimum: 1,
|
|
94
|
+
nullable: true,
|
|
95
|
+
},
|
|
96
|
+
method: {
|
|
97
|
+
type: 'string',
|
|
98
|
+
enum: ['service-account-json'],
|
|
99
|
+
},
|
|
100
|
+
keyfile_json: {
|
|
101
|
+
type: 'object',
|
|
102
|
+
},
|
|
103
|
+
priority: {
|
|
104
|
+
type: 'string',
|
|
105
|
+
enum: ['interactive', 'batch'],
|
|
106
|
+
nullable: true,
|
|
107
|
+
},
|
|
108
|
+
retries: {
|
|
109
|
+
type: 'integer',
|
|
110
|
+
nullable: true,
|
|
111
|
+
},
|
|
112
|
+
location: {
|
|
113
|
+
type: 'string',
|
|
114
|
+
nullable: true,
|
|
115
|
+
},
|
|
116
|
+
maximum_bytes_billed: {
|
|
117
|
+
type: 'integer',
|
|
118
|
+
nullable: true,
|
|
119
|
+
},
|
|
120
|
+
timeout_seconds: {
|
|
121
|
+
type: 'integer',
|
|
122
|
+
nullable: true,
|
|
123
|
+
},
|
|
124
|
+
execution_project: {
|
|
125
|
+
type: 'string',
|
|
126
|
+
nullable: true,
|
|
127
|
+
},
|
|
128
|
+
},
|
|
129
|
+
required: ['type', 'project', 'dataset', 'method', 'keyfile_json'],
|
|
130
|
+
};
|
|
131
|
+
const getBigqueryCredentialsFromServiceAccountJson = async (target) => {
|
|
132
|
+
const validate = ajv_1.ajv.compile(exports.bigqueryServiceAccountJsonSchema);
|
|
133
|
+
if (validate(target)) {
|
|
134
|
+
return Object.entries(target.keyfile_json).reduce((acc, [key, value]) => {
|
|
135
|
+
if (typeof value === 'string') {
|
|
136
|
+
acc[key] = value.replaceAll(/\\n/gm, '\n'); // replace escaped newlines. Prevents error: Error: error:1E08010C:DECODER routines::unsupported
|
|
137
|
+
}
|
|
138
|
+
else {
|
|
139
|
+
acc[key] = value;
|
|
140
|
+
}
|
|
141
|
+
return acc;
|
|
142
|
+
}, {});
|
|
143
|
+
}
|
|
144
|
+
const lineErrorMessages = (validate.errors || [])
|
|
145
|
+
.map((err) => `Field at ${err.instancePath} ${err.message}`)
|
|
146
|
+
.join('\n');
|
|
147
|
+
throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\n ${lineErrorMessages}`);
|
|
148
|
+
};
|
|
149
|
+
exports.getBigqueryCredentialsFromServiceAccountJson = getBigqueryCredentialsFromServiceAccountJson;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Default OAuth client ID for Databricks U2M authentication.
|
|
3
|
+
* This matches the client ID used by dbt-databricks.
|
|
4
|
+
*/
|
|
5
|
+
export declare const DATABRICKS_DEFAULT_OAUTH_CLIENT_ID = "dbt-databricks";
|
|
6
|
+
/**
|
|
7
|
+
* Databricks OAuth tokens result
|
|
8
|
+
*/
|
|
9
|
+
export interface DatabricksOAuthTokens {
|
|
10
|
+
accessToken: string;
|
|
11
|
+
refreshToken: string;
|
|
12
|
+
expiresAt: number;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Perform Databricks U2M OAuth flow
|
|
16
|
+
* Opens browser for user authentication and exchanges authorization code for tokens
|
|
17
|
+
* @param host Databricks workspace host
|
|
18
|
+
* @param clientId OAuth client ID (defaults to 'databricks-cli')
|
|
19
|
+
*/
|
|
20
|
+
export declare const performDatabricksOAuthFlow: (host: string, clientId: string, clientSecret: string | undefined) => Promise<DatabricksOAuthTokens>;
|
|
21
|
+
//# sourceMappingURL=oauth.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"oauth.d.ts","sourceRoot":"","sources":["../../../../src/dbt/targets/Databricks/oauth.ts"],"names":[],"mappings":"AAUA;;;GAGG;AACH,eAAO,MAAM,kCAAkC,mBAAmB,CAAC;AAEnE;;GAEG;AACH,MAAM,WAAW,qBAAqB;IAClC,WAAW,EAAE,MAAM,CAAC;IACpB,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,EAAE,MAAM,CAAC;CACrB;AAED;;;;;GAKG;AACH,eAAO,MAAM,0BAA0B,GACnC,MAAM,MAAM,EACZ,UAAU,MAAM,EAChB,cAAc,MAAM,GAAG,SAAS,KACjC,OAAO,CAAC,qBAAqB,CAmO/B,CAAC"}
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.performDatabricksOAuthFlow = exports.DATABRICKS_DEFAULT_OAUTH_CLIENT_ID = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const common_1 = require("@lightdash/common");
|
|
6
|
+
const http = tslib_1.__importStar(require("http"));
|
|
7
|
+
const node_fetch_1 = tslib_1.__importDefault(require("node-fetch"));
|
|
8
|
+
const openid_client_1 = require("openid-client");
|
|
9
|
+
const ora_1 = tslib_1.__importDefault(require("ora"));
|
|
10
|
+
const url_1 = require("url");
|
|
11
|
+
const globalState_1 = tslib_1.__importDefault(require("../../../globalState"));
|
|
12
|
+
const oauth_1 = require("../../../handlers/login/oauth");
|
|
13
|
+
const styles = tslib_1.__importStar(require("../../../styles"));
|
|
14
|
+
/**
|
|
15
|
+
* Default OAuth client ID for Databricks U2M authentication.
|
|
16
|
+
* This matches the client ID used by dbt-databricks.
|
|
17
|
+
*/
|
|
18
|
+
exports.DATABRICKS_DEFAULT_OAUTH_CLIENT_ID = 'dbt-databricks';
|
|
19
|
+
/**
|
|
20
|
+
* Perform Databricks U2M OAuth flow
|
|
21
|
+
* Opens browser for user authentication and exchanges authorization code for tokens
|
|
22
|
+
* @param host Databricks workspace host
|
|
23
|
+
* @param clientId OAuth client ID (defaults to 'databricks-cli')
|
|
24
|
+
*/
|
|
25
|
+
const performDatabricksOAuthFlow = async (host, clientId, clientSecret) => {
|
|
26
|
+
// Create a promise that will be resolved when we get the authorization code
|
|
27
|
+
let resolveAuth;
|
|
28
|
+
let rejectAuth;
|
|
29
|
+
const authPromise = new Promise((resolve, reject) => {
|
|
30
|
+
resolveAuth = resolve;
|
|
31
|
+
rejectAuth = reject;
|
|
32
|
+
});
|
|
33
|
+
let port = 0;
|
|
34
|
+
// Generate PKCE values
|
|
35
|
+
const codeVerifier = openid_client_1.generators.codeVerifier();
|
|
36
|
+
const codeChallenge = openid_client_1.generators.codeChallenge(codeVerifier);
|
|
37
|
+
const state = openid_client_1.generators.state();
|
|
38
|
+
// Create HTTP server to handle the callback at root path
|
|
39
|
+
const server = http.createServer((req, res) => {
|
|
40
|
+
const callbackUrl = new url_1.URL(req.url || '/', `http://localhost:${port}`);
|
|
41
|
+
const code = callbackUrl.searchParams.get('code');
|
|
42
|
+
const returnedState = callbackUrl.searchParams.get('state');
|
|
43
|
+
const error = callbackUrl.searchParams.get('error');
|
|
44
|
+
res.setHeader('Content-Type', 'text/html');
|
|
45
|
+
if (error === 'access_denied') {
|
|
46
|
+
rejectAuth(new common_1.AuthorizationError(`OAuth error: access denied`));
|
|
47
|
+
res.writeHead(400);
|
|
48
|
+
res.end('<html><body><h1>Authentication Failed</h1><p>Access denied. You can close this window.</p></body></html>');
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
if (error) {
|
|
52
|
+
rejectAuth(new common_1.AuthorizationError(`OAuth error: ${error}`));
|
|
53
|
+
res.writeHead(400);
|
|
54
|
+
res.end(`<html><body><h1>Authentication Failed</h1><p>Error: ${error}</p></body></html>`);
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
if (!code || !returnedState) {
|
|
58
|
+
rejectAuth(new common_1.AuthorizationError('Missing authorization code or state'));
|
|
59
|
+
res.writeHead(400);
|
|
60
|
+
res.end('<html><body><h1>Authentication Failed</h1><p>Missing authorization code or state.</p></body></html>');
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
if (returnedState !== state) {
|
|
64
|
+
rejectAuth(new common_1.AuthorizationError('Authentication session expired or invalid'));
|
|
65
|
+
res.writeHead(400);
|
|
66
|
+
res.end('<html><body><h1>Authentication Failed</h1><p>Session expired or invalid. Please close this window and try again.</p></body></html>');
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
// Success - resolve the promise
|
|
70
|
+
resolveAuth({ code, state: returnedState });
|
|
71
|
+
res.writeHead(200);
|
|
72
|
+
res.end('<html><body><h1>Authentication Successful</h1><p>You can close this window and return to the CLI.</p></body></html>');
|
|
73
|
+
});
|
|
74
|
+
// Start the server on port 8020 (standard for Databricks CLI)
|
|
75
|
+
const preferredPort = 8020;
|
|
76
|
+
await new Promise((resolve, reject) => {
|
|
77
|
+
server.on('error', (err) => {
|
|
78
|
+
// perhaps 8020 port is busy, but we can't use a random port
|
|
79
|
+
reject(err);
|
|
80
|
+
});
|
|
81
|
+
server.listen(preferredPort, () => {
|
|
82
|
+
const address = server.address();
|
|
83
|
+
if (address === null)
|
|
84
|
+
throw new Error('Failed to get server address');
|
|
85
|
+
if (typeof address === 'object') {
|
|
86
|
+
port = address.port;
|
|
87
|
+
}
|
|
88
|
+
else {
|
|
89
|
+
port = parseInt(address.toString(), 10);
|
|
90
|
+
}
|
|
91
|
+
globalState_1.default.debug(`> OAuth callback server listening on port ${port}`);
|
|
92
|
+
resolve();
|
|
93
|
+
});
|
|
94
|
+
});
|
|
95
|
+
const redirectUri = `http://localhost:${port}`;
|
|
96
|
+
globalState_1.default.debug(`> Starting CLI callback server on URI: ${redirectUri}`);
|
|
97
|
+
try {
|
|
98
|
+
// Build Databricks authorization URL
|
|
99
|
+
const authUrl = new url_1.URL('/oidc/v1/authorize', `https://${host}`);
|
|
100
|
+
authUrl.searchParams.set('client_id', clientId);
|
|
101
|
+
authUrl.searchParams.set('redirect_uri', redirectUri);
|
|
102
|
+
authUrl.searchParams.set('response_type', 'code');
|
|
103
|
+
authUrl.searchParams.set('scope', 'all-apis offline_access');
|
|
104
|
+
authUrl.searchParams.set('code_challenge', codeChallenge);
|
|
105
|
+
authUrl.searchParams.set('code_challenge_method', 'S256');
|
|
106
|
+
authUrl.searchParams.set('state', state);
|
|
107
|
+
// Pause any active spinner while authenticating
|
|
108
|
+
const parentSpinner = globalState_1.default.getActiveSpinner();
|
|
109
|
+
const wasSpinning = parentSpinner?.isSpinning;
|
|
110
|
+
parentSpinner?.stop();
|
|
111
|
+
const divider = styles.secondary('─'.repeat(process.stdout.columns || 80));
|
|
112
|
+
console.error(`\n${divider}`);
|
|
113
|
+
console.error(`${styles.title('🔐 Databricks Authentication')}`);
|
|
114
|
+
// Try to open the browser
|
|
115
|
+
await (0, oauth_1.openBrowser)(authUrl.href);
|
|
116
|
+
// Show spinner with fallback URL — both disappear on success
|
|
117
|
+
// Use ora directly to avoid overwriting the global active spinner
|
|
118
|
+
const authSpinner = (0, ora_1.default)(` Waiting for authentication in browser...\n` +
|
|
119
|
+
` If it doesn't open, visit: ${styles.secondary(authUrl.href)}`).start();
|
|
120
|
+
// Wait for the authorization code
|
|
121
|
+
let code;
|
|
122
|
+
try {
|
|
123
|
+
({ code } = await authPromise);
|
|
124
|
+
authSpinner.stop();
|
|
125
|
+
}
|
|
126
|
+
catch (e) {
|
|
127
|
+
authSpinner.fail(` Databricks authentication failed`);
|
|
128
|
+
throw e;
|
|
129
|
+
}
|
|
130
|
+
globalState_1.default.debug(`> Got authorization code ${code.substring(0, 10)}...`);
|
|
131
|
+
// Exchange the authorization code for tokens
|
|
132
|
+
const tokenUrl = new url_1.URL('/oidc/v1/token', `https://${host}`);
|
|
133
|
+
// Build token request parameters
|
|
134
|
+
const tokenParams = {
|
|
135
|
+
grant_type: 'authorization_code',
|
|
136
|
+
code,
|
|
137
|
+
client_id: clientId,
|
|
138
|
+
redirect_uri: redirectUri,
|
|
139
|
+
code_verifier: codeVerifier,
|
|
140
|
+
};
|
|
141
|
+
// For confidential clients (custom OAuth apps), include client_secret
|
|
142
|
+
if (clientSecret) {
|
|
143
|
+
tokenParams.client_secret = clientSecret;
|
|
144
|
+
}
|
|
145
|
+
const tokenResponse = await (0, node_fetch_1.default)(tokenUrl.href, {
|
|
146
|
+
method: 'POST',
|
|
147
|
+
headers: {
|
|
148
|
+
'Content-Type': 'application/x-www-form-urlencoded',
|
|
149
|
+
},
|
|
150
|
+
body: new URLSearchParams(tokenParams),
|
|
151
|
+
});
|
|
152
|
+
if (!tokenResponse.ok) {
|
|
153
|
+
const errorText = await tokenResponse.text();
|
|
154
|
+
throw new common_1.AuthorizationError(`Token exchange failed: ${tokenResponse.status} ${errorText}`);
|
|
155
|
+
}
|
|
156
|
+
const tokenData = (await tokenResponse.json());
|
|
157
|
+
const accessToken = tokenData.access_token;
|
|
158
|
+
const refreshToken = tokenData.refresh_token;
|
|
159
|
+
const expiresIn = tokenData.expires_in;
|
|
160
|
+
if (!accessToken || !refreshToken) {
|
|
161
|
+
throw new common_1.AuthorizationError('No access token or refresh token received from Databricks');
|
|
162
|
+
}
|
|
163
|
+
globalState_1.default.debug(`> OAuth access token: ${accessToken.substring(0, 10)}...`);
|
|
164
|
+
globalState_1.default.debug(`> OAuth refresh token: ${refreshToken.substring(0, 10)}...`);
|
|
165
|
+
// Calculate expiration timestamp (current time + expires_in)
|
|
166
|
+
const expiresAt = Math.floor(Date.now() / 1000) + expiresIn;
|
|
167
|
+
console.error(`${styles.success('✔')} Successfully authenticated with Databricks`);
|
|
168
|
+
console.error(`${divider}\n`);
|
|
169
|
+
// Restart the parent spinner if it was active
|
|
170
|
+
if (wasSpinning) {
|
|
171
|
+
parentSpinner?.start();
|
|
172
|
+
}
|
|
173
|
+
return {
|
|
174
|
+
accessToken,
|
|
175
|
+
refreshToken,
|
|
176
|
+
expiresAt,
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
finally {
|
|
180
|
+
// Clean up the server
|
|
181
|
+
server.close();
|
|
182
|
+
}
|
|
183
|
+
};
|
|
184
|
+
exports.performDatabricksOAuthFlow = performDatabricksOAuthFlow;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { CreateAthenaCredentials } from '@lightdash/common';
|
|
2
|
+
import { JSONSchemaType } from 'ajv';
|
|
3
|
+
import { Target } from '../types';
|
|
4
|
+
export type AthenaTarget = {
|
|
5
|
+
type: 'athena';
|
|
6
|
+
region_name: string;
|
|
7
|
+
database: string;
|
|
8
|
+
schema: string;
|
|
9
|
+
s3_staging_dir: string;
|
|
10
|
+
s3_data_dir?: string;
|
|
11
|
+
aws_access_key_id?: string;
|
|
12
|
+
aws_secret_access_key?: string;
|
|
13
|
+
aws_assume_role_arn?: string;
|
|
14
|
+
aws_assume_role_external_id?: string;
|
|
15
|
+
work_group?: string;
|
|
16
|
+
threads?: number;
|
|
17
|
+
num_retries?: number;
|
|
18
|
+
};
|
|
19
|
+
export declare const athenaSchema: JSONSchemaType<AthenaTarget>;
|
|
20
|
+
export declare const convertAthenaSchema: (target: Target) => CreateAthenaCredentials;
|
|
21
|
+
//# sourceMappingURL=athena.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"athena.d.ts","sourceRoot":"","sources":["../../../src/dbt/targets/athena.ts"],"names":[],"mappings":"AAAA,OAAO,EAEH,uBAAuB,EAG1B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AAGrC,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC,MAAM,MAAM,YAAY,GAAG;IACvB,IAAI,EAAE,QAAQ,CAAC;IACf,WAAW,EAAE,MAAM,CAAC;IACpB,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,cAAc,EAAE,MAAM,CAAC;IACvB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,2BAA2B,CAAC,EAAE,MAAM,CAAC;IACrC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;CACxB,CAAC;AAEF,eAAO,MAAM,YAAY,EAAE,cAAc,CAAC,YAAY,CAqDrD,CAAC;AAEF,eAAO,MAAM,mBAAmB,GAC5B,QAAQ,MAAM,KACf,uBAkCF,CAAC"}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.convertAthenaSchema = exports.athenaSchema = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const common_1 = require("@lightdash/common");
|
|
6
|
+
const better_ajv_errors_1 = tslib_1.__importDefault(require("better-ajv-errors"));
|
|
7
|
+
const ajv_1 = require("../../ajv");
|
|
8
|
+
exports.athenaSchema = {
|
|
9
|
+
type: 'object',
|
|
10
|
+
properties: {
|
|
11
|
+
type: {
|
|
12
|
+
type: 'string',
|
|
13
|
+
enum: ['athena'],
|
|
14
|
+
},
|
|
15
|
+
region_name: {
|
|
16
|
+
type: 'string',
|
|
17
|
+
},
|
|
18
|
+
database: {
|
|
19
|
+
type: 'string',
|
|
20
|
+
},
|
|
21
|
+
schema: {
|
|
22
|
+
type: 'string',
|
|
23
|
+
},
|
|
24
|
+
s3_staging_dir: {
|
|
25
|
+
type: 'string',
|
|
26
|
+
},
|
|
27
|
+
s3_data_dir: {
|
|
28
|
+
type: 'string',
|
|
29
|
+
nullable: true,
|
|
30
|
+
},
|
|
31
|
+
aws_access_key_id: {
|
|
32
|
+
type: 'string',
|
|
33
|
+
nullable: true,
|
|
34
|
+
},
|
|
35
|
+
aws_secret_access_key: {
|
|
36
|
+
type: 'string',
|
|
37
|
+
nullable: true,
|
|
38
|
+
},
|
|
39
|
+
aws_assume_role_arn: {
|
|
40
|
+
type: 'string',
|
|
41
|
+
nullable: true,
|
|
42
|
+
},
|
|
43
|
+
aws_assume_role_external_id: {
|
|
44
|
+
type: 'string',
|
|
45
|
+
nullable: true,
|
|
46
|
+
},
|
|
47
|
+
work_group: {
|
|
48
|
+
type: 'string',
|
|
49
|
+
nullable: true,
|
|
50
|
+
},
|
|
51
|
+
threads: {
|
|
52
|
+
type: 'number',
|
|
53
|
+
nullable: true,
|
|
54
|
+
},
|
|
55
|
+
num_retries: {
|
|
56
|
+
type: 'number',
|
|
57
|
+
nullable: true,
|
|
58
|
+
},
|
|
59
|
+
},
|
|
60
|
+
required: ['type', 'region_name', 'database', 'schema', 's3_staging_dir'],
|
|
61
|
+
};
|
|
62
|
+
const convertAthenaSchema = (target) => {
|
|
63
|
+
const validate = ajv_1.ajv.compile(exports.athenaSchema);
|
|
64
|
+
if (validate(target)) {
|
|
65
|
+
const hasAccessKeyCredentials = !!target.aws_access_key_id && !!target.aws_secret_access_key;
|
|
66
|
+
return {
|
|
67
|
+
type: common_1.WarehouseTypes.ATHENA,
|
|
68
|
+
region: target.region_name,
|
|
69
|
+
database: target.database,
|
|
70
|
+
schema: target.schema,
|
|
71
|
+
s3StagingDir: target.s3_staging_dir,
|
|
72
|
+
s3DataDir: target.s3_data_dir,
|
|
73
|
+
// CLI is intentionally permissive here: when keys are absent in a dbt
|
|
74
|
+
// profile, infer IAM role auth so local/CI runs can use AWS default
|
|
75
|
+
// credential resolution (for example OIDC/instance/task role).
|
|
76
|
+
authenticationType: hasAccessKeyCredentials
|
|
77
|
+
? common_1.AthenaAuthenticationType.ACCESS_KEY
|
|
78
|
+
: common_1.AthenaAuthenticationType.IAM_ROLE,
|
|
79
|
+
accessKeyId: target.aws_access_key_id,
|
|
80
|
+
secretAccessKey: target.aws_secret_access_key,
|
|
81
|
+
assumeRoleArn: target.aws_assume_role_arn,
|
|
82
|
+
assumeRoleExternalId: target.aws_assume_role_external_id,
|
|
83
|
+
workGroup: target.work_group,
|
|
84
|
+
threads: target.threads,
|
|
85
|
+
numRetries: target.num_retries,
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
const errs = (0, better_ajv_errors_1.default)(exports.athenaSchema, target, validate.errors || []);
|
|
89
|
+
throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\n${errs}`);
|
|
90
|
+
};
|
|
91
|
+
exports.convertAthenaSchema = convertAthenaSchema;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"athena.test.d.ts","sourceRoot":"","sources":["../../../src/dbt/targets/athena.test.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const common_1 = require("@lightdash/common");
|
|
4
|
+
const athena_1 = require("./athena");
|
|
5
|
+
describe('convertAthenaSchema', () => {
|
|
6
|
+
test('should parse access key authentication when keys are present', () => {
|
|
7
|
+
const target = {
|
|
8
|
+
type: 'athena',
|
|
9
|
+
region_name: 'us-east-1',
|
|
10
|
+
database: 'AwsDataCatalog',
|
|
11
|
+
schema: 'default',
|
|
12
|
+
s3_staging_dir: 's3://test-results/',
|
|
13
|
+
aws_access_key_id: 'AKIATEST',
|
|
14
|
+
aws_secret_access_key: 'SECRETTEST',
|
|
15
|
+
};
|
|
16
|
+
expect((0, athena_1.convertAthenaSchema)(target)).toEqual({
|
|
17
|
+
type: common_1.WarehouseTypes.ATHENA,
|
|
18
|
+
region: 'us-east-1',
|
|
19
|
+
database: 'AwsDataCatalog',
|
|
20
|
+
schema: 'default',
|
|
21
|
+
s3StagingDir: 's3://test-results/',
|
|
22
|
+
s3DataDir: undefined,
|
|
23
|
+
authenticationType: common_1.AthenaAuthenticationType.ACCESS_KEY,
|
|
24
|
+
accessKeyId: 'AKIATEST',
|
|
25
|
+
secretAccessKey: 'SECRETTEST',
|
|
26
|
+
workGroup: undefined,
|
|
27
|
+
threads: undefined,
|
|
28
|
+
numRetries: undefined,
|
|
29
|
+
});
|
|
30
|
+
});
|
|
31
|
+
test('should parse iam role authentication when keys are missing', () => {
|
|
32
|
+
const target = {
|
|
33
|
+
type: 'athena',
|
|
34
|
+
region_name: 'us-east-1',
|
|
35
|
+
database: 'AwsDataCatalog',
|
|
36
|
+
schema: 'default',
|
|
37
|
+
s3_staging_dir: 's3://test-results/',
|
|
38
|
+
};
|
|
39
|
+
expect((0, athena_1.convertAthenaSchema)(target)).toEqual({
|
|
40
|
+
type: common_1.WarehouseTypes.ATHENA,
|
|
41
|
+
region: 'us-east-1',
|
|
42
|
+
database: 'AwsDataCatalog',
|
|
43
|
+
schema: 'default',
|
|
44
|
+
s3StagingDir: 's3://test-results/',
|
|
45
|
+
s3DataDir: undefined,
|
|
46
|
+
authenticationType: common_1.AthenaAuthenticationType.IAM_ROLE,
|
|
47
|
+
accessKeyId: undefined,
|
|
48
|
+
secretAccessKey: undefined,
|
|
49
|
+
workGroup: undefined,
|
|
50
|
+
threads: undefined,
|
|
51
|
+
numRetries: undefined,
|
|
52
|
+
});
|
|
53
|
+
});
|
|
54
|
+
test('should throw parse error for invalid Athena target', () => {
|
|
55
|
+
expect(() => (0, athena_1.convertAthenaSchema)({
|
|
56
|
+
type: 'athena',
|
|
57
|
+
region_name: 'us-east-1',
|
|
58
|
+
})).toThrow(common_1.ParseError);
|
|
59
|
+
});
|
|
60
|
+
});
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { CreateClickhouseCredentials } from '@lightdash/common';
|
|
2
|
+
import { JSONSchemaType } from 'ajv';
|
|
3
|
+
import { Target } from '../types';
|
|
4
|
+
export type ClickhouseTarget = {
|
|
5
|
+
type: 'clickhouse';
|
|
6
|
+
host: string;
|
|
7
|
+
user: string;
|
|
8
|
+
password: string;
|
|
9
|
+
port: number;
|
|
10
|
+
schema: string;
|
|
11
|
+
driver?: string;
|
|
12
|
+
cluster?: string;
|
|
13
|
+
verify?: boolean;
|
|
14
|
+
secure?: boolean;
|
|
15
|
+
retries?: number;
|
|
16
|
+
compression?: boolean;
|
|
17
|
+
connect_timeout?: number;
|
|
18
|
+
send_receive_timeout?: number;
|
|
19
|
+
cluster_mode?: boolean;
|
|
20
|
+
custom_settings?: Record<string, unknown>;
|
|
21
|
+
};
|
|
22
|
+
export declare const clickhouseSchema: JSONSchemaType<ClickhouseTarget>;
|
|
23
|
+
export declare const convertClickhouseSchema: (target: Target) => CreateClickhouseCredentials;
|
|
24
|
+
//# sourceMappingURL=clickhouse.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"clickhouse.d.ts","sourceRoot":"","sources":["../../../src/dbt/targets/clickhouse.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,2BAA2B,EAG9B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AAGrC,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC,MAAM,MAAM,gBAAgB,GAAG;IAC3B,IAAI,EAAE,YAAY,CAAC;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,eAAe,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CAC7C,CAAC;AAEF,eAAO,MAAM,gBAAgB,EAAE,cAAc,CAAC,gBAAgB,CAgE7D,CAAC;AAEF,eAAO,MAAM,uBAAuB,GAChC,QAAQ,MAAM,KACf,2BAwBF,CAAC"}
|