@claryai/cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +25 -0
- package/README.md +197 -0
- package/dist/.tsbuildinfo +1 -0
- package/dist/ajv.d.ts +3 -0
- package/dist/ajv.d.ts.map +1 -0
- package/dist/ajv.js +13 -0
- package/dist/analytics/analytics.d.ts +370 -0
- package/dist/analytics/analytics.d.ts.map +1 -0
- package/dist/analytics/analytics.js +143 -0
- package/dist/config.d.ts +34 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +134 -0
- package/dist/dbt/context.d.ts +14 -0
- package/dist/dbt/context.d.ts.map +1 -0
- package/dist/dbt/context.js +76 -0
- package/dist/dbt/context.test.d.ts +2 -0
- package/dist/dbt/context.test.d.ts.map +1 -0
- package/dist/dbt/context.test.js +152 -0
- package/dist/dbt/manifest.d.ts +7 -0
- package/dist/dbt/manifest.d.ts.map +1 -0
- package/dist/dbt/manifest.js +23 -0
- package/dist/dbt/models.d.ts +43 -0
- package/dist/dbt/models.d.ts.map +1 -0
- package/dist/dbt/models.js +256 -0
- package/dist/dbt/models.test.d.ts +2 -0
- package/dist/dbt/models.test.d.ts.map +1 -0
- package/dist/dbt/models.test.js +19 -0
- package/dist/dbt/profile.d.ts +9 -0
- package/dist/dbt/profile.d.ts.map +1 -0
- package/dist/dbt/profile.js +86 -0
- package/dist/dbt/profiles.test.d.ts +2 -0
- package/dist/dbt/profiles.test.d.ts.map +1 -0
- package/dist/dbt/profiles.test.js +50 -0
- package/dist/dbt/schema.d.ts +31 -0
- package/dist/dbt/schema.d.ts.map +1 -0
- package/dist/dbt/schema.js +49 -0
- package/dist/dbt/targets/Bigquery/index.d.ts +18 -0
- package/dist/dbt/targets/Bigquery/index.d.ts.map +1 -0
- package/dist/dbt/targets/Bigquery/index.js +105 -0
- package/dist/dbt/targets/Bigquery/oauth.d.ts +2 -0
- package/dist/dbt/targets/Bigquery/oauth.d.ts.map +1 -0
- package/dist/dbt/targets/Bigquery/oauth.js +43 -0
- package/dist/dbt/targets/Bigquery/serviceAccount.d.ts +35 -0
- package/dist/dbt/targets/Bigquery/serviceAccount.d.ts.map +1 -0
- package/dist/dbt/targets/Bigquery/serviceAccount.js +149 -0
- package/dist/dbt/targets/Databricks/oauth.d.ts +21 -0
- package/dist/dbt/targets/Databricks/oauth.d.ts.map +1 -0
- package/dist/dbt/targets/Databricks/oauth.js +184 -0
- package/dist/dbt/targets/athena.d.ts +21 -0
- package/dist/dbt/targets/athena.d.ts.map +1 -0
- package/dist/dbt/targets/athena.js +91 -0
- package/dist/dbt/targets/athena.test.d.ts +2 -0
- package/dist/dbt/targets/athena.test.d.ts.map +1 -0
- package/dist/dbt/targets/athena.test.js +60 -0
- package/dist/dbt/targets/clickhouse.d.ts +24 -0
- package/dist/dbt/targets/clickhouse.d.ts.map +1 -0
- package/dist/dbt/targets/clickhouse.js +90 -0
- package/dist/dbt/targets/databricks.d.ts +27 -0
- package/dist/dbt/targets/databricks.d.ts.map +1 -0
- package/dist/dbt/targets/databricks.js +138 -0
- package/dist/dbt/targets/duckdb.d.ts +16 -0
- package/dist/dbt/targets/duckdb.d.ts.map +1 -0
- package/dist/dbt/targets/duckdb.js +63 -0
- package/dist/dbt/targets/duckdb.test.d.ts +2 -0
- package/dist/dbt/targets/duckdb.test.d.ts.map +1 -0
- package/dist/dbt/targets/duckdb.test.js +37 -0
- package/dist/dbt/targets/postgres.d.ts +26 -0
- package/dist/dbt/targets/postgres.d.ts.map +1 -0
- package/dist/dbt/targets/postgres.js +142 -0
- package/dist/dbt/targets/redshift.d.ts +23 -0
- package/dist/dbt/targets/redshift.d.ts.map +1 -0
- package/dist/dbt/targets/redshift.js +96 -0
- package/dist/dbt/targets/snowflake.d.ts +4 -0
- package/dist/dbt/targets/snowflake.d.ts.map +1 -0
- package/dist/dbt/targets/snowflake.js +134 -0
- package/dist/dbt/targets/trino.d.ts +16 -0
- package/dist/dbt/targets/trino.d.ts.map +1 -0
- package/dist/dbt/targets/trino.js +65 -0
- package/dist/dbt/templating.d.ts +15 -0
- package/dist/dbt/templating.d.ts.map +1 -0
- package/dist/dbt/templating.js +50 -0
- package/dist/dbt/templating.test.d.ts +2 -0
- package/dist/dbt/templating.test.d.ts.map +1 -0
- package/dist/dbt/templating.test.js +51 -0
- package/dist/dbt/types.d.ts +17 -0
- package/dist/dbt/types.d.ts.map +1 -0
- package/dist/dbt/types.js +2 -0
- package/dist/dbt/validation.d.ts +9 -0
- package/dist/dbt/validation.d.ts.map +1 -0
- package/dist/dbt/validation.js +54 -0
- package/dist/env.d.ts +12 -0
- package/dist/env.d.ts.map +1 -0
- package/dist/env.js +40 -0
- package/dist/error.d.ts +2 -0
- package/dist/error.d.ts.map +1 -0
- package/dist/error.js +12 -0
- package/dist/globalState.d.ts +29 -0
- package/dist/globalState.d.ts.map +1 -0
- package/dist/globalState.js +67 -0
- package/dist/handlers/asyncQuery.d.ts +7 -0
- package/dist/handlers/asyncQuery.d.ts.map +1 -0
- package/dist/handlers/asyncQuery.js +50 -0
- package/dist/handlers/compile.d.ts +16 -0
- package/dist/handlers/compile.d.ts.map +1 -0
- package/dist/handlers/compile.js +277 -0
- package/dist/handlers/compile.test.d.ts +2 -0
- package/dist/handlers/compile.test.d.ts.map +1 -0
- package/dist/handlers/compile.test.js +201 -0
- package/dist/handlers/createProject.d.ts +37 -0
- package/dist/handlers/createProject.d.ts.map +1 -0
- package/dist/handlers/createProject.js +272 -0
- package/dist/handlers/dbt/apiClient.d.ts +14 -0
- package/dist/handlers/dbt/apiClient.d.ts.map +1 -0
- package/dist/handlers/dbt/apiClient.js +167 -0
- package/dist/handlers/dbt/compile.d.ts +35 -0
- package/dist/handlers/dbt/compile.d.ts.map +1 -0
- package/dist/handlers/dbt/compile.js +220 -0
- package/dist/handlers/dbt/getDbtProfileTargetName.d.ts +9 -0
- package/dist/handlers/dbt/getDbtProfileTargetName.d.ts.map +1 -0
- package/dist/handlers/dbt/getDbtProfileTargetName.js +44 -0
- package/dist/handlers/dbt/getDbtVersion.d.ts +16 -0
- package/dist/handlers/dbt/getDbtVersion.d.ts.map +1 -0
- package/dist/handlers/dbt/getDbtVersion.js +141 -0
- package/dist/handlers/dbt/getDbtVersion.mocks.d.ts +11 -0
- package/dist/handlers/dbt/getDbtVersion.mocks.d.ts.map +1 -0
- package/dist/handlers/dbt/getDbtVersion.mocks.js +70 -0
- package/dist/handlers/dbt/getDbtVersion.test.d.ts +2 -0
- package/dist/handlers/dbt/getDbtVersion.test.d.ts.map +1 -0
- package/dist/handlers/dbt/getDbtVersion.test.js +97 -0
- package/dist/handlers/dbt/getWarehouseClient.d.ts +24 -0
- package/dist/handlers/dbt/getWarehouseClient.d.ts.map +1 -0
- package/dist/handlers/dbt/getWarehouseClient.js +312 -0
- package/dist/handlers/dbt/refresh.d.ts +11 -0
- package/dist/handlers/dbt/refresh.d.ts.map +1 -0
- package/dist/handlers/dbt/refresh.js +114 -0
- package/dist/handlers/dbt/run.d.ts +14 -0
- package/dist/handlers/dbt/run.d.ts.map +1 -0
- package/dist/handlers/dbt/run.js +67 -0
- package/dist/handlers/deploy.d.ts +26 -0
- package/dist/handlers/deploy.d.ts.map +1 -0
- package/dist/handlers/deploy.js +377 -0
- package/dist/handlers/diagnostics.d.ts +11 -0
- package/dist/handlers/diagnostics.d.ts.map +1 -0
- package/dist/handlers/diagnostics.js +194 -0
- package/dist/handlers/download.d.ts +29 -0
- package/dist/handlers/download.d.ts.map +1 -0
- package/dist/handlers/download.js +955 -0
- package/dist/handlers/exportChartImage.d.ts +7 -0
- package/dist/handlers/exportChartImage.d.ts.map +1 -0
- package/dist/handlers/exportChartImage.js +33 -0
- package/dist/handlers/generate.d.ts +13 -0
- package/dist/handlers/generate.d.ts.map +1 -0
- package/dist/handlers/generate.js +159 -0
- package/dist/handlers/generateExposures.d.ts +8 -0
- package/dist/handlers/generateExposures.d.ts.map +1 -0
- package/dist/handlers/generateExposures.js +100 -0
- package/dist/handlers/getProject.d.ts +6 -0
- package/dist/handlers/getProject.d.ts.map +1 -0
- package/dist/handlers/getProject.js +43 -0
- package/dist/handlers/installSkills.d.ts +12 -0
- package/dist/handlers/installSkills.d.ts.map +1 -0
- package/dist/handlers/installSkills.js +321 -0
- package/dist/handlers/lint/ajvToSarif.d.ts +66 -0
- package/dist/handlers/lint/ajvToSarif.d.ts.map +1 -0
- package/dist/handlers/lint/ajvToSarif.js +222 -0
- package/dist/handlers/lint/sarifFormatter.d.ts +14 -0
- package/dist/handlers/lint/sarifFormatter.d.ts.map +1 -0
- package/dist/handlers/lint/sarifFormatter.js +111 -0
- package/dist/handlers/lint.d.ts +8 -0
- package/dist/handlers/lint.d.ts.map +1 -0
- package/dist/handlers/lint.js +308 -0
- package/dist/handlers/listProjects.d.ts +6 -0
- package/dist/handlers/listProjects.d.ts.map +1 -0
- package/dist/handlers/listProjects.js +53 -0
- package/dist/handlers/login/oauth.d.ts +2 -0
- package/dist/handlers/login/oauth.d.ts.map +1 -0
- package/dist/handlers/login/oauth.js +27 -0
- package/dist/handlers/login/pat.d.ts +2 -0
- package/dist/handlers/login/pat.d.ts.map +1 -0
- package/dist/handlers/login/pat.js +31 -0
- package/dist/handlers/login.d.ts +15 -0
- package/dist/handlers/login.d.ts.map +1 -0
- package/dist/handlers/login.js +239 -0
- package/dist/handlers/metadataFile.d.ts +9 -0
- package/dist/handlers/metadataFile.d.ts.map +1 -0
- package/dist/handlers/metadataFile.js +34 -0
- package/dist/handlers/oauthLogin.d.ts +6 -0
- package/dist/handlers/oauthLogin.d.ts.map +1 -0
- package/dist/handlers/oauthLogin.js +191 -0
- package/dist/handlers/preview.d.ts +29 -0
- package/dist/handlers/preview.d.ts.map +1 -0
- package/dist/handlers/preview.js +415 -0
- package/dist/handlers/renameHandler.d.ts +16 -0
- package/dist/handlers/renameHandler.d.ts.map +1 -0
- package/dist/handlers/renameHandler.js +160 -0
- package/dist/handlers/runChart.d.ts +10 -0
- package/dist/handlers/runChart.d.ts.map +1 -0
- package/dist/handlers/runChart.js +105 -0
- package/dist/handlers/selectProject.d.ts +20 -0
- package/dist/handlers/selectProject.d.ts.map +1 -0
- package/dist/handlers/selectProject.js +91 -0
- package/dist/handlers/setProject.d.ts +14 -0
- package/dist/handlers/setProject.d.ts.map +1 -0
- package/dist/handlers/setProject.js +131 -0
- package/dist/handlers/setWarehouse.d.ts +14 -0
- package/dist/handlers/setWarehouse.d.ts.map +1 -0
- package/dist/handlers/setWarehouse.js +94 -0
- package/dist/handlers/sql.d.ts +9 -0
- package/dist/handlers/sql.d.ts.map +1 -0
- package/dist/handlers/sql.js +89 -0
- package/dist/handlers/utils.d.ts +11 -0
- package/dist/handlers/utils.d.ts.map +1 -0
- package/dist/handlers/utils.js +36 -0
- package/dist/handlers/validate.d.ts +22 -0
- package/dist/handlers/validate.d.ts.map +1 -0
- package/dist/handlers/validate.js +201 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +581 -0
- package/dist/lightdash/loader.d.ts +21 -0
- package/dist/lightdash/loader.d.ts.map +1 -0
- package/dist/lightdash/loader.js +122 -0
- package/dist/lightdash/projectType.d.ts +84 -0
- package/dist/lightdash/projectType.d.ts.map +1 -0
- package/dist/lightdash/projectType.js +75 -0
- package/dist/lightdash-config/index.d.ts +2 -0
- package/dist/lightdash-config/index.d.ts.map +1 -0
- package/dist/lightdash-config/index.js +41 -0
- package/dist/lightdash-config/lightdash-config.test.d.ts +2 -0
- package/dist/lightdash-config/lightdash-config.test.d.ts.map +1 -0
- package/dist/lightdash-config/lightdash-config.test.js +70 -0
- package/dist/styles.d.ts +10 -0
- package/dist/styles.d.ts.map +1 -0
- package/dist/styles.js +14 -0
- package/entitlements.plist +33 -0
- package/package.json +71 -0
- package/track.sh +116 -0
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.convertClickhouseSchema = exports.clickhouseSchema = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const common_1 = require("@lightdash/common");
|
|
6
|
+
const better_ajv_errors_1 = tslib_1.__importDefault(require("better-ajv-errors"));
|
|
7
|
+
const ajv_1 = require("../../ajv");
|
|
8
|
+
exports.clickhouseSchema = {
|
|
9
|
+
type: 'object',
|
|
10
|
+
properties: {
|
|
11
|
+
type: {
|
|
12
|
+
type: 'string',
|
|
13
|
+
enum: ['clickhouse'],
|
|
14
|
+
},
|
|
15
|
+
host: {
|
|
16
|
+
type: 'string',
|
|
17
|
+
},
|
|
18
|
+
user: {
|
|
19
|
+
type: 'string',
|
|
20
|
+
},
|
|
21
|
+
password: {
|
|
22
|
+
type: 'string',
|
|
23
|
+
},
|
|
24
|
+
port: {
|
|
25
|
+
type: 'number',
|
|
26
|
+
},
|
|
27
|
+
schema: {
|
|
28
|
+
type: 'string',
|
|
29
|
+
},
|
|
30
|
+
driver: {
|
|
31
|
+
type: 'string',
|
|
32
|
+
nullable: true,
|
|
33
|
+
},
|
|
34
|
+
cluster: {
|
|
35
|
+
type: 'string',
|
|
36
|
+
nullable: true,
|
|
37
|
+
},
|
|
38
|
+
verify: {
|
|
39
|
+
type: 'boolean',
|
|
40
|
+
nullable: true,
|
|
41
|
+
},
|
|
42
|
+
secure: {
|
|
43
|
+
type: 'boolean',
|
|
44
|
+
nullable: true,
|
|
45
|
+
},
|
|
46
|
+
retries: {
|
|
47
|
+
type: 'number',
|
|
48
|
+
nullable: true,
|
|
49
|
+
},
|
|
50
|
+
compression: {
|
|
51
|
+
type: 'boolean',
|
|
52
|
+
nullable: true,
|
|
53
|
+
},
|
|
54
|
+
connect_timeout: {
|
|
55
|
+
type: 'number',
|
|
56
|
+
nullable: true,
|
|
57
|
+
},
|
|
58
|
+
send_receive_timeout: {
|
|
59
|
+
type: 'number',
|
|
60
|
+
nullable: true,
|
|
61
|
+
},
|
|
62
|
+
cluster_mode: {
|
|
63
|
+
type: 'boolean',
|
|
64
|
+
nullable: true,
|
|
65
|
+
},
|
|
66
|
+
custom_settings: {
|
|
67
|
+
type: 'object',
|
|
68
|
+
nullable: true,
|
|
69
|
+
},
|
|
70
|
+
},
|
|
71
|
+
required: ['type', 'host', 'user', 'password', 'port', 'schema'],
|
|
72
|
+
};
|
|
73
|
+
const convertClickhouseSchema = (target) => {
|
|
74
|
+
const validate = ajv_1.ajv.compile(exports.clickhouseSchema);
|
|
75
|
+
if (validate(target)) {
|
|
76
|
+
return {
|
|
77
|
+
type: common_1.WarehouseTypes.CLICKHOUSE,
|
|
78
|
+
host: target.host,
|
|
79
|
+
user: target.user,
|
|
80
|
+
password: target.password,
|
|
81
|
+
port: target.port,
|
|
82
|
+
schema: target.schema,
|
|
83
|
+
secure: target.secure ?? true,
|
|
84
|
+
timeoutSeconds: target.send_receive_timeout,
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
const errs = (0, better_ajv_errors_1.default)(exports.clickhouseSchema, target, validate.errors || []);
|
|
88
|
+
throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\n${errs}`);
|
|
89
|
+
};
|
|
90
|
+
exports.convertClickhouseSchema = convertClickhouseSchema;
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { CreateDatabricksCredentials } from '@lightdash/common';
|
|
2
|
+
import { JSONSchemaType } from 'ajv';
|
|
3
|
+
import { Target } from '../types';
|
|
4
|
+
type DatabricksComputeConfig = {
|
|
5
|
+
[name: string]: {
|
|
6
|
+
http_path: string;
|
|
7
|
+
};
|
|
8
|
+
};
|
|
9
|
+
export type DatabricksTarget = {
|
|
10
|
+
type: 'databricks';
|
|
11
|
+
catalog?: string;
|
|
12
|
+
schema: string;
|
|
13
|
+
host: string;
|
|
14
|
+
http_path: string;
|
|
15
|
+
token?: string;
|
|
16
|
+
auth_type?: 'token' | 'oauth';
|
|
17
|
+
client_id?: string;
|
|
18
|
+
client_secret?: string;
|
|
19
|
+
access_token?: string;
|
|
20
|
+
refresh_token?: string;
|
|
21
|
+
threads?: number;
|
|
22
|
+
compute?: DatabricksComputeConfig;
|
|
23
|
+
};
|
|
24
|
+
export declare const databricksSchema: JSONSchemaType<DatabricksTarget>;
|
|
25
|
+
export declare const convertDatabricksSchema: (target: Target) => CreateDatabricksCredentials;
|
|
26
|
+
export {};
|
|
27
|
+
//# sourceMappingURL=databricks.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"databricks.d.ts","sourceRoot":"","sources":["../../../src/dbt/targets/databricks.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,2BAA2B,EAI9B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AAGrC,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAGlC,KAAK,uBAAuB,GAAG;IAC3B,CAAC,IAAI,EAAE,MAAM,GAAG;QACZ,SAAS,EAAE,MAAM,CAAC;KACrB,CAAC;CACL,CAAC;AAEF,MAAM,MAAM,gBAAgB,GAAG;IAC3B,IAAI,EAAE,YAAY,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;IAElB,KAAK,CAAC,EAAE,MAAM,CAAC;IAEf,SAAS,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC;IAC9B,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,uBAAuB,CAAC;CACrC,CAAC;AAEF,eAAO,MAAM,gBAAgB,EAAE,cAAc,CAAC,gBAAgB,CAqE7D,CAAC;AAEF,eAAO,MAAM,uBAAuB,GAChC,QAAQ,MAAM,KACf,2BA6EF,CAAC"}
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.convertDatabricksSchema = exports.databricksSchema = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const common_1 = require("@lightdash/common");
|
|
6
|
+
const better_ajv_errors_1 = tslib_1.__importDefault(require("better-ajv-errors"));
|
|
7
|
+
const ajv_1 = require("../../ajv");
|
|
8
|
+
const oauth_1 = require("./Databricks/oauth");
|
|
9
|
+
exports.databricksSchema = {
|
|
10
|
+
type: 'object',
|
|
11
|
+
properties: {
|
|
12
|
+
type: {
|
|
13
|
+
type: 'string',
|
|
14
|
+
enum: ['databricks'],
|
|
15
|
+
},
|
|
16
|
+
catalog: {
|
|
17
|
+
type: 'string',
|
|
18
|
+
nullable: true,
|
|
19
|
+
},
|
|
20
|
+
schema: {
|
|
21
|
+
type: 'string',
|
|
22
|
+
},
|
|
23
|
+
host: {
|
|
24
|
+
type: 'string',
|
|
25
|
+
},
|
|
26
|
+
http_path: {
|
|
27
|
+
type: 'string',
|
|
28
|
+
},
|
|
29
|
+
token: {
|
|
30
|
+
type: 'string',
|
|
31
|
+
nullable: true,
|
|
32
|
+
},
|
|
33
|
+
auth_type: {
|
|
34
|
+
type: 'string',
|
|
35
|
+
enum: ['token', 'oauth'],
|
|
36
|
+
nullable: true,
|
|
37
|
+
},
|
|
38
|
+
client_id: {
|
|
39
|
+
type: 'string',
|
|
40
|
+
nullable: true,
|
|
41
|
+
},
|
|
42
|
+
client_secret: {
|
|
43
|
+
type: 'string',
|
|
44
|
+
nullable: true,
|
|
45
|
+
},
|
|
46
|
+
access_token: {
|
|
47
|
+
type: 'string',
|
|
48
|
+
nullable: true,
|
|
49
|
+
},
|
|
50
|
+
refresh_token: {
|
|
51
|
+
type: 'string',
|
|
52
|
+
nullable: true,
|
|
53
|
+
},
|
|
54
|
+
oauth_client_id: {
|
|
55
|
+
type: 'string',
|
|
56
|
+
nullable: true,
|
|
57
|
+
},
|
|
58
|
+
threads: {
|
|
59
|
+
type: 'number',
|
|
60
|
+
nullable: true,
|
|
61
|
+
},
|
|
62
|
+
compute: {
|
|
63
|
+
type: 'object',
|
|
64
|
+
nullable: true,
|
|
65
|
+
required: [],
|
|
66
|
+
properties: {},
|
|
67
|
+
additionalProperties: {
|
|
68
|
+
type: 'object',
|
|
69
|
+
properties: {
|
|
70
|
+
http_path: { type: 'string' },
|
|
71
|
+
},
|
|
72
|
+
required: ['http_path'],
|
|
73
|
+
additionalProperties: false,
|
|
74
|
+
},
|
|
75
|
+
},
|
|
76
|
+
},
|
|
77
|
+
required: ['type', 'schema', 'host', 'http_path'],
|
|
78
|
+
};
|
|
79
|
+
const convertDatabricksSchema = (target) => {
|
|
80
|
+
const validate = ajv_1.ajv.compile(exports.databricksSchema);
|
|
81
|
+
if (!validate(target)) {
|
|
82
|
+
const errs = (0, better_ajv_errors_1.default)(exports.databricksSchema, target, validate.errors || []);
|
|
83
|
+
throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\n${errs}`);
|
|
84
|
+
}
|
|
85
|
+
const authType = target.auth_type || 'token';
|
|
86
|
+
// OAuth authentication
|
|
87
|
+
if (authType === 'oauth') {
|
|
88
|
+
// Determine authentication type: check env var first, then auto-detect
|
|
89
|
+
let authenticationType;
|
|
90
|
+
const databricksOAuthEnv = process.env.DATABRICKS_OAUTH?.toLowerCase();
|
|
91
|
+
if (databricksOAuthEnv === 'u2m') {
|
|
92
|
+
// Force U2M (user-to-machine) - browser-based OAuth
|
|
93
|
+
authenticationType = common_1.DatabricksAuthenticationType.OAUTH_U2M;
|
|
94
|
+
}
|
|
95
|
+
else {
|
|
96
|
+
// Auto-detect based on presence of client credentials
|
|
97
|
+
// If both client_id and client_secret are present, assume M2M
|
|
98
|
+
// Otherwise, assume U2M (which uses PKCE and doesn't require secret)
|
|
99
|
+
authenticationType =
|
|
100
|
+
target.client_secret && target.client_id
|
|
101
|
+
? common_1.DatabricksAuthenticationType.OAUTH_M2M
|
|
102
|
+
: common_1.DatabricksAuthenticationType.OAUTH_U2M;
|
|
103
|
+
}
|
|
104
|
+
const clientId = target.client_id || oauth_1.DATABRICKS_DEFAULT_OAUTH_CLIENT_ID;
|
|
105
|
+
return {
|
|
106
|
+
type: common_1.WarehouseTypes.DATABRICKS,
|
|
107
|
+
authenticationType,
|
|
108
|
+
catalog: target.catalog,
|
|
109
|
+
database: target.schema,
|
|
110
|
+
serverHostName: target.host,
|
|
111
|
+
httpPath: target.http_path,
|
|
112
|
+
oauthClientId: clientId,
|
|
113
|
+
oauthClientSecret: target.client_secret,
|
|
114
|
+
compute: Object.entries(target.compute || {}).map(([name, compute]) => ({
|
|
115
|
+
name,
|
|
116
|
+
httpPath: compute.http_path,
|
|
117
|
+
})),
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
// Personal Access Token authentication (default)
|
|
121
|
+
if (!target.token) {
|
|
122
|
+
throw new common_1.ParseError('Databricks token is required when not using OAuth authentication');
|
|
123
|
+
}
|
|
124
|
+
return {
|
|
125
|
+
type: common_1.WarehouseTypes.DATABRICKS,
|
|
126
|
+
authenticationType: common_1.DatabricksAuthenticationType.PERSONAL_ACCESS_TOKEN,
|
|
127
|
+
catalog: target.catalog,
|
|
128
|
+
database: target.schema,
|
|
129
|
+
serverHostName: target.host,
|
|
130
|
+
httpPath: target.http_path,
|
|
131
|
+
personalAccessToken: target.token,
|
|
132
|
+
compute: Object.entries(target.compute || {}).map(([name, compute]) => ({
|
|
133
|
+
name,
|
|
134
|
+
httpPath: compute.http_path,
|
|
135
|
+
})),
|
|
136
|
+
};
|
|
137
|
+
};
|
|
138
|
+
exports.convertDatabricksSchema = convertDatabricksSchema;
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { CreateDuckdbCredentials } from '@lightdash/common';
|
|
2
|
+
import { JSONSchemaType } from 'ajv';
|
|
3
|
+
import { Target } from '../types';
|
|
4
|
+
export type DuckdbTarget = {
|
|
5
|
+
type: 'duckdb';
|
|
6
|
+
path: string;
|
|
7
|
+
schema: string;
|
|
8
|
+
threads?: number;
|
|
9
|
+
extensions?: string[];
|
|
10
|
+
settings?: {
|
|
11
|
+
motherduck_token?: string;
|
|
12
|
+
};
|
|
13
|
+
};
|
|
14
|
+
export declare const duckdbSchema: JSONSchemaType<DuckdbTarget>;
|
|
15
|
+
export declare const convertDuckdbSchema: (target: Target) => CreateDuckdbCredentials;
|
|
16
|
+
//# sourceMappingURL=duckdb.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"duckdb.d.ts","sourceRoot":"","sources":["../../../src/dbt/targets/duckdb.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,uBAAuB,EAG1B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AAGrC,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC,MAAM,MAAM,YAAY,GAAG;IACvB,IAAI,EAAE,QAAQ,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,UAAU,CAAC,EAAE,MAAM,EAAE,CAAC;IACtB,QAAQ,CAAC,EAAE;QACP,gBAAgB,CAAC,EAAE,MAAM,CAAC;KAC7B,CAAC;CACL,CAAC;AAEF,eAAO,MAAM,YAAY,EAAE,cAAc,CAAC,YAAY,CAkCrD,CAAC;AAEF,eAAO,MAAM,mBAAmB,GAC5B,QAAQ,MAAM,KACf,uBA6BF,CAAC"}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.convertDuckdbSchema = exports.duckdbSchema = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const common_1 = require("@lightdash/common");
|
|
6
|
+
const better_ajv_errors_1 = tslib_1.__importDefault(require("better-ajv-errors"));
|
|
7
|
+
const ajv_1 = require("../../ajv");
|
|
8
|
+
exports.duckdbSchema = {
|
|
9
|
+
type: 'object',
|
|
10
|
+
properties: {
|
|
11
|
+
type: {
|
|
12
|
+
type: 'string',
|
|
13
|
+
enum: ['duckdb'],
|
|
14
|
+
},
|
|
15
|
+
path: {
|
|
16
|
+
type: 'string',
|
|
17
|
+
},
|
|
18
|
+
schema: {
|
|
19
|
+
type: 'string',
|
|
20
|
+
},
|
|
21
|
+
threads: {
|
|
22
|
+
type: 'number',
|
|
23
|
+
nullable: true,
|
|
24
|
+
},
|
|
25
|
+
extensions: {
|
|
26
|
+
type: 'array',
|
|
27
|
+
items: { type: 'string' },
|
|
28
|
+
nullable: true,
|
|
29
|
+
},
|
|
30
|
+
settings: {
|
|
31
|
+
type: 'object',
|
|
32
|
+
properties: {
|
|
33
|
+
motherduck_token: {
|
|
34
|
+
type: 'string',
|
|
35
|
+
nullable: true,
|
|
36
|
+
},
|
|
37
|
+
},
|
|
38
|
+
nullable: true,
|
|
39
|
+
},
|
|
40
|
+
},
|
|
41
|
+
required: ['type', 'path', 'schema'],
|
|
42
|
+
};
|
|
43
|
+
const convertDuckdbSchema = (target) => {
|
|
44
|
+
const validate = ajv_1.ajv.compile(exports.duckdbSchema);
|
|
45
|
+
if (validate(target)) {
|
|
46
|
+
if (!target.path.startsWith('md:')) {
|
|
47
|
+
throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\nClary only supports MotherDuck duckdb targets. Expected path to start with "md:".`);
|
|
48
|
+
}
|
|
49
|
+
if (!target.settings?.motherduck_token) {
|
|
50
|
+
throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\nClary only supports MotherDuck duckdb targets. Expected settings.motherduck_token to be set.`);
|
|
51
|
+
}
|
|
52
|
+
return {
|
|
53
|
+
type: common_1.WarehouseTypes.DUCKDB,
|
|
54
|
+
database: target.path.slice(3),
|
|
55
|
+
schema: target.schema,
|
|
56
|
+
token: target.settings.motherduck_token,
|
|
57
|
+
threads: target.threads,
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
const errs = (0, better_ajv_errors_1.default)(exports.duckdbSchema, target, validate.errors || []);
|
|
61
|
+
throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\n${errs}`);
|
|
62
|
+
};
|
|
63
|
+
exports.convertDuckdbSchema = convertDuckdbSchema;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"duckdb.test.d.ts","sourceRoot":"","sources":["../../../src/dbt/targets/duckdb.test.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const common_1 = require("@lightdash/common");
|
|
4
|
+
const duckdb_1 = require("./duckdb");
|
|
5
|
+
describe('convertDuckdbSchema', () => {
|
|
6
|
+
test('should parse MotherDuck duckdb targets', () => {
|
|
7
|
+
expect((0, duckdb_1.convertDuckdbSchema)({
|
|
8
|
+
type: 'duckdb',
|
|
9
|
+
path: 'md:analytics',
|
|
10
|
+
schema: 'main',
|
|
11
|
+
threads: 4,
|
|
12
|
+
settings: {
|
|
13
|
+
motherduck_token: 'motherduck_token',
|
|
14
|
+
},
|
|
15
|
+
})).toEqual({
|
|
16
|
+
type: common_1.WarehouseTypes.DUCKDB,
|
|
17
|
+
database: 'analytics',
|
|
18
|
+
schema: 'main',
|
|
19
|
+
token: 'motherduck_token',
|
|
20
|
+
threads: 4,
|
|
21
|
+
});
|
|
22
|
+
});
|
|
23
|
+
test('should reject non-MotherDuck duckdb targets', () => {
|
|
24
|
+
expect(() => (0, duckdb_1.convertDuckdbSchema)({
|
|
25
|
+
type: 'duckdb',
|
|
26
|
+
path: 'analytics.duckdb',
|
|
27
|
+
schema: 'main',
|
|
28
|
+
})).toThrow(common_1.ParseError);
|
|
29
|
+
});
|
|
30
|
+
test('should require a MotherDuck token', () => {
|
|
31
|
+
expect(() => (0, duckdb_1.convertDuckdbSchema)({
|
|
32
|
+
type: 'duckdb',
|
|
33
|
+
path: 'md:analytics',
|
|
34
|
+
schema: 'main',
|
|
35
|
+
})).toThrow(common_1.ParseError);
|
|
36
|
+
});
|
|
37
|
+
});
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { CreatePostgresCredentials } from '@lightdash/common';
|
|
2
|
+
import { JSONSchemaType } from 'ajv';
|
|
3
|
+
import { Target } from '../types';
|
|
4
|
+
export type PostgresTarget = {
|
|
5
|
+
type: 'postgres';
|
|
6
|
+
host: string;
|
|
7
|
+
user: string;
|
|
8
|
+
port: number;
|
|
9
|
+
dbname?: string;
|
|
10
|
+
database?: string;
|
|
11
|
+
schema: string;
|
|
12
|
+
threads?: number;
|
|
13
|
+
pass?: string;
|
|
14
|
+
password?: string;
|
|
15
|
+
keepalives_idle?: number;
|
|
16
|
+
connect_timeout?: number;
|
|
17
|
+
search_path?: string;
|
|
18
|
+
role?: string;
|
|
19
|
+
sslmode?: string;
|
|
20
|
+
sslcert?: string;
|
|
21
|
+
sslkey?: string;
|
|
22
|
+
sslrootcert?: string;
|
|
23
|
+
};
|
|
24
|
+
export declare const postgresSchema: JSONSchemaType<PostgresTarget>;
|
|
25
|
+
export declare const convertPostgresSchema: (target: Target) => Promise<CreatePostgresCredentials>;
|
|
26
|
+
//# sourceMappingURL=postgres.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"postgres.d.ts","sourceRoot":"","sources":["../../../src/dbt/targets/postgres.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,yBAAyB,EAI5B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AAKrC,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC,MAAM,MAAM,cAAc,GAAG;IACzB,IAAI,EAAE,UAAU,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;CACxB,CAAC;AAEF,eAAO,MAAM,cAAc,EAAE,cAAc,CAAC,cAAc,CAyEzD,CAAC;AAaF,eAAO,MAAM,qBAAqB,GAC9B,QAAQ,MAAM,KACf,OAAO,CAAC,yBAAyB,CAmEnC,CAAC"}
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.convertPostgresSchema = exports.postgresSchema = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const common_1 = require("@lightdash/common");
|
|
6
|
+
const better_ajv_errors_1 = tslib_1.__importDefault(require("better-ajv-errors"));
|
|
7
|
+
const promises_1 = require("fs/promises");
|
|
8
|
+
const ajv_1 = require("../../ajv");
|
|
9
|
+
const globalState_1 = tslib_1.__importDefault(require("../../globalState"));
|
|
10
|
+
exports.postgresSchema = {
|
|
11
|
+
type: 'object',
|
|
12
|
+
properties: {
|
|
13
|
+
type: {
|
|
14
|
+
type: 'string',
|
|
15
|
+
enum: ['postgres'],
|
|
16
|
+
},
|
|
17
|
+
host: {
|
|
18
|
+
type: 'string',
|
|
19
|
+
},
|
|
20
|
+
user: {
|
|
21
|
+
type: 'string',
|
|
22
|
+
},
|
|
23
|
+
port: {
|
|
24
|
+
type: 'integer',
|
|
25
|
+
},
|
|
26
|
+
dbname: {
|
|
27
|
+
type: 'string',
|
|
28
|
+
nullable: true,
|
|
29
|
+
},
|
|
30
|
+
database: {
|
|
31
|
+
type: 'string',
|
|
32
|
+
nullable: true,
|
|
33
|
+
},
|
|
34
|
+
schema: {
|
|
35
|
+
type: 'string',
|
|
36
|
+
},
|
|
37
|
+
threads: {
|
|
38
|
+
type: 'integer',
|
|
39
|
+
nullable: true,
|
|
40
|
+
},
|
|
41
|
+
pass: {
|
|
42
|
+
type: 'string',
|
|
43
|
+
nullable: true,
|
|
44
|
+
},
|
|
45
|
+
password: {
|
|
46
|
+
type: 'string',
|
|
47
|
+
nullable: true,
|
|
48
|
+
},
|
|
49
|
+
keepalives_idle: {
|
|
50
|
+
type: 'integer',
|
|
51
|
+
nullable: true,
|
|
52
|
+
},
|
|
53
|
+
connect_timeout: {
|
|
54
|
+
type: 'integer',
|
|
55
|
+
nullable: true,
|
|
56
|
+
},
|
|
57
|
+
search_path: {
|
|
58
|
+
type: 'string',
|
|
59
|
+
nullable: true,
|
|
60
|
+
},
|
|
61
|
+
role: {
|
|
62
|
+
type: 'string',
|
|
63
|
+
nullable: true,
|
|
64
|
+
},
|
|
65
|
+
sslmode: {
|
|
66
|
+
type: 'string',
|
|
67
|
+
nullable: true,
|
|
68
|
+
},
|
|
69
|
+
sslcert: {
|
|
70
|
+
type: 'string',
|
|
71
|
+
nullable: true,
|
|
72
|
+
},
|
|
73
|
+
sslkey: {
|
|
74
|
+
type: 'string',
|
|
75
|
+
nullable: true,
|
|
76
|
+
},
|
|
77
|
+
sslrootcert: {
|
|
78
|
+
type: 'string',
|
|
79
|
+
nullable: true,
|
|
80
|
+
},
|
|
81
|
+
},
|
|
82
|
+
required: ['type', 'host', 'user', 'port', 'schema'],
|
|
83
|
+
};
|
|
84
|
+
const readFileOrThrow = (fileType, path) => {
|
|
85
|
+
globalState_1.default.debug(`> Reading file ${path} for ${fileType}`);
|
|
86
|
+
return (0, promises_1.readFile)(path)
|
|
87
|
+
.then((file) => file.toString('utf-8'))
|
|
88
|
+
.catch((err) => {
|
|
89
|
+
throw new common_1.ParameterError(`Postgres target requires ${fileType}, Error reading provided file:\n\t${err.code} ${path}`);
|
|
90
|
+
});
|
|
91
|
+
};
|
|
92
|
+
const convertPostgresSchema = async (target) => {
|
|
93
|
+
const validate = ajv_1.ajv.compile(exports.postgresSchema);
|
|
94
|
+
if (!validate(target)) {
|
|
95
|
+
const errs = (0, better_ajv_errors_1.default)(exports.postgresSchema, target, validate.errors || []);
|
|
96
|
+
throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\n${errs}`);
|
|
97
|
+
}
|
|
98
|
+
let sslcertFile;
|
|
99
|
+
let sslkeyFile;
|
|
100
|
+
let sslrootcertFile;
|
|
101
|
+
if (target.sslmode === 'verify-full') {
|
|
102
|
+
if (!target.sslcert || !target.sslkey || !target.sslrootcert) {
|
|
103
|
+
throw new common_1.ParseError(`Postgres target requires sslcert, sslkey and sslrootcert when sslmode is "verify-full"`);
|
|
104
|
+
}
|
|
105
|
+
[sslcertFile, sslkeyFile, sslrootcertFile] = await Promise.all([
|
|
106
|
+
readFileOrThrow('sslcert', target.sslcert),
|
|
107
|
+
readFileOrThrow('sslkey', target.sslkey),
|
|
108
|
+
readFileOrThrow('sslrootcert', target.sslrootcert),
|
|
109
|
+
]);
|
|
110
|
+
}
|
|
111
|
+
if (target.sslmode === 'verify-ca') {
|
|
112
|
+
if (!target.sslrootcert) {
|
|
113
|
+
throw new common_1.ParseError(`Postgres target requires sslrootcert when sslmode is "verify-ca"`);
|
|
114
|
+
}
|
|
115
|
+
sslrootcertFile = await readFileOrThrow('sslrootcert', target.sslrootcert);
|
|
116
|
+
}
|
|
117
|
+
const password = target.pass || target.password;
|
|
118
|
+
if (!password) {
|
|
119
|
+
throw new common_1.ParseError(`Postgres target requires a password: "password"`);
|
|
120
|
+
}
|
|
121
|
+
const dbname = target.dbname || target.database;
|
|
122
|
+
if (!dbname) {
|
|
123
|
+
throw new common_1.ParseError(`Postgres target requires a database name: "database"`);
|
|
124
|
+
}
|
|
125
|
+
return {
|
|
126
|
+
type: common_1.WarehouseTypes.POSTGRES,
|
|
127
|
+
host: target.host,
|
|
128
|
+
user: target.user,
|
|
129
|
+
password,
|
|
130
|
+
port: target.port,
|
|
131
|
+
dbname,
|
|
132
|
+
schema: target.schema,
|
|
133
|
+
keepalivesIdle: target.keepalives_idle,
|
|
134
|
+
searchPath: target.search_path,
|
|
135
|
+
role: target.role,
|
|
136
|
+
sslmode: target.sslmode,
|
|
137
|
+
sslcert: sslcertFile,
|
|
138
|
+
sslkey: sslkeyFile,
|
|
139
|
+
sslrootcert: sslrootcertFile,
|
|
140
|
+
};
|
|
141
|
+
};
|
|
142
|
+
exports.convertPostgresSchema = convertPostgresSchema;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { CreateRedshiftCredentials } from '@lightdash/common';
|
|
2
|
+
import { JSONSchemaType } from 'ajv';
|
|
3
|
+
import { Target } from '../types';
|
|
4
|
+
export type RedshiftTarget = {
|
|
5
|
+
type: 'redshift';
|
|
6
|
+
host: string;
|
|
7
|
+
user: string;
|
|
8
|
+
port: number;
|
|
9
|
+
dbname?: string;
|
|
10
|
+
database?: string;
|
|
11
|
+
schema: string;
|
|
12
|
+
threads?: number;
|
|
13
|
+
pass?: string;
|
|
14
|
+
password?: string;
|
|
15
|
+
keepalives_idle?: number;
|
|
16
|
+
connect_timeout?: number;
|
|
17
|
+
search_path?: string;
|
|
18
|
+
role?: string;
|
|
19
|
+
sslmode?: string;
|
|
20
|
+
};
|
|
21
|
+
export declare const redshiftSchema: JSONSchemaType<RedshiftTarget>;
|
|
22
|
+
export declare const convertRedshiftSchema: (target: Target) => CreateRedshiftCredentials;
|
|
23
|
+
//# sourceMappingURL=redshift.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"redshift.d.ts","sourceRoot":"","sources":["../../../src/dbt/targets/redshift.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,yBAAyB,EAG5B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AAGrC,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC,MAAM,MAAM,cAAc,GAAG;IACzB,IAAI,EAAE,UAAU,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;CACpB,CAAC;AAEF,eAAO,MAAM,cAAc,EAAE,cAAc,CAAC,cAAc,CA6DzD,CAAC;AAEF,eAAO,MAAM,qBAAqB,GAC9B,QAAQ,MAAM,KACf,yBA+BF,CAAC"}
|