@claryai/cli 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (237) hide show
  1. package/LICENSE +25 -0
  2. package/README.md +197 -0
  3. package/dist/.tsbuildinfo +1 -0
  4. package/dist/ajv.d.ts +3 -0
  5. package/dist/ajv.d.ts.map +1 -0
  6. package/dist/ajv.js +13 -0
  7. package/dist/analytics/analytics.d.ts +370 -0
  8. package/dist/analytics/analytics.d.ts.map +1 -0
  9. package/dist/analytics/analytics.js +143 -0
  10. package/dist/config.d.ts +34 -0
  11. package/dist/config.d.ts.map +1 -0
  12. package/dist/config.js +134 -0
  13. package/dist/dbt/context.d.ts +14 -0
  14. package/dist/dbt/context.d.ts.map +1 -0
  15. package/dist/dbt/context.js +76 -0
  16. package/dist/dbt/context.test.d.ts +2 -0
  17. package/dist/dbt/context.test.d.ts.map +1 -0
  18. package/dist/dbt/context.test.js +152 -0
  19. package/dist/dbt/manifest.d.ts +7 -0
  20. package/dist/dbt/manifest.d.ts.map +1 -0
  21. package/dist/dbt/manifest.js +23 -0
  22. package/dist/dbt/models.d.ts +43 -0
  23. package/dist/dbt/models.d.ts.map +1 -0
  24. package/dist/dbt/models.js +256 -0
  25. package/dist/dbt/models.test.d.ts +2 -0
  26. package/dist/dbt/models.test.d.ts.map +1 -0
  27. package/dist/dbt/models.test.js +19 -0
  28. package/dist/dbt/profile.d.ts +9 -0
  29. package/dist/dbt/profile.d.ts.map +1 -0
  30. package/dist/dbt/profile.js +86 -0
  31. package/dist/dbt/profiles.test.d.ts +2 -0
  32. package/dist/dbt/profiles.test.d.ts.map +1 -0
  33. package/dist/dbt/profiles.test.js +50 -0
  34. package/dist/dbt/schema.d.ts +31 -0
  35. package/dist/dbt/schema.d.ts.map +1 -0
  36. package/dist/dbt/schema.js +49 -0
  37. package/dist/dbt/targets/Bigquery/index.d.ts +18 -0
  38. package/dist/dbt/targets/Bigquery/index.d.ts.map +1 -0
  39. package/dist/dbt/targets/Bigquery/index.js +105 -0
  40. package/dist/dbt/targets/Bigquery/oauth.d.ts +2 -0
  41. package/dist/dbt/targets/Bigquery/oauth.d.ts.map +1 -0
  42. package/dist/dbt/targets/Bigquery/oauth.js +43 -0
  43. package/dist/dbt/targets/Bigquery/serviceAccount.d.ts +35 -0
  44. package/dist/dbt/targets/Bigquery/serviceAccount.d.ts.map +1 -0
  45. package/dist/dbt/targets/Bigquery/serviceAccount.js +149 -0
  46. package/dist/dbt/targets/Databricks/oauth.d.ts +21 -0
  47. package/dist/dbt/targets/Databricks/oauth.d.ts.map +1 -0
  48. package/dist/dbt/targets/Databricks/oauth.js +184 -0
  49. package/dist/dbt/targets/athena.d.ts +21 -0
  50. package/dist/dbt/targets/athena.d.ts.map +1 -0
  51. package/dist/dbt/targets/athena.js +91 -0
  52. package/dist/dbt/targets/athena.test.d.ts +2 -0
  53. package/dist/dbt/targets/athena.test.d.ts.map +1 -0
  54. package/dist/dbt/targets/athena.test.js +60 -0
  55. package/dist/dbt/targets/clickhouse.d.ts +24 -0
  56. package/dist/dbt/targets/clickhouse.d.ts.map +1 -0
  57. package/dist/dbt/targets/clickhouse.js +90 -0
  58. package/dist/dbt/targets/databricks.d.ts +27 -0
  59. package/dist/dbt/targets/databricks.d.ts.map +1 -0
  60. package/dist/dbt/targets/databricks.js +138 -0
  61. package/dist/dbt/targets/duckdb.d.ts +16 -0
  62. package/dist/dbt/targets/duckdb.d.ts.map +1 -0
  63. package/dist/dbt/targets/duckdb.js +63 -0
  64. package/dist/dbt/targets/duckdb.test.d.ts +2 -0
  65. package/dist/dbt/targets/duckdb.test.d.ts.map +1 -0
  66. package/dist/dbt/targets/duckdb.test.js +37 -0
  67. package/dist/dbt/targets/postgres.d.ts +26 -0
  68. package/dist/dbt/targets/postgres.d.ts.map +1 -0
  69. package/dist/dbt/targets/postgres.js +142 -0
  70. package/dist/dbt/targets/redshift.d.ts +23 -0
  71. package/dist/dbt/targets/redshift.d.ts.map +1 -0
  72. package/dist/dbt/targets/redshift.js +96 -0
  73. package/dist/dbt/targets/snowflake.d.ts +4 -0
  74. package/dist/dbt/targets/snowflake.d.ts.map +1 -0
  75. package/dist/dbt/targets/snowflake.js +134 -0
  76. package/dist/dbt/targets/trino.d.ts +16 -0
  77. package/dist/dbt/targets/trino.d.ts.map +1 -0
  78. package/dist/dbt/targets/trino.js +65 -0
  79. package/dist/dbt/templating.d.ts +15 -0
  80. package/dist/dbt/templating.d.ts.map +1 -0
  81. package/dist/dbt/templating.js +50 -0
  82. package/dist/dbt/templating.test.d.ts +2 -0
  83. package/dist/dbt/templating.test.d.ts.map +1 -0
  84. package/dist/dbt/templating.test.js +51 -0
  85. package/dist/dbt/types.d.ts +17 -0
  86. package/dist/dbt/types.d.ts.map +1 -0
  87. package/dist/dbt/types.js +2 -0
  88. package/dist/dbt/validation.d.ts +9 -0
  89. package/dist/dbt/validation.d.ts.map +1 -0
  90. package/dist/dbt/validation.js +54 -0
  91. package/dist/env.d.ts +12 -0
  92. package/dist/env.d.ts.map +1 -0
  93. package/dist/env.js +40 -0
  94. package/dist/error.d.ts +2 -0
  95. package/dist/error.d.ts.map +1 -0
  96. package/dist/error.js +12 -0
  97. package/dist/globalState.d.ts +29 -0
  98. package/dist/globalState.d.ts.map +1 -0
  99. package/dist/globalState.js +67 -0
  100. package/dist/handlers/asyncQuery.d.ts +7 -0
  101. package/dist/handlers/asyncQuery.d.ts.map +1 -0
  102. package/dist/handlers/asyncQuery.js +50 -0
  103. package/dist/handlers/compile.d.ts +16 -0
  104. package/dist/handlers/compile.d.ts.map +1 -0
  105. package/dist/handlers/compile.js +277 -0
  106. package/dist/handlers/compile.test.d.ts +2 -0
  107. package/dist/handlers/compile.test.d.ts.map +1 -0
  108. package/dist/handlers/compile.test.js +201 -0
  109. package/dist/handlers/createProject.d.ts +37 -0
  110. package/dist/handlers/createProject.d.ts.map +1 -0
  111. package/dist/handlers/createProject.js +272 -0
  112. package/dist/handlers/dbt/apiClient.d.ts +14 -0
  113. package/dist/handlers/dbt/apiClient.d.ts.map +1 -0
  114. package/dist/handlers/dbt/apiClient.js +167 -0
  115. package/dist/handlers/dbt/compile.d.ts +35 -0
  116. package/dist/handlers/dbt/compile.d.ts.map +1 -0
  117. package/dist/handlers/dbt/compile.js +220 -0
  118. package/dist/handlers/dbt/getDbtProfileTargetName.d.ts +9 -0
  119. package/dist/handlers/dbt/getDbtProfileTargetName.d.ts.map +1 -0
  120. package/dist/handlers/dbt/getDbtProfileTargetName.js +44 -0
  121. package/dist/handlers/dbt/getDbtVersion.d.ts +16 -0
  122. package/dist/handlers/dbt/getDbtVersion.d.ts.map +1 -0
  123. package/dist/handlers/dbt/getDbtVersion.js +141 -0
  124. package/dist/handlers/dbt/getDbtVersion.mocks.d.ts +11 -0
  125. package/dist/handlers/dbt/getDbtVersion.mocks.d.ts.map +1 -0
  126. package/dist/handlers/dbt/getDbtVersion.mocks.js +70 -0
  127. package/dist/handlers/dbt/getDbtVersion.test.d.ts +2 -0
  128. package/dist/handlers/dbt/getDbtVersion.test.d.ts.map +1 -0
  129. package/dist/handlers/dbt/getDbtVersion.test.js +97 -0
  130. package/dist/handlers/dbt/getWarehouseClient.d.ts +24 -0
  131. package/dist/handlers/dbt/getWarehouseClient.d.ts.map +1 -0
  132. package/dist/handlers/dbt/getWarehouseClient.js +312 -0
  133. package/dist/handlers/dbt/refresh.d.ts +11 -0
  134. package/dist/handlers/dbt/refresh.d.ts.map +1 -0
  135. package/dist/handlers/dbt/refresh.js +114 -0
  136. package/dist/handlers/dbt/run.d.ts +14 -0
  137. package/dist/handlers/dbt/run.d.ts.map +1 -0
  138. package/dist/handlers/dbt/run.js +67 -0
  139. package/dist/handlers/deploy.d.ts +26 -0
  140. package/dist/handlers/deploy.d.ts.map +1 -0
  141. package/dist/handlers/deploy.js +377 -0
  142. package/dist/handlers/diagnostics.d.ts +11 -0
  143. package/dist/handlers/diagnostics.d.ts.map +1 -0
  144. package/dist/handlers/diagnostics.js +194 -0
  145. package/dist/handlers/download.d.ts +29 -0
  146. package/dist/handlers/download.d.ts.map +1 -0
  147. package/dist/handlers/download.js +955 -0
  148. package/dist/handlers/exportChartImage.d.ts +7 -0
  149. package/dist/handlers/exportChartImage.d.ts.map +1 -0
  150. package/dist/handlers/exportChartImage.js +33 -0
  151. package/dist/handlers/generate.d.ts +13 -0
  152. package/dist/handlers/generate.d.ts.map +1 -0
  153. package/dist/handlers/generate.js +159 -0
  154. package/dist/handlers/generateExposures.d.ts +8 -0
  155. package/dist/handlers/generateExposures.d.ts.map +1 -0
  156. package/dist/handlers/generateExposures.js +100 -0
  157. package/dist/handlers/getProject.d.ts +6 -0
  158. package/dist/handlers/getProject.d.ts.map +1 -0
  159. package/dist/handlers/getProject.js +43 -0
  160. package/dist/handlers/installSkills.d.ts +12 -0
  161. package/dist/handlers/installSkills.d.ts.map +1 -0
  162. package/dist/handlers/installSkills.js +321 -0
  163. package/dist/handlers/lint/ajvToSarif.d.ts +66 -0
  164. package/dist/handlers/lint/ajvToSarif.d.ts.map +1 -0
  165. package/dist/handlers/lint/ajvToSarif.js +222 -0
  166. package/dist/handlers/lint/sarifFormatter.d.ts +14 -0
  167. package/dist/handlers/lint/sarifFormatter.d.ts.map +1 -0
  168. package/dist/handlers/lint/sarifFormatter.js +111 -0
  169. package/dist/handlers/lint.d.ts +8 -0
  170. package/dist/handlers/lint.d.ts.map +1 -0
  171. package/dist/handlers/lint.js +308 -0
  172. package/dist/handlers/listProjects.d.ts +6 -0
  173. package/dist/handlers/listProjects.d.ts.map +1 -0
  174. package/dist/handlers/listProjects.js +53 -0
  175. package/dist/handlers/login/oauth.d.ts +2 -0
  176. package/dist/handlers/login/oauth.d.ts.map +1 -0
  177. package/dist/handlers/login/oauth.js +27 -0
  178. package/dist/handlers/login/pat.d.ts +2 -0
  179. package/dist/handlers/login/pat.d.ts.map +1 -0
  180. package/dist/handlers/login/pat.js +31 -0
  181. package/dist/handlers/login.d.ts +15 -0
  182. package/dist/handlers/login.d.ts.map +1 -0
  183. package/dist/handlers/login.js +239 -0
  184. package/dist/handlers/metadataFile.d.ts +9 -0
  185. package/dist/handlers/metadataFile.d.ts.map +1 -0
  186. package/dist/handlers/metadataFile.js +34 -0
  187. package/dist/handlers/oauthLogin.d.ts +6 -0
  188. package/dist/handlers/oauthLogin.d.ts.map +1 -0
  189. package/dist/handlers/oauthLogin.js +191 -0
  190. package/dist/handlers/preview.d.ts +29 -0
  191. package/dist/handlers/preview.d.ts.map +1 -0
  192. package/dist/handlers/preview.js +415 -0
  193. package/dist/handlers/renameHandler.d.ts +16 -0
  194. package/dist/handlers/renameHandler.d.ts.map +1 -0
  195. package/dist/handlers/renameHandler.js +160 -0
  196. package/dist/handlers/runChart.d.ts +10 -0
  197. package/dist/handlers/runChart.d.ts.map +1 -0
  198. package/dist/handlers/runChart.js +105 -0
  199. package/dist/handlers/selectProject.d.ts +20 -0
  200. package/dist/handlers/selectProject.d.ts.map +1 -0
  201. package/dist/handlers/selectProject.js +91 -0
  202. package/dist/handlers/setProject.d.ts +14 -0
  203. package/dist/handlers/setProject.d.ts.map +1 -0
  204. package/dist/handlers/setProject.js +131 -0
  205. package/dist/handlers/setWarehouse.d.ts +14 -0
  206. package/dist/handlers/setWarehouse.d.ts.map +1 -0
  207. package/dist/handlers/setWarehouse.js +94 -0
  208. package/dist/handlers/sql.d.ts +9 -0
  209. package/dist/handlers/sql.d.ts.map +1 -0
  210. package/dist/handlers/sql.js +89 -0
  211. package/dist/handlers/utils.d.ts +11 -0
  212. package/dist/handlers/utils.d.ts.map +1 -0
  213. package/dist/handlers/utils.js +36 -0
  214. package/dist/handlers/validate.d.ts +22 -0
  215. package/dist/handlers/validate.d.ts.map +1 -0
  216. package/dist/handlers/validate.js +201 -0
  217. package/dist/index.d.ts +3 -0
  218. package/dist/index.d.ts.map +1 -0
  219. package/dist/index.js +581 -0
  220. package/dist/lightdash/loader.d.ts +21 -0
  221. package/dist/lightdash/loader.d.ts.map +1 -0
  222. package/dist/lightdash/loader.js +122 -0
  223. package/dist/lightdash/projectType.d.ts +84 -0
  224. package/dist/lightdash/projectType.d.ts.map +1 -0
  225. package/dist/lightdash/projectType.js +75 -0
  226. package/dist/lightdash-config/index.d.ts +2 -0
  227. package/dist/lightdash-config/index.d.ts.map +1 -0
  228. package/dist/lightdash-config/index.js +41 -0
  229. package/dist/lightdash-config/lightdash-config.test.d.ts +2 -0
  230. package/dist/lightdash-config/lightdash-config.test.d.ts.map +1 -0
  231. package/dist/lightdash-config/lightdash-config.test.js +70 -0
  232. package/dist/styles.d.ts +10 -0
  233. package/dist/styles.d.ts.map +1 -0
  234. package/dist/styles.js +14 -0
  235. package/entitlements.plist +33 -0
  236. package/package.json +71 -0
  237. package/track.sh +116 -0
@@ -0,0 +1,312 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createProgramaticallySnowflakePat = exports.getTableSchema = void 0;
4
+ exports.default = getWarehouseClient;
5
+ const tslib_1 = require("tslib");
6
+ const common_1 = require("@lightdash/common");
7
+ const warehouses_1 = require("@lightdash/warehouses");
8
+ const crypto_1 = tslib_1.__importDefault(require("crypto"));
9
+ const execa_1 = tslib_1.__importDefault(require("execa"));
10
+ const path_1 = tslib_1.__importDefault(require("path"));
11
+ const config_1 = require("../../config");
12
+ const profile_1 = require("../../dbt/profile");
13
+ const oauth_1 = require("../../dbt/targets/Databricks/oauth");
14
+ const globalState_1 = tslib_1.__importDefault(require("../../globalState"));
15
+ const styles = tslib_1.__importStar(require("../../styles"));
16
+ const apiClient_1 = require("./apiClient");
17
+ /**
18
+ * Cache warehouse clients to avoid repeated authentication prompts
19
+ * Currently used for:
20
+ * - Snowflake external browser auth (avoids opening multiple browser tabs)
21
+ */
22
+ const warehouseClientCache = new Map();
23
+ /**
24
+ * Generates a unique cache key for warehouse credentials by hashing the credentials
25
+ */
26
+ function getWarehouseClientCacheKey(credentials) {
27
+ // Create a hash of the stringified credentials
28
+ // This provides a unique key regardless of warehouse type
29
+ const credentialsString = JSON.stringify(credentials);
30
+ const hash = crypto_1.default
31
+ .createHash('sha256')
32
+ .update(credentialsString)
33
+ .digest('hex');
34
+ return hash;
35
+ }
36
+ const getTableSchema = async ({ projectUuid, tableName, schemaName, databaseName, }) => (0, apiClient_1.lightdashApi)({
37
+ method: 'GET',
38
+ url: `/api/v1/projects/${projectUuid}/sqlRunner/fields?tableName=${tableName}&schemaName=${schemaName}&databaseName=${databaseName}`,
39
+ body: undefined,
40
+ });
41
+ exports.getTableSchema = getTableSchema;
42
+ const DBT_CLOUD_CONNECTION_TYPE_REGEX = /Connection type\s+(\w+)/;
43
+ const getDbtCloudConnectionType = async () => {
44
+ try {
45
+ const { all } = await (0, execa_1.default)('dbt', ['environment', 'show'], {
46
+ all: true,
47
+ stdio: ['pipe', 'pipe', 'pipe'],
48
+ });
49
+ const logs = all || '';
50
+ const connectionType = logs.match(DBT_CLOUD_CONNECTION_TYPE_REGEX);
51
+ if (connectionType === null || connectionType.length === 0) {
52
+ throw new common_1.ParseError(`Can't locate connection type in 'dbt environment show' response`);
53
+ }
54
+ if (!(0, common_1.isSupportedDbtAdapterType)(connectionType[1])) {
55
+ throw new common_1.ParseError(`Unsupported dbt adaptor type ${connectionType[1]}`);
56
+ }
57
+ return connectionType[1];
58
+ }
59
+ catch (e) {
60
+ throw new common_1.ParseError(`Failed to get connection type:\n ${(0, common_1.getErrorMessage)(e)}`);
61
+ }
62
+ };
63
+ function getMockCredentials(dbtAdaptorType) {
64
+ switch (dbtAdaptorType) {
65
+ case common_1.SupportedDbtAdapter.BIGQUERY:
66
+ return {
67
+ type: common_1.WarehouseTypes.BIGQUERY,
68
+ project: '',
69
+ dataset: '',
70
+ timeoutSeconds: undefined,
71
+ priority: undefined,
72
+ keyfileContents: {},
73
+ retries: undefined,
74
+ location: undefined,
75
+ maximumBytesBilled: undefined,
76
+ };
77
+ case common_1.SupportedDbtAdapter.POSTGRES:
78
+ return {
79
+ type: common_1.WarehouseTypes.POSTGRES,
80
+ host: '',
81
+ user: '',
82
+ password: '',
83
+ port: 5432,
84
+ dbname: '',
85
+ schema: '',
86
+ };
87
+ case common_1.SupportedDbtAdapter.DUCKDB:
88
+ return {
89
+ type: common_1.WarehouseTypes.DUCKDB,
90
+ database: ':memory:',
91
+ schema: 'main',
92
+ token: '',
93
+ };
94
+ case common_1.SupportedDbtAdapter.REDSHIFT:
95
+ return {
96
+ type: common_1.WarehouseTypes.REDSHIFT,
97
+ host: '',
98
+ user: '',
99
+ password: '',
100
+ port: 5432,
101
+ dbname: '',
102
+ schema: '',
103
+ };
104
+ case common_1.SupportedDbtAdapter.SNOWFLAKE:
105
+ return {
106
+ type: common_1.WarehouseTypes.SNOWFLAKE,
107
+ account: '',
108
+ user: '',
109
+ password: '',
110
+ warehouse: '',
111
+ database: '',
112
+ schema: '',
113
+ role: '',
114
+ };
115
+ case common_1.SupportedDbtAdapter.DATABRICKS:
116
+ return {
117
+ type: common_1.WarehouseTypes.DATABRICKS,
118
+ catalog: '',
119
+ database: '',
120
+ serverHostName: '',
121
+ httpPath: '',
122
+ personalAccessToken: '',
123
+ };
124
+ case common_1.SupportedDbtAdapter.TRINO:
125
+ return {
126
+ type: common_1.WarehouseTypes.TRINO,
127
+ host: '',
128
+ user: '',
129
+ password: '',
130
+ port: 5432,
131
+ dbname: '',
132
+ schema: '',
133
+ http_scheme: '',
134
+ };
135
+ case common_1.SupportedDbtAdapter.CLICKHOUSE:
136
+ return {
137
+ type: common_1.WarehouseTypes.CLICKHOUSE,
138
+ host: '',
139
+ user: '',
140
+ password: '',
141
+ port: 8443,
142
+ schema: '',
143
+ secure: true,
144
+ timeoutSeconds: 300,
145
+ };
146
+ case common_1.SupportedDbtAdapter.ATHENA:
147
+ return {
148
+ type: common_1.WarehouseTypes.ATHENA,
149
+ authenticationType: common_1.AthenaAuthenticationType.ACCESS_KEY,
150
+ region: '',
151
+ database: '',
152
+ schema: '',
153
+ s3StagingDir: '',
154
+ accessKeyId: '',
155
+ secretAccessKey: '',
156
+ };
157
+ default:
158
+ return (0, common_1.assertUnreachable)(dbtAdaptorType, `Unsupported dbt adaptor type ${dbtAdaptorType}`);
159
+ }
160
+ }
161
+ /*
162
+ Generates a temporary Snowflake PAT to enable access on Clary which expires in 1 day.
163
+ Snowflake PAT limitations and error messages:
164
+ - 15 PATs per user: Exceeded maximum of 15 programmatic access tokens.
165
+ - Must be unique: Programmatic access token CLARY_CLI already exists.
166
+ - Can't include "-" in the name : SQL compilation error: syntax error line 1 at position 37 unexpected '-'
167
+ */
168
+ const createProgramaticallySnowflakePat = async (credentials) => {
169
+ const tempClient = new warehouses_1.SnowflakeWarehouseClient({
170
+ ...credentials,
171
+ });
172
+ try {
173
+ console.error(`\n- Creating Snowflake Programmatic Access Token\n`);
174
+ const { tokenSecret, tokenName } = await tempClient.createProgrammaticAccessToken(`clary_cli_${Date.now()}`, 1);
175
+ console.error(`\n✓ Successfully created Snowflake PAT: ${tokenName}\n`);
176
+ return tokenSecret;
177
+ }
178
+ catch (e) {
179
+ console.error(styles.error(`\nFailed to create Snowflake PAT: ${(0, common_1.getErrorMessage)(e)}`));
180
+ process.exit(1);
181
+ }
182
+ return '';
183
+ };
184
+ exports.createProgramaticallySnowflakePat = createProgramaticallySnowflakePat;
185
+ async function getWarehouseClient(options) {
186
+ let warehouseClient;
187
+ let credentials;
188
+ if (options.isDbtCloudCLI) {
189
+ const dbtAdaptorType = await getDbtCloudConnectionType();
190
+ globalState_1.default.debug(`> Using ${dbtAdaptorType} client mock`);
191
+ credentials = getMockCredentials(dbtAdaptorType);
192
+ warehouseClient = (0, warehouses_1.warehouseClientFromCredentials)({
193
+ ...credentials,
194
+ startOfWeek: (0, common_1.isWeekDay)(options.startOfWeek)
195
+ ? options.startOfWeek
196
+ : undefined,
197
+ });
198
+ const config = await (0, config_1.getConfig)();
199
+ // Overwrite methods that need to connect to the warehouse
200
+ warehouseClient.getCatalog = async (refs) => refs.reduce(async (accPromise, ref) => {
201
+ const acc = await accPromise; // Wait for the previous step's result
202
+ if (!config.context?.project) {
203
+ // If the project is not set(eg: on first project create), we can't fetch the schema
204
+ return acc;
205
+ }
206
+ try {
207
+ globalState_1.default.debug(`> Warehouse schema information is not available in dbt Cloud CLI. The schema ${ref.database}.${ref.schema}.${ref.table} will be fetched from the active project.`);
208
+ const fields = await (0, exports.getTableSchema)({
209
+ projectUuid: config.context.project,
210
+ tableName: ref.table,
211
+ schemaName: ref.schema,
212
+ databaseName: ref.database,
213
+ });
214
+ acc[ref.database] = {
215
+ [ref.schema]: {
216
+ [ref.table]: fields,
217
+ },
218
+ };
219
+ }
220
+ catch (e) {
221
+ globalState_1.default.debug(`Failed to get schema for ${ref.database}.${ref.schema}.${ref.table}.`);
222
+ }
223
+ return acc;
224
+ }, Promise.resolve({}));
225
+ warehouseClient.streamQuery = async (_query, streamCallback) => {
226
+ globalState_1.default.debug(`> WarehouseClient.streamQuery() is not supported with dbt Cloud CLI. An empty result will be used.`);
227
+ return streamCallback({ fields: {}, rows: [] });
228
+ };
229
+ warehouseClient.runQuery = async () => {
230
+ globalState_1.default.debug(`> WarehouseClient.runQuery() is not supported with dbt Cloud CLI. An empty result will be used.`);
231
+ return { fields: {}, rows: [] };
232
+ };
233
+ warehouseClient.test = async () => {
234
+ globalState_1.default.debug(`> WarehouseClient.test() is not supported with dbt Cloud CLI. No test will be run.`);
235
+ };
236
+ warehouseClient.getAllTables = async () => {
237
+ globalState_1.default.debug(`> WarehouseClient.getAllTables() is not supported with dbt Cloud CLI. An empty result will be used.`);
238
+ return [];
239
+ };
240
+ warehouseClient.getFields = async () => {
241
+ globalState_1.default.debug(`> WarehouseClient.getFields() is not supported with dbt Cloud CLI. An empty result will be used.`);
242
+ return { fields: {} };
243
+ };
244
+ }
245
+ else {
246
+ const absoluteProfilesPath = path_1.default.resolve(options.profilesDir);
247
+ globalState_1.default.debug(`> Using profiles dir ${absoluteProfilesPath} and profile ${options.profile}`);
248
+ const { target } = await (0, profile_1.loadDbtTarget)({
249
+ profilesDir: absoluteProfilesPath,
250
+ profileName: options.profile,
251
+ targetName: options.target,
252
+ });
253
+ globalState_1.default.debug(`> Using target ${target.type}`);
254
+ credentials = await (0, profile_1.warehouseCredentialsFromDbtTarget)(target);
255
+ // Check cache before any OAuth flows to avoid repeated authentication prompts
256
+ const cacheKey = getWarehouseClientCacheKey(credentials);
257
+ if (warehouseClientCache.has(cacheKey)) {
258
+ globalState_1.default.debug(`> Reusing cached warehouse client (${credentials.type})`);
259
+ const cached = warehouseClientCache.get(cacheKey);
260
+ warehouseClient = cached.warehouseClient;
261
+ credentials = cached.credentials;
262
+ }
263
+ else {
264
+ // Exchange Databricks OAuth M2M credentials for access token if needed
265
+ if (credentials.type === common_1.WarehouseTypes.DATABRICKS &&
266
+ credentials.authenticationType ===
267
+ common_1.DatabricksAuthenticationType.OAUTH_M2M &&
268
+ credentials.oauthClientId &&
269
+ credentials.oauthClientSecret &&
270
+ !credentials.token) {
271
+ globalState_1.default.debug(`> Exchanging Databricks OAuth credentials for access token`);
272
+ try {
273
+ const { accessToken } = await (0, warehouses_1.exchangeDatabricksOAuthCredentials)(credentials.serverHostName, credentials.oauthClientId, credentials.oauthClientSecret);
274
+ credentials.token = accessToken;
275
+ }
276
+ catch (e) {
277
+ globalState_1.default.debug(`> Failed to exchange Databricks OAuth credentials for access token: ${(0, common_1.getErrorMessage)(e)}`);
278
+ console.warn(styles.error(`\nFailed to authenticate with Databricks using M2M OAuth (client_id and client_secret). ` +
279
+ `Perhaps you meant to use U2M OAuth instead? Set DATABRICKS_OAUTH=u2m environment variable to force U2M authentication.`));
280
+ process.exit(1);
281
+ }
282
+ }
283
+ // Handle Databricks OAuth U2M authentication
284
+ if (credentials.type === common_1.WarehouseTypes.DATABRICKS &&
285
+ credentials.authenticationType ===
286
+ common_1.DatabricksAuthenticationType.OAUTH_U2M &&
287
+ !credentials.token) {
288
+ const clientId = credentials.oauthClientId ||
289
+ oauth_1.DATABRICKS_DEFAULT_OAUTH_CLIENT_ID;
290
+ const tokens = await (0, oauth_1.performDatabricksOAuthFlow)(credentials.serverHostName, clientId, credentials.oauthClientSecret);
291
+ // Store tokens in memory only
292
+ credentials.token = tokens.accessToken;
293
+ credentials.refreshToken = tokens.refreshToken;
294
+ }
295
+ globalState_1.default.debug(`> Creating new warehouse client to cache (${credentials.type})`);
296
+ warehouseClient = (0, warehouses_1.warehouseClientFromCredentials)({
297
+ ...credentials,
298
+ startOfWeek: (0, common_1.isWeekDay)(options.startOfWeek)
299
+ ? options.startOfWeek
300
+ : undefined,
301
+ });
302
+ warehouseClientCache.set(cacheKey, {
303
+ warehouseClient,
304
+ credentials,
305
+ });
306
+ }
307
+ }
308
+ return {
309
+ warehouseClient,
310
+ credentials,
311
+ };
312
+ }
@@ -0,0 +1,11 @@
1
+ import { Job, JobStep, Project } from '@lightdash/common';
2
+ export declare const getProject: (projectUuid: string) => Promise<Project>;
3
+ export declare const getRunningStepsMessage: (steps: JobStep[]) => string;
4
+ export declare const getErrorStepsMessage: (steps: JobStep[]) => string;
5
+ export declare const getFinalJobState: (jobUuid: string, spinnerPrefix?: string) => Promise<Job>;
6
+ type RefreshHandlerOptions = {
7
+ verbose: boolean;
8
+ };
9
+ export declare const refreshHandler: (options: RefreshHandlerOptions) => Promise<void>;
10
+ export {};
11
+ //# sourceMappingURL=refresh.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"refresh.d.ts","sourceRoot":"","sources":["../../../src/handlers/dbt/refresh.ts"],"names":[],"mappings":"AAAA,OAAO,EAIH,GAAG,EAEH,OAAO,EAGP,OAAO,EACV,MAAM,mBAAmB,CAAC;AAQ3B,eAAO,MAAM,UAAU,GAAU,aAAa,MAAM,qBAK9C,CAAC;AAsBP,eAAO,MAAM,sBAAsB,GAAI,OAAO,OAAO,EAAE,KAAG,MAUzD,CAAC;AAEF,eAAO,MAAM,oBAAoB,GAAI,OAAO,OAAO,EAAE,KAAG,MAUvD,CAAC;AAIF,eAAO,MAAM,gBAAgB,GACzB,SAAS,MAAM,EACf,gBAAe,MAAiC,KACjD,OAAO,CAAC,GAAG,CAcb,CAAC;AAEF,KAAK,qBAAqB,GAAG;IACzB,OAAO,EAAE,OAAO,CAAC;CACpB,CAAC;AAEF,eAAO,MAAM,cAAc,GAAU,SAAS,qBAAqB,kBAgElE,CAAC"}
@@ -0,0 +1,114 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.refreshHandler = exports.getFinalJobState = exports.getErrorStepsMessage = exports.getRunningStepsMessage = exports.getProject = void 0;
4
+ const tslib_1 = require("tslib");
5
+ const common_1 = require("@lightdash/common");
6
+ const uuid_1 = require("uuid");
7
+ const analytics_1 = require("../../analytics/analytics");
8
+ const config_1 = require("../../config");
9
+ const globalState_1 = tslib_1.__importDefault(require("../../globalState"));
10
+ const styles = tslib_1.__importStar(require("../../styles"));
11
+ const apiClient_1 = require("./apiClient");
12
+ const getProject = async (projectUuid) => (0, apiClient_1.lightdashApi)({
13
+ method: 'GET',
14
+ url: `/api/v1/projects/${projectUuid}`,
15
+ body: undefined,
16
+ });
17
+ exports.getProject = getProject;
18
+ const refreshProject = async (projectUuid) => (0, apiClient_1.lightdashApi)({
19
+ method: 'POST',
20
+ url: `/api/v1/projects/${projectUuid}/refresh`,
21
+ body: undefined,
22
+ });
23
+ const getJobState = async (jobUuid) => (0, apiClient_1.lightdashApi)({
24
+ method: 'GET',
25
+ url: `/api/v1/jobs/${jobUuid}`,
26
+ body: undefined,
27
+ });
28
+ function delay(ms) {
29
+ return new Promise((resolve) => {
30
+ setTimeout(resolve, ms);
31
+ });
32
+ }
33
+ const getRunningStepsMessage = (steps) => {
34
+ const runningStep = steps.find((step) => step.stepStatus === common_1.JobStepStatusType.RUNNING);
35
+ const numberOfCompletedSteps = steps.filter((step) => step.stepStatus === common_1.JobStepStatusType.DONE).length;
36
+ return `step ${Math.min(numberOfCompletedSteps + 1, steps.length)}/${steps.length}: ${runningStep?.stepLabel || ''}`;
37
+ };
38
+ exports.getRunningStepsMessage = getRunningStepsMessage;
39
+ const getErrorStepsMessage = (steps) => {
40
+ const errorStep = steps.find((step) => step.stepStatus === common_1.JobStepStatusType.ERROR);
41
+ const numberOfCompletedSteps = steps.filter((step) => step.stepStatus === common_1.JobStepStatusType.DONE).length;
42
+ return `step ${Math.min(numberOfCompletedSteps + 1, steps.length)}/${steps.length}: ${errorStep?.stepLabel || ''} error ${errorStep?.stepError}`;
43
+ };
44
+ exports.getErrorStepsMessage = getErrorStepsMessage;
45
+ const REFETCH_JOB_INTERVAL = 3000;
46
+ const getFinalJobState = async (jobUuid, spinnerPrefix = 'Refreshing dbt project') => {
47
+ const job = await getJobState(jobUuid);
48
+ if (job.jobStatus === common_1.JobStatusType.DONE) {
49
+ return job;
50
+ }
51
+ if (job.jobStatus === common_1.JobStatusType.ERROR) {
52
+ throw new Error((0, exports.getErrorStepsMessage)(job.steps));
53
+ }
54
+ const spinner = globalState_1.default.getActiveSpinner();
55
+ spinner?.start(` ${spinnerPrefix}, ${(0, exports.getRunningStepsMessage)(job.steps)}`);
56
+ return delay(REFETCH_JOB_INTERVAL).then(() => (0, exports.getFinalJobState)(jobUuid, spinnerPrefix));
57
+ };
58
+ exports.getFinalJobState = getFinalJobState;
59
+ const refreshHandler = async (options) => {
60
+ globalState_1.default.setVerbose(options.verbose);
61
+ await (0, apiClient_1.checkLightdashVersion)();
62
+ const executionId = (0, uuid_1.v4)();
63
+ const config = await (0, config_1.getConfig)();
64
+ if (!(config.context?.project && config.context.serverUrl)) {
65
+ throw new common_1.AuthorizationError(`No active Clary project. Run 'clary login --help'`);
66
+ }
67
+ const projectUuid = config.context.project;
68
+ // Log current project info
69
+ globalState_1.default.logProjectInfo(config);
70
+ const project = await (0, exports.getProject)(projectUuid);
71
+ if (project.dbtConnection.type === common_1.DbtProjectType.NONE) {
72
+ throw new common_1.ParameterError('Clary project must be connected to a remote repository. eg: GitHub, Gitlab, etc');
73
+ }
74
+ const spinner = globalState_1.default.startSpinner(` Refreshing dbt project`);
75
+ try {
76
+ const refreshStartTime = Date.now();
77
+ await analytics_1.LightdashAnalytics.track({
78
+ event: 'refresh.started',
79
+ properties: {
80
+ executionId,
81
+ projectId: projectUuid,
82
+ },
83
+ });
84
+ const refreshResults = await refreshProject(projectUuid);
85
+ await (0, exports.getFinalJobState)(refreshResults.jobUuid);
86
+ await analytics_1.LightdashAnalytics.track({
87
+ event: 'refresh.completed',
88
+ properties: {
89
+ executionId,
90
+ projectId: projectUuid,
91
+ durationMs: Date.now() - refreshStartTime,
92
+ },
93
+ });
94
+ spinner.stop();
95
+ }
96
+ catch (e) {
97
+ await analytics_1.LightdashAnalytics.track({
98
+ event: 'refresh.error',
99
+ properties: {
100
+ executionId,
101
+ projectId: projectUuid,
102
+ error: `Error refreshing project: ${e}`,
103
+ },
104
+ });
105
+ spinner.fail();
106
+ throw e;
107
+ }
108
+ const displayUrl = `${config.context?.serverUrl}/projects/${projectUuid}/home`;
109
+ console.error(`${styles.bold('Successfully refreshed project:')}`);
110
+ console.error('');
111
+ console.error(` ${styles.bold(`⚡️ ${displayUrl}`)}`);
112
+ console.error('');
113
+ };
114
+ exports.refreshHandler = refreshHandler;
@@ -0,0 +1,14 @@
1
+ import { Command } from 'commander';
2
+ import { DbtCompileOptions } from './compile';
3
+ type DbtRunHandlerOptions = DbtCompileOptions & {
4
+ profilesDir: string;
5
+ projectDir: string;
6
+ excludeMeta: boolean;
7
+ verbose: boolean;
8
+ assumeYes: boolean;
9
+ assumeNo: boolean;
10
+ preserveColumnCase: boolean;
11
+ };
12
+ export declare const dbtRunHandler: (options: DbtRunHandlerOptions, command: Command) => Promise<void>;
13
+ export {};
14
+ //# sourceMappingURL=run.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"run.d.ts","sourceRoot":"","sources":["../../../src/handlers/dbt/run.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAwB,MAAM,WAAW,CAAC;AAK1D,OAAO,EAAE,iBAAiB,EAAE,MAAM,WAAW,CAAC;AAE9C,KAAK,oBAAoB,GAAG,iBAAiB,GAAG;IAC5C,WAAW,EAAE,MAAM,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,OAAO,CAAC;IACrB,OAAO,EAAE,OAAO,CAAC;IACjB,SAAS,EAAE,OAAO,CAAC;IACnB,QAAQ,EAAE,OAAO,CAAC;IAClB,kBAAkB,EAAE,OAAO,CAAC;CAC/B,CAAC;AAEF,eAAO,MAAM,aAAa,GACtB,SAAS,oBAAoB,EAC7B,SAAS,OAAO,kBAmEnB,CAAC"}
@@ -0,0 +1,67 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.dbtRunHandler = void 0;
4
+ const tslib_1 = require("tslib");
5
+ const common_1 = require("@lightdash/common");
6
+ const commander_1 = require("commander");
7
+ const execa_1 = tslib_1.__importDefault(require("execa"));
8
+ const analytics_1 = require("../../analytics/analytics");
9
+ const globalState_1 = tslib_1.__importDefault(require("../../globalState"));
10
+ const generate_1 = require("../generate");
11
+ const dbtRunHandler = async (options, command) => {
12
+ globalState_1.default.setVerbose(options.verbose);
13
+ if (!command.parent) {
14
+ throw new Error('Parent command not found');
15
+ }
16
+ if (options.assumeYes && options.assumeNo) {
17
+ throw new commander_1.InvalidArgumentError('Cannot use both --assume-yes and --assume-no flags');
18
+ }
19
+ const dbtStartTime = Date.now();
20
+ await analytics_1.LightdashAnalytics.track({
21
+ event: 'dbt_command.started',
22
+ properties: {
23
+ command: `${command.parent.args}`,
24
+ },
25
+ });
26
+ const commands = command.parent.args.reduce((acc, arg) => {
27
+ if (arg === '--verbose' ||
28
+ arg === '--assume-yes' ||
29
+ arg === '--assume-no')
30
+ return acc;
31
+ return [...acc, arg];
32
+ }, []);
33
+ globalState_1.default.debug(`> Running dbt command: ${commands}`);
34
+ try {
35
+ const subprocess = (0, execa_1.default)('dbt', commands, {
36
+ stdio: 'inherit',
37
+ });
38
+ await subprocess;
39
+ await analytics_1.LightdashAnalytics.track({
40
+ event: 'dbt_command.completed',
41
+ properties: {
42
+ command: `${commands}`,
43
+ durationMs: Date.now() - dbtStartTime,
44
+ },
45
+ });
46
+ }
47
+ catch (e) {
48
+ const msg = (0, common_1.getErrorMessage)(e);
49
+ await analytics_1.LightdashAnalytics.track({
50
+ event: 'dbt_command.error',
51
+ properties: {
52
+ command: `${commands}`,
53
+ error: `${msg}`,
54
+ durationMs: Date.now() - dbtStartTime,
55
+ },
56
+ });
57
+ throw new common_1.ParseError(`Failed to run dbt:\n ${msg}`);
58
+ }
59
+ if (!options.assumeNo) {
60
+ await (0, generate_1.generateHandler)({
61
+ ...options,
62
+ excludeMeta: options.excludeMeta,
63
+ preserveColumnCase: options.preserveColumnCase,
64
+ });
65
+ }
66
+ };
67
+ exports.dbtRunHandler = dbtRunHandler;
@@ -0,0 +1,26 @@
1
+ import { Explore, ExploreError } from '@lightdash/common';
2
+ import { DbtCompileOptions } from './dbt/compile';
3
+ type DeployHandlerOptions = DbtCompileOptions & {
4
+ projectDir: string;
5
+ profilesDir: string;
6
+ target: string | undefined;
7
+ profile: string | undefined;
8
+ create?: boolean | string;
9
+ verbose: boolean;
10
+ ignoreErrors: boolean;
11
+ startOfWeek?: number;
12
+ warehouseCredentials?: boolean;
13
+ organizationCredentials?: string;
14
+ assumeYes?: boolean;
15
+ useBatchedDeploy?: boolean;
16
+ batchSize?: string;
17
+ parallelBatches?: string;
18
+ gzip?: boolean;
19
+ };
20
+ type DeployArgs = DeployHandlerOptions & {
21
+ projectUuid: string;
22
+ };
23
+ export declare const deploy: (explores: (Explore | ExploreError)[], options: DeployArgs) => Promise<void>;
24
+ export declare const deployHandler: (originalOptions: DeployHandlerOptions) => Promise<void>;
25
+ export {};
26
+ //# sourceMappingURL=deploy.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"deploy.d.ts","sourceRoot":"","sources":["../../src/handlers/deploy.ts"],"names":[],"mappings":"AAAA,OAAO,EAGH,OAAO,EACP,YAAY,EASf,MAAM,mBAAmB,CAAC;AAsB3B,OAAO,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAIlD,KAAK,oBAAoB,GAAG,iBAAiB,GAAG;IAC5C,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,CAAC;IACpB,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;IAC5B,MAAM,CAAC,EAAE,OAAO,GAAG,MAAM,CAAC;IAC1B,OAAO,EAAE,OAAO,CAAC;IACjB,YAAY,EAAE,OAAO,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,oBAAoB,CAAC,EAAE,OAAO,CAAC;IAC/B,uBAAuB,CAAC,EAAE,MAAM,CAAC;IACjC,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,IAAI,CAAC,EAAE,OAAO,CAAC;CAClB,CAAC;AAEF,KAAK,UAAU,GAAG,oBAAoB,GAAG;IACrC,WAAW,EAAE,MAAM,CAAC;CACvB,CAAC;AA8KF,eAAO,MAAM,MAAM,GACf,UAAU,CAAC,OAAO,GAAG,YAAY,CAAC,EAAE,EACpC,SAAS,UAAU,KACpB,OAAO,CAAC,IAAI,CAuHd,CAAC;AAiGF,eAAO,MAAM,aAAa,GAAU,iBAAiB,oBAAoB,kBA+GxE,CAAC"}