@claryai/cli 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (237) hide show
  1. package/LICENSE +25 -0
  2. package/README.md +197 -0
  3. package/dist/.tsbuildinfo +1 -0
  4. package/dist/ajv.d.ts +3 -0
  5. package/dist/ajv.d.ts.map +1 -0
  6. package/dist/ajv.js +13 -0
  7. package/dist/analytics/analytics.d.ts +370 -0
  8. package/dist/analytics/analytics.d.ts.map +1 -0
  9. package/dist/analytics/analytics.js +143 -0
  10. package/dist/config.d.ts +34 -0
  11. package/dist/config.d.ts.map +1 -0
  12. package/dist/config.js +134 -0
  13. package/dist/dbt/context.d.ts +14 -0
  14. package/dist/dbt/context.d.ts.map +1 -0
  15. package/dist/dbt/context.js +76 -0
  16. package/dist/dbt/context.test.d.ts +2 -0
  17. package/dist/dbt/context.test.d.ts.map +1 -0
  18. package/dist/dbt/context.test.js +152 -0
  19. package/dist/dbt/manifest.d.ts +7 -0
  20. package/dist/dbt/manifest.d.ts.map +1 -0
  21. package/dist/dbt/manifest.js +23 -0
  22. package/dist/dbt/models.d.ts +43 -0
  23. package/dist/dbt/models.d.ts.map +1 -0
  24. package/dist/dbt/models.js +256 -0
  25. package/dist/dbt/models.test.d.ts +2 -0
  26. package/dist/dbt/models.test.d.ts.map +1 -0
  27. package/dist/dbt/models.test.js +19 -0
  28. package/dist/dbt/profile.d.ts +9 -0
  29. package/dist/dbt/profile.d.ts.map +1 -0
  30. package/dist/dbt/profile.js +86 -0
  31. package/dist/dbt/profiles.test.d.ts +2 -0
  32. package/dist/dbt/profiles.test.d.ts.map +1 -0
  33. package/dist/dbt/profiles.test.js +50 -0
  34. package/dist/dbt/schema.d.ts +31 -0
  35. package/dist/dbt/schema.d.ts.map +1 -0
  36. package/dist/dbt/schema.js +49 -0
  37. package/dist/dbt/targets/Bigquery/index.d.ts +18 -0
  38. package/dist/dbt/targets/Bigquery/index.d.ts.map +1 -0
  39. package/dist/dbt/targets/Bigquery/index.js +105 -0
  40. package/dist/dbt/targets/Bigquery/oauth.d.ts +2 -0
  41. package/dist/dbt/targets/Bigquery/oauth.d.ts.map +1 -0
  42. package/dist/dbt/targets/Bigquery/oauth.js +43 -0
  43. package/dist/dbt/targets/Bigquery/serviceAccount.d.ts +35 -0
  44. package/dist/dbt/targets/Bigquery/serviceAccount.d.ts.map +1 -0
  45. package/dist/dbt/targets/Bigquery/serviceAccount.js +149 -0
  46. package/dist/dbt/targets/Databricks/oauth.d.ts +21 -0
  47. package/dist/dbt/targets/Databricks/oauth.d.ts.map +1 -0
  48. package/dist/dbt/targets/Databricks/oauth.js +184 -0
  49. package/dist/dbt/targets/athena.d.ts +21 -0
  50. package/dist/dbt/targets/athena.d.ts.map +1 -0
  51. package/dist/dbt/targets/athena.js +91 -0
  52. package/dist/dbt/targets/athena.test.d.ts +2 -0
  53. package/dist/dbt/targets/athena.test.d.ts.map +1 -0
  54. package/dist/dbt/targets/athena.test.js +60 -0
  55. package/dist/dbt/targets/clickhouse.d.ts +24 -0
  56. package/dist/dbt/targets/clickhouse.d.ts.map +1 -0
  57. package/dist/dbt/targets/clickhouse.js +90 -0
  58. package/dist/dbt/targets/databricks.d.ts +27 -0
  59. package/dist/dbt/targets/databricks.d.ts.map +1 -0
  60. package/dist/dbt/targets/databricks.js +138 -0
  61. package/dist/dbt/targets/duckdb.d.ts +16 -0
  62. package/dist/dbt/targets/duckdb.d.ts.map +1 -0
  63. package/dist/dbt/targets/duckdb.js +63 -0
  64. package/dist/dbt/targets/duckdb.test.d.ts +2 -0
  65. package/dist/dbt/targets/duckdb.test.d.ts.map +1 -0
  66. package/dist/dbt/targets/duckdb.test.js +37 -0
  67. package/dist/dbt/targets/postgres.d.ts +26 -0
  68. package/dist/dbt/targets/postgres.d.ts.map +1 -0
  69. package/dist/dbt/targets/postgres.js +142 -0
  70. package/dist/dbt/targets/redshift.d.ts +23 -0
  71. package/dist/dbt/targets/redshift.d.ts.map +1 -0
  72. package/dist/dbt/targets/redshift.js +96 -0
  73. package/dist/dbt/targets/snowflake.d.ts +4 -0
  74. package/dist/dbt/targets/snowflake.d.ts.map +1 -0
  75. package/dist/dbt/targets/snowflake.js +134 -0
  76. package/dist/dbt/targets/trino.d.ts +16 -0
  77. package/dist/dbt/targets/trino.d.ts.map +1 -0
  78. package/dist/dbt/targets/trino.js +65 -0
  79. package/dist/dbt/templating.d.ts +15 -0
  80. package/dist/dbt/templating.d.ts.map +1 -0
  81. package/dist/dbt/templating.js +50 -0
  82. package/dist/dbt/templating.test.d.ts +2 -0
  83. package/dist/dbt/templating.test.d.ts.map +1 -0
  84. package/dist/dbt/templating.test.js +51 -0
  85. package/dist/dbt/types.d.ts +17 -0
  86. package/dist/dbt/types.d.ts.map +1 -0
  87. package/dist/dbt/types.js +2 -0
  88. package/dist/dbt/validation.d.ts +9 -0
  89. package/dist/dbt/validation.d.ts.map +1 -0
  90. package/dist/dbt/validation.js +54 -0
  91. package/dist/env.d.ts +12 -0
  92. package/dist/env.d.ts.map +1 -0
  93. package/dist/env.js +40 -0
  94. package/dist/error.d.ts +2 -0
  95. package/dist/error.d.ts.map +1 -0
  96. package/dist/error.js +12 -0
  97. package/dist/globalState.d.ts +29 -0
  98. package/dist/globalState.d.ts.map +1 -0
  99. package/dist/globalState.js +67 -0
  100. package/dist/handlers/asyncQuery.d.ts +7 -0
  101. package/dist/handlers/asyncQuery.d.ts.map +1 -0
  102. package/dist/handlers/asyncQuery.js +50 -0
  103. package/dist/handlers/compile.d.ts +16 -0
  104. package/dist/handlers/compile.d.ts.map +1 -0
  105. package/dist/handlers/compile.js +277 -0
  106. package/dist/handlers/compile.test.d.ts +2 -0
  107. package/dist/handlers/compile.test.d.ts.map +1 -0
  108. package/dist/handlers/compile.test.js +201 -0
  109. package/dist/handlers/createProject.d.ts +37 -0
  110. package/dist/handlers/createProject.d.ts.map +1 -0
  111. package/dist/handlers/createProject.js +272 -0
  112. package/dist/handlers/dbt/apiClient.d.ts +14 -0
  113. package/dist/handlers/dbt/apiClient.d.ts.map +1 -0
  114. package/dist/handlers/dbt/apiClient.js +167 -0
  115. package/dist/handlers/dbt/compile.d.ts +35 -0
  116. package/dist/handlers/dbt/compile.d.ts.map +1 -0
  117. package/dist/handlers/dbt/compile.js +220 -0
  118. package/dist/handlers/dbt/getDbtProfileTargetName.d.ts +9 -0
  119. package/dist/handlers/dbt/getDbtProfileTargetName.d.ts.map +1 -0
  120. package/dist/handlers/dbt/getDbtProfileTargetName.js +44 -0
  121. package/dist/handlers/dbt/getDbtVersion.d.ts +16 -0
  122. package/dist/handlers/dbt/getDbtVersion.d.ts.map +1 -0
  123. package/dist/handlers/dbt/getDbtVersion.js +141 -0
  124. package/dist/handlers/dbt/getDbtVersion.mocks.d.ts +11 -0
  125. package/dist/handlers/dbt/getDbtVersion.mocks.d.ts.map +1 -0
  126. package/dist/handlers/dbt/getDbtVersion.mocks.js +70 -0
  127. package/dist/handlers/dbt/getDbtVersion.test.d.ts +2 -0
  128. package/dist/handlers/dbt/getDbtVersion.test.d.ts.map +1 -0
  129. package/dist/handlers/dbt/getDbtVersion.test.js +97 -0
  130. package/dist/handlers/dbt/getWarehouseClient.d.ts +24 -0
  131. package/dist/handlers/dbt/getWarehouseClient.d.ts.map +1 -0
  132. package/dist/handlers/dbt/getWarehouseClient.js +312 -0
  133. package/dist/handlers/dbt/refresh.d.ts +11 -0
  134. package/dist/handlers/dbt/refresh.d.ts.map +1 -0
  135. package/dist/handlers/dbt/refresh.js +114 -0
  136. package/dist/handlers/dbt/run.d.ts +14 -0
  137. package/dist/handlers/dbt/run.d.ts.map +1 -0
  138. package/dist/handlers/dbt/run.js +67 -0
  139. package/dist/handlers/deploy.d.ts +26 -0
  140. package/dist/handlers/deploy.d.ts.map +1 -0
  141. package/dist/handlers/deploy.js +377 -0
  142. package/dist/handlers/diagnostics.d.ts +11 -0
  143. package/dist/handlers/diagnostics.d.ts.map +1 -0
  144. package/dist/handlers/diagnostics.js +194 -0
  145. package/dist/handlers/download.d.ts +29 -0
  146. package/dist/handlers/download.d.ts.map +1 -0
  147. package/dist/handlers/download.js +955 -0
  148. package/dist/handlers/exportChartImage.d.ts +7 -0
  149. package/dist/handlers/exportChartImage.d.ts.map +1 -0
  150. package/dist/handlers/exportChartImage.js +33 -0
  151. package/dist/handlers/generate.d.ts +13 -0
  152. package/dist/handlers/generate.d.ts.map +1 -0
  153. package/dist/handlers/generate.js +159 -0
  154. package/dist/handlers/generateExposures.d.ts +8 -0
  155. package/dist/handlers/generateExposures.d.ts.map +1 -0
  156. package/dist/handlers/generateExposures.js +100 -0
  157. package/dist/handlers/getProject.d.ts +6 -0
  158. package/dist/handlers/getProject.d.ts.map +1 -0
  159. package/dist/handlers/getProject.js +43 -0
  160. package/dist/handlers/installSkills.d.ts +12 -0
  161. package/dist/handlers/installSkills.d.ts.map +1 -0
  162. package/dist/handlers/installSkills.js +321 -0
  163. package/dist/handlers/lint/ajvToSarif.d.ts +66 -0
  164. package/dist/handlers/lint/ajvToSarif.d.ts.map +1 -0
  165. package/dist/handlers/lint/ajvToSarif.js +222 -0
  166. package/dist/handlers/lint/sarifFormatter.d.ts +14 -0
  167. package/dist/handlers/lint/sarifFormatter.d.ts.map +1 -0
  168. package/dist/handlers/lint/sarifFormatter.js +111 -0
  169. package/dist/handlers/lint.d.ts +8 -0
  170. package/dist/handlers/lint.d.ts.map +1 -0
  171. package/dist/handlers/lint.js +308 -0
  172. package/dist/handlers/listProjects.d.ts +6 -0
  173. package/dist/handlers/listProjects.d.ts.map +1 -0
  174. package/dist/handlers/listProjects.js +53 -0
  175. package/dist/handlers/login/oauth.d.ts +2 -0
  176. package/dist/handlers/login/oauth.d.ts.map +1 -0
  177. package/dist/handlers/login/oauth.js +27 -0
  178. package/dist/handlers/login/pat.d.ts +2 -0
  179. package/dist/handlers/login/pat.d.ts.map +1 -0
  180. package/dist/handlers/login/pat.js +31 -0
  181. package/dist/handlers/login.d.ts +15 -0
  182. package/dist/handlers/login.d.ts.map +1 -0
  183. package/dist/handlers/login.js +239 -0
  184. package/dist/handlers/metadataFile.d.ts +9 -0
  185. package/dist/handlers/metadataFile.d.ts.map +1 -0
  186. package/dist/handlers/metadataFile.js +34 -0
  187. package/dist/handlers/oauthLogin.d.ts +6 -0
  188. package/dist/handlers/oauthLogin.d.ts.map +1 -0
  189. package/dist/handlers/oauthLogin.js +191 -0
  190. package/dist/handlers/preview.d.ts +29 -0
  191. package/dist/handlers/preview.d.ts.map +1 -0
  192. package/dist/handlers/preview.js +415 -0
  193. package/dist/handlers/renameHandler.d.ts +16 -0
  194. package/dist/handlers/renameHandler.d.ts.map +1 -0
  195. package/dist/handlers/renameHandler.js +160 -0
  196. package/dist/handlers/runChart.d.ts +10 -0
  197. package/dist/handlers/runChart.d.ts.map +1 -0
  198. package/dist/handlers/runChart.js +105 -0
  199. package/dist/handlers/selectProject.d.ts +20 -0
  200. package/dist/handlers/selectProject.d.ts.map +1 -0
  201. package/dist/handlers/selectProject.js +91 -0
  202. package/dist/handlers/setProject.d.ts +14 -0
  203. package/dist/handlers/setProject.d.ts.map +1 -0
  204. package/dist/handlers/setProject.js +131 -0
  205. package/dist/handlers/setWarehouse.d.ts +14 -0
  206. package/dist/handlers/setWarehouse.d.ts.map +1 -0
  207. package/dist/handlers/setWarehouse.js +94 -0
  208. package/dist/handlers/sql.d.ts +9 -0
  209. package/dist/handlers/sql.d.ts.map +1 -0
  210. package/dist/handlers/sql.js +89 -0
  211. package/dist/handlers/utils.d.ts +11 -0
  212. package/dist/handlers/utils.d.ts.map +1 -0
  213. package/dist/handlers/utils.js +36 -0
  214. package/dist/handlers/validate.d.ts +22 -0
  215. package/dist/handlers/validate.d.ts.map +1 -0
  216. package/dist/handlers/validate.js +201 -0
  217. package/dist/index.d.ts +3 -0
  218. package/dist/index.d.ts.map +1 -0
  219. package/dist/index.js +581 -0
  220. package/dist/lightdash/loader.d.ts +21 -0
  221. package/dist/lightdash/loader.d.ts.map +1 -0
  222. package/dist/lightdash/loader.js +122 -0
  223. package/dist/lightdash/projectType.d.ts +84 -0
  224. package/dist/lightdash/projectType.d.ts.map +1 -0
  225. package/dist/lightdash/projectType.js +75 -0
  226. package/dist/lightdash-config/index.d.ts +2 -0
  227. package/dist/lightdash-config/index.d.ts.map +1 -0
  228. package/dist/lightdash-config/index.js +41 -0
  229. package/dist/lightdash-config/lightdash-config.test.d.ts +2 -0
  230. package/dist/lightdash-config/lightdash-config.test.d.ts.map +1 -0
  231. package/dist/lightdash-config/lightdash-config.test.js +70 -0
  232. package/dist/styles.d.ts +10 -0
  233. package/dist/styles.d.ts.map +1 -0
  234. package/dist/styles.js +14 -0
  235. package/entitlements.plist +33 -0
  236. package/package.json +71 -0
  237. package/track.sh +116 -0
@@ -0,0 +1,256 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getCompiledModels = exports.findAndUpdateModelYaml = exports.isDocBlock = exports.getWarehouseTableForModel = void 0;
4
+ const tslib_1 = require("tslib");
5
+ const common_1 = require("@lightdash/common");
6
+ const execa_1 = tslib_1.__importDefault(require("execa"));
7
+ const inquirer_1 = tslib_1.__importDefault(require("inquirer"));
8
+ const path = tslib_1.__importStar(require("path"));
9
+ const globalState_1 = tslib_1.__importDefault(require("../globalState"));
10
+ const styles = tslib_1.__importStar(require("../styles"));
11
+ const schema_1 = require("./schema");
12
+ const getWarehouseTableForModel = async ({ model, warehouseClient, preserveColumnCase, }) => {
13
+ const tableRef = {
14
+ database: model.database,
15
+ schema: model.schema,
16
+ table: model.alias || model.name,
17
+ };
18
+ const catalog = await warehouseClient.getCatalog([tableRef]);
19
+ const table = catalog[tableRef.database]?.[tableRef.schema]?.[tableRef.table];
20
+ if (!table) {
21
+ const database = catalog[tableRef.database];
22
+ const schema = database?.[tableRef.schema];
23
+ const missing = (database === undefined && `database ${tableRef.database}`) ||
24
+ (schema === undefined && `schema ${tableRef.schema}`) ||
25
+ (table === undefined && `table ${tableRef.table}`);
26
+ throw new common_1.ParseError(`Expected to find materialised model at ${tableRef.database}.${tableRef.schema}.${tableRef.table} but couldn't find (or cannot access) ${missing}`);
27
+ }
28
+ return Object.entries(table).reduce((accumulator, [key, value]) => {
29
+ const columnName = preserveColumnCase ? key : key.toLowerCase();
30
+ accumulator[columnName] = value;
31
+ return accumulator;
32
+ }, {});
33
+ };
34
+ exports.getWarehouseTableForModel = getWarehouseTableForModel;
35
+ // Check if we should use dbt 1.10+ metadata structure
36
+ const wrapInConfig = (dbtVersion, meta) => {
37
+ const useDbt110Metadata = dbtVersion &&
38
+ Object.values(common_1.SupportedDbtVersions).indexOf(dbtVersion) >=
39
+ Object.values(common_1.SupportedDbtVersions).indexOf(common_1.SupportedDbtVersions.V1_10);
40
+ return useDbt110Metadata ? { config: { meta } } : { meta };
41
+ };
42
+ const generateModelYml = ({ model, table, includeMeta, dbtVersion, }) => ({
43
+ name: model.name,
44
+ columns: Object.entries(table).map(([columnName, dimensionType]) => ({
45
+ name: columnName,
46
+ description: '',
47
+ ...(includeMeta
48
+ ? wrapInConfig(dbtVersion, {
49
+ dimension: {
50
+ type: dimensionType,
51
+ },
52
+ })
53
+ : {}),
54
+ })),
55
+ });
56
+ const askOverwrite = async (message) => {
57
+ const answers = await inquirer_1.default.prompt([
58
+ {
59
+ type: 'confirm',
60
+ name: 'isConfirm',
61
+ message,
62
+ },
63
+ ]);
64
+ if (!answers.isConfirm) {
65
+ return false;
66
+ }
67
+ return true;
68
+ };
69
+ const isDocBlock = (text = '') => !!text.match(/{{\s*doc\(['"]\w+['"]\)\s*}}/);
70
+ exports.isDocBlock = isDocBlock;
71
+ const askOverwriteDescription = async (columnName, existingDescription, newDescription, assumeYes) => {
72
+ if (!existingDescription)
73
+ return newDescription || '';
74
+ if (!newDescription)
75
+ return existingDescription;
76
+ if (newDescription === existingDescription ||
77
+ (0, exports.isDocBlock)(existingDescription))
78
+ return existingDescription;
79
+ if (assumeYes)
80
+ return newDescription;
81
+ const shortDescription = `${existingDescription.substring(0, 20)}${existingDescription.length > 20 ? '...' : ''}`;
82
+ const overwriteMessage = `Do you want to overwrite the existing column "${columnName}" description (${shortDescription}) with a doc block?`;
83
+ const spinner = globalState_1.default.getActiveSpinner();
84
+ spinner?.stop();
85
+ const overwrite = await askOverwrite(overwriteMessage);
86
+ spinner?.start();
87
+ if (overwrite)
88
+ return newDescription;
89
+ return existingDescription;
90
+ };
91
+ const findAndUpdateModelYaml = async ({ model, table, docs, includeMeta, projectDir, projectName, assumeYes, dbtVersion, }) => {
92
+ const generatedModel = generateModelYml({
93
+ model,
94
+ table,
95
+ includeMeta,
96
+ dbtVersion,
97
+ });
98
+ const filenames = [];
99
+ const { patchPath, packageName } = model;
100
+ if (patchPath) {
101
+ const { project: expectedYamlProject, path: expectedYamlSubPath } = (0, common_1.patchPathParts)(patchPath);
102
+ const projectSubpath = expectedYamlProject !== projectName
103
+ ? path.join('dbt_packages', expectedYamlProject)
104
+ : '.';
105
+ const expectedYamlPath = path.join(projectDir, projectSubpath, expectedYamlSubPath);
106
+ filenames.push(expectedYamlPath);
107
+ }
108
+ const outputDir = path.dirname(path.join(packageName === projectName
109
+ ? '.'
110
+ : path.join('dbt_packages', packageName), model.originalFilePath));
111
+ const outputFilePath = path.join(projectDir, outputDir, `${model.name}.yml`);
112
+ filenames.push(outputFilePath);
113
+ const match = await (0, schema_1.searchForModel)({
114
+ modelName: model.name,
115
+ filenames,
116
+ dbtVersion,
117
+ });
118
+ if (match) {
119
+ const { schemaEditor } = match;
120
+ const docsNames = Object.values(docs).map((doc) => doc.name);
121
+ const existingColumns = schemaEditor.getModelColumns(model.name) ?? [];
122
+ const existingColumnNamesLower = existingColumns.map((c) => c.name.toLowerCase());
123
+ // Build a case-insensitive lookup for warehouse table columns
124
+ const tableLower = Object.entries(table).reduce((acc, [key, value]) => {
125
+ acc[key.toLowerCase()] = value;
126
+ return acc;
127
+ }, {});
128
+ // Update existing columns description and dimension type
129
+ for (const column of existingColumns) {
130
+ const hasDoc = docsNames.includes(column.name);
131
+ const newDescription = hasDoc ? `{{doc('${column.name}')}}` : '';
132
+ const existingDescription = column.description;
133
+ const existingDimensionType = column.meta?.dimension?.type;
134
+ const dimensionType = tableLower[column.name.toLowerCase()];
135
+ // eslint-disable-next-line no-await-in-loop
136
+ const description = await askOverwriteDescription(column.name, existingDescription, newDescription, assumeYes);
137
+ // Update meta if dimension type is different
138
+ const meta = includeMeta &&
139
+ dimensionType &&
140
+ existingDimensionType !== dimensionType
141
+ ? {
142
+ dimension: {
143
+ type: dimensionType,
144
+ },
145
+ }
146
+ : undefined;
147
+ schemaEditor.updateColumn({
148
+ modelName: model.name,
149
+ columnName: column.name,
150
+ properties: {
151
+ description,
152
+ ...(meta ? wrapInConfig(dbtVersion, meta) : {}),
153
+ },
154
+ });
155
+ }
156
+ // Add columns that don't exist in the model (case-insensitive comparison)
157
+ const newColumns = generatedModel.columns.filter((c) => !existingColumnNamesLower.includes(c.name.toLowerCase()));
158
+ newColumns.forEach((column) => {
159
+ schemaEditor.addColumn(model.name, column);
160
+ });
161
+ // Delete columns that no longer exist in the warehouse (case-insensitive comparison)
162
+ const generatedColumnNamesLower = generatedModel.columns.map((gc) => gc.name.toLowerCase());
163
+ const deletedColumnNames = existingColumns
164
+ .filter((c) => !generatedColumnNamesLower.includes(c.name.toLowerCase()))
165
+ .map((c) => c.name);
166
+ if (deletedColumnNames.length > 0 && !globalState_1.default.isNonInteractive()) {
167
+ let answers = { isConfirm: assumeYes };
168
+ if (!assumeYes) {
169
+ const spinner = globalState_1.default.getActiveSpinner();
170
+ spinner?.stop();
171
+ console.error(`
172
+ These columns in your model ${styles.bold(model.name)} on file ${styles.bold(match.filename.split('/').slice(-1))} no longer exist in your warehouse:
173
+ ${deletedColumnNames.map((name) => `- ${styles.bold(name)} \n`).join('')}
174
+ `);
175
+ answers = await inquirer_1.default.prompt([
176
+ {
177
+ type: 'confirm',
178
+ name: 'isConfirm',
179
+ message: `Would you like to remove them from your .yml file? `,
180
+ },
181
+ ]);
182
+ spinner?.start();
183
+ }
184
+ if (answers.isConfirm) {
185
+ schemaEditor.removeColumns(model.name, deletedColumnNames);
186
+ }
187
+ }
188
+ return {
189
+ updatedYml: schemaEditor,
190
+ outputFilePath: match.filename,
191
+ };
192
+ }
193
+ return {
194
+ updatedYml: new common_1.DbtSchemaEditor('', '', dbtVersion).addModel(generatedModel),
195
+ outputFilePath,
196
+ };
197
+ };
198
+ exports.findAndUpdateModelYaml = findAndUpdateModelYaml;
199
+ const getCompiledModels = async (models, args) => {
200
+ let allModelIds = models.map((model) => model.unique_id);
201
+ if (args.select || args.exclude) {
202
+ const spinner = globalState_1.default.startSpinner(`Filtering models`);
203
+ try {
204
+ const { stdout } = await (0, execa_1.default)('dbt', [
205
+ 'ls',
206
+ ...(args.projectDir ? ['--project-dir', args.projectDir] : []),
207
+ ...(args.profilesDir
208
+ ? ['--profiles-dir', args.profilesDir]
209
+ : []),
210
+ ...(args.target ? ['--target', args.target] : []),
211
+ ...(args.profile ? ['--profile', args.profile] : []),
212
+ ...(args.select ? ['--select', args.select.join(' ')] : []),
213
+ ...(args.exclude ? ['--exclude', args.exclude.join(' ')] : []),
214
+ ...(args.vars ? ['--vars', args.vars] : []),
215
+ '--resource-type=model',
216
+ '--output=json',
217
+ ]);
218
+ const filteredModelIds = stdout
219
+ .split('\n')
220
+ .map((l) => l.trim())
221
+ .filter((l) => l.length > 0)
222
+ .map((l) => {
223
+ try {
224
+ // remove prefixed time in dbt cloud cli output
225
+ const lineWithoutPrefixedTime = l.replace(/^\d{2}:\d{2}:\d{2}\s*/, '');
226
+ return JSON.parse(lineWithoutPrefixedTime);
227
+ }
228
+ catch {
229
+ return null;
230
+ }
231
+ })
232
+ .filter((l) => l !== null)
233
+ .filter((model) => model.resource_type === 'model')
234
+ .map((model) => model.unique_id);
235
+ allModelIds = allModelIds.filter((modelId) => filteredModelIds.includes(modelId));
236
+ }
237
+ catch (e) {
238
+ console.error(styles.error(`Failed to filter models: ${e}`));
239
+ throw e;
240
+ }
241
+ finally {
242
+ spinner.stop();
243
+ }
244
+ }
245
+ const modelLookup = models.reduce((acc, model) => ({ ...acc, [model.unique_id]: model }), {});
246
+ return allModelIds.map((modelId) => ({
247
+ name: modelLookup[modelId].name,
248
+ schema: modelLookup[modelId].schema,
249
+ database: modelLookup[modelId].database,
250
+ originalFilePath: modelLookup[modelId].original_file_path,
251
+ patchPath: modelLookup[modelId].patch_path,
252
+ alias: modelLookup[modelId].alias,
253
+ packageName: modelLookup[modelId].package_name,
254
+ }));
255
+ };
256
+ exports.getCompiledModels = getCompiledModels;
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=models.test.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"models.test.d.ts","sourceRoot":"","sources":["../../src/dbt/models.test.ts"],"names":[],"mappings":""}
@@ -0,0 +1,19 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const models_1 = require("./models");
4
+ describe('Models', () => {
5
+ describe('isDocBlock', () => {
6
+ test('should match all doc block variations', () => {
7
+ expect((0, models_1.isDocBlock)("{{doc('user_id')}}")).toBe(true); // single quote
8
+ expect((0, models_1.isDocBlock)('{{doc("user_id")}}')).toBe(true); // double quote
9
+ expect((0, models_1.isDocBlock)("{{ doc('user_id') }}")).toBe(true); // white spaces
10
+ expect((0, models_1.isDocBlock)("{{ doc('user_id')}}")).toBe(true); // inconsistent white space
11
+ });
12
+ test('should return false when value doesnt match doc block', () => {
13
+ expect((0, models_1.isDocBlock)()).toBe(false);
14
+ expect((0, models_1.isDocBlock)('{{ref("user_id")}}')).toBe(false);
15
+ expect((0, models_1.isDocBlock)("doc('user_id')")).toBe(false);
16
+ expect((0, models_1.isDocBlock)('my description')).toBe(false);
17
+ });
18
+ });
19
+ });
@@ -0,0 +1,9 @@
1
+ import { CreateWarehouseCredentials } from '@lightdash/common';
2
+ import { LoadProfileArgs, Target } from './types';
3
+ export declare const loadDbtTarget: ({ profilesDir, profileName, targetName, }: LoadProfileArgs) => Promise<{
4
+ name: string;
5
+ target: Target;
6
+ }>;
7
+ export declare const warehouseCredentialsFromDbtTarget: (target: Target) => Promise<CreateWarehouseCredentials>;
8
+ export declare const findDbtDefaultProfile: () => string;
9
+ //# sourceMappingURL=profile.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"profile.d.ts","sourceRoot":"","sources":["../../src/dbt/profile.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,0BAA0B,EAG7B,MAAM,mBAAmB,CAAC;AAe3B,OAAO,EAAE,eAAe,EAAY,MAAM,EAAE,MAAM,SAAS,CAAC;AAE5D,eAAO,MAAM,aAAa,GAAU,2CAIjC,eAAe,KAAG,OAAO,CAAC;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAE,CA8B5D,CAAC;AAEF,eAAO,MAAM,iCAAiC,GAC1C,QAAQ,MAAM,KACf,OAAO,CAAC,0BAA0B,CAyBpC,CAAC;AAEF,eAAO,MAAM,qBAAqB,QAAO,MAYxC,CAAC"}
@@ -0,0 +1,86 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.findDbtDefaultProfile = exports.warehouseCredentialsFromDbtTarget = exports.loadDbtTarget = void 0;
4
+ const tslib_1 = require("tslib");
5
+ const common_1 = require("@lightdash/common");
6
+ const fs_1 = require("fs");
7
+ const yaml = tslib_1.__importStar(require("js-yaml"));
8
+ const os_1 = require("os");
9
+ const path = tslib_1.__importStar(require("path"));
10
+ const athena_1 = require("./targets/athena");
11
+ const Bigquery_1 = require("./targets/Bigquery");
12
+ const clickhouse_1 = require("./targets/clickhouse");
13
+ const databricks_1 = require("./targets/databricks");
14
+ const duckdb_1 = require("./targets/duckdb");
15
+ const postgres_1 = require("./targets/postgres");
16
+ const redshift_1 = require("./targets/redshift");
17
+ const snowflake_1 = require("./targets/snowflake");
18
+ const trino_1 = require("./targets/trino");
19
+ const templating_1 = require("./templating");
20
+ const loadDbtTarget = async ({ profilesDir, profileName, targetName, }) => {
21
+ const profilePath = path.join(profilesDir, 'profiles.yml');
22
+ let allProfiles;
23
+ try {
24
+ const raw = await fs_1.promises.readFile(profilePath, { encoding: 'utf8' });
25
+ const rendered = (0, templating_1.renderProfilesYml)(raw);
26
+ allProfiles = yaml.load(rendered);
27
+ }
28
+ catch (e) {
29
+ const msg = (0, common_1.getErrorMessage)(e);
30
+ throw new common_1.ParseError(`Could not find a valid profiles.yml file at ${profilePath}:\n ${msg}`);
31
+ }
32
+ const profile = allProfiles[profileName];
33
+ if (!profile) {
34
+ throw new common_1.ParseError(`Profile ${profileName} not found in ${profilePath}`);
35
+ }
36
+ const selectedTargetName = targetName || profile.target;
37
+ const target = profile.outputs[selectedTargetName];
38
+ if (target === undefined) {
39
+ throw new common_1.ParseError(`Couldn't find target "${selectedTargetName}" for profile ${profileName} in profiles.yml at ${profilePath}`);
40
+ }
41
+ return {
42
+ name: selectedTargetName,
43
+ target,
44
+ };
45
+ };
46
+ exports.loadDbtTarget = loadDbtTarget;
47
+ const warehouseCredentialsFromDbtTarget = async (target) => {
48
+ switch (target.type) {
49
+ case 'postgres':
50
+ return (0, postgres_1.convertPostgresSchema)(target);
51
+ case 'snowflake':
52
+ return (0, snowflake_1.convertSnowflakeSchema)(target);
53
+ case 'bigquery':
54
+ return (0, Bigquery_1.convertBigquerySchema)(target);
55
+ case 'redshift':
56
+ return (0, redshift_1.convertRedshiftSchema)(target);
57
+ case 'databricks':
58
+ return (0, databricks_1.convertDatabricksSchema)(target);
59
+ case 'trino':
60
+ return (0, trino_1.convertTrinoSchema)(target);
61
+ case 'clickhouse':
62
+ return (0, clickhouse_1.convertClickhouseSchema)(target);
63
+ case 'athena':
64
+ return (0, athena_1.convertAthenaSchema)(target);
65
+ case 'duckdb':
66
+ return (0, duckdb_1.convertDuckdbSchema)(target);
67
+ default:
68
+ throw new common_1.ParseError(`Sorry! Clary doesn't yet support ${target.type} dbt targets`);
69
+ }
70
+ };
71
+ exports.warehouseCredentialsFromDbtTarget = warehouseCredentialsFromDbtTarget;
72
+ const findDbtDefaultProfile = () => {
73
+ if (process.env.DBT_PROFILES_DIR) {
74
+ return process.env.DBT_PROFILES_DIR;
75
+ }
76
+ // Check in Current Working Directory
77
+ const profilePathFromCwd = path.join(process.cwd(), 'profiles.yml');
78
+ try {
79
+ (0, fs_1.accessSync)(profilePathFromCwd, fs_1.constants.F_OK);
80
+ return process.cwd();
81
+ }
82
+ catch (e) {
83
+ return path.join((0, os_1.homedir)(), '.dbt');
84
+ }
85
+ };
86
+ exports.findDbtDefaultProfile = findDbtDefaultProfile;
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=profiles.test.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"profiles.test.d.ts","sourceRoot":"","sources":["../../src/dbt/profiles.test.ts"],"names":[],"mappings":""}
@@ -0,0 +1,50 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const tslib_1 = require("tslib");
4
+ const fs = tslib_1.__importStar(require("fs"));
5
+ const os = tslib_1.__importStar(require("os"));
6
+ const path = tslib_1.__importStar(require("path"));
7
+ const profile_1 = require("./profile");
8
+ jest.mock('os');
9
+ jest.mock('fs');
10
+ jest.mock('path');
11
+ describe('Profile', () => {
12
+ const { env, cwd } = process;
13
+ const mockAccessSync = fs.accessSync;
14
+ const mockHomedir = os.homedir;
15
+ const mockJoin = path.join;
16
+ beforeEach(() => {
17
+ jest.resetAllMocks();
18
+ jest.resetModules();
19
+ process.cwd = jest.fn(() => '/current/dir');
20
+ mockHomedir.mockReturnValue('/root');
21
+ mockJoin.mockImplementation((...paths) => paths.join('/'));
22
+ process.env = { ...env };
23
+ });
24
+ afterEach(() => {
25
+ process.env = env;
26
+ process.cwd = cwd;
27
+ });
28
+ describe('findDbtDefaultProfile', () => {
29
+ test('should return path from DBT_PROFILE_DIR when set', () => {
30
+ process.env.DBT_PROFILES_DIR = '/path/to/profiles';
31
+ const result = (0, profile_1.findDbtDefaultProfile)();
32
+ expect(result).toBe('/path/to/profiles');
33
+ });
34
+ test('should return cwd when profile.yml exists and DBT_PROFILE_DIR is undefined', () => {
35
+ delete process.env.DBT_PROFILES_DIR;
36
+ mockAccessSync.mockImplementation(() => undefined);
37
+ expect((0, profile_1.findDbtDefaultProfile)()).toBe('/current/dir');
38
+ expect(mockAccessSync).toHaveBeenCalledWith('/current/dir/profiles.yml', fs.constants.F_OK);
39
+ });
40
+ test('should return homedir when profile.yml does not exist in cwd', () => {
41
+ delete process.env.DBT_PROFILE_DIR;
42
+ mockAccessSync.mockImplementation(() => {
43
+ throw new Error('File not found');
44
+ });
45
+ expect((0, profile_1.findDbtDefaultProfile)()).toBe('/root/.dbt');
46
+ expect(mockAccessSync).toHaveBeenCalledWith('/current/dir/profiles.yml', fs.constants.F_OK);
47
+ expect(mockHomedir).toHaveBeenCalled();
48
+ });
49
+ });
50
+ });
@@ -0,0 +1,31 @@
1
+ import { DbtSchemaEditor, DimensionType, SupportedDbtVersions } from '@lightdash/common';
2
+ type YamlColumnMeta = {
3
+ dimension?: {
4
+ type?: DimensionType;
5
+ };
6
+ };
7
+ type YamlColumn = {
8
+ name: string;
9
+ description?: string;
10
+ meta?: YamlColumnMeta;
11
+ };
12
+ export type YamlModel = {
13
+ name: string;
14
+ description?: string;
15
+ columns?: YamlColumn[];
16
+ };
17
+ export type YamlSchema = {
18
+ version?: 2;
19
+ models?: YamlModel[];
20
+ };
21
+ type SearchForModelArgs = {
22
+ modelName: string;
23
+ filenames: string[];
24
+ dbtVersion?: SupportedDbtVersions;
25
+ };
26
+ export declare const searchForModel: ({ modelName, filenames, dbtVersion, }: SearchForModelArgs) => Promise<{
27
+ schemaEditor: DbtSchemaEditor;
28
+ filename: string;
29
+ } | undefined>;
30
+ export {};
31
+ //# sourceMappingURL=schema.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../src/dbt/schema.ts"],"names":[],"mappings":"AAAA,OAAO,EACH,eAAe,EACf,aAAa,EAEb,oBAAoB,EACvB,MAAM,mBAAmB,CAAC;AAG3B,KAAK,cAAc,GAAG;IAClB,SAAS,CAAC,EAAE;QACR,IAAI,CAAC,EAAE,aAAa,CAAC;KACxB,CAAC;CACL,CAAC;AAEF,KAAK,UAAU,GAAG;IACd,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,cAAc,CAAC;CACzB,CAAC;AAEF,MAAM,MAAM,SAAS,GAAG;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,OAAO,CAAC,EAAE,UAAU,EAAE,CAAC;CAC1B,CAAC;AAEF,MAAM,MAAM,UAAU,GAAG;IACrB,OAAO,CAAC,EAAE,CAAC,CAAC;IACZ,MAAM,CAAC,EAAE,SAAS,EAAE,CAAC;CACxB,CAAC;AAgDF,KAAK,kBAAkB,GAAG;IACtB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,EAAE,CAAC;IACpB,UAAU,CAAC,EAAE,oBAAoB,CAAC;CACrC,CAAC;AACF,eAAO,MAAM,cAAc,GAAU,uCAIlC,kBAAkB;;;cAepB,CAAC"}
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.searchForModel = void 0;
4
+ const common_1 = require("@lightdash/common");
5
+ const fs_1 = require("fs");
6
+ const loadYamlSchema = async (path, dbtVersion) => {
7
+ try {
8
+ return new common_1.DbtSchemaEditor(await fs_1.promises.readFile(path, 'utf8'), path, dbtVersion);
9
+ }
10
+ catch (e) {
11
+ if (e instanceof common_1.ParseError) {
12
+ // Prefix error message with file path
13
+ throw new common_1.ParseError(`Couldn't parse existing yaml file at ${path}\n${e.message}`);
14
+ }
15
+ throw e;
16
+ }
17
+ };
18
+ const findModelInYaml = async ({ filename, modelName, dbtVersion, }) => {
19
+ try {
20
+ const schemaEditor = await loadYamlSchema(filename, dbtVersion);
21
+ if (schemaEditor.findModelByName(modelName)) {
22
+ return schemaEditor;
23
+ }
24
+ return undefined;
25
+ }
26
+ catch (e) {
27
+ if (e instanceof Error && 'code' in e && e.code === 'ENOENT') {
28
+ return undefined;
29
+ }
30
+ throw e;
31
+ }
32
+ };
33
+ const searchForModel = async ({ modelName, filenames, dbtVersion, }) => {
34
+ for await (const filename of filenames) {
35
+ const schemaEditor = await findModelInYaml({
36
+ filename,
37
+ modelName,
38
+ dbtVersion,
39
+ });
40
+ if (schemaEditor) {
41
+ return {
42
+ schemaEditor,
43
+ filename,
44
+ };
45
+ }
46
+ }
47
+ return undefined;
48
+ };
49
+ exports.searchForModel = searchForModel;
@@ -0,0 +1,18 @@
1
+ import { CreateBigqueryCredentials } from '@lightdash/common';
2
+ import { JSONSchemaType } from 'ajv';
3
+ import { Target } from '../../types';
4
+ type BigqueryTarget = {
5
+ project?: string;
6
+ dataset: string;
7
+ schema: string;
8
+ priority?: 'interactive' | 'batch';
9
+ retries?: number;
10
+ location?: string;
11
+ maximum_bytes_billed?: number;
12
+ timeout_seconds?: number;
13
+ execution_project?: string;
14
+ };
15
+ export declare const bigqueryTargetJsonSchema: JSONSchemaType<BigqueryTarget>;
16
+ export declare const convertBigquerySchema: (target: Target) => Promise<CreateBigqueryCredentials>;
17
+ export {};
18
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/dbt/targets/Bigquery/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAEH,yBAAyB,EAG5B,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,cAAc,EAAE,MAAM,KAAK,CAAC;AAGrC,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AAOrC,KAAK,cAAc,GAAG;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,aAAa,GAAG,OAAO,CAAC;IACnC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC9B,CAAC;AAEF,eAAO,MAAM,wBAAwB,EAAE,cAAc,CAAC,cAAc,CAgDnE,CAAC;AAEF,eAAO,MAAM,qBAAqB,GAC9B,QAAQ,MAAM,KACf,OAAO,CAAC,yBAAyB,CA8DnC,CAAC"}
@@ -0,0 +1,105 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.convertBigquerySchema = exports.bigqueryTargetJsonSchema = void 0;
4
+ const tslib_1 = require("tslib");
5
+ const common_1 = require("@lightdash/common");
6
+ const better_ajv_errors_1 = tslib_1.__importDefault(require("better-ajv-errors"));
7
+ const ajv_1 = require("../../../ajv");
8
+ const oauth_1 = require("./oauth");
9
+ const serviceAccount_1 = require("./serviceAccount");
10
+ exports.bigqueryTargetJsonSchema = {
11
+ type: 'object',
12
+ properties: {
13
+ project: {
14
+ type: 'string',
15
+ nullable: true,
16
+ },
17
+ dataset: {
18
+ type: 'string',
19
+ },
20
+ schema: {
21
+ type: 'string',
22
+ },
23
+ priority: {
24
+ type: 'string',
25
+ enum: ['interactive', 'batch'],
26
+ nullable: true,
27
+ },
28
+ retries: {
29
+ type: 'integer',
30
+ nullable: true,
31
+ },
32
+ location: {
33
+ type: 'string',
34
+ nullable: true,
35
+ },
36
+ maximum_bytes_billed: {
37
+ type: 'integer',
38
+ nullable: true,
39
+ },
40
+ timeout_seconds: {
41
+ type: 'integer',
42
+ nullable: true,
43
+ },
44
+ execution_project: {
45
+ type: 'string',
46
+ nullable: true,
47
+ },
48
+ },
49
+ required: [],
50
+ oneOf: [
51
+ {
52
+ required: ['dataset'],
53
+ },
54
+ {
55
+ required: ['schema'],
56
+ },
57
+ ],
58
+ };
59
+ const convertBigquerySchema = async (target) => {
60
+ const validate = ajv_1.ajv.compile(exports.bigqueryTargetJsonSchema);
61
+ if (validate(target)) {
62
+ let getBigqueryCredentials;
63
+ switch (target.method) {
64
+ case 'oauth':
65
+ getBigqueryCredentials = oauth_1.getBigqueryCredentialsFromOauth;
66
+ break;
67
+ case 'service-account':
68
+ getBigqueryCredentials =
69
+ serviceAccount_1.getBigqueryCredentialsFromServiceAccount;
70
+ break;
71
+ case 'service-account-json':
72
+ getBigqueryCredentials =
73
+ serviceAccount_1.getBigqueryCredentialsFromServiceAccountJson;
74
+ break;
75
+ default:
76
+ throw new common_1.ParseError(`BigQuery method ${target.method} is not yet supported`);
77
+ }
78
+ if (target.project === undefined && target.method !== 'oauth')
79
+ throw new common_1.ParseError(`BigQuery project is required for ${target.method} authentication method`);
80
+ const result = {
81
+ type: common_1.WarehouseTypes.BIGQUERY,
82
+ project: target.project || '',
83
+ dataset: target.dataset || target.schema,
84
+ timeoutSeconds: target.timeout_seconds,
85
+ priority: target.priority,
86
+ keyfileContents: {},
87
+ retries: target.retries,
88
+ maximumBytesBilled: target.maximum_bytes_billed,
89
+ location: target.location,
90
+ executionProject: target.execution_project,
91
+ };
92
+ const oauthOrKeyResult = await getBigqueryCredentials(target);
93
+ if (oauthOrKeyResult.authenticationType ===
94
+ common_1.BigqueryAuthenticationType.ADC) {
95
+ result.authenticationType = common_1.BigqueryAuthenticationType.ADC;
96
+ }
97
+ else {
98
+ result.keyfileContents = oauthOrKeyResult;
99
+ }
100
+ return result;
101
+ }
102
+ const errs = (0, better_ajv_errors_1.default)(exports.bigqueryTargetJsonSchema, target, validate.errors || []);
103
+ throw new common_1.ParseError(`Couldn't read profiles.yml file for ${target.type}:\n${errs}`);
104
+ };
105
+ exports.convertBigquerySchema = convertBigquerySchema;
@@ -0,0 +1,2 @@
1
+ export declare const getBigqueryCredentialsFromOauth: () => Promise<Record<string, string>>;
2
+ //# sourceMappingURL=oauth.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"oauth.d.ts","sourceRoot":"","sources":["../../../../src/dbt/targets/Bigquery/oauth.ts"],"names":[],"mappings":"AAQA,eAAO,MAAM,+BAA+B,QAAa,OAAO,CAC5D,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CA8CzB,CAAC"}