@databricks/appkit 0.25.1 → 0.26.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/appkit/package.js +1 -1
- package/dist/cli/commands/codemod/index.js +16 -0
- package/dist/cli/commands/codemod/index.js.map +1 -0
- package/dist/cli/commands/codemod/on-plugins-ready.js +364 -0
- package/dist/cli/commands/codemod/on-plugins-ready.js.map +1 -0
- package/dist/cli/index.js +2 -0
- package/dist/cli/index.js.map +1 -1
- package/dist/connectors/lakebase-v1/client.js.map +1 -1
- package/dist/connectors/sql-warehouse/client.js +1 -0
- package/dist/connectors/sql-warehouse/client.js.map +1 -1
- package/dist/core/appkit.d.ts +12 -8
- package/dist/core/appkit.d.ts.map +1 -1
- package/dist/core/appkit.js +22 -9
- package/dist/core/appkit.js.map +1 -1
- package/dist/errors/server.d.ts +0 -5
- package/dist/errors/server.d.ts.map +1 -1
- package/dist/errors/server.js +0 -6
- package/dist/errors/server.js.map +1 -1
- package/dist/plugins/server/index.d.ts +16 -13
- package/dist/plugins/server/index.d.ts.map +1 -1
- package/dist/plugins/server/index.js +19 -17
- package/dist/plugins/server/index.js.map +1 -1
- package/dist/plugins/server/manifest.js +0 -5
- package/dist/plugins/server/types.d.ts +0 -1
- package/dist/plugins/server/types.d.ts.map +1 -1
- package/dist/registry/manifest-loader.d.ts +2 -2
- package/dist/registry/manifest-loader.d.ts.map +1 -1
- package/dist/stream/stream-manager.d.ts.map +1 -1
- package/dist/stream/stream-manager.js +4 -0
- package/dist/stream/stream-manager.js.map +1 -1
- package/docs/api/appkit/Class.ServerError.md +4 -26
- package/docs/api/appkit/Function.createApp.md +17 -15
- package/docs/plugins/server.md +25 -9
- package/package.json +1 -1
- package/sbom.cdx.json +1 -1
package/dist/appkit/package.js
CHANGED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { onPluginsReadyCommand } from "./on-plugins-ready.js";
|
|
2
|
+
import { Command } from "commander";
|
|
3
|
+
|
|
4
|
+
//#region src/cli/commands/codemod/index.ts
|
|
5
|
+
/**
|
|
6
|
+
* Parent command for codemod operations.
|
|
7
|
+
* Subcommands:
|
|
8
|
+
* - on-plugins-ready: Migrate from autoStart/extend/start to onPluginsReady callback
|
|
9
|
+
*/
|
|
10
|
+
const codemodCommand = new Command("codemod").description("Run codemods to migrate to newer AppKit APIs").addCommand(onPluginsReadyCommand).addHelpText("after", `
|
|
11
|
+
Examples:
|
|
12
|
+
$ appkit codemod on-plugins-ready --write`);
|
|
13
|
+
|
|
14
|
+
//#endregion
|
|
15
|
+
export { codemodCommand };
|
|
16
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../../../../src/cli/commands/codemod/index.ts"],"sourcesContent":["import { Command } from \"commander\";\nimport { onPluginsReadyCommand } from \"./on-plugins-ready\";\n\n/**\n * Parent command for codemod operations.\n * Subcommands:\n * - on-plugins-ready: Migrate from autoStart/extend/start to onPluginsReady callback\n */\nexport const codemodCommand = new Command(\"codemod\")\n .description(\"Run codemods to migrate to newer AppKit APIs\")\n .addCommand(onPluginsReadyCommand)\n .addHelpText(\n \"after\",\n `\nExamples:\n $ appkit codemod on-plugins-ready --write`,\n );\n"],"mappings":";;;;;;;;;AAQA,MAAa,iBAAiB,IAAI,QAAQ,UAAU,CACjD,YAAY,+CAA+C,CAC3D,WAAW,sBAAsB,CACjC,YACC,SACA;;6CAGD"}
|
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { Command } from "commander";
|
|
4
|
+
import { Lang, parse } from "@ast-grep/napi";
|
|
5
|
+
|
|
6
|
+
//#region src/cli/commands/codemod/on-plugins-ready.ts
|
|
7
|
+
const SEARCH_DIRS = [
|
|
8
|
+
"server",
|
|
9
|
+
"src",
|
|
10
|
+
"."
|
|
11
|
+
];
|
|
12
|
+
const CANDIDATE_NAMES = ["server.ts", "index.ts"];
|
|
13
|
+
const SKIP_DIRS = new Set([
|
|
14
|
+
"node_modules",
|
|
15
|
+
"dist",
|
|
16
|
+
"build",
|
|
17
|
+
".git"
|
|
18
|
+
]);
|
|
19
|
+
function findServerEntryFiles(rootDir) {
|
|
20
|
+
const results = [];
|
|
21
|
+
for (const dir of SEARCH_DIRS) {
|
|
22
|
+
const absDir = path.resolve(rootDir, dir);
|
|
23
|
+
if (!fs.existsSync(absDir)) continue;
|
|
24
|
+
const files = dir === "." ? CANDIDATE_NAMES.map((n) => path.join(absDir, n)).filter(fs.existsSync) : findTsFiles(absDir);
|
|
25
|
+
for (const file of files) {
|
|
26
|
+
const content = fs.readFileSync(file, "utf-8");
|
|
27
|
+
if (content.includes("createApp") && content.includes("@databricks/appkit")) results.push(file);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
return [...new Set(results)];
|
|
31
|
+
}
|
|
32
|
+
function findTsFiles(dir, files = []) {
|
|
33
|
+
let entries;
|
|
34
|
+
try {
|
|
35
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
36
|
+
} catch {
|
|
37
|
+
return files;
|
|
38
|
+
}
|
|
39
|
+
for (const entry of entries) {
|
|
40
|
+
const fullPath = path.join(dir, entry.name);
|
|
41
|
+
if (entry.isDirectory()) {
|
|
42
|
+
if (SKIP_DIRS.has(entry.name)) continue;
|
|
43
|
+
findTsFiles(fullPath, files);
|
|
44
|
+
} else if (entry.isFile() && entry.name.endsWith(".ts")) files.push(fullPath);
|
|
45
|
+
}
|
|
46
|
+
return files;
|
|
47
|
+
}
|
|
48
|
+
function isAlreadyMigrated(content) {
|
|
49
|
+
return parse(Lang.TypeScript, content).root().findAll("createApp({ $$$PROPS })").some((match) => {
|
|
50
|
+
const text = match.text();
|
|
51
|
+
return /\bonPluginsReady\s*[(:]/.test(text);
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Find the index of the matching closing delimiter for an opening one.
|
|
56
|
+
* Supports (), {}, and [].
|
|
57
|
+
*/
|
|
58
|
+
function findMatchingClose(content, openIdx) {
|
|
59
|
+
const open = content[openIdx];
|
|
60
|
+
const close = {
|
|
61
|
+
"(": ")",
|
|
62
|
+
"{": "}",
|
|
63
|
+
"[": "]"
|
|
64
|
+
}[open];
|
|
65
|
+
if (!close) return -1;
|
|
66
|
+
let depth = 1;
|
|
67
|
+
let i = openIdx + 1;
|
|
68
|
+
while (i < content.length && depth > 0) {
|
|
69
|
+
const ch = content[i];
|
|
70
|
+
if (ch === open) depth++;
|
|
71
|
+
else if (ch === close) depth--;
|
|
72
|
+
if (ch === "\"" || ch === "'" || ch === "`") {
|
|
73
|
+
i = skipString(content, i);
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
i++;
|
|
77
|
+
}
|
|
78
|
+
return depth === 0 ? i - 1 : -1;
|
|
79
|
+
}
|
|
80
|
+
function skipString(content, startIdx) {
|
|
81
|
+
const quote = content[startIdx];
|
|
82
|
+
let i = startIdx + 1;
|
|
83
|
+
while (i < content.length) {
|
|
84
|
+
if (content[i] === "\\") {
|
|
85
|
+
i += 2;
|
|
86
|
+
continue;
|
|
87
|
+
}
|
|
88
|
+
if (content[i] === quote) return i + 1;
|
|
89
|
+
i++;
|
|
90
|
+
}
|
|
91
|
+
return i;
|
|
92
|
+
}
|
|
93
|
+
function stripAutoStartFromServerCalls(content) {
|
|
94
|
+
return content.replace(/server\(\{([^}]*)\}\)/g, (_fullMatch, propsStr) => {
|
|
95
|
+
const cleaned = propsStr.replace(/autoStart\s*:\s*(true|false)\s*,?\s*/g, "").replace(/,\s*$/, "").trim();
|
|
96
|
+
if (!cleaned) return "server()";
|
|
97
|
+
return `server({ ${cleaned} })`;
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
function migratePatternA(content) {
|
|
101
|
+
const warnings = [];
|
|
102
|
+
const createAppIdx = content.indexOf("createApp(");
|
|
103
|
+
if (createAppIdx === -1) return {
|
|
104
|
+
migrated: false,
|
|
105
|
+
content,
|
|
106
|
+
warnings
|
|
107
|
+
};
|
|
108
|
+
const configOpenParen = content.indexOf("(", createAppIdx);
|
|
109
|
+
const configCloseParen = findMatchingClose(content, configOpenParen);
|
|
110
|
+
if (configCloseParen === -1) return {
|
|
111
|
+
migrated: false,
|
|
112
|
+
content,
|
|
113
|
+
warnings
|
|
114
|
+
};
|
|
115
|
+
const afterCreateApp = content.slice(configCloseParen + 1);
|
|
116
|
+
if (!afterCreateApp.match(/^\s*\.then\s*\(/)) return {
|
|
117
|
+
migrated: false,
|
|
118
|
+
content,
|
|
119
|
+
warnings
|
|
120
|
+
};
|
|
121
|
+
const thenStart = configCloseParen + 1 + afterCreateApp.indexOf(".then");
|
|
122
|
+
const thenOpenParen = content.indexOf("(", thenStart + 4);
|
|
123
|
+
const thenCloseParen = findMatchingClose(content, thenOpenParen);
|
|
124
|
+
if (thenCloseParen === -1) return {
|
|
125
|
+
migrated: false,
|
|
126
|
+
content,
|
|
127
|
+
warnings
|
|
128
|
+
};
|
|
129
|
+
const thenRaw = content.slice(thenOpenParen + 1, thenCloseParen);
|
|
130
|
+
const thenInner = thenRaw.trim();
|
|
131
|
+
const callbackMatch = thenInner.match(/^(?:async\s+)?\(\s*(\w+)\s*\)\s*=>\s*\{/);
|
|
132
|
+
if (!callbackMatch) return {
|
|
133
|
+
migrated: false,
|
|
134
|
+
content,
|
|
135
|
+
warnings
|
|
136
|
+
};
|
|
137
|
+
const paramName = callbackMatch[1];
|
|
138
|
+
const bodyOpenBrace = thenOpenParen + 1 + thenRaw.indexOf("{");
|
|
139
|
+
const bodyCloseBrace = findMatchingClose(content, bodyOpenBrace);
|
|
140
|
+
if (bodyCloseBrace === -1) return {
|
|
141
|
+
migrated: false,
|
|
142
|
+
content,
|
|
143
|
+
warnings
|
|
144
|
+
};
|
|
145
|
+
let callbackBody = content.slice(bodyOpenBrace + 1, bodyCloseBrace).trim();
|
|
146
|
+
callbackBody = callbackBody.replace(/^\s*(?:await\s+)?\w+\.server\s*\.\s*start\(\s*\)\s*;?\s*$/gm, "").replace(/\n\s*\.start\(\s*\)\s*;?/g, ";").replace(/\.start\(\s*\)/g, "").replace(/\n\s*\n\s*\n/g, "\n\n").trim();
|
|
147
|
+
if (callbackBody.endsWith(";")) {} else if (!callbackBody.endsWith("}")) callbackBody += ";";
|
|
148
|
+
const isAsync = /^async\s/.test(thenInner.trim());
|
|
149
|
+
const catchPatternMatch = content.slice(thenCloseParen + 1).match(/^\s*(?:\)\s*)?\.catch\s*\(/);
|
|
150
|
+
let catchSuffix;
|
|
151
|
+
let consumeAfterThen;
|
|
152
|
+
if (catchPatternMatch) {
|
|
153
|
+
const catchOpenParen = thenCloseParen + 1 + catchPatternMatch[0].length - 1;
|
|
154
|
+
const catchCloseParen = findMatchingClose(content, catchOpenParen);
|
|
155
|
+
if (catchCloseParen !== -1) {
|
|
156
|
+
catchSuffix = `.catch(${content.slice(catchOpenParen + 1, catchCloseParen).trim()})`;
|
|
157
|
+
consumeAfterThen = catchCloseParen + 1 - (thenCloseParen + 1);
|
|
158
|
+
} else {
|
|
159
|
+
catchSuffix = ".catch(console.error)";
|
|
160
|
+
consumeAfterThen = 0;
|
|
161
|
+
}
|
|
162
|
+
} else {
|
|
163
|
+
catchSuffix = ".catch(console.error)";
|
|
164
|
+
consumeAfterThen = 0;
|
|
165
|
+
}
|
|
166
|
+
const configStr = content.slice(configOpenParen + 1, configCloseParen);
|
|
167
|
+
const lastBraceIdx = configStr.lastIndexOf("}");
|
|
168
|
+
if (lastBraceIdx === -1) return {
|
|
169
|
+
migrated: false,
|
|
170
|
+
content,
|
|
171
|
+
warnings
|
|
172
|
+
};
|
|
173
|
+
const beforeLastBrace = configStr.slice(0, lastBraceIdx).trimEnd();
|
|
174
|
+
const needsComma = beforeLastBrace.endsWith(",") ? "" : ",";
|
|
175
|
+
const indentedBody = callbackBody.split("\n").map((line) => ` ${line.trimStart()}`).join("\n");
|
|
176
|
+
const newConfig = `${beforeLastBrace}${`${needsComma}\n ${isAsync ? "async " : ""}onPluginsReady(${paramName}) {\n${indentedBody}\n },`}\n}`;
|
|
177
|
+
let finalEnd = thenCloseParen + 1 + consumeAfterThen;
|
|
178
|
+
const trailing = content.slice(finalEnd).match(/^\s*\)?\s*;?\s*/);
|
|
179
|
+
if (trailing) finalEnd += trailing[0].length;
|
|
180
|
+
return {
|
|
181
|
+
migrated: true,
|
|
182
|
+
content: content.slice(0, createAppIdx) + `createApp(${newConfig})${catchSuffix};` + content.slice(finalEnd),
|
|
183
|
+
warnings
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
function migratePatternB(content) {
|
|
187
|
+
const warnings = [];
|
|
188
|
+
const match = content.match(/(?:const|let)\s+(\w+)\s*=\s*await\s+createApp\s*\(/);
|
|
189
|
+
if (!match) return {
|
|
190
|
+
migrated: false,
|
|
191
|
+
content,
|
|
192
|
+
warnings
|
|
193
|
+
};
|
|
194
|
+
const varName = match[1];
|
|
195
|
+
const matchIdx = content.indexOf(match[0]);
|
|
196
|
+
const configOpenParen = matchIdx + match[0].length - 1;
|
|
197
|
+
const configCloseParen = findMatchingClose(content, configOpenParen);
|
|
198
|
+
if (configCloseParen === -1) return {
|
|
199
|
+
migrated: false,
|
|
200
|
+
content,
|
|
201
|
+
warnings
|
|
202
|
+
};
|
|
203
|
+
const semiMatch = content.slice(configCloseParen + 1).match(/^\s*;/);
|
|
204
|
+
const createAppEnd = configCloseParen + 1 + (semiMatch ? semiMatch[0].length : 0);
|
|
205
|
+
const afterCreateApp = content.slice(createAppEnd);
|
|
206
|
+
const varUsagePattern = new RegExp(`\\b${varName}\\.(\\w+)`, "g");
|
|
207
|
+
const usages = [];
|
|
208
|
+
for (const usageMatch of afterCreateApp.matchAll(varUsagePattern)) usages.push({
|
|
209
|
+
plugin: usageMatch[1],
|
|
210
|
+
index: usageMatch.index
|
|
211
|
+
});
|
|
212
|
+
if (usages.filter((u) => u.plugin !== "server").length > 0) {
|
|
213
|
+
warnings.push(`Found additional usage of '${varName}' handle outside server.extend/start. Please migrate manually.`);
|
|
214
|
+
return {
|
|
215
|
+
migrated: false,
|
|
216
|
+
content,
|
|
217
|
+
warnings
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
const extendPattern = new RegExp(`\\b${varName}\\.server\\.extend\\s*\\(`, "g");
|
|
221
|
+
const startPattern = new RegExp(`(?:await\\s+)?${varName}\\.server\\.start\\s*\\(\\s*\\)\\s*;`);
|
|
222
|
+
const extendMatches = [...afterCreateApp.matchAll(extendPattern)];
|
|
223
|
+
if (extendMatches.length > 1) {
|
|
224
|
+
warnings.push(`Found ${extendMatches.length} server.extend() calls. Please migrate manually.`);
|
|
225
|
+
return {
|
|
226
|
+
migrated: false,
|
|
227
|
+
content,
|
|
228
|
+
warnings
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
const extendExec = extendMatches[0] ?? null;
|
|
232
|
+
const startExec = startPattern.exec(afterCreateApp);
|
|
233
|
+
if (!startExec) return {
|
|
234
|
+
migrated: false,
|
|
235
|
+
content,
|
|
236
|
+
warnings
|
|
237
|
+
};
|
|
238
|
+
let extendArg = "";
|
|
239
|
+
let extendFullStatement = "";
|
|
240
|
+
if (extendExec) {
|
|
241
|
+
const extendOpenParen = createAppEnd + extendExec.index + extendExec[0].length - 1;
|
|
242
|
+
const extendCloseParen = findMatchingClose(content, extendOpenParen);
|
|
243
|
+
if (extendCloseParen !== -1) {
|
|
244
|
+
extendArg = content.slice(extendOpenParen + 1, extendCloseParen).trim();
|
|
245
|
+
const stmtStart = createAppEnd + extendExec.index;
|
|
246
|
+
let stmtEnd = extendCloseParen + 1;
|
|
247
|
+
const trailingSemi = content.slice(stmtEnd).match(/^\s*;/);
|
|
248
|
+
if (trailingSemi) stmtEnd += trailingSemi[0].length;
|
|
249
|
+
extendFullStatement = content.slice(stmtStart, stmtEnd);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
const startFullStatement = startExec[0];
|
|
253
|
+
const configStr = content.slice(configOpenParen + 1, configCloseParen);
|
|
254
|
+
const lastBraceIdx = configStr.lastIndexOf("}");
|
|
255
|
+
if (lastBraceIdx === -1) return {
|
|
256
|
+
migrated: false,
|
|
257
|
+
content,
|
|
258
|
+
warnings
|
|
259
|
+
};
|
|
260
|
+
const beforeLastBrace = configStr.slice(0, lastBraceIdx).trimEnd();
|
|
261
|
+
const needsComma = beforeLastBrace.endsWith(",") ? "" : ",";
|
|
262
|
+
let onPluginsReadyProp;
|
|
263
|
+
if (extendArg) onPluginsReadyProp = `${needsComma}\n onPluginsReady(${varName}) {\n ${varName}.server.extend(${extendArg});\n },`;
|
|
264
|
+
else onPluginsReadyProp = "";
|
|
265
|
+
const newCreateApp = `await createApp(${`${beforeLastBrace}${onPluginsReadyProp}\n}`});`;
|
|
266
|
+
let result = content.slice(0, matchIdx) + newCreateApp;
|
|
267
|
+
let remaining = afterCreateApp;
|
|
268
|
+
if (extendFullStatement) remaining = remaining.replace(extendFullStatement, "");
|
|
269
|
+
remaining = remaining.replace(startFullStatement, "");
|
|
270
|
+
remaining = remaining.replace(/\n\s*\n\s*\n/g, "\n\n");
|
|
271
|
+
result += remaining;
|
|
272
|
+
return {
|
|
273
|
+
migrated: true,
|
|
274
|
+
content: result,
|
|
275
|
+
warnings
|
|
276
|
+
};
|
|
277
|
+
}
|
|
278
|
+
function migrateFile(filePath) {
|
|
279
|
+
const original = fs.readFileSync(filePath, "utf-8");
|
|
280
|
+
if (isAlreadyMigrated(original)) return {
|
|
281
|
+
migrated: false,
|
|
282
|
+
content: original,
|
|
283
|
+
warnings: ["Already migrated -- no changes needed."]
|
|
284
|
+
};
|
|
285
|
+
const content = stripAutoStartFromServerCalls(original);
|
|
286
|
+
const allWarnings = [];
|
|
287
|
+
const patternA = migratePatternA(content);
|
|
288
|
+
if (patternA.migrated) {
|
|
289
|
+
allWarnings.push(...patternA.warnings);
|
|
290
|
+
return {
|
|
291
|
+
migrated: true,
|
|
292
|
+
content: patternA.content,
|
|
293
|
+
warnings: allWarnings
|
|
294
|
+
};
|
|
295
|
+
}
|
|
296
|
+
allWarnings.push(...patternA.warnings);
|
|
297
|
+
const patternB = migratePatternB(content);
|
|
298
|
+
if (patternB.migrated) {
|
|
299
|
+
allWarnings.push(...patternB.warnings);
|
|
300
|
+
return {
|
|
301
|
+
migrated: true,
|
|
302
|
+
content: patternB.content,
|
|
303
|
+
warnings: allWarnings
|
|
304
|
+
};
|
|
305
|
+
}
|
|
306
|
+
allWarnings.push(...patternB.warnings);
|
|
307
|
+
if (content !== original) return {
|
|
308
|
+
migrated: true,
|
|
309
|
+
content,
|
|
310
|
+
warnings: allWarnings
|
|
311
|
+
};
|
|
312
|
+
return {
|
|
313
|
+
migrated: false,
|
|
314
|
+
content: original,
|
|
315
|
+
warnings: allWarnings
|
|
316
|
+
};
|
|
317
|
+
}
|
|
318
|
+
function runCodemod(options) {
|
|
319
|
+
const rootDir = process.cwd();
|
|
320
|
+
const write = options.write ?? false;
|
|
321
|
+
let files;
|
|
322
|
+
if (options.path) {
|
|
323
|
+
const absPath = path.resolve(rootDir, options.path);
|
|
324
|
+
if (!fs.existsSync(absPath)) {
|
|
325
|
+
console.error(`File not found: ${absPath}`);
|
|
326
|
+
process.exit(1);
|
|
327
|
+
}
|
|
328
|
+
files = [absPath];
|
|
329
|
+
} else files = findServerEntryFiles(rootDir);
|
|
330
|
+
if (files.length === 0) {
|
|
331
|
+
console.log("No files found importing createApp from @databricks/appkit.");
|
|
332
|
+
console.log("Use --path to specify a file explicitly.");
|
|
333
|
+
process.exit(0);
|
|
334
|
+
}
|
|
335
|
+
let hasChanges = false;
|
|
336
|
+
for (const file of files) {
|
|
337
|
+
const relPath = path.relative(rootDir, file);
|
|
338
|
+
const result = migrateFile(file);
|
|
339
|
+
for (const warning of result.warnings) console.log(` ${relPath}: ${warning}`);
|
|
340
|
+
if (!result.migrated) {
|
|
341
|
+
if (result.warnings.length === 0) console.log(` ${relPath}: No migration needed.`);
|
|
342
|
+
continue;
|
|
343
|
+
}
|
|
344
|
+
hasChanges = true;
|
|
345
|
+
if (write) {
|
|
346
|
+
fs.writeFileSync(file, result.content, "utf-8");
|
|
347
|
+
console.log(` ${relPath}: Migrated successfully.`);
|
|
348
|
+
} else {
|
|
349
|
+
console.log(`\n--- ${relPath} (dry run) ---`);
|
|
350
|
+
console.log(result.content);
|
|
351
|
+
console.log("---");
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
if (hasChanges && !write) console.log("\nDry run complete. Run with --write to apply changes.");
|
|
355
|
+
}
|
|
356
|
+
const onPluginsReadyCommand = new Command("on-plugins-ready").description("Migrate createApp usage from autoStart/extend/start pattern to onPluginsReady callback").option("--path <file>", "Path to the server entry file to migrate").option("--write", "Apply changes (default: dry-run)", false).addHelpText("after", `
|
|
357
|
+
Examples:
|
|
358
|
+
$ appkit codemod on-plugins-ready # dry-run, auto-detect files
|
|
359
|
+
$ appkit codemod on-plugins-ready --write # apply changes
|
|
360
|
+
$ appkit codemod on-plugins-ready --path server.ts # migrate a specific file`).action(runCodemod);
|
|
361
|
+
|
|
362
|
+
//#endregion
|
|
363
|
+
export { onPluginsReadyCommand };
|
|
364
|
+
//# sourceMappingURL=on-plugins-ready.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"on-plugins-ready.js","names":[],"sources":["../../../../src/cli/commands/codemod/on-plugins-ready.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { Lang, parse } from \"@ast-grep/napi\";\nimport { Command } from \"commander\";\n\nconst SEARCH_DIRS = [\"server\", \"src\", \".\"];\nconst CANDIDATE_NAMES = [\"server.ts\", \"index.ts\"];\nconst SKIP_DIRS = new Set([\"node_modules\", \"dist\", \"build\", \".git\"]);\n\nfunction findServerEntryFiles(rootDir: string): string[] {\n const results: string[] = [];\n\n for (const dir of SEARCH_DIRS) {\n const absDir = path.resolve(rootDir, dir);\n if (!fs.existsSync(absDir)) continue;\n\n const files =\n dir === \".\"\n ? CANDIDATE_NAMES.map((n) => path.join(absDir, n)).filter(fs.existsSync)\n : findTsFiles(absDir);\n\n for (const file of files) {\n const content = fs.readFileSync(file, \"utf-8\");\n if (\n content.includes(\"createApp\") &&\n content.includes(\"@databricks/appkit\")\n ) {\n results.push(file);\n }\n }\n }\n\n return [...new Set(results)];\n}\n\nfunction findTsFiles(dir: string, files: string[] = []): string[] {\n let entries: fs.Dirent[];\n try {\n entries = fs.readdirSync(dir, { withFileTypes: true });\n } catch {\n return files;\n }\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) {\n if (SKIP_DIRS.has(entry.name)) continue;\n findTsFiles(fullPath, files);\n } else if (entry.isFile() && entry.name.endsWith(\".ts\")) {\n files.push(fullPath);\n }\n }\n\n return files;\n}\n\nfunction isAlreadyMigrated(content: string): boolean {\n const ast = parse(Lang.TypeScript, content);\n const root = ast.root();\n return root.findAll(\"createApp({ $$$PROPS })\").some((match) => {\n const text = match.text();\n return /\\bonPluginsReady\\s*[(:]/.test(text);\n });\n}\n\n/**\n * Find the index of the matching closing delimiter for an opening one.\n * Supports (), {}, and [].\n */\nfunction findMatchingClose(content: string, openIdx: number): number {\n const open = content[openIdx];\n const closeMap: Record<string, string> = {\n \"(\": \")\",\n \"{\": \"}\",\n \"[\": \"]\",\n };\n const close = closeMap[open];\n if (!close) return -1;\n\n let depth = 1;\n let i = openIdx + 1;\n while (i < content.length && depth > 0) {\n const ch = content[i];\n if (ch === open) depth++;\n else if (ch === close) depth--;\n\n // skip string literals\n if (ch === '\"' || ch === \"'\" || ch === \"`\") {\n i = skipString(content, i);\n continue;\n }\n i++;\n }\n return depth === 0 ? i - 1 : -1;\n}\n\nfunction skipString(content: string, startIdx: number): number {\n const quote = content[startIdx];\n let i = startIdx + 1;\n while (i < content.length) {\n if (content[i] === \"\\\\\") {\n i += 2;\n continue;\n }\n if (content[i] === quote) return i + 1;\n i++;\n }\n return i;\n}\n\nfunction stripAutoStartFromServerCalls(content: string): string {\n return content.replace(\n /server\\(\\{([^}]*)\\}\\)/g,\n (_fullMatch, propsStr: string) => {\n const cleaned = propsStr\n .replace(/autoStart\\s*:\\s*(true|false)\\s*,?\\s*/g, \"\")\n .replace(/,\\s*$/, \"\")\n .trim();\n if (!cleaned) return \"server()\";\n return `server({ ${cleaned} })`;\n },\n );\n}\n\ninterface MigrationResult {\n migrated: boolean;\n content: string;\n warnings: string[];\n}\n\nfunction migratePatternA(content: string): MigrationResult {\n const warnings: string[] = [];\n\n // Find createApp(...).then(\n const createAppIdx = content.indexOf(\"createApp(\");\n if (createAppIdx === -1) return { migrated: false, content, warnings };\n\n // Find the opening paren of createApp(\n const configOpenParen = content.indexOf(\"(\", createAppIdx);\n const configCloseParen = findMatchingClose(content, configOpenParen);\n if (configCloseParen === -1) return { migrated: false, content, warnings };\n\n // Check for .then( after the closing paren\n const afterCreateApp = content.slice(configCloseParen + 1);\n const thenMatch = afterCreateApp.match(/^\\s*\\.then\\s*\\(/);\n if (!thenMatch) return { migrated: false, content, warnings };\n\n const thenStart = configCloseParen + 1 + afterCreateApp.indexOf(\".then\");\n const thenOpenParen = content.indexOf(\"(\", thenStart + 4);\n const thenCloseParen = findMatchingClose(content, thenOpenParen);\n if (thenCloseParen === -1) return { migrated: false, content, warnings };\n\n // Extract the callback inside .then(...)\n const thenRaw = content.slice(thenOpenParen + 1, thenCloseParen);\n const thenInner = thenRaw.trim();\n\n // Parse callback: (param) => { body } or async (param) => { body }\n const callbackMatch = thenInner.match(\n /^(?:async\\s+)?\\(\\s*(\\w+)\\s*\\)\\s*=>\\s*\\{/,\n );\n if (!callbackMatch) return { migrated: false, content, warnings };\n\n const paramName = callbackMatch[1];\n const bodyOpenBrace = thenOpenParen + 1 + thenRaw.indexOf(\"{\");\n const bodyCloseBrace = findMatchingClose(content, bodyOpenBrace);\n if (bodyCloseBrace === -1) return { migrated: false, content, warnings };\n\n let callbackBody = content.slice(bodyOpenBrace + 1, bodyCloseBrace).trim();\n\n // Remove entire statements that are just .start() calls (e.g. `await appkit.server.start();`)\n callbackBody = callbackBody\n .replace(/^\\s*(?:await\\s+)?\\w+\\.server\\s*\\.\\s*start\\(\\s*\\)\\s*;?\\s*$/gm, \"\")\n .replace(/\\n\\s*\\.start\\(\\s*\\)\\s*;?/g, \";\")\n .replace(/\\.start\\(\\s*\\)/g, \"\")\n .replace(/\\n\\s*\\n\\s*\\n/g, \"\\n\\n\")\n .trim();\n\n // Clean up trailing semicolons\n if (callbackBody.endsWith(\";\")) {\n // fine\n } else if (!callbackBody.endsWith(\"}\")) {\n callbackBody += \";\";\n }\n\n // Detect if the callback was async\n const isAsync = /^async\\s/.test(thenInner.trim());\n\n // Check for .catch() after .then(...) using brace-aware parsing\n const afterThenClose = content.slice(thenCloseParen + 1);\n const catchPatternMatch = afterThenClose.match(/^\\s*(?:\\)\\s*)?\\.catch\\s*\\(/);\n\n let catchSuffix: string;\n let consumeAfterThen: number;\n\n if (catchPatternMatch) {\n const catchOpenParen = thenCloseParen + 1 + catchPatternMatch[0].length - 1;\n const catchCloseParen = findMatchingClose(content, catchOpenParen);\n if (catchCloseParen !== -1) {\n const catchArg = content\n .slice(catchOpenParen + 1, catchCloseParen)\n .trim();\n catchSuffix = `.catch(${catchArg})`;\n consumeAfterThen = catchCloseParen + 1 - (thenCloseParen + 1);\n } else {\n catchSuffix = \".catch(console.error)\";\n consumeAfterThen = 0;\n }\n } else {\n catchSuffix = \".catch(console.error)\";\n consumeAfterThen = 0;\n }\n\n // Build the onPluginsReady property\n const configStr = content.slice(configOpenParen + 1, configCloseParen);\n const lastBraceIdx = configStr.lastIndexOf(\"}\");\n if (lastBraceIdx === -1) return { migrated: false, content, warnings };\n\n const beforeLastBrace = configStr.slice(0, lastBraceIdx).trimEnd();\n const needsComma = beforeLastBrace.endsWith(\",\") ? \"\" : \",\";\n\n // Indent the body properly\n const bodyLines = callbackBody.split(\"\\n\");\n const indentedBody = bodyLines\n .map((line) => ` ${line.trimStart()}`)\n .join(\"\\n\");\n\n const asyncPrefix = isAsync ? \"async \" : \"\";\n const onPluginsReadyProp = `${needsComma}\\n ${asyncPrefix}onPluginsReady(${paramName}) {\\n${indentedBody}\\n },`;\n const newConfig = `${beforeLastBrace}${onPluginsReadyProp}\\n}`;\n\n // Build the replacement\n const endIdx = thenCloseParen + 1 + consumeAfterThen;\n // Consume trailing ) ; and whitespace\n let finalEnd = endIdx;\n const trailing = content.slice(finalEnd).match(/^\\s*\\)?\\s*;?\\s*/);\n if (trailing) finalEnd += trailing[0].length;\n\n const newContent =\n content.slice(0, createAppIdx) +\n `createApp(${newConfig})${catchSuffix};` +\n content.slice(finalEnd);\n\n return { migrated: true, content: newContent, warnings };\n}\n\nfunction migratePatternB(content: string): MigrationResult {\n const warnings: string[] = [];\n\n // Match: const/let varName = await createApp({...});\n const awaitPattern = /(?:const|let)\\s+(\\w+)\\s*=\\s*await\\s+createApp\\s*\\(/;\n\n const match = content.match(awaitPattern);\n if (!match) return { migrated: false, content, warnings };\n\n const varName = match[1];\n const matchIdx = content.indexOf(match[0]);\n\n // Find the createApp(...) closing paren\n const configOpenParen = matchIdx + match[0].length - 1;\n const configCloseParen = findMatchingClose(content, configOpenParen);\n if (configCloseParen === -1) return { migrated: false, content, warnings };\n\n // Find the semicolon after the createApp call\n const afterCall = content.slice(configCloseParen + 1);\n const semiMatch = afterCall.match(/^\\s*;/);\n const createAppEnd =\n configCloseParen + 1 + (semiMatch ? semiMatch[0].length : 0);\n\n // Find all uses of varName after the createApp call\n const afterCreateApp = content.slice(createAppEnd);\n const varUsagePattern = new RegExp(`\\\\b${varName}\\\\.(\\\\w+)`, \"g\");\n\n const usages: { plugin: string; index: number }[] = [];\n for (const usageMatch of afterCreateApp.matchAll(varUsagePattern)) {\n usages.push({ plugin: usageMatch[1], index: usageMatch.index });\n }\n\n // Check for non-server usage\n const nonServerUsage = usages.filter((u) => u.plugin !== \"server\");\n if (nonServerUsage.length > 0) {\n warnings.push(\n `Found additional usage of '${varName}' handle outside server.extend/start. Please migrate manually.`,\n );\n return { migrated: false, content, warnings };\n }\n\n // Find the extend call(s) and start call in the after-createApp region\n const extendPattern = new RegExp(\n `\\\\b${varName}\\\\.server\\\\.extend\\\\s*\\\\(`,\n \"g\",\n );\n const startPattern = new RegExp(\n `(?:await\\\\s+)?${varName}\\\\.server\\\\.start\\\\s*\\\\(\\\\s*\\\\)\\\\s*;`,\n );\n\n const extendMatches = [...afterCreateApp.matchAll(extendPattern)];\n if (extendMatches.length > 1) {\n warnings.push(\n `Found ${extendMatches.length} server.extend() calls. Please migrate manually.`,\n );\n return { migrated: false, content, warnings };\n }\n\n const extendExec = extendMatches[0] ?? null;\n const startExec = startPattern.exec(afterCreateApp);\n\n if (!startExec) return { migrated: false, content, warnings };\n\n // Extract the extend call's argument\n let extendArg = \"\";\n let extendFullStatement = \"\";\n if (extendExec) {\n const extendOpenParen =\n createAppEnd + extendExec.index + extendExec[0].length - 1;\n const extendCloseParen = findMatchingClose(content, extendOpenParen);\n if (extendCloseParen !== -1) {\n extendArg = content.slice(extendOpenParen + 1, extendCloseParen).trim();\n // Find the full statement including trailing semicolon\n const stmtStart = createAppEnd + extendExec.index;\n let stmtEnd = extendCloseParen + 1;\n const afterExtend = content.slice(stmtEnd);\n const trailingSemi = afterExtend.match(/^\\s*;/);\n if (trailingSemi) stmtEnd += trailingSemi[0].length;\n extendFullStatement = content.slice(stmtStart, stmtEnd);\n }\n }\n\n const startFullStatement = startExec[0];\n\n // Build the onPluginsReady callback\n const configStr = content.slice(configOpenParen + 1, configCloseParen);\n const lastBraceIdx = configStr.lastIndexOf(\"}\");\n if (lastBraceIdx === -1) return { migrated: false, content, warnings };\n\n const beforeLastBrace = configStr.slice(0, lastBraceIdx).trimEnd();\n const needsComma = beforeLastBrace.endsWith(\",\") ? \"\" : \",\";\n\n let onPluginsReadyProp: string;\n if (extendArg) {\n onPluginsReadyProp =\n `${needsComma}\\n onPluginsReady(${varName}) {\\n` +\n ` ${varName}.server.extend(${extendArg});\\n` +\n \" },\";\n } else {\n onPluginsReadyProp = \"\";\n }\n\n const newConfig = `${beforeLastBrace}${onPluginsReadyProp}\\n}`;\n const newCreateApp = `await createApp(${newConfig});`;\n\n // Replace: remove const declaration, replace with plain await, remove extend + start\n let result = content.slice(0, matchIdx) + newCreateApp;\n let remaining = afterCreateApp;\n\n if (extendFullStatement) {\n remaining = remaining.replace(extendFullStatement, \"\");\n }\n remaining = remaining.replace(startFullStatement, \"\");\n\n // Clean up consecutive blank lines\n remaining = remaining.replace(/\\n\\s*\\n\\s*\\n/g, \"\\n\\n\");\n\n result += remaining;\n\n return { migrated: true, content: result, warnings };\n}\n\nexport function migrateFile(filePath: string): MigrationResult {\n const original = fs.readFileSync(filePath, \"utf-8\");\n\n if (isAlreadyMigrated(original)) {\n return {\n migrated: false,\n content: original,\n warnings: [\"Already migrated -- no changes needed.\"],\n };\n }\n\n const content = stripAutoStartFromServerCalls(original);\n const allWarnings: string[] = [];\n\n // Try Pattern A first\n const patternA = migratePatternA(content);\n if (patternA.migrated) {\n allWarnings.push(...patternA.warnings);\n return {\n migrated: true,\n content: patternA.content,\n warnings: allWarnings,\n };\n }\n allWarnings.push(...patternA.warnings);\n\n // Try Pattern B\n const patternB = migratePatternB(content);\n if (patternB.migrated) {\n allWarnings.push(...patternB.warnings);\n return {\n migrated: true,\n content: patternB.content,\n warnings: allWarnings,\n };\n }\n allWarnings.push(...patternB.warnings);\n\n // Check if autoStart was stripped (content changed but no pattern matched)\n if (content !== original) {\n return { migrated: true, content, warnings: allWarnings };\n }\n\n return { migrated: false, content: original, warnings: allWarnings };\n}\n\nfunction runCodemod(options: { path?: string; write?: boolean }) {\n const rootDir = process.cwd();\n const write = options.write ?? false;\n\n let files: string[];\n if (options.path) {\n const absPath = path.resolve(rootDir, options.path);\n if (!fs.existsSync(absPath)) {\n console.error(`File not found: ${absPath}`);\n process.exit(1);\n }\n files = [absPath];\n } else {\n files = findServerEntryFiles(rootDir);\n }\n\n if (files.length === 0) {\n console.log(\"No files found importing createApp from @databricks/appkit.\");\n console.log(\"Use --path to specify a file explicitly.\");\n process.exit(0);\n }\n\n let hasChanges = false;\n\n for (const file of files) {\n const relPath = path.relative(rootDir, file);\n const result = migrateFile(file);\n\n for (const warning of result.warnings) {\n console.log(` ${relPath}: ${warning}`);\n }\n\n if (!result.migrated) {\n if (result.warnings.length === 0) {\n console.log(` ${relPath}: No migration needed.`);\n }\n continue;\n }\n\n hasChanges = true;\n\n if (write) {\n fs.writeFileSync(file, result.content, \"utf-8\");\n console.log(` ${relPath}: Migrated successfully.`);\n } else {\n console.log(`\\n--- ${relPath} (dry run) ---`);\n console.log(result.content);\n console.log(\"---\");\n }\n }\n\n if (hasChanges && !write) {\n console.log(\"\\nDry run complete. Run with --write to apply changes.\");\n }\n}\n\nexport const onPluginsReadyCommand = new Command(\"on-plugins-ready\")\n .description(\n \"Migrate createApp usage from autoStart/extend/start pattern to onPluginsReady callback\",\n )\n .option(\"--path <file>\", \"Path to the server entry file to migrate\")\n .option(\"--write\", \"Apply changes (default: dry-run)\", false)\n .addHelpText(\n \"after\",\n `\nExamples:\n $ appkit codemod on-plugins-ready # dry-run, auto-detect files\n $ appkit codemod on-plugins-ready --write # apply changes\n $ appkit codemod on-plugins-ready --path server.ts # migrate a specific file`,\n )\n .action(runCodemod);\n"],"mappings":";;;;;;AAKA,MAAM,cAAc;CAAC;CAAU;CAAO;CAAI;AAC1C,MAAM,kBAAkB,CAAC,aAAa,WAAW;AACjD,MAAM,YAAY,IAAI,IAAI;CAAC;CAAgB;CAAQ;CAAS;CAAO,CAAC;AAEpE,SAAS,qBAAqB,SAA2B;CACvD,MAAM,UAAoB,EAAE;AAE5B,MAAK,MAAM,OAAO,aAAa;EAC7B,MAAM,SAAS,KAAK,QAAQ,SAAS,IAAI;AACzC,MAAI,CAAC,GAAG,WAAW,OAAO,CAAE;EAE5B,MAAM,QACJ,QAAQ,MACJ,gBAAgB,KAAK,MAAM,KAAK,KAAK,QAAQ,EAAE,CAAC,CAAC,OAAO,GAAG,WAAW,GACtE,YAAY,OAAO;AAEzB,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,UAAU,GAAG,aAAa,MAAM,QAAQ;AAC9C,OACE,QAAQ,SAAS,YAAY,IAC7B,QAAQ,SAAS,qBAAqB,CAEtC,SAAQ,KAAK,KAAK;;;AAKxB,QAAO,CAAC,GAAG,IAAI,IAAI,QAAQ,CAAC;;AAG9B,SAAS,YAAY,KAAa,QAAkB,EAAE,EAAY;CAChE,IAAI;AACJ,KAAI;AACF,YAAU,GAAG,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;SAChD;AACN,SAAO;;AAGT,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,WAAW,KAAK,KAAK,KAAK,MAAM,KAAK;AAC3C,MAAI,MAAM,aAAa,EAAE;AACvB,OAAI,UAAU,IAAI,MAAM,KAAK,CAAE;AAC/B,eAAY,UAAU,MAAM;aACnB,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,CACrD,OAAM,KAAK,SAAS;;AAIxB,QAAO;;AAGT,SAAS,kBAAkB,SAA0B;AAGnD,QAFY,MAAM,KAAK,YAAY,QAAQ,CAC1B,MAAM,CACX,QAAQ,0BAA0B,CAAC,MAAM,UAAU;EAC7D,MAAM,OAAO,MAAM,MAAM;AACzB,SAAO,0BAA0B,KAAK,KAAK;GAC3C;;;;;;AAOJ,SAAS,kBAAkB,SAAiB,SAAyB;CACnE,MAAM,OAAO,QAAQ;CAMrB,MAAM,QALmC;EACvC,KAAK;EACL,KAAK;EACL,KAAK;EACN,CACsB;AACvB,KAAI,CAAC,MAAO,QAAO;CAEnB,IAAI,QAAQ;CACZ,IAAI,IAAI,UAAU;AAClB,QAAO,IAAI,QAAQ,UAAU,QAAQ,GAAG;EACtC,MAAM,KAAK,QAAQ;AACnB,MAAI,OAAO,KAAM;WACR,OAAO,MAAO;AAGvB,MAAI,OAAO,QAAO,OAAO,OAAO,OAAO,KAAK;AAC1C,OAAI,WAAW,SAAS,EAAE;AAC1B;;AAEF;;AAEF,QAAO,UAAU,IAAI,IAAI,IAAI;;AAG/B,SAAS,WAAW,SAAiB,UAA0B;CAC7D,MAAM,QAAQ,QAAQ;CACtB,IAAI,IAAI,WAAW;AACnB,QAAO,IAAI,QAAQ,QAAQ;AACzB,MAAI,QAAQ,OAAO,MAAM;AACvB,QAAK;AACL;;AAEF,MAAI,QAAQ,OAAO,MAAO,QAAO,IAAI;AACrC;;AAEF,QAAO;;AAGT,SAAS,8BAA8B,SAAyB;AAC9D,QAAO,QAAQ,QACb,2BACC,YAAY,aAAqB;EAChC,MAAM,UAAU,SACb,QAAQ,yCAAyC,GAAG,CACpD,QAAQ,SAAS,GAAG,CACpB,MAAM;AACT,MAAI,CAAC,QAAS,QAAO;AACrB,SAAO,YAAY,QAAQ;GAE9B;;AASH,SAAS,gBAAgB,SAAkC;CACzD,MAAM,WAAqB,EAAE;CAG7B,MAAM,eAAe,QAAQ,QAAQ,aAAa;AAClD,KAAI,iBAAiB,GAAI,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAGtE,MAAM,kBAAkB,QAAQ,QAAQ,KAAK,aAAa;CAC1D,MAAM,mBAAmB,kBAAkB,SAAS,gBAAgB;AACpE,KAAI,qBAAqB,GAAI,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAG1E,MAAM,iBAAiB,QAAQ,MAAM,mBAAmB,EAAE;AAE1D,KAAI,CADc,eAAe,MAAM,kBAAkB,CACzC,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAE7D,MAAM,YAAY,mBAAmB,IAAI,eAAe,QAAQ,QAAQ;CACxE,MAAM,gBAAgB,QAAQ,QAAQ,KAAK,YAAY,EAAE;CACzD,MAAM,iBAAiB,kBAAkB,SAAS,cAAc;AAChE,KAAI,mBAAmB,GAAI,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAGxE,MAAM,UAAU,QAAQ,MAAM,gBAAgB,GAAG,eAAe;CAChE,MAAM,YAAY,QAAQ,MAAM;CAGhC,MAAM,gBAAgB,UAAU,MAC9B,0CACD;AACD,KAAI,CAAC,cAAe,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAEjE,MAAM,YAAY,cAAc;CAChC,MAAM,gBAAgB,gBAAgB,IAAI,QAAQ,QAAQ,IAAI;CAC9D,MAAM,iBAAiB,kBAAkB,SAAS,cAAc;AAChE,KAAI,mBAAmB,GAAI,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAExE,IAAI,eAAe,QAAQ,MAAM,gBAAgB,GAAG,eAAe,CAAC,MAAM;AAG1E,gBAAe,aACZ,QAAQ,+DAA+D,GAAG,CAC1E,QAAQ,6BAA6B,IAAI,CACzC,QAAQ,mBAAmB,GAAG,CAC9B,QAAQ,iBAAiB,OAAO,CAChC,MAAM;AAGT,KAAI,aAAa,SAAS,IAAI,EAAE,YAErB,CAAC,aAAa,SAAS,IAAI,CACpC,iBAAgB;CAIlB,MAAM,UAAU,WAAW,KAAK,UAAU,MAAM,CAAC;CAIjD,MAAM,oBADiB,QAAQ,MAAM,iBAAiB,EAAE,CACf,MAAM,6BAA6B;CAE5E,IAAI;CACJ,IAAI;AAEJ,KAAI,mBAAmB;EACrB,MAAM,iBAAiB,iBAAiB,IAAI,kBAAkB,GAAG,SAAS;EAC1E,MAAM,kBAAkB,kBAAkB,SAAS,eAAe;AAClE,MAAI,oBAAoB,IAAI;AAI1B,iBAAc,UAHG,QACd,MAAM,iBAAiB,GAAG,gBAAgB,CAC1C,MAAM,CACwB;AACjC,sBAAmB,kBAAkB,KAAK,iBAAiB;SACtD;AACL,iBAAc;AACd,sBAAmB;;QAEhB;AACL,gBAAc;AACd,qBAAmB;;CAIrB,MAAM,YAAY,QAAQ,MAAM,kBAAkB,GAAG,iBAAiB;CACtE,MAAM,eAAe,UAAU,YAAY,IAAI;AAC/C,KAAI,iBAAiB,GAAI,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAEtE,MAAM,kBAAkB,UAAU,MAAM,GAAG,aAAa,CAAC,SAAS;CAClE,MAAM,aAAa,gBAAgB,SAAS,IAAI,GAAG,KAAK;CAIxD,MAAM,eADY,aAAa,MAAM,KAAK,CAEvC,KAAK,SAAS,OAAO,KAAK,WAAW,GAAG,CACxC,KAAK,KAAK;CAIb,MAAM,YAAY,GAAG,kBADM,GAAG,WAAW,MADrB,UAAU,WAAW,GACkB,iBAAiB,UAAU,OAAO,aAAa,QAChD;CAK1D,IAAI,WAFW,iBAAiB,IAAI;CAGpC,MAAM,WAAW,QAAQ,MAAM,SAAS,CAAC,MAAM,kBAAkB;AACjE,KAAI,SAAU,aAAY,SAAS,GAAG;AAOtC,QAAO;EAAE,UAAU;EAAM,SAJvB,QAAQ,MAAM,GAAG,aAAa,GAC9B,aAAa,UAAU,GAAG,YAAY,KACtC,QAAQ,MAAM,SAAS;EAEqB;EAAU;;AAG1D,SAAS,gBAAgB,SAAkC;CACzD,MAAM,WAAqB,EAAE;CAK7B,MAAM,QAAQ,QAAQ,MAFD,qDAEoB;AACzC,KAAI,CAAC,MAAO,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAEzD,MAAM,UAAU,MAAM;CACtB,MAAM,WAAW,QAAQ,QAAQ,MAAM,GAAG;CAG1C,MAAM,kBAAkB,WAAW,MAAM,GAAG,SAAS;CACrD,MAAM,mBAAmB,kBAAkB,SAAS,gBAAgB;AACpE,KAAI,qBAAqB,GAAI,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAI1E,MAAM,YADY,QAAQ,MAAM,mBAAmB,EAAE,CACzB,MAAM,QAAQ;CAC1C,MAAM,eACJ,mBAAmB,KAAK,YAAY,UAAU,GAAG,SAAS;CAG5D,MAAM,iBAAiB,QAAQ,MAAM,aAAa;CAClD,MAAM,kBAAkB,IAAI,OAAO,MAAM,QAAQ,YAAY,IAAI;CAEjE,MAAM,SAA8C,EAAE;AACtD,MAAK,MAAM,cAAc,eAAe,SAAS,gBAAgB,CAC/D,QAAO,KAAK;EAAE,QAAQ,WAAW;EAAI,OAAO,WAAW;EAAO,CAAC;AAKjE,KADuB,OAAO,QAAQ,MAAM,EAAE,WAAW,SAAS,CAC/C,SAAS,GAAG;AAC7B,WAAS,KACP,8BAA8B,QAAQ,gEACvC;AACD,SAAO;GAAE,UAAU;GAAO;GAAS;GAAU;;CAI/C,MAAM,gBAAgB,IAAI,OACxB,MAAM,QAAQ,4BACd,IACD;CACD,MAAM,eAAe,IAAI,OACvB,iBAAiB,QAAQ,sCAC1B;CAED,MAAM,gBAAgB,CAAC,GAAG,eAAe,SAAS,cAAc,CAAC;AACjE,KAAI,cAAc,SAAS,GAAG;AAC5B,WAAS,KACP,SAAS,cAAc,OAAO,kDAC/B;AACD,SAAO;GAAE,UAAU;GAAO;GAAS;GAAU;;CAG/C,MAAM,aAAa,cAAc,MAAM;CACvC,MAAM,YAAY,aAAa,KAAK,eAAe;AAEnD,KAAI,CAAC,UAAW,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAG7D,IAAI,YAAY;CAChB,IAAI,sBAAsB;AAC1B,KAAI,YAAY;EACd,MAAM,kBACJ,eAAe,WAAW,QAAQ,WAAW,GAAG,SAAS;EAC3D,MAAM,mBAAmB,kBAAkB,SAAS,gBAAgB;AACpE,MAAI,qBAAqB,IAAI;AAC3B,eAAY,QAAQ,MAAM,kBAAkB,GAAG,iBAAiB,CAAC,MAAM;GAEvE,MAAM,YAAY,eAAe,WAAW;GAC5C,IAAI,UAAU,mBAAmB;GAEjC,MAAM,eADc,QAAQ,MAAM,QAAQ,CACT,MAAM,QAAQ;AAC/C,OAAI,aAAc,YAAW,aAAa,GAAG;AAC7C,yBAAsB,QAAQ,MAAM,WAAW,QAAQ;;;CAI3D,MAAM,qBAAqB,UAAU;CAGrC,MAAM,YAAY,QAAQ,MAAM,kBAAkB,GAAG,iBAAiB;CACtE,MAAM,eAAe,UAAU,YAAY,IAAI;AAC/C,KAAI,iBAAiB,GAAI,QAAO;EAAE,UAAU;EAAO;EAAS;EAAU;CAEtE,MAAM,kBAAkB,UAAU,MAAM,GAAG,aAAa,CAAC,SAAS;CAClE,MAAM,aAAa,gBAAgB,SAAS,IAAI,GAAG,KAAK;CAExD,IAAI;AACJ,KAAI,UACF,sBACE,GAAG,WAAW,qBAAqB,QAAQ,WACpC,QAAQ,iBAAiB,UAAU;KAG5C,sBAAqB;CAIvB,MAAM,eAAe,mBADH,GAAG,kBAAkB,mBAAmB,KACR;CAGlD,IAAI,SAAS,QAAQ,MAAM,GAAG,SAAS,GAAG;CAC1C,IAAI,YAAY;AAEhB,KAAI,oBACF,aAAY,UAAU,QAAQ,qBAAqB,GAAG;AAExD,aAAY,UAAU,QAAQ,oBAAoB,GAAG;AAGrD,aAAY,UAAU,QAAQ,iBAAiB,OAAO;AAEtD,WAAU;AAEV,QAAO;EAAE,UAAU;EAAM,SAAS;EAAQ;EAAU;;AAGtD,SAAgB,YAAY,UAAmC;CAC7D,MAAM,WAAW,GAAG,aAAa,UAAU,QAAQ;AAEnD,KAAI,kBAAkB,SAAS,CAC7B,QAAO;EACL,UAAU;EACV,SAAS;EACT,UAAU,CAAC,yCAAyC;EACrD;CAGH,MAAM,UAAU,8BAA8B,SAAS;CACvD,MAAM,cAAwB,EAAE;CAGhC,MAAM,WAAW,gBAAgB,QAAQ;AACzC,KAAI,SAAS,UAAU;AACrB,cAAY,KAAK,GAAG,SAAS,SAAS;AACtC,SAAO;GACL,UAAU;GACV,SAAS,SAAS;GAClB,UAAU;GACX;;AAEH,aAAY,KAAK,GAAG,SAAS,SAAS;CAGtC,MAAM,WAAW,gBAAgB,QAAQ;AACzC,KAAI,SAAS,UAAU;AACrB,cAAY,KAAK,GAAG,SAAS,SAAS;AACtC,SAAO;GACL,UAAU;GACV,SAAS,SAAS;GAClB,UAAU;GACX;;AAEH,aAAY,KAAK,GAAG,SAAS,SAAS;AAGtC,KAAI,YAAY,SACd,QAAO;EAAE,UAAU;EAAM;EAAS,UAAU;EAAa;AAG3D,QAAO;EAAE,UAAU;EAAO,SAAS;EAAU,UAAU;EAAa;;AAGtE,SAAS,WAAW,SAA6C;CAC/D,MAAM,UAAU,QAAQ,KAAK;CAC7B,MAAM,QAAQ,QAAQ,SAAS;CAE/B,IAAI;AACJ,KAAI,QAAQ,MAAM;EAChB,MAAM,UAAU,KAAK,QAAQ,SAAS,QAAQ,KAAK;AACnD,MAAI,CAAC,GAAG,WAAW,QAAQ,EAAE;AAC3B,WAAQ,MAAM,mBAAmB,UAAU;AAC3C,WAAQ,KAAK,EAAE;;AAEjB,UAAQ,CAAC,QAAQ;OAEjB,SAAQ,qBAAqB,QAAQ;AAGvC,KAAI,MAAM,WAAW,GAAG;AACtB,UAAQ,IAAI,8DAA8D;AAC1E,UAAQ,IAAI,2CAA2C;AACvD,UAAQ,KAAK,EAAE;;CAGjB,IAAI,aAAa;AAEjB,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,UAAU,KAAK,SAAS,SAAS,KAAK;EAC5C,MAAM,SAAS,YAAY,KAAK;AAEhC,OAAK,MAAM,WAAW,OAAO,SAC3B,SAAQ,IAAI,KAAK,QAAQ,IAAI,UAAU;AAGzC,MAAI,CAAC,OAAO,UAAU;AACpB,OAAI,OAAO,SAAS,WAAW,EAC7B,SAAQ,IAAI,KAAK,QAAQ,wBAAwB;AAEnD;;AAGF,eAAa;AAEb,MAAI,OAAO;AACT,MAAG,cAAc,MAAM,OAAO,SAAS,QAAQ;AAC/C,WAAQ,IAAI,KAAK,QAAQ,0BAA0B;SAC9C;AACL,WAAQ,IAAI,SAAS,QAAQ,gBAAgB;AAC7C,WAAQ,IAAI,OAAO,QAAQ;AAC3B,WAAQ,IAAI,MAAM;;;AAItB,KAAI,cAAc,CAAC,MACjB,SAAQ,IAAI,yDAAyD;;AAIzE,MAAa,wBAAwB,IAAI,QAAQ,mBAAmB,CACjE,YACC,yFACD,CACA,OAAO,iBAAiB,2CAA2C,CACnE,OAAO,WAAW,oCAAoC,MAAM,CAC5D,YACC,SACA;;;;kFAKD,CACA,OAAO,WAAW"}
|
package/dist/cli/index.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
import { codemodCommand } from "./commands/codemod/index.js";
|
|
2
3
|
import { docsCommand } from "./commands/docs.js";
|
|
3
4
|
import { generateTypesCommand } from "./commands/generate-types.js";
|
|
4
5
|
import { lintCommand } from "./commands/lint.js";
|
|
@@ -20,6 +21,7 @@ cmd.addCommand(generateTypesCommand);
|
|
|
20
21
|
cmd.addCommand(lintCommand);
|
|
21
22
|
cmd.addCommand(docsCommand);
|
|
22
23
|
cmd.addCommand(pluginCommand);
|
|
24
|
+
cmd.addCommand(codemodCommand);
|
|
23
25
|
cmd.parse();
|
|
24
26
|
|
|
25
27
|
//#endregion
|
package/dist/cli/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":[],"sources":["../../src/cli/index.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { readFileSync } from \"node:fs\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { Command } from \"commander\";\nimport { docsCommand } from \"./commands/docs.js\";\nimport { generateTypesCommand } from \"./commands/generate-types.js\";\nimport { lintCommand } from \"./commands/lint.js\";\nimport { pluginCommand } from \"./commands/plugin/index.js\";\nimport { setupCommand } from \"./commands/setup.js\";\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\nconst pkgPath = join(__dirname, \"../../package.json\");\nconst pkg = JSON.parse(readFileSync(pkgPath, \"utf-8\"));\n\nconst cmd = new Command();\n\ncmd\n .name(\"appkit\")\n .description(\"CLI tools for Databricks AppKit\")\n .version(pkg.version);\n\ncmd.addCommand(setupCommand);\ncmd.addCommand(generateTypesCommand);\ncmd.addCommand(lintCommand);\ncmd.addCommand(docsCommand);\ncmd.addCommand(pluginCommand);\n\ncmd.parse();\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../../src/cli/index.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { readFileSync } from \"node:fs\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { Command } from \"commander\";\nimport { codemodCommand } from \"./commands/codemod/index.js\";\nimport { docsCommand } from \"./commands/docs.js\";\nimport { generateTypesCommand } from \"./commands/generate-types.js\";\nimport { lintCommand } from \"./commands/lint.js\";\nimport { pluginCommand } from \"./commands/plugin/index.js\";\nimport { setupCommand } from \"./commands/setup.js\";\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\nconst pkgPath = join(__dirname, \"../../package.json\");\nconst pkg = JSON.parse(readFileSync(pkgPath, \"utf-8\"));\n\nconst cmd = new Command();\n\ncmd\n .name(\"appkit\")\n .description(\"CLI tools for Databricks AppKit\")\n .version(pkg.version);\n\ncmd.addCommand(setupCommand);\ncmd.addCommand(generateTypesCommand);\ncmd.addCommand(lintCommand);\ncmd.addCommand(docsCommand);\ncmd.addCommand(pluginCommand);\ncmd.addCommand(codemodCommand);\n\ncmd.parse();\n"],"mappings":";;;;;;;;;;;;;;AAcA,MAAM,UAAU,KADE,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC,EACzB,qBAAqB;AACrD,MAAM,MAAM,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;AAEtD,MAAM,MAAM,IAAI,SAAS;AAEzB,IACG,KAAK,SAAS,CACd,YAAY,kCAAkC,CAC9C,QAAQ,IAAI,QAAQ;AAEvB,IAAI,WAAW,aAAa;AAC5B,IAAI,WAAW,qBAAqB;AACpC,IAAI,WAAW,YAAY;AAC3B,IAAI,WAAW,YAAY;AAC3B,IAAI,WAAW,cAAc;AAC7B,IAAI,WAAW,eAAe;AAE9B,IAAI,OAAO"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase-v1/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n AppKitError,\n AuthenticationError,\n ConfigurationError,\n ConnectionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"../../telemetry\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseV1Defaults } from \"./defaults\";\nimport type {\n LakebaseV1Config,\n LakebaseV1ConnectionConfig,\n LakebaseV1Credentials,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:lakebase-v1\");\n\n/**\n * Enterprise-grade connector for Databricks Lakebase Provisioned\n *\n * @deprecated This connector is for Lakebase Provisioned only.\n * For new projects, use Lakebase Autoscaling instead: https://docs.databricks.com/aws/en/oltp/projects/\n *\n * This connector is compatible with Lakebase Provisioned: https://docs.databricks.com/aws/en/oltp/instances/\n *\n * Lakebase Autoscaling offers:\n * - Automatic compute scaling\n * - Scale-to-zero for cost optimization\n * - Database branching for development\n * - Instant restore capabilities\n *\n * Use the new LakebaseConnector (coming in a future release) for Lakebase Autoscaling support.\n *\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseV1Connector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseV1Connector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseV1Connector {\n private readonly name: string = \"lakebase-v1\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseV1Config;\n private readonly connectionConfig: LakebaseV1ConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseV1Credentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseV1Config>) {\n this.config = deepMerge(lakebaseV1Defaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.v1.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.v1.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw ValidationError.invalidValue(\n \"maxPoolSize\",\n this.config.maxPoolSize,\n \"at least 1\",\n );\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.query\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.queryFailed(error as Error);\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.transactionFailed(error as Error);\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n logger.error(\"Error closing connection pool: %O\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw ConnectionError.clientUnavailable(\n \"Databricks workspace client\",\n \"Either pass it in config or ensure ServiceContext is initialized\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n logger.error(\n \"Connection pool error: %s (code: %s)\",\n error.message,\n error.code,\n );\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n logger.error(\n \"Error closing old connection pool during rotation: %O\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw AuthenticationError.userLookupFailed();\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw ConfigurationError.resourceNotFound(\"Database app name\");\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw AuthenticationError.credentialsFailed(\n this.connectionConfig.appName,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseV1ConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw ValidationError.invalidValue(\"port\", pgPort, \"a number\");\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseV1ConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw ConfigurationError.missingConnectionParam(\"appName\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;AA2BA,MAAM,SAAS,aAAa,yBAAyB"}
|
|
1
|
+
{"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase-v1/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n AppKitError,\n AuthenticationError,\n ConfigurationError,\n ConnectionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"../../telemetry\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseV1Defaults } from \"./defaults\";\nimport type {\n LakebaseV1Config,\n LakebaseV1ConnectionConfig,\n LakebaseV1Credentials,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:lakebase-v1\");\n\n/**\n * Enterprise-grade connector for Databricks Lakebase Provisioned\n *\n * @deprecated This connector is for Lakebase Provisioned only.\n * For new projects, use Lakebase Autoscaling instead: https://docs.databricks.com/aws/en/oltp/projects/\n *\n * This connector is compatible with Lakebase Provisioned: https://docs.databricks.com/aws/en/oltp/instances/\n *\n * Lakebase Autoscaling offers:\n * - Automatic compute scaling\n * - Scale-to-zero for cost optimization\n * - Database branching for development\n * - Instant restore capabilities\n *\n * Use the new LakebaseConnector (coming in a future release) for Lakebase Autoscaling support.\n *\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseV1Connector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseV1Connector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseV1Connector {\n private readonly name: string = \"lakebase-v1\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseV1Config;\n private readonly connectionConfig: LakebaseV1ConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseV1Credentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseV1Config>) {\n this.config = deepMerge(lakebaseV1Defaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.v1.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.v1.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw ValidationError.invalidValue(\n \"maxPoolSize\",\n this.config.maxPoolSize,\n \"at least 1\",\n );\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.query\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n logger.error(\n \"Query execution failed: %s (code=%s)\",\n error instanceof Error ? error.message : String(error),\n (error as any)?.code,\n );\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.queryFailed(error as Error);\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n logger.error(\n \"Transaction execution failed: %s (code=%s)\",\n error instanceof Error ? error.message : String(error),\n (error as any)?.code,\n );\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.transactionFailed(error as Error);\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n logger.error(\"Error closing connection pool: %O\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw ConnectionError.clientUnavailable(\n \"Databricks workspace client\",\n \"Either pass it in config or ensure ServiceContext is initialized\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n logger.error(\n \"Connection pool error: %s (code: %s)\",\n error.message,\n error.code,\n );\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n logger.error(\n \"Error closing old connection pool during rotation: %O\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw AuthenticationError.userLookupFailed();\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw ConfigurationError.resourceNotFound(\"Database app name\");\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw AuthenticationError.credentialsFailed(\n this.connectionConfig.appName,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseV1ConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw ValidationError.invalidValue(\"port\", pgPort, \"a number\");\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseV1ConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw ConfigurationError.missingConnectionParam(\"appName\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;AA2BA,MAAM,SAAS,aAAa,yBAAyB"}
|
|
@@ -135,6 +135,7 @@ var SQLWarehouseConnector = class {
|
|
|
135
135
|
code: SpanStatusCode.ERROR,
|
|
136
136
|
message: error instanceof Error ? error.message : String(error)
|
|
137
137
|
});
|
|
138
|
+
logger.error("Statement execution failed: %s", error instanceof Error ? error.message : String(error));
|
|
138
139
|
}
|
|
139
140
|
if (error instanceof AppKitError) throw error;
|
|
140
141
|
throw ExecutionError.statementFailed(error instanceof Error ? error.message : String(error));
|