@databricks/appkit 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +3 -0
- package/DCO +25 -0
- package/LICENSE +203 -0
- package/NOTICE.md +73 -0
- package/README.md +35 -0
- package/bin/setup-claude.js +190 -0
- package/dist/_virtual/rolldown_runtime.js +39 -0
- package/dist/analytics/analytics.d.ts +31 -0
- package/dist/analytics/analytics.d.ts.map +1 -0
- package/dist/analytics/analytics.js +149 -0
- package/dist/analytics/analytics.js.map +1 -0
- package/dist/analytics/defaults.js +17 -0
- package/dist/analytics/defaults.js.map +1 -0
- package/dist/analytics/index.js +3 -0
- package/dist/analytics/query.js +50 -0
- package/dist/analytics/query.js.map +1 -0
- package/dist/analytics/types.d.ts +9 -0
- package/dist/analytics/types.d.ts.map +1 -0
- package/dist/app/index.d.ts +23 -0
- package/dist/app/index.d.ts.map +1 -0
- package/dist/app/index.js +49 -0
- package/dist/app/index.js.map +1 -0
- package/dist/appkit/package.js +7 -0
- package/dist/appkit/package.js.map +1 -0
- package/dist/cache/defaults.js +14 -0
- package/dist/cache/defaults.js.map +1 -0
- package/dist/cache/index.d.ts +119 -0
- package/dist/cache/index.d.ts.map +1 -0
- package/dist/cache/index.js +307 -0
- package/dist/cache/index.js.map +1 -0
- package/dist/cache/storage/defaults.js +16 -0
- package/dist/cache/storage/defaults.js.map +1 -0
- package/dist/cache/storage/index.js +4 -0
- package/dist/cache/storage/memory.js +87 -0
- package/dist/cache/storage/memory.js.map +1 -0
- package/dist/cache/storage/persistent.js +211 -0
- package/dist/cache/storage/persistent.js.map +1 -0
- package/dist/connectors/index.js +6 -0
- package/dist/connectors/lakebase/client.js +348 -0
- package/dist/connectors/lakebase/client.js.map +1 -0
- package/dist/connectors/lakebase/defaults.js +13 -0
- package/dist/connectors/lakebase/defaults.js.map +1 -0
- package/dist/connectors/lakebase/index.js +3 -0
- package/dist/connectors/sql-warehouse/client.js +284 -0
- package/dist/connectors/sql-warehouse/client.js.map +1 -0
- package/dist/connectors/sql-warehouse/defaults.js +12 -0
- package/dist/connectors/sql-warehouse/defaults.js.map +1 -0
- package/dist/connectors/sql-warehouse/index.js +3 -0
- package/dist/core/appkit.d.ts +14 -0
- package/dist/core/appkit.d.ts.map +1 -0
- package/dist/core/appkit.js +66 -0
- package/dist/core/appkit.js.map +1 -0
- package/dist/core/index.js +3 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.js +21 -0
- package/dist/index.js.map +1 -0
- package/dist/plugin/dev-reader.d.ts +20 -0
- package/dist/plugin/dev-reader.d.ts.map +1 -0
- package/dist/plugin/dev-reader.js +63 -0
- package/dist/plugin/dev-reader.js.map +1 -0
- package/dist/plugin/index.js +4 -0
- package/dist/plugin/interceptors/cache.js +15 -0
- package/dist/plugin/interceptors/cache.js.map +1 -0
- package/dist/plugin/interceptors/retry.js +32 -0
- package/dist/plugin/interceptors/retry.js.map +1 -0
- package/dist/plugin/interceptors/telemetry.js +33 -0
- package/dist/plugin/interceptors/telemetry.js.map +1 -0
- package/dist/plugin/interceptors/timeout.js +35 -0
- package/dist/plugin/interceptors/timeout.js.map +1 -0
- package/dist/plugin/plugin.d.ts +43 -0
- package/dist/plugin/plugin.d.ts.map +1 -0
- package/dist/plugin/plugin.js +119 -0
- package/dist/plugin/plugin.js.map +1 -0
- package/dist/plugin/to-plugin.d.ts +7 -0
- package/dist/plugin/to-plugin.d.ts.map +1 -0
- package/dist/plugin/to-plugin.js +12 -0
- package/dist/plugin/to-plugin.js.map +1 -0
- package/dist/server/base-server.js +24 -0
- package/dist/server/base-server.js.map +1 -0
- package/dist/server/index.d.ts +100 -0
- package/dist/server/index.d.ts.map +1 -0
- package/dist/server/index.js +224 -0
- package/dist/server/index.js.map +1 -0
- package/dist/server/remote-tunnel/denied.html +68 -0
- package/dist/server/remote-tunnel/gate.js +51 -0
- package/dist/server/remote-tunnel/gate.js.map +1 -0
- package/dist/server/remote-tunnel/index.html +165 -0
- package/dist/server/remote-tunnel/remote-tunnel-controller.js +100 -0
- package/dist/server/remote-tunnel/remote-tunnel-controller.js.map +1 -0
- package/dist/server/remote-tunnel/remote-tunnel-manager.js +320 -0
- package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -0
- package/dist/server/remote-tunnel/wait.html +158 -0
- package/dist/server/static-server.js +47 -0
- package/dist/server/static-server.js.map +1 -0
- package/dist/server/types.d.ts +14 -0
- package/dist/server/types.d.ts.map +1 -0
- package/dist/server/utils.js +70 -0
- package/dist/server/utils.js.map +1 -0
- package/dist/server/vite-dev-server.js +103 -0
- package/dist/server/vite-dev-server.js.map +1 -0
- package/dist/shared/src/cache.d.ts +62 -0
- package/dist/shared/src/cache.d.ts.map +1 -0
- package/dist/shared/src/execute.d.ts +46 -0
- package/dist/shared/src/execute.d.ts.map +1 -0
- package/dist/shared/src/plugin.d.ts +50 -0
- package/dist/shared/src/plugin.d.ts.map +1 -0
- package/dist/shared/src/sql/helpers.d.ts +160 -0
- package/dist/shared/src/sql/helpers.d.ts.map +1 -0
- package/dist/shared/src/sql/helpers.js +103 -0
- package/dist/shared/src/sql/helpers.js.map +1 -0
- package/dist/shared/src/sql/types.d.ts +34 -0
- package/dist/shared/src/sql/types.d.ts.map +1 -0
- package/dist/shared/src/tunnel.d.ts +30 -0
- package/dist/shared/src/tunnel.d.ts.map +1 -0
- package/dist/stream/arrow-stream-processor.js +154 -0
- package/dist/stream/arrow-stream-processor.js.map +1 -0
- package/dist/stream/buffers.js +88 -0
- package/dist/stream/buffers.js.map +1 -0
- package/dist/stream/defaults.js +14 -0
- package/dist/stream/defaults.js.map +1 -0
- package/dist/stream/index.js +6 -0
- package/dist/stream/sse-writer.js +61 -0
- package/dist/stream/sse-writer.js.map +1 -0
- package/dist/stream/stream-manager.d.ts +27 -0
- package/dist/stream/stream-manager.d.ts.map +1 -0
- package/dist/stream/stream-manager.js +191 -0
- package/dist/stream/stream-manager.js.map +1 -0
- package/dist/stream/stream-registry.js +54 -0
- package/dist/stream/stream-registry.js.map +1 -0
- package/dist/stream/types.js +14 -0
- package/dist/stream/types.js.map +1 -0
- package/dist/stream/validator.js +25 -0
- package/dist/stream/validator.js.map +1 -0
- package/dist/telemetry/config.js +20 -0
- package/dist/telemetry/config.js.map +1 -0
- package/dist/telemetry/index.d.ts +4 -0
- package/dist/telemetry/index.js +8 -0
- package/dist/telemetry/instrumentations.js +38 -0
- package/dist/telemetry/instrumentations.js.map +1 -0
- package/dist/telemetry/noop.js +54 -0
- package/dist/telemetry/noop.js.map +1 -0
- package/dist/telemetry/telemetry-manager.js +113 -0
- package/dist/telemetry/telemetry-manager.js.map +1 -0
- package/dist/telemetry/telemetry-provider.js +82 -0
- package/dist/telemetry/telemetry-provider.js.map +1 -0
- package/dist/telemetry/types.d.ts +74 -0
- package/dist/telemetry/types.d.ts.map +1 -0
- package/dist/type-generator/vite-plugin.d.ts +22 -0
- package/dist/type-generator/vite-plugin.d.ts.map +1 -0
- package/dist/type-generator/vite-plugin.js +49 -0
- package/dist/type-generator/vite-plugin.js.map +1 -0
- package/dist/utils/databricks-client-middleware.d.ts +17 -0
- package/dist/utils/databricks-client-middleware.d.ts.map +1 -0
- package/dist/utils/databricks-client-middleware.js +117 -0
- package/dist/utils/databricks-client-middleware.js.map +1 -0
- package/dist/utils/env-validator.js +14 -0
- package/dist/utils/env-validator.js.map +1 -0
- package/dist/utils/index.js +26 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/merge.js +25 -0
- package/dist/utils/merge.js.map +1 -0
- package/dist/utils/vite-config-merge.js +22 -0
- package/dist/utils/vite-config-merge.js.map +1 -0
- package/llms.txt +193 -0
- package/package.json +70 -0
- package/scripts/postinstall.js +6 -0
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { execSync } from "node:child_process";
|
|
3
|
+
|
|
4
|
+
//#region src/type-generator/vite-plugin.ts
|
|
5
|
+
/**
|
|
6
|
+
* Vite plugin to generate types for AppKit queries.
|
|
7
|
+
* Calls `npx appkit-generate-types` under the hood.
|
|
8
|
+
* @param options - Options to override default values.
|
|
9
|
+
* @returns Vite plugin to generate types for AppKit queries.
|
|
10
|
+
*/
|
|
11
|
+
function appKitTypesPlugin(options) {
|
|
12
|
+
let root;
|
|
13
|
+
let appRoot;
|
|
14
|
+
let outFile;
|
|
15
|
+
let watchFolders;
|
|
16
|
+
function generate() {
|
|
17
|
+
try {
|
|
18
|
+
execSync(`npx appkit-generate-types ${[appRoot, outFile].join(" ")}`, {
|
|
19
|
+
cwd: appRoot,
|
|
20
|
+
stdio: "inherit"
|
|
21
|
+
});
|
|
22
|
+
} catch (error) {
|
|
23
|
+
if (process.env.NODE_ENV === "production") throw error;
|
|
24
|
+
console.error("[AppKit] Error generating types:", error);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return {
|
|
28
|
+
name: "appkit-types",
|
|
29
|
+
configResolved(config) {
|
|
30
|
+
root = config.root;
|
|
31
|
+
appRoot = path.resolve(root, "..");
|
|
32
|
+
outFile = path.resolve(root, options?.outFile ?? "src/appKitTypes.d.ts");
|
|
33
|
+
watchFolders = (options?.watchFolders ?? ["../config/queries"]).map((folder) => path.resolve(root, folder));
|
|
34
|
+
},
|
|
35
|
+
buildStart() {
|
|
36
|
+
generate();
|
|
37
|
+
},
|
|
38
|
+
configureServer(server) {
|
|
39
|
+
server.watcher.add(watchFolders);
|
|
40
|
+
server.watcher.on("change", (changedFile) => {
|
|
41
|
+
if (watchFolders.some((folder) => changedFile.startsWith(folder)) && changedFile.endsWith(".sql")) generate();
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
//#endregion
|
|
48
|
+
export { appKitTypesPlugin };
|
|
49
|
+
//# sourceMappingURL=vite-plugin.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"vite-plugin.js","names":["root: string","appRoot: string","outFile: string","watchFolders: string[]"],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":["import { execSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport type { Plugin } from \"vite\";\n\n/**\n * Options for the AppKit types plugin.\n */\ninterface AppKitTypesPluginOptions {\n /* Path to the output d.ts file (relative to client folder). */\n outFile?: string;\n /** Folders to watch for changes. */\n watchFolders?: string[];\n}\n\n/**\n * Vite plugin to generate types for AppKit queries.\n * Calls `npx appkit-generate-types` under the hood.\n * @param options - Options to override default values.\n * @returns Vite plugin to generate types for AppKit queries.\n */\nexport function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin {\n let root: string;\n let appRoot: string;\n let outFile: string;\n let watchFolders: string[];\n\n function generate() {\n try {\n const args = [appRoot, outFile].join(\" \");\n execSync(`npx appkit-generate-types ${args}`, {\n cwd: appRoot,\n stdio: \"inherit\",\n });\n } catch (error) {\n // throw in production to fail the build\n if (process.env.NODE_ENV === \"production\") {\n throw error;\n }\n console.error(\"[AppKit] Error generating types:\", error);\n }\n }\n\n return {\n name: \"appkit-types\",\n configResolved(config) {\n root = config.root;\n appRoot = path.resolve(root, \"..\");\n\n outFile = path.resolve(root, options?.outFile ?? \"src/appKitTypes.d.ts\");\n\n watchFolders = (options?.watchFolders ?? [\"../config/queries\"]).map(\n (folder) => path.resolve(root, folder),\n );\n },\n buildStart() {\n generate();\n },\n\n configureServer(server) {\n server.watcher.add(watchFolders);\n\n server.watcher.on(\"change\", (changedFile) => {\n const isWatchedFile = watchFolders.some((folder) =>\n changedFile.startsWith(folder),\n );\n\n if (isWatchedFile && changedFile.endsWith(\".sql\")) {\n generate();\n }\n });\n },\n };\n}\n"],"mappings":";;;;;;;;;;AAoBA,SAAgB,kBAAkB,SAA4C;CAC5E,IAAIA;CACJ,IAAIC;CACJ,IAAIC;CACJ,IAAIC;CAEJ,SAAS,WAAW;AAClB,MAAI;AAEF,YAAS,6BADI,CAAC,SAAS,QAAQ,CAAC,KAAK,IAAI,IACK;IAC5C,KAAK;IACL,OAAO;IACR,CAAC;WACK,OAAO;AAEd,OAAI,QAAQ,IAAI,aAAa,aAC3B,OAAM;AAER,WAAQ,MAAM,oCAAoC,MAAM;;;AAI5D,QAAO;EACL,MAAM;EACN,eAAe,QAAQ;AACrB,UAAO,OAAO;AACd,aAAU,KAAK,QAAQ,MAAM,KAAK;AAElC,aAAU,KAAK,QAAQ,MAAM,SAAS,WAAW,uBAAuB;AAExE,mBAAgB,SAAS,gBAAgB,CAAC,oBAAoB,EAAE,KAC7D,WAAW,KAAK,QAAQ,MAAM,OAAO,CACvC;;EAEH,aAAa;AACX,aAAU;;EAGZ,gBAAgB,QAAQ;AACtB,UAAO,QAAQ,IAAI,aAAa;AAEhC,UAAO,QAAQ,GAAG,WAAW,gBAAgB;AAK3C,QAJsB,aAAa,MAAM,WACvC,YAAY,WAAW,OAAO,CAC/B,IAEoB,YAAY,SAAS,OAAO,CAC/C,WAAU;KAEZ;;EAEL"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { WorkspaceClient } from "@databricks/sdk-experimental";
|
|
2
|
+
import express from "express";
|
|
3
|
+
|
|
4
|
+
//#region src/utils/databricks-client-middleware.d.ts
|
|
5
|
+
type RequestContext = {
|
|
6
|
+
userDatabricksClient?: WorkspaceClient;
|
|
7
|
+
serviceDatabricksClient: WorkspaceClient;
|
|
8
|
+
userId: string;
|
|
9
|
+
userName?: string;
|
|
10
|
+
serviceUserId: string;
|
|
11
|
+
warehouseId: Promise<string>;
|
|
12
|
+
workspaceId: Promise<string>;
|
|
13
|
+
};
|
|
14
|
+
declare function getRequestContext(): RequestContext;
|
|
15
|
+
//#endregion
|
|
16
|
+
export { getRequestContext };
|
|
17
|
+
//# sourceMappingURL=databricks-client-middleware.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"databricks-client-middleware.d.ts","names":[],"sources":["../../src/utils/databricks-client-middleware.ts"],"sourcesContent":[],"mappings":";;;;KAYY,cAAA;yBACa;EADb,uBAAc,EAEC,eAFD;EAAA,MAAA,EAAA,MAAA;UACD,CAAA,EAAA,MAAA;eACE,EAAA,MAAA;aAIZ,EAAA,OAAA,CAAA,MAAA,CAAA;aACA,EAAA,OAAA,CAAA,MAAA,CAAA;CAAO;iBA4EN,iBAAA,CAAA,GAAqB"}
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
import { __esmMin } from "../_virtual/rolldown_runtime.js";
|
|
2
|
+
import { name, version } from "../appkit/package.js";
|
|
3
|
+
import { WorkspaceClient } from "@databricks/sdk-experimental";
|
|
4
|
+
import { AsyncLocalStorage } from "node:async_hooks";
|
|
5
|
+
|
|
6
|
+
//#region src/utils/databricks-client-middleware.ts
|
|
7
|
+
function getClientOptions() {
|
|
8
|
+
const isDev = process.env.NODE_ENV === "development";
|
|
9
|
+
return {
|
|
10
|
+
product: name,
|
|
11
|
+
productVersion: version.split(".").slice(0, 3).join("."),
|
|
12
|
+
...isDev && { userAgentExtra: { mode: "dev" } }
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
async function databricksClientMiddleware() {
|
|
16
|
+
const serviceDatabricksClient = new WorkspaceClient({}, getClientOptions());
|
|
17
|
+
const warehouseId = getWarehouseId(serviceDatabricksClient);
|
|
18
|
+
const workspaceId = getWorkspaceId(serviceDatabricksClient);
|
|
19
|
+
const serviceUserId = (await serviceDatabricksClient.currentUser.me()).id;
|
|
20
|
+
if (!serviceUserId) throw new Error("Service user ID not found");
|
|
21
|
+
return async (req, res, next) => {
|
|
22
|
+
const userToken = req.headers["x-forwarded-access-token"];
|
|
23
|
+
let userDatabricksClient;
|
|
24
|
+
const host = process.env.DATABRICKS_HOST;
|
|
25
|
+
if (userToken && host) userDatabricksClient = new WorkspaceClient({
|
|
26
|
+
token: userToken,
|
|
27
|
+
host,
|
|
28
|
+
authType: "pat"
|
|
29
|
+
}, getClientOptions());
|
|
30
|
+
else if (process.env.NODE_ENV === "development") userDatabricksClient = serviceDatabricksClient;
|
|
31
|
+
let userName = req.headers["x-forwarded-user"];
|
|
32
|
+
if (!userName && process.env.NODE_ENV !== "development") {
|
|
33
|
+
res.status(401).json({ error: "Unauthorized" });
|
|
34
|
+
return;
|
|
35
|
+
} else userName = serviceUserId;
|
|
36
|
+
return asyncLocalStorage.run({
|
|
37
|
+
userDatabricksClient,
|
|
38
|
+
serviceDatabricksClient,
|
|
39
|
+
warehouseId,
|
|
40
|
+
workspaceId,
|
|
41
|
+
userId: userName,
|
|
42
|
+
serviceUserId
|
|
43
|
+
}, async () => {
|
|
44
|
+
return next();
|
|
45
|
+
});
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
function getRequestContext() {
|
|
49
|
+
const store = asyncLocalStorage.getStore();
|
|
50
|
+
if (!store) throw new Error("Request context not found");
|
|
51
|
+
return store;
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Get the appropriate WorkspaceClient based on whether the request
|
|
55
|
+
* should be executed as the user or as the service principal.
|
|
56
|
+
*
|
|
57
|
+
* @param asUser - If true, returns user's WorkspaceClient (requires token passthrough)
|
|
58
|
+
* @throws Error if asUser is true but user token passthrough is not enabled
|
|
59
|
+
*/
|
|
60
|
+
function getWorkspaceClient(asUser) {
|
|
61
|
+
const context = getRequestContext();
|
|
62
|
+
if (asUser) {
|
|
63
|
+
if (!context.userDatabricksClient) throw new Error(`User token passthrough is not enabled for this workspace.`);
|
|
64
|
+
return context.userDatabricksClient;
|
|
65
|
+
}
|
|
66
|
+
return context.serviceDatabricksClient;
|
|
67
|
+
}
|
|
68
|
+
async function getWorkspaceId(workspaceClient) {
|
|
69
|
+
if (process.env.DATABRICKS_WORKSPACE_ID) return process.env.DATABRICKS_WORKSPACE_ID;
|
|
70
|
+
const response = await workspaceClient.apiClient.request({
|
|
71
|
+
path: "/api/2.0/preview/scim/v2/Me",
|
|
72
|
+
method: "GET",
|
|
73
|
+
headers: new Headers(),
|
|
74
|
+
raw: false,
|
|
75
|
+
query: {},
|
|
76
|
+
responseHeaders: ["x-databricks-org-id"]
|
|
77
|
+
});
|
|
78
|
+
if (!response["x-databricks-org-id"]) throw new Error("Workspace ID not found");
|
|
79
|
+
return response["x-databricks-org-id"];
|
|
80
|
+
}
|
|
81
|
+
async function getWarehouseId(workspaceClient) {
|
|
82
|
+
if (process.env.DATABRICKS_WAREHOUSE_ID) return process.env.DATABRICKS_WAREHOUSE_ID;
|
|
83
|
+
if (process.env.NODE_ENV === "development") {
|
|
84
|
+
const response = await workspaceClient.apiClient.request({
|
|
85
|
+
path: "/api/2.0/sql/warehouses",
|
|
86
|
+
method: "GET",
|
|
87
|
+
headers: new Headers(),
|
|
88
|
+
raw: false,
|
|
89
|
+
query: { skip_cannot_use: "true" }
|
|
90
|
+
});
|
|
91
|
+
const priorities = {
|
|
92
|
+
RUNNING: 0,
|
|
93
|
+
STOPPED: 1,
|
|
94
|
+
STARTING: 2,
|
|
95
|
+
STOPPING: 3,
|
|
96
|
+
DELETED: 99,
|
|
97
|
+
DELETING: 99
|
|
98
|
+
};
|
|
99
|
+
const warehouses = (response.warehouses || []).sort((a, b) => {
|
|
100
|
+
return priorities[a.state] - priorities[b.state];
|
|
101
|
+
});
|
|
102
|
+
if (response.warehouses.length === 0) throw new Error("Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.");
|
|
103
|
+
const firstWarehouse = warehouses[0];
|
|
104
|
+
if (firstWarehouse.state === "DELETED" || firstWarehouse.state === "DELETING" || !firstWarehouse.id) throw new Error("Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.");
|
|
105
|
+
return firstWarehouse.id;
|
|
106
|
+
}
|
|
107
|
+
throw new Error("Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.");
|
|
108
|
+
}
|
|
109
|
+
var asyncLocalStorage;
|
|
110
|
+
var init_databricks_client_middleware = __esmMin((() => {
|
|
111
|
+
asyncLocalStorage = new AsyncLocalStorage();
|
|
112
|
+
}));
|
|
113
|
+
|
|
114
|
+
//#endregion
|
|
115
|
+
init_databricks_client_middleware();
|
|
116
|
+
export { databricksClientMiddleware, getRequestContext, getWorkspaceClient, init_databricks_client_middleware };
|
|
117
|
+
//# sourceMappingURL=databricks-client-middleware.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"databricks-client-middleware.js","names":["productName","productVersion","userDatabricksClient: WorkspaceClient | undefined","priorities: Record<sql.State, number>"],"sources":["../../src/utils/databricks-client-middleware.ts"],"sourcesContent":["import { AsyncLocalStorage } from \"node:async_hooks\";\nimport {\n type ClientOptions,\n type sql,\n WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport type express from \"express\";\nimport {\n name as productName,\n version as productVersion,\n} from \"../../package.json\";\n\nexport type RequestContext = {\n userDatabricksClient?: WorkspaceClient;\n serviceDatabricksClient: WorkspaceClient;\n userId: string;\n userName?: string;\n serviceUserId: string;\n warehouseId: Promise<string>;\n workspaceId: Promise<string>;\n};\n\nconst asyncLocalStorage = new AsyncLocalStorage<RequestContext>();\n\nfunction getClientOptions(): ClientOptions {\n const isDev = process.env.NODE_ENV === \"development\";\n const normalizedVersion = productVersion\n .split(\".\")\n .slice(0, 3)\n .join(\".\") as ClientOptions[\"productVersion\"];\n\n return {\n product: productName,\n productVersion: normalizedVersion,\n ...(isDev && { userAgentExtra: { mode: \"dev\" } }),\n };\n}\n\nexport async function databricksClientMiddleware(): Promise<express.RequestHandler> {\n const serviceDatabricksClient = new WorkspaceClient({}, getClientOptions());\n const warehouseId = getWarehouseId(serviceDatabricksClient);\n const workspaceId = getWorkspaceId(serviceDatabricksClient);\n const serviceUserId = (await serviceDatabricksClient.currentUser.me()).id;\n\n if (!serviceUserId) {\n throw new Error(\"Service user ID not found\");\n }\n\n return async (\n req: express.Request,\n res: express.Response,\n next: express.NextFunction,\n ) => {\n const userToken = req.headers[\"x-forwarded-access-token\"] as string;\n let userDatabricksClient: WorkspaceClient | undefined;\n const host = process.env.DATABRICKS_HOST;\n if (userToken && host) {\n userDatabricksClient = new WorkspaceClient(\n {\n token: userToken,\n host,\n authType: \"pat\",\n },\n getClientOptions(),\n );\n } else if (process.env.NODE_ENV === \"development\") {\n // in local development service and no user token are the same\n // TODO: use `databricks apps run-local` to fix this\n userDatabricksClient = serviceDatabricksClient;\n }\n\n let userName = req.headers[\"x-forwarded-user\"] as string;\n if (!userName && process.env.NODE_ENV !== \"development\") {\n res.status(401).json({ error: \"Unauthorized\" });\n return;\n } else {\n userName = serviceUserId;\n }\n\n return asyncLocalStorage.run(\n {\n userDatabricksClient,\n serviceDatabricksClient,\n warehouseId,\n workspaceId,\n userId: userName,\n serviceUserId,\n },\n async () => {\n return next();\n },\n );\n };\n}\n\nexport function getRequestContext(): RequestContext {\n const store = asyncLocalStorage.getStore();\n if (!store) {\n throw new Error(\"Request context not found\");\n }\n return store;\n}\n\n/**\n * Get the appropriate WorkspaceClient based on whether the request\n * should be executed as the user or as the service principal.\n *\n * @param asUser - If true, returns user's WorkspaceClient (requires token passthrough)\n * @throws Error if asUser is true but user token passthrough is not enabled\n */\nexport function getWorkspaceClient(asUser: boolean): WorkspaceClient {\n const context = getRequestContext();\n\n if (asUser) {\n if (!context.userDatabricksClient) {\n throw new Error(\n `User token passthrough is not enabled for this workspace.`,\n );\n }\n return context.userDatabricksClient;\n }\n\n return context.serviceDatabricksClient;\n}\n\nasync function getWorkspaceId(\n workspaceClient: WorkspaceClient,\n): Promise<string> {\n if (process.env.DATABRICKS_WORKSPACE_ID) {\n return process.env.DATABRICKS_WORKSPACE_ID;\n }\n\n const response = (await workspaceClient.apiClient.request({\n path: \"/api/2.0/preview/scim/v2/Me\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {},\n responseHeaders: [\"x-databricks-org-id\"],\n })) as { \"x-databricks-org-id\": string };\n\n if (!response[\"x-databricks-org-id\"]) {\n throw new Error(\"Workspace ID not found\");\n }\n\n return response[\"x-databricks-org-id\"];\n}\n\nasync function getWarehouseId(\n workspaceClient: WorkspaceClient,\n): Promise<string> {\n if (process.env.DATABRICKS_WAREHOUSE_ID) {\n return process.env.DATABRICKS_WAREHOUSE_ID;\n }\n\n if (process.env.NODE_ENV === \"development\") {\n const response = (await workspaceClient.apiClient.request({\n path: \"/api/2.0/sql/warehouses\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {\n skip_cannot_use: \"true\",\n },\n })) as { warehouses: sql.EndpointInfo[] };\n\n const priorities: Record<sql.State, number> = {\n RUNNING: 0,\n STOPPED: 1,\n STARTING: 2,\n STOPPING: 3,\n DELETED: 99,\n DELETING: 99,\n };\n\n const warehouses = (response.warehouses || []).sort((a, b) => {\n return (\n priorities[a.state as sql.State] - priorities[b.state as sql.State]\n );\n });\n\n if (response.warehouses.length === 0) {\n throw new Error(\n \"Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.\",\n );\n }\n\n const firstWarehouse = warehouses[0];\n if (\n firstWarehouse.state === \"DELETED\" ||\n firstWarehouse.state === \"DELETING\" ||\n !firstWarehouse.id\n ) {\n throw new Error(\n \"Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.\",\n );\n }\n\n return firstWarehouse.id;\n }\n\n throw new Error(\n \"Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.\",\n );\n}\n\nexport type Request = express.Request;\nexport type Response = express.Response;\n"],"mappings":";;;;;;AAwBA,SAAS,mBAAkC;CACzC,MAAM,QAAQ,QAAQ,IAAI,aAAa;AAMvC,QAAO;EACL,SAASA;EACT,gBAPwBC,QACvB,MAAM,IAAI,CACV,MAAM,GAAG,EAAE,CACX,KAAK,IAAI;EAKV,GAAI,SAAS,EAAE,gBAAgB,EAAE,MAAM,OAAO,EAAE;EACjD;;AAGH,eAAsB,6BAA8D;CAClF,MAAM,0BAA0B,IAAI,gBAAgB,EAAE,EAAE,kBAAkB,CAAC;CAC3E,MAAM,cAAc,eAAe,wBAAwB;CAC3D,MAAM,cAAc,eAAe,wBAAwB;CAC3D,MAAM,iBAAiB,MAAM,wBAAwB,YAAY,IAAI,EAAE;AAEvE,KAAI,CAAC,cACH,OAAM,IAAI,MAAM,4BAA4B;AAG9C,QAAO,OACL,KACA,KACA,SACG;EACH,MAAM,YAAY,IAAI,QAAQ;EAC9B,IAAIC;EACJ,MAAM,OAAO,QAAQ,IAAI;AACzB,MAAI,aAAa,KACf,wBAAuB,IAAI,gBACzB;GACE,OAAO;GACP;GACA,UAAU;GACX,EACD,kBAAkB,CACnB;WACQ,QAAQ,IAAI,aAAa,cAGlC,wBAAuB;EAGzB,IAAI,WAAW,IAAI,QAAQ;AAC3B,MAAI,CAAC,YAAY,QAAQ,IAAI,aAAa,eAAe;AACvD,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,gBAAgB,CAAC;AAC/C;QAEA,YAAW;AAGb,SAAO,kBAAkB,IACvB;GACE;GACA;GACA;GACA;GACA,QAAQ;GACR;GACD,EACD,YAAY;AACV,UAAO,MAAM;IAEhB;;;AAIL,SAAgB,oBAAoC;CAClD,MAAM,QAAQ,kBAAkB,UAAU;AAC1C,KAAI,CAAC,MACH,OAAM,IAAI,MAAM,4BAA4B;AAE9C,QAAO;;;;;;;;;AAUT,SAAgB,mBAAmB,QAAkC;CACnE,MAAM,UAAU,mBAAmB;AAEnC,KAAI,QAAQ;AACV,MAAI,CAAC,QAAQ,qBACX,OAAM,IAAI,MACR,4DACD;AAEH,SAAO,QAAQ;;AAGjB,QAAO,QAAQ;;AAGjB,eAAe,eACb,iBACiB;AACjB,KAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;CAGrB,MAAM,WAAY,MAAM,gBAAgB,UAAU,QAAQ;EACxD,MAAM;EACN,QAAQ;EACR,SAAS,IAAI,SAAS;EACtB,KAAK;EACL,OAAO,EAAE;EACT,iBAAiB,CAAC,sBAAsB;EACzC,CAAC;AAEF,KAAI,CAAC,SAAS,uBACZ,OAAM,IAAI,MAAM,yBAAyB;AAG3C,QAAO,SAAS;;AAGlB,eAAe,eACb,iBACiB;AACjB,KAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;AAGrB,KAAI,QAAQ,IAAI,aAAa,eAAe;EAC1C,MAAM,WAAY,MAAM,gBAAgB,UAAU,QAAQ;GACxD,MAAM;GACN,QAAQ;GACR,SAAS,IAAI,SAAS;GACtB,KAAK;GACL,OAAO,EACL,iBAAiB,QAClB;GACF,CAAC;EAEF,MAAMC,aAAwC;GAC5C,SAAS;GACT,SAAS;GACT,UAAU;GACV,UAAU;GACV,SAAS;GACT,UAAU;GACX;EAED,MAAM,cAAc,SAAS,cAAc,EAAE,EAAE,MAAM,GAAG,MAAM;AAC5D,UACE,WAAW,EAAE,SAAsB,WAAW,EAAE;IAElD;AAEF,MAAI,SAAS,WAAW,WAAW,EACjC,OAAM,IAAI,MACR,6FACD;EAGH,MAAM,iBAAiB,WAAW;AAClC,MACE,eAAe,UAAU,aACzB,eAAe,UAAU,cACzB,CAAC,eAAe,GAEhB,OAAM,IAAI,MACR,6FACD;AAGH,SAAO,eAAe;;AAGxB,OAAM,IAAI,MACR,6FACD;;;;CArLG,oBAAoB,IAAI,mBAAmC"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { __esmMin } from "../_virtual/rolldown_runtime.js";
|
|
2
|
+
|
|
3
|
+
//#region src/utils/env-validator.ts
|
|
4
|
+
function validateEnv(envVars) {
|
|
5
|
+
const missingVars = [];
|
|
6
|
+
for (const envVar of envVars) if (!process.env[envVar]) missingVars.push(envVar);
|
|
7
|
+
if (missingVars.length > 0) throw new Error(`Missing required environment variables: ${missingVars.join(", ")}`);
|
|
8
|
+
}
|
|
9
|
+
var init_env_validator = __esmMin((() => {}));
|
|
10
|
+
|
|
11
|
+
//#endregion
|
|
12
|
+
init_env_validator();
|
|
13
|
+
export { init_env_validator, validateEnv };
|
|
14
|
+
//# sourceMappingURL=env-validator.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"env-validator.js","names":[],"sources":["../../src/utils/env-validator.ts"],"sourcesContent":["export function validateEnv(envVars: string[]) {\n const missingVars = [];\n\n for (const envVar of envVars) {\n if (!process.env[envVar]) {\n missingVars.push(envVar);\n }\n }\n\n if (missingVars.length > 0) {\n throw new Error(\n `Missing required environment variables: ${missingVars.join(\", \")}`,\n );\n }\n}\n"],"mappings":";;;AAAA,SAAgB,YAAY,SAAmB;CAC7C,MAAM,cAAc,EAAE;AAEtB,MAAK,MAAM,UAAU,QACnB,KAAI,CAAC,QAAQ,IAAI,QACf,aAAY,KAAK,OAAO;AAI5B,KAAI,YAAY,SAAS,EACvB,OAAM,IAAI,MACR,2CAA2C,YAAY,KAAK,KAAK,GAClE"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { __esmMin, __export } from "../_virtual/rolldown_runtime.js";
|
|
2
|
+
import { databricksClientMiddleware, getRequestContext, getWorkspaceClient, init_databricks_client_middleware } from "./databricks-client-middleware.js";
|
|
3
|
+
import { init_env_validator, validateEnv } from "./env-validator.js";
|
|
4
|
+
import { deepMerge, init_merge } from "./merge.js";
|
|
5
|
+
import { init_vite_config_merge, mergeConfigDedup } from "./vite-config-merge.js";
|
|
6
|
+
|
|
7
|
+
//#region src/utils/index.ts
|
|
8
|
+
var utils_exports = /* @__PURE__ */ __export({
|
|
9
|
+
databricksClientMiddleware: () => databricksClientMiddleware,
|
|
10
|
+
deepMerge: () => deepMerge,
|
|
11
|
+
getRequestContext: () => getRequestContext,
|
|
12
|
+
getWorkspaceClient: () => getWorkspaceClient,
|
|
13
|
+
mergeConfigDedup: () => mergeConfigDedup,
|
|
14
|
+
validateEnv: () => validateEnv
|
|
15
|
+
});
|
|
16
|
+
var init_utils = __esmMin((() => {
|
|
17
|
+
init_databricks_client_middleware();
|
|
18
|
+
init_env_validator();
|
|
19
|
+
init_merge();
|
|
20
|
+
init_vite_config_merge();
|
|
21
|
+
}));
|
|
22
|
+
|
|
23
|
+
//#endregion
|
|
24
|
+
init_utils();
|
|
25
|
+
export { init_utils, utils_exports };
|
|
26
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../../src/utils/index.ts"],"sourcesContent":["export * from \"./databricks-client-middleware\";\nexport * from \"./env-validator\";\nexport * from \"./merge\";\nexport * from \"./vite-config-merge\";\n"],"mappings":""}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { __esmMin } from "../_virtual/rolldown_runtime.js";
|
|
2
|
+
|
|
3
|
+
//#region src/utils/merge.ts
|
|
4
|
+
function deepMerge(target, ...sources) {
|
|
5
|
+
if (!sources.length) return target;
|
|
6
|
+
const source = sources.shift();
|
|
7
|
+
if (!source) return deepMerge(target, ...sources);
|
|
8
|
+
const result = { ...target };
|
|
9
|
+
for (const key in source) {
|
|
10
|
+
const sourceValue = source[key];
|
|
11
|
+
const targetValue = result[key];
|
|
12
|
+
if (sourceValue !== void 0) if (isObject(sourceValue) && isObject(targetValue)) result[key] = deepMerge(targetValue, sourceValue);
|
|
13
|
+
else result[key] = sourceValue;
|
|
14
|
+
}
|
|
15
|
+
return sources.length ? deepMerge(result, ...sources) : result;
|
|
16
|
+
}
|
|
17
|
+
function isObject(item) {
|
|
18
|
+
return typeof item === "object" && item !== null && !Array.isArray(item);
|
|
19
|
+
}
|
|
20
|
+
var init_merge = __esmMin((() => {}));
|
|
21
|
+
|
|
22
|
+
//#endregion
|
|
23
|
+
init_merge();
|
|
24
|
+
export { deepMerge, init_merge };
|
|
25
|
+
//# sourceMappingURL=merge.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"merge.js","names":[],"sources":["../../src/utils/merge.ts"],"sourcesContent":["export function deepMerge<T extends Record<string, unknown>>(\n target: T,\n ...sources: Array<Partial<T> | undefined>\n): T {\n if (!sources.length) return target;\n\n const source = sources.shift();\n if (!source) return deepMerge(target, ...sources);\n\n const result = { ...target };\n\n for (const key in source) {\n const sourceValue = source[key];\n const targetValue = result[key];\n\n if (sourceValue !== undefined) {\n if (isObject(sourceValue) && isObject(targetValue)) {\n result[key] = deepMerge(\n targetValue as Record<string, unknown>,\n sourceValue as Record<string, unknown>,\n ) as T[Extract<keyof T, string>];\n } else {\n result[key] = sourceValue as T[Extract<keyof T, string>];\n }\n }\n }\n\n return sources.length ? deepMerge(result, ...sources) : result;\n}\n\nfunction isObject(item: unknown): item is Record<string, unknown> {\n return typeof item === \"object\" && item !== null && !Array.isArray(item);\n}\n"],"mappings":";;;AAAA,SAAgB,UACd,QACA,GAAG,SACA;AACH,KAAI,CAAC,QAAQ,OAAQ,QAAO;CAE5B,MAAM,SAAS,QAAQ,OAAO;AAC9B,KAAI,CAAC,OAAQ,QAAO,UAAU,QAAQ,GAAG,QAAQ;CAEjD,MAAM,SAAS,EAAE,GAAG,QAAQ;AAE5B,MAAK,MAAM,OAAO,QAAQ;EACxB,MAAM,cAAc,OAAO;EAC3B,MAAM,cAAc,OAAO;AAE3B,MAAI,gBAAgB,OAClB,KAAI,SAAS,YAAY,IAAI,SAAS,YAAY,CAChD,QAAO,OAAO,UACZ,aACA,YACD;MAED,QAAO,OAAO;;AAKpB,QAAO,QAAQ,SAAS,UAAU,QAAQ,GAAG,QAAQ,GAAG;;AAG1D,SAAS,SAAS,MAAgD;AAChE,QAAO,OAAO,SAAS,YAAY,SAAS,QAAQ,CAAC,MAAM,QAAQ,KAAK"}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { __esmMin } from "../_virtual/rolldown_runtime.js";
|
|
2
|
+
|
|
3
|
+
//#region src/utils/vite-config-merge.ts
|
|
4
|
+
function mergeConfigDedup(base, override, mergeFn) {
|
|
5
|
+
const merged = mergeFn(base, override);
|
|
6
|
+
if (base.plugins && override.plugins) {
|
|
7
|
+
const seen = /* @__PURE__ */ new Set();
|
|
8
|
+
merged.plugins = [...base.plugins, ...override.plugins].filter((p) => {
|
|
9
|
+
const name = p.name;
|
|
10
|
+
if (seen.has(name)) return false;
|
|
11
|
+
seen.add(name);
|
|
12
|
+
return true;
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
return merged;
|
|
16
|
+
}
|
|
17
|
+
var init_vite_config_merge = __esmMin((() => {}));
|
|
18
|
+
|
|
19
|
+
//#endregion
|
|
20
|
+
init_vite_config_merge();
|
|
21
|
+
export { init_vite_config_merge, mergeConfigDedup };
|
|
22
|
+
//# sourceMappingURL=vite-config-merge.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"vite-config-merge.js","names":[],"sources":["../../src/utils/vite-config-merge.ts"],"sourcesContent":["import type { Plugin } from \"vite\";\n\nexport function mergeConfigDedup(\n base: any,\n override: any,\n mergeFn: (a: any, b: any) => any,\n) {\n const merged = mergeFn(base, override);\n if (base.plugins && override.plugins) {\n const seen = new Set<string>();\n merged.plugins = [...base.plugins, ...override.plugins].filter(\n (p: Plugin) => {\n const name = p.name;\n if (seen.has(name)) return false;\n seen.add(name);\n return true;\n },\n );\n }\n return merged;\n}\n"],"mappings":";;;AAEA,SAAgB,iBACd,MACA,UACA,SACA;CACA,MAAM,SAAS,QAAQ,MAAM,SAAS;AACtC,KAAI,KAAK,WAAW,SAAS,SAAS;EACpC,MAAM,uBAAO,IAAI,KAAa;AAC9B,SAAO,UAAU,CAAC,GAAG,KAAK,SAAS,GAAG,SAAS,QAAQ,CAAC,QACrD,MAAc;GACb,MAAM,OAAO,EAAE;AACf,OAAI,KAAK,IAAI,KAAK,CAAE,QAAO;AAC3B,QAAK,IAAI,KAAK;AACd,UAAO;IAEV;;AAEH,QAAO"}
|
package/llms.txt
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
# llms.txt — Guidance for AI systems using the Databricks AppKit (@databricks/appkit)
|
|
2
|
+
|
|
3
|
+
Project: Databricks AppKit
|
|
4
|
+
Author: Databricks
|
|
5
|
+
Version: 1.0.0
|
|
6
|
+
|
|
7
|
+
# =====================
|
|
8
|
+
# General Description
|
|
9
|
+
# =====================
|
|
10
|
+
AppKit is a modular TypeScript SDK for building apps with workflows and plugins.
|
|
11
|
+
It provides a single entrypoint (createApp) where you configure and register plugins.
|
|
12
|
+
Each plugin is then available under AppKit[pluginName].
|
|
13
|
+
|
|
14
|
+
Main concepts:
|
|
15
|
+
- createApp(config): initializes the SDK with plugins
|
|
16
|
+
- Plugins: extend AppKit with functionality (server, analytics, ai, etc.)
|
|
17
|
+
- AppKit[pluginName]: exposes plugin API after initialization
|
|
18
|
+
- New plugins can be created by extending the Plugin class.
|
|
19
|
+
|
|
20
|
+
# =====================
|
|
21
|
+
# Primary Usage Pattern
|
|
22
|
+
# =====================
|
|
23
|
+
Always use async/await.
|
|
24
|
+
Always initialize AppKit before using plugins.
|
|
25
|
+
Server and plugins already initialized, no custom endpoints.
|
|
26
|
+
|
|
27
|
+
Example:
|
|
28
|
+
|
|
29
|
+
```ts
|
|
30
|
+
import { createApp, server, analytics } from "@databricks/appkit";
|
|
31
|
+
|
|
32
|
+
await createApp({
|
|
33
|
+
plugins: [
|
|
34
|
+
server({ port: 8000 }),
|
|
35
|
+
analytics(),
|
|
36
|
+
],
|
|
37
|
+
});
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
# ==============================================
|
|
41
|
+
# Basic Usage Pattern starting the server
|
|
42
|
+
# ==============================================
|
|
43
|
+
|
|
44
|
+
Example:
|
|
45
|
+
|
|
46
|
+
```ts
|
|
47
|
+
import { createApp, server, analytics } from "@databricks/appkit";
|
|
48
|
+
|
|
49
|
+
const AppKit = await createApp({
|
|
50
|
+
plugins: [
|
|
51
|
+
server({ port: 8000, autoStart: false }),
|
|
52
|
+
analytics(),
|
|
53
|
+
],
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
const app = await AppKit.server.start();
|
|
57
|
+
app.get("/ping", (req, res) => res.send("pong"));
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
# =====================
|
|
61
|
+
# Plugin APIs
|
|
62
|
+
# =====================
|
|
63
|
+
|
|
64
|
+
Each plugin exposes a set of endpoints by default.
|
|
65
|
+
|
|
66
|
+
## Server Plugin
|
|
67
|
+
- AppKit.server.start(): Promise<Express.Application>
|
|
68
|
+
- Purpose: Start an Express server with configured port, only use if { autoStart: false } is provided in the config of the server plugin
|
|
69
|
+
- Usage: Add routes via the returned app
|
|
70
|
+
- Config - When setting the plugin, the following options can be provided:
|
|
71
|
+
server({
|
|
72
|
+
port?: number;
|
|
73
|
+
staticPath?: string; // This provides the path where the frontend assets are.
|
|
74
|
+
autoStart?: boolean;
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
## Analytics Plugin
|
|
78
|
+
- AppKit.analytics.query.executeQuery({ query, parameters }: { query: string; parameters?: Record<string, any> }, options?: ExecuteOptions): Promise<ExecuteStatementOutput>;
|
|
79
|
+
- Purpose: Provide SQL by key interface.
|
|
80
|
+
- Usage: Only for structured query + insert examples. SQL never goes into the call to the function. Any SQL that needs to be written,
|
|
81
|
+
will be written into config/queries/<query_key>.sql. All queries should be parameterized (use placeholders).
|
|
82
|
+
- Default endpoints:
|
|
83
|
+
- POST /api/analytics/:query_key -> `query_key` will be the key to the file that contains the query. Expects a body with the shape { parameters?: Record<string, any>; }. parameters will be bound into the query.
|
|
84
|
+
|
|
85
|
+
# =====================
|
|
86
|
+
# Custom Plugins
|
|
87
|
+
# =====================
|
|
88
|
+
|
|
89
|
+
Databricks AppKit Might not cover all the cases needed, so for those cases a plugin can be created.
|
|
90
|
+
Here is an example:
|
|
91
|
+
|
|
92
|
+
```ts
|
|
93
|
+
import { Plugin, toPlugin } from '@databricks/appkit';
|
|
94
|
+
|
|
95
|
+
class OpenWeatherPlugin extends Plugin {
|
|
96
|
+
name: string = "open-weather";
|
|
97
|
+
private apiKey: string;
|
|
98
|
+
private url: string;
|
|
99
|
+
|
|
100
|
+
constructor(config: any, auth: IAuthManager, telemetry: ITelemetryManager) {
|
|
101
|
+
super(config, auth, telemetry);
|
|
102
|
+
|
|
103
|
+
this.apiKey = process.env.OPEN_WEATHER_API_KEY!;
|
|
104
|
+
this.url = process.env.OPEN_WEATHER_URL || "https://api.openweathermap.org/data/3.0/onecall";
|
|
105
|
+
|
|
106
|
+
// ...
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
async getWeather(lat: number, lon: number): Promise<any | null> {
|
|
110
|
+
const url = `${this.url}?lat=${lat}&lon=${lon}&appid=${this.apiKey}`;
|
|
111
|
+
|
|
112
|
+
try {
|
|
113
|
+
const response = await fetch(url);
|
|
114
|
+
if (!response.ok) {
|
|
115
|
+
console.error("Error fetching weather data:", response.statusText);
|
|
116
|
+
return null;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
const data = await response.json();
|
|
120
|
+
return data;
|
|
121
|
+
} catch (error) {
|
|
122
|
+
console.error("Fetch error:", error);
|
|
123
|
+
return null;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Optionally the plugin can inject its own routes to the router
|
|
129
|
+
*/
|
|
130
|
+
injectRoutes(router: express.Router) {
|
|
131
|
+
/**
|
|
132
|
+
* Each route is scoped to the plugin name. So in this case the route will be end up being
|
|
133
|
+
* /api/open-weather/weather
|
|
134
|
+
*
|
|
135
|
+
* and an example request would be:
|
|
136
|
+
* GET /api/open-weather/weather?lat=40.7128&lon=-74.0060
|
|
137
|
+
*/
|
|
138
|
+
router.get("/weather", async (req: any, res: any) => {
|
|
139
|
+
const { lat, lon } = req.query;
|
|
140
|
+
const data = await this.getWeather(lat, lon);
|
|
141
|
+
res.send(data);
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
export const openWeather = toPlugin<typeof OpenWeatherPlugin, OpenWeatherConfig, "openWeather">(OpenWeatherPlugin, "openWeather");
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
Then it would be used as the rest of the plugins
|
|
150
|
+
|
|
151
|
+
```ts
|
|
152
|
+
import { createApp, server, analytics } from "@databricks/appkit";
|
|
153
|
+
import { openWeather } from './open-weather';
|
|
154
|
+
|
|
155
|
+
const AppKit = await createApp({
|
|
156
|
+
plugins: [
|
|
157
|
+
server({ port: 8000 }),
|
|
158
|
+
analytics(),
|
|
159
|
+
openWeather(),
|
|
160
|
+
],
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
const app = await AppKit.server.start();
|
|
164
|
+
/**
|
|
165
|
+
* A route could also be added here
|
|
166
|
+
*/
|
|
167
|
+
app.get("/api/open-weather/weather", async (req, res) => {
|
|
168
|
+
const data = await AppKit.openWeather.getWeather(40.7128, -74.0060);
|
|
169
|
+
res.send(data);
|
|
170
|
+
});
|
|
171
|
+
```
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
# =====================
|
|
175
|
+
# Style Guidelines for AI
|
|
176
|
+
# =====================
|
|
177
|
+
- Always prefer async/await (never .then chaining in examples).
|
|
178
|
+
- Always show explicit plugin config (no hidden defaults).
|
|
179
|
+
- Use ESModules (import/export), not require().
|
|
180
|
+
- Use TypeScript typings in advanced examples if helpful.
|
|
181
|
+
|
|
182
|
+
# =====================
|
|
183
|
+
# Anti-Patterns (avoid in examples)
|
|
184
|
+
# =====================
|
|
185
|
+
- ❌ Do not access AppKit internals (only use AppKit[pluginName]).
|
|
186
|
+
- ❌ Do not assume SQL queries hit a real DB (they return demo data unless configured).
|
|
187
|
+
- ❌ Do not show usage without createApp first.
|
|
188
|
+
|
|
189
|
+
# =====================
|
|
190
|
+
# Attribution
|
|
191
|
+
# =====================
|
|
192
|
+
If AI-generated code uses this SDK, attribute:
|
|
193
|
+
"Powered by Databricks AppKit (https://github.com/...)".
|
package/package.json
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@databricks/appkit",
|
|
3
|
+
"type": "module",
|
|
4
|
+
"version": "0.0.2",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"types": "./dist/index.d.ts",
|
|
7
|
+
"packageManager": "pnpm@10.21.0",
|
|
8
|
+
"files": [
|
|
9
|
+
"dist",
|
|
10
|
+
"bin",
|
|
11
|
+
"scripts",
|
|
12
|
+
"CLAUDE.md",
|
|
13
|
+
"llms.txt",
|
|
14
|
+
"README.md",
|
|
15
|
+
"DCO",
|
|
16
|
+
"NOTICE.md"
|
|
17
|
+
],
|
|
18
|
+
"exports": {
|
|
19
|
+
".": "./dist/index.js",
|
|
20
|
+
"./package.json": "./package.json"
|
|
21
|
+
},
|
|
22
|
+
"bin": {
|
|
23
|
+
"appkit-generate-types": "./bin/generate-types.ts",
|
|
24
|
+
"appkit-setup": "./bin/setup-claude.js"
|
|
25
|
+
},
|
|
26
|
+
"scripts": {
|
|
27
|
+
"build:package": "tsdown --config tsdown.config.ts",
|
|
28
|
+
"build:watch": "tsdown --config tsdown.config.ts --watch",
|
|
29
|
+
"clean:full": "rm -rf dist node_modules tmp",
|
|
30
|
+
"clean": "rm -rf dist tmp",
|
|
31
|
+
"dist": "tsx ../../tools/dist.ts",
|
|
32
|
+
"tarball": "tsx ../../tools/dist.ts && npm pack ./tmp --pack-destination ./tmp",
|
|
33
|
+
"typecheck": "tsc --noEmit",
|
|
34
|
+
"postinstall": "node scripts/postinstall.js"
|
|
35
|
+
},
|
|
36
|
+
"dependencies": {
|
|
37
|
+
"@databricks/sdk-experimental": "^0.15.0",
|
|
38
|
+
"@opentelemetry/api": "^1.9.0",
|
|
39
|
+
"@opentelemetry/api-logs": "^0.208.0",
|
|
40
|
+
"@opentelemetry/auto-instrumentations-node": "^0.67.0",
|
|
41
|
+
"@opentelemetry/exporter-logs-otlp-proto": "^0.208.0",
|
|
42
|
+
"@opentelemetry/exporter-metrics-otlp-proto": "^0.208.0",
|
|
43
|
+
"@opentelemetry/exporter-trace-otlp-proto": "^0.208.0",
|
|
44
|
+
"@opentelemetry/instrumentation": "^0.208.0",
|
|
45
|
+
"@opentelemetry/instrumentation-express": "^0.57.0",
|
|
46
|
+
"@opentelemetry/instrumentation-http": "^0.208.0",
|
|
47
|
+
"@opentelemetry/resources": "^2.2.0",
|
|
48
|
+
"@opentelemetry/sdk-logs": "^0.208.0",
|
|
49
|
+
"@opentelemetry/sdk-metrics": "^2.2.0",
|
|
50
|
+
"@opentelemetry/sdk-node": "^0.208.0",
|
|
51
|
+
"@opentelemetry/semantic-conventions": "^1.38.0",
|
|
52
|
+
"dotenv": "^16.6.1",
|
|
53
|
+
"express": "^4.22.0",
|
|
54
|
+
"pg": "^8.16.3",
|
|
55
|
+
"vite": "npm:rolldown-vite@7.1.14",
|
|
56
|
+
"ws": "^8.18.3",
|
|
57
|
+
"zod-to-ts": "^2.0.0"
|
|
58
|
+
},
|
|
59
|
+
"devDependencies": {
|
|
60
|
+
"@types/express": "^4.17.25",
|
|
61
|
+
"@types/pg": "^8.15.6",
|
|
62
|
+
"@types/ws": "^8.18.1",
|
|
63
|
+
"@vitejs/plugin-react": "^5.1.1"
|
|
64
|
+
},
|
|
65
|
+
"overrides": {
|
|
66
|
+
"vite": "npm:rolldown-vite@7.1.14"
|
|
67
|
+
},
|
|
68
|
+
"module": "./dist/index.js",
|
|
69
|
+
"publishConfig": {}
|
|
70
|
+
}
|