@databricks/appkit 0.21.0 → 0.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +11 -0
- package/NOTICE.md +1 -0
- package/README.md +3 -20
- package/dist/appkit/package.js +1 -1
- package/dist/cli/commands/generate-types.js +15 -13
- package/dist/cli/commands/generate-types.js.map +1 -1
- package/dist/cli/commands/setup.js +2 -2
- package/dist/cli/commands/setup.js.map +1 -1
- package/dist/connectors/genie/client.js +50 -0
- package/dist/connectors/genie/client.js.map +1 -1
- package/dist/connectors/serving/client.js +47 -0
- package/dist/connectors/serving/client.js.map +1 -0
- package/dist/index.d.ts +6 -1
- package/dist/index.js +4 -1
- package/dist/index.js.map +1 -1
- package/dist/plugin/execution-result.d.ts +26 -0
- package/dist/plugin/execution-result.d.ts.map +1 -0
- package/dist/plugin/index.d.ts +1 -0
- package/dist/plugin/interceptors/retry.js +1 -1
- package/dist/plugin/interceptors/retry.js.map +1 -1
- package/dist/plugin/plugin.d.ts +54 -5
- package/dist/plugin/plugin.d.ts.map +1 -1
- package/dist/plugin/plugin.js +87 -7
- package/dist/plugin/plugin.js.map +1 -1
- package/dist/plugins/analytics/analytics.d.ts.map +1 -1
- package/dist/plugins/analytics/analytics.js +2 -3
- package/dist/plugins/analytics/analytics.js.map +1 -1
- package/dist/plugins/files/plugin.d.ts +2 -0
- package/dist/plugins/files/plugin.d.ts.map +1 -1
- package/dist/plugins/files/plugin.js +39 -59
- package/dist/plugins/files/plugin.js.map +1 -1
- package/dist/plugins/genie/genie.d.ts +1 -0
- package/dist/plugins/genie/genie.d.ts.map +1 -1
- package/dist/plugins/genie/genie.js +42 -3
- package/dist/plugins/genie/genie.js.map +1 -1
- package/dist/plugins/index.d.ts +4 -1
- package/dist/plugins/index.js +2 -0
- package/dist/plugins/server/base-server.js +4 -2
- package/dist/plugins/server/base-server.js.map +1 -1
- package/dist/plugins/server/client-config-sanitizer.js +184 -0
- package/dist/plugins/server/client-config-sanitizer.js.map +1 -0
- package/dist/plugins/server/index.d.ts +3 -2
- package/dist/plugins/server/index.d.ts.map +1 -1
- package/dist/plugins/server/index.js +27 -9
- package/dist/plugins/server/index.js.map +1 -1
- package/dist/plugins/server/remote-tunnel/denied.html +68 -0
- package/dist/plugins/server/remote-tunnel/index.html +165 -0
- package/dist/plugins/server/remote-tunnel/remote-tunnel-manager.js +2 -1
- package/dist/plugins/server/remote-tunnel/remote-tunnel-manager.js.map +1 -1
- package/dist/plugins/server/remote-tunnel/wait.html +158 -0
- package/dist/plugins/server/static-server.js +2 -2
- package/dist/plugins/server/static-server.js.map +1 -1
- package/dist/plugins/server/utils.js +28 -5
- package/dist/plugins/server/utils.js.map +1 -1
- package/dist/plugins/server/vite-dev-server.js +8 -3
- package/dist/plugins/server/vite-dev-server.js.map +1 -1
- package/dist/plugins/serving/defaults.js +10 -0
- package/dist/plugins/serving/defaults.js.map +1 -0
- package/dist/plugins/serving/index.d.ts +2 -0
- package/dist/plugins/serving/index.js +3 -0
- package/dist/plugins/serving/manifest.js +53 -0
- package/dist/plugins/serving/manifest.js.map +1 -0
- package/dist/plugins/serving/schema-filter.js +52 -0
- package/dist/plugins/serving/schema-filter.js.map +1 -0
- package/dist/plugins/serving/serving.d.ts +38 -0
- package/dist/plugins/serving/serving.d.ts.map +1 -0
- package/dist/plugins/serving/serving.js +213 -0
- package/dist/plugins/serving/serving.js.map +1 -0
- package/dist/plugins/serving/types.d.ts +58 -0
- package/dist/plugins/serving/types.d.ts.map +1 -0
- package/dist/shared/src/execute.d.ts +1 -1
- package/dist/shared/src/plugin.d.ts +1 -0
- package/dist/shared/src/plugin.d.ts.map +1 -1
- package/dist/stream/stream-manager.js +1 -0
- package/dist/stream/stream-manager.js.map +1 -1
- package/dist/stream/types.js +2 -1
- package/dist/stream/types.js.map +1 -1
- package/dist/type-generator/cache.js +1 -1
- package/dist/type-generator/cache.js.map +1 -1
- package/dist/type-generator/index.js +13 -1
- package/dist/type-generator/index.js.map +1 -1
- package/dist/type-generator/query-registry.js +77 -4
- package/dist/type-generator/query-registry.js.map +1 -1
- package/dist/type-generator/serving/cache.js +38 -0
- package/dist/type-generator/serving/cache.js.map +1 -0
- package/dist/type-generator/serving/converter.js +108 -0
- package/dist/type-generator/serving/converter.js.map +1 -0
- package/dist/type-generator/serving/fetcher.js +54 -0
- package/dist/type-generator/serving/fetcher.js.map +1 -0
- package/dist/type-generator/serving/generator.js +185 -0
- package/dist/type-generator/serving/generator.js.map +1 -0
- package/dist/type-generator/serving/server-file-extractor.d.ts +22 -0
- package/dist/type-generator/serving/server-file-extractor.d.ts.map +1 -0
- package/dist/type-generator/serving/server-file-extractor.js +131 -0
- package/dist/type-generator/serving/server-file-extractor.js.map +1 -0
- package/dist/type-generator/serving/vite-plugin.d.ts +24 -0
- package/dist/type-generator/serving/vite-plugin.d.ts.map +1 -0
- package/dist/type-generator/serving/vite-plugin.js +60 -0
- package/dist/type-generator/serving/vite-plugin.js.map +1 -0
- package/docs/api/appkit/Class.Plugin.md +83 -20
- package/docs/api/appkit/Function.appKitServingTypesPlugin.md +24 -0
- package/docs/api/appkit/Function.extractServingEndpoints.md +22 -0
- package/docs/api/appkit/Function.findServerFile.md +20 -0
- package/docs/api/appkit/Interface.EndpointConfig.md +23 -0
- package/docs/api/appkit/Interface.ServingEndpointEntry.md +30 -0
- package/docs/api/appkit/Interface.ServingEndpointRegistry.md +3 -0
- package/docs/api/appkit/TypeAlias.ExecutionResult.md +36 -0
- package/docs/api/appkit/TypeAlias.ServingFactory.md +15 -0
- package/docs/api/appkit.md +39 -31
- package/docs/app-management.md +1 -1
- package/docs/architecture.md +1 -1
- package/docs/development/ai-assisted-development.md +2 -2
- package/docs/development/local-development.md +1 -1
- package/docs/development/remote-bridge.md +1 -1
- package/docs/development/templates.md +93 -0
- package/docs/development.md +1 -1
- package/docs/faq.md +66 -0
- package/docs/plugins/caching.md +3 -1
- package/docs/plugins/execution-context.md +1 -1
- package/docs/plugins/lakebase.md +1 -1
- package/docs/plugins/serving.md +223 -0
- package/docs.md +2 -2
- package/llms.txt +11 -0
- package/package.json +37 -36
- package/sbom.cdx.json +1 -0
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# Templates
|
|
2
|
+
|
|
3
|
+
AppKit uses a template system powered by the Databricks CLI's `databricks apps init` command. Templates define the project structure, and `.tmpl` files are processed with Go's `text/template` engine to generate customized output.
|
|
4
|
+
|
|
5
|
+
## How `.tmpl` files work[](#how-tmpl-files-work "Direct link to how-tmpl-files-work")
|
|
6
|
+
|
|
7
|
+
Any file ending in `.tmpl` is processed by the CLI during `databricks apps init`:
|
|
8
|
+
|
|
9
|
+
1. The `.tmpl` suffix is stripped (e.g. `.env.tmpl` → `.env`)
|
|
10
|
+
2. Go template expressions are evaluated and substituted
|
|
11
|
+
3. The rendered file is written to the output directory
|
|
12
|
+
|
|
13
|
+
Files named with a `_` prefix are renamed to `.` prefix (e.g. `_gitignore` → `.gitignore`).
|
|
14
|
+
|
|
15
|
+
### Template variables[](#template-variables "Direct link to Template variables")
|
|
16
|
+
|
|
17
|
+
| Variable | Description |
|
|
18
|
+
| ----------------- | ---------------------------------------------------------------------------- |
|
|
19
|
+
| `.projectName` | Project name from `--name` or interactive prompt |
|
|
20
|
+
| `.workspaceHost` | Databricks workspace URL |
|
|
21
|
+
| `.profile` | Databricks CLI profile name (empty if using host-based auth) |
|
|
22
|
+
| `.appDescription` | App description |
|
|
23
|
+
| `.plugins.<name>` | Non-nil for each selected plugin, enabling conditionals |
|
|
24
|
+
| `.dotEnv.content` | Generated `.env` content from plugin resources |
|
|
25
|
+
| `.dotEnv.example` | Generated `.env.example` content with placeholders |
|
|
26
|
+
| `.bundle.*` | Generated `databricks.yml` sections (variables, resources, target variables) |
|
|
27
|
+
| `.appEnv` | Generated `app.yaml` env entries |
|
|
28
|
+
|
|
29
|
+
### Conditional content[](#conditional-content "Direct link to Conditional content")
|
|
30
|
+
|
|
31
|
+
Use Go template conditionals to include/exclude code based on selected plugins:
|
|
32
|
+
|
|
33
|
+
```go
|
|
34
|
+
{{- if .plugins.analytics}}
|
|
35
|
+
import { analytics } from '@databricks/appkit';
|
|
36
|
+
{{- end}}
|
|
37
|
+
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## `appkit.plugins.json`[](#appkitpluginsjson "Direct link to appkitpluginsjson")
|
|
41
|
+
|
|
42
|
+
The plugin manifest drives the CLI's behavior during `databricks apps init`:
|
|
43
|
+
|
|
44
|
+
* **Plugin selection UI** — selectable plugins shown in the interactive prompt
|
|
45
|
+
* **Resource prompts** — required/optional resources prompt the user for values (e.g. SQL Warehouse ID)
|
|
46
|
+
* **`.dotEnv` population** — resource fields with an `env` property are written to `.env`
|
|
47
|
+
* **`app.yaml` generation** — resource fields produce `env` + `valueFrom` entries
|
|
48
|
+
* **`databricks.yml` generation** — resource fields produce bundle variables and app resource entries
|
|
49
|
+
|
|
50
|
+
### Resource field properties[](#resource-field-properties "Direct link to Resource field properties")
|
|
51
|
+
|
|
52
|
+
Each resource field in the manifest can have these properties:
|
|
53
|
+
|
|
54
|
+
| Property | Description |
|
|
55
|
+
| -------------- | ----------------------------------------------------------------------------------- |
|
|
56
|
+
| `env` | Environment variable name written to `.env` and `app.yaml` |
|
|
57
|
+
| `description` | Shown in the interactive prompt and bundle variable description |
|
|
58
|
+
| `localOnly` | Only written to `.env` for local dev; excluded from `app.yaml` and bundle variables |
|
|
59
|
+
| `bundleIgnore` | Excluded from `databricks.yml` variables (but still in `.env`) |
|
|
60
|
+
| `value` | Default value used when no user input is provided |
|
|
61
|
+
| `resolve` | Auto-populated by CLI from API calls instead of prompting (see below) |
|
|
62
|
+
| `examples` | Example values shown in field descriptions |
|
|
63
|
+
|
|
64
|
+
### Resolvers[](#resolvers "Direct link to Resolvers")
|
|
65
|
+
|
|
66
|
+
Fields with a `resolve` property are auto-populated by the CLI from API calls rather than user prompts. The format is `<type>:<field>`.
|
|
67
|
+
|
|
68
|
+
Currently only the `postgres` resource type has a resolver. Given the user-provided `branch` and `database` resource names, it derives:
|
|
69
|
+
|
|
70
|
+
| Resolve key | Description |
|
|
71
|
+
| ----------------------- | ----------------------------------------------------------- |
|
|
72
|
+
| `postgres:host` | Postgres host from the branch's read-write endpoint |
|
|
73
|
+
| `postgres:databaseName` | Postgres database name from the database resource |
|
|
74
|
+
| `postgres:endpointPath` | Lakebase endpoint resource name from the branch's endpoints |
|
|
75
|
+
|
|
76
|
+
Example field definition:
|
|
77
|
+
|
|
78
|
+
```json
|
|
79
|
+
{
|
|
80
|
+
"host": {
|
|
81
|
+
"env": "PGHOST",
|
|
82
|
+
"localOnly": true,
|
|
83
|
+
"resolve": "postgres:host",
|
|
84
|
+
"description": "Postgres host for local development."
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
## See also[](#see-also "Direct link to See also")
|
|
91
|
+
|
|
92
|
+
* [Plugin management](./docs/plugins/plugin-management.md) — `appkit plugin sync`, `appkit plugin create`
|
|
93
|
+
* [Configuration](./docs/configuration.md) — environment variables
|
package/docs/development.md
CHANGED
|
@@ -5,7 +5,7 @@ AppKit provides multiple development workflows to suit different needs: local de
|
|
|
5
5
|
## Prerequisites[](#prerequisites "Direct link to Prerequisites")
|
|
6
6
|
|
|
7
7
|
* [Node.js](https://nodejs.org) v22+ environment with `npm`
|
|
8
|
-
* Databricks CLI (v0.
|
|
8
|
+
* Databricks CLI (v0.295.0 or higher): install and configure it according to the [official tutorial](https://docs.databricks.com/aws/en/dev-tools/cli/tutorial).
|
|
9
9
|
* A new Databricks app with AppKit installed. See [Bootstrap a new Databricks app](./docs.md#quick-start-options) for more details.
|
|
10
10
|
|
|
11
11
|
## Development flows[](#development-flows "Direct link to Development flows")
|
package/docs/faq.md
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
# FAQ
|
|
2
|
+
|
|
3
|
+
## Integrations[](#integrations "Direct link to Integrations")
|
|
4
|
+
|
|
5
|
+
### What Databricks services are available through AppKit?[](#what-databricks-services-are-available-through-appkit "Direct link to What Databricks services are available through AppKit?")
|
|
6
|
+
|
|
7
|
+
AppKit provides built-in integrations with the following Databricks services via its [plugin system](./docs/plugins.md):
|
|
8
|
+
|
|
9
|
+
| Plugin | Databricks Service | What It Does |
|
|
10
|
+
| ---------------------------------------------- | --------------------------------- | --------------------------------------------------------------------------------------- |
|
|
11
|
+
| [Analytics](./docs/plugins/analytics.md) | SQL Warehouses | Execute parameterized SQL queries with built-in caching, retry, and Arrow support |
|
|
12
|
+
| [Lakebase](./docs/plugins/lakebase.md) | Lakebase Autoscaling (PostgreSQL) | Relational database access via standard pg.Pool with automatic OAuth token refresh |
|
|
13
|
+
| [Genie](./docs/plugins/genie.md) | AI/BI Genie Spaces | Natural language data queries with conversation management and streaming |
|
|
14
|
+
| [Files](./docs/plugins/files.md) | Unity Catalog Volumes | Multi-volume file operations (list, read, upload, download, delete, preview) |
|
|
15
|
+
| [Serving](./docs/plugins/serving.md) | Model Serving | Authenticated proxy to Model Serving endpoints with invoke and streaming support |
|
|
16
|
+
| [Server](./docs/plugins/server.md) | N/A | Express HTTP server with static file serving, Vite dev mode, and plugin route injection |
|
|
17
|
+
|
|
18
|
+
Stay tuned for new plugins as we constantly expand integrations!
|
|
19
|
+
|
|
20
|
+
### Can I add custom integrations?[](#can-i-add-custom-integrations "Direct link to Can I add custom integrations?")
|
|
21
|
+
|
|
22
|
+
Yes. AppKit's plugin architecture is extensible - you can create custom plugins using the CLI (`npx appkit plugin create`) or manually. See the [creating custom plugins guide](./docs/plugins/custom-plugins.md).
|
|
23
|
+
|
|
24
|
+
## Authentication[](#authentication "Direct link to Authentication")
|
|
25
|
+
|
|
26
|
+
### How does authentication work in AppKit apps?[](#how-does-authentication-work-in-appkit-apps "Direct link to How does authentication work in AppKit apps?")
|
|
27
|
+
|
|
28
|
+
AppKit apps are designed to run on [Databricks Apps](https://docs.databricks.com/aws/en/dev-tools/databricks-apps/), which handles user authentication and authorization. Databricks Apps forwards user identity to the app via request headers, and AppKit integrates with this through the `.asUser(req)` pattern for on-behalf-of (OBO) execution — allowing plugins to act on behalf of the authenticated user.
|
|
29
|
+
|
|
30
|
+
For details on how authentication and authorization work in Databricks Apps, see the [official auth documentation](https://docs.databricks.com/aws/en/dev-tools/databricks-apps/auth).
|
|
31
|
+
|
|
32
|
+
## Databases[](#databases "Direct link to Databases")
|
|
33
|
+
|
|
34
|
+
*Also: Lakebase, PostgreSQL, OLTP*
|
|
35
|
+
|
|
36
|
+
### How does AppKit handle databases?[](#how-does-appkit-handle-databases "Direct link to How does AppKit handle databases?")
|
|
37
|
+
|
|
38
|
+
AppKit is a TypeScript SDK (Express + React) and does not manage databases directly.
|
|
39
|
+
|
|
40
|
+
To add database support, use the [Lakebase plugin](./docs/plugins/lakebase.md), which integrates with Lakebase Autoscaling.
|
|
41
|
+
|
|
42
|
+
AppKit also uses Lakebase for caching when it is available (see the [caching](#does-appkit-support-caching) section below).
|
|
43
|
+
|
|
44
|
+
You can manage Lakebase Autoscaling projects and branches using the dedicated agent skill from [Databricks Agent Skills](./docs/development/ai-assisted-development.md), installed with the Databricks CLI.
|
|
45
|
+
|
|
46
|
+
### How does database setup and permission management work in AppKit?[](#how-does-database-setup-and-permission-management-work-in-appkit "Direct link to How does database setup and permission management work in AppKit?")
|
|
47
|
+
|
|
48
|
+
AppKit apps can have an attached Lakebase Autoscaling instance. No database is bundled by default - you add one by configuring the [Lakebase plugin](./docs/plugins/lakebase.md). When running `databricks apps init` and selecting the Lakebase plugin, the selected database is automatically attached as an app resource after deployment.
|
|
49
|
+
|
|
50
|
+
With [AI-assisted development](./docs/development/ai-assisted-development.md), you can also ask the Agent to create a Lakebase project and branch for you.
|
|
51
|
+
|
|
52
|
+
When deployed, a Databricks app uses its Service Principal for schema and table creation. If you configure the Lakebase Autoscaling project as an [app resource](https://docs.databricks.com/aws/en/dev-tools/databricks-apps/resources), the necessary connect and create permissions are granted automatically to the app's Service Principal.
|
|
53
|
+
|
|
54
|
+
### Does AppKit support Lakebase Provisioned?[](#does-appkit-support-lakebase-provisioned "Direct link to Does AppKit support Lakebase Provisioned?")
|
|
55
|
+
|
|
56
|
+
No. AppKit only supports Lakebase Autoscaling. Lakebase Provisioned databases are not supported by the [Lakebase plugin](./docs/plugins/lakebase.md) or Lakebase agent skill from [Databricks Agent Skills](./docs/development/ai-assisted-development.md).
|
|
57
|
+
|
|
58
|
+
## Caching[](#caching "Direct link to Caching")
|
|
59
|
+
|
|
60
|
+
### Does AppKit support caching?[](#does-appkit-support-caching "Direct link to Does AppKit support caching?")
|
|
61
|
+
|
|
62
|
+
Yes. The [Analytics plugin](./docs/plugins/analytics.md) - used for executing SQL queries against Databricks SQL Warehouses - supports an optional cache layer.
|
|
63
|
+
|
|
64
|
+
Caching is configured per plugin and can use either [Lakebase Autoscaling](https://docs.databricks.com/aws/en/oltp/) or an in-memory store, depending on the configuration.
|
|
65
|
+
|
|
66
|
+
If the Lakebase Autoscaling connection is configured, the AppKit-based app creates an `appkit` schema in the configured database with internal tables required for caching.
|
package/docs/plugins/caching.md
CHANGED
|
@@ -16,7 +16,9 @@ await createApp({
|
|
|
16
16
|
|
|
17
17
|
```
|
|
18
18
|
|
|
19
|
-
Storage auto-selects **Lakebase
|
|
19
|
+
Storage auto-selects **Lakebase Autoscaling persistent cache when healthy**, otherwise falls back to in-memory.
|
|
20
|
+
|
|
21
|
+
The database-backed cache requires the same Lakebase environment variables as the [Lakebase plugin](./docs/plugins/lakebase.md#environment-variables) (`PGHOST`, `PGDATABASE`, `LAKEBASE_ENDPOINT`, `PGSSLMODE`).
|
|
20
22
|
|
|
21
23
|
## Plugin-level caching[](#plugin-level-caching "Direct link to Plugin-level caching")
|
|
22
24
|
|
|
@@ -42,4 +42,4 @@ Exported from `@databricks/appkit`:
|
|
|
42
42
|
|
|
43
43
|
## Development mode behavior[](#development-mode-behavior "Direct link to Development mode behavior")
|
|
44
44
|
|
|
45
|
-
In local development (`NODE_ENV=development`), if `asUser(req)` is called without a user token, it logs a warning and
|
|
45
|
+
In local development (`NODE_ENV=development`), if `asUser(req)` is called without a user token, it logs a warning and skips user impersonation — the operation runs with the default credentials configured for the app instead.
|
package/docs/plugins/lakebase.md
CHANGED
|
@@ -17,7 +17,7 @@ The easiest way to get started with the Lakebase plugin is to use the Databricks
|
|
|
17
17
|
### Prerequisites[](#prerequisites "Direct link to Prerequisites")
|
|
18
18
|
|
|
19
19
|
* [Node.js](https://nodejs.org) v22+ environment with `npm`
|
|
20
|
-
* Databricks CLI (v0.
|
|
20
|
+
* Databricks CLI (v0.295.0 or higher): install and configure it according to the [official tutorial](https://docs.databricks.com/aws/en/dev-tools/cli/tutorial).
|
|
21
21
|
* A new Databricks app with AppKit installed. See [Bootstrap a new Databricks app](./docs.md#quick-start-options) for more details.
|
|
22
22
|
|
|
23
23
|
### Steps[](#steps "Direct link to Steps")
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
# Serving plugin
|
|
2
|
+
|
|
3
|
+
Provides an authenticated proxy to [Databricks Model Serving](https://docs.databricks.com/aws/en/machine-learning/model-serving) endpoints, with invoke and streaming support.
|
|
4
|
+
|
|
5
|
+
**Key features:**
|
|
6
|
+
|
|
7
|
+
* Named endpoint aliases for multiple serving endpoints
|
|
8
|
+
* Non-streaming (`invoke`) and SSE streaming (`stream`) invocation
|
|
9
|
+
* Automatic OpenAPI type generation for request/response schemas
|
|
10
|
+
* Request body filtering based on endpoint schema
|
|
11
|
+
* On-behalf-of (OBO) user execution
|
|
12
|
+
|
|
13
|
+
## Basic usage[](#basic-usage "Direct link to Basic usage")
|
|
14
|
+
|
|
15
|
+
```ts
|
|
16
|
+
import { createApp, server, serving } from "@databricks/appkit";
|
|
17
|
+
|
|
18
|
+
await createApp({
|
|
19
|
+
plugins: [
|
|
20
|
+
server(),
|
|
21
|
+
serving(),
|
|
22
|
+
],
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
With no configuration, the plugin reads `DATABRICKS_SERVING_ENDPOINT_NAME` from the environment and registers it under the `default` alias.
|
|
28
|
+
|
|
29
|
+
## Configuration options[](#configuration-options "Direct link to Configuration options")
|
|
30
|
+
|
|
31
|
+
| Option | Type | Default | Description |
|
|
32
|
+
| ----------- | -------------------------------- | ---------------------------------------------------------- | -------------------------------------- |
|
|
33
|
+
| `endpoints` | `Record<string, EndpointConfig>` | `{ default: { env: "DATABRICKS_SERVING_ENDPOINT_NAME" } }` | Map of alias names to endpoint configs |
|
|
34
|
+
| `timeout` | `number` | `120000` | Request timeout in ms |
|
|
35
|
+
|
|
36
|
+
### Endpoint aliases[](#endpoint-aliases "Direct link to Endpoint aliases")
|
|
37
|
+
|
|
38
|
+
Endpoint aliases let you reference multiple serving endpoints by name:
|
|
39
|
+
|
|
40
|
+
```ts
|
|
41
|
+
serving({
|
|
42
|
+
endpoints: {
|
|
43
|
+
llm: { env: "DATABRICKS_SERVING_ENDPOINT_NAME" },
|
|
44
|
+
classifier: { env: "DATABRICKS_SERVING_ENDPOINT_CLASSIFIER" },
|
|
45
|
+
},
|
|
46
|
+
})
|
|
47
|
+
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
Each alias maps to an environment variable holding the actual endpoint name. If an endpoint serves multiple models, you can use `servedModel` to bypass traffic routing and target a specific model directly:
|
|
51
|
+
|
|
52
|
+
```ts
|
|
53
|
+
serving({
|
|
54
|
+
endpoints: {
|
|
55
|
+
llm: { env: "DATABRICKS_SERVING_ENDPOINT_NAME", servedModel: "llama-v2" },
|
|
56
|
+
},
|
|
57
|
+
})
|
|
58
|
+
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
## Type generation[](#type-generation "Direct link to Type generation")
|
|
62
|
+
|
|
63
|
+
The `appKitServingTypesPlugin()` Vite plugin generates TypeScript types from your serving endpoints' OpenAPI schemas. **No manual setup needed** — the AppKit dev server includes this plugin automatically.
|
|
64
|
+
|
|
65
|
+
The plugin auto-discovers endpoint configuration from your server file (`server/index.ts` or `server/server.ts`).
|
|
66
|
+
|
|
67
|
+
Generated types provide:
|
|
68
|
+
|
|
69
|
+
* **Alias autocomplete** in both backend (`AppKit.serving("alias")`) and frontend hooks (`useServingStream`, `useServingInvoke`)
|
|
70
|
+
* **Typed request/response/chunk** per endpoint based on OpenAPI schemas
|
|
71
|
+
|
|
72
|
+
If an endpoint's OpenAPI schema is unavailable (not deployed, env var not set), the plugin generates generic fallback types. The endpoint is still usable — just without typed request/response.
|
|
73
|
+
|
|
74
|
+
note
|
|
75
|
+
|
|
76
|
+
Endpoints that don't define a streaming response schema in their OpenAPI spec will have `chunk: unknown`. For these endpoints, use `useServingInvoke` instead of `useServingStream` — the `response` type will still be properly typed.
|
|
77
|
+
|
|
78
|
+
## Environment variables[](#environment-variables "Direct link to Environment variables")
|
|
79
|
+
|
|
80
|
+
| Variable | Description |
|
|
81
|
+
| ---------------------------------- | --------------------------------------------------------------- |
|
|
82
|
+
| `DATABRICKS_SERVING_ENDPOINT_NAME` | Default endpoint name (used when `endpoints` config is omitted) |
|
|
83
|
+
|
|
84
|
+
When using named endpoints, define a custom environment variable per alias (e.g. `DATABRICKS_SERVING_ENDPOINT_CLASSIFIER`).
|
|
85
|
+
|
|
86
|
+
## Execution context[](#execution-context "Direct link to Execution context")
|
|
87
|
+
|
|
88
|
+
All serving routes execute on behalf of the authenticated user (OBO) by default, consistent with the Genie and Files plugins. This ensures per-user `CAN_QUERY` permissions are enforced on the serving endpoint.
|
|
89
|
+
|
|
90
|
+
For programmatic access via `exports()`, use `.asUser(req)` to run in user context:
|
|
91
|
+
|
|
92
|
+
```ts
|
|
93
|
+
// Service principal context (default)
|
|
94
|
+
const result = await AppKit.serving("llm").invoke({ messages });
|
|
95
|
+
|
|
96
|
+
// User context (recommended in route handlers)
|
|
97
|
+
const result = await AppKit.serving("llm").asUser(req).invoke({ messages });
|
|
98
|
+
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
## HTTP endpoints[](#http-endpoints "Direct link to HTTP endpoints")
|
|
102
|
+
|
|
103
|
+
### Named mode (with `endpoints` config)[](#named-mode-with-endpoints-config "Direct link to named-mode-with-endpoints-config")
|
|
104
|
+
|
|
105
|
+
* `POST /api/serving/:alias/invoke` — Non-streaming invocation
|
|
106
|
+
* `POST /api/serving/:alias/stream` — SSE streaming invocation
|
|
107
|
+
|
|
108
|
+
### Default mode (no `endpoints` config)[](#default-mode-no-endpoints-config "Direct link to default-mode-no-endpoints-config")
|
|
109
|
+
|
|
110
|
+
* `POST /api/serving/invoke` — Non-streaming invocation
|
|
111
|
+
* `POST /api/serving/stream` — SSE streaming invocation
|
|
112
|
+
|
|
113
|
+
### Request format[](#request-format "Direct link to Request format")
|
|
114
|
+
|
|
115
|
+
```text
|
|
116
|
+
POST /api/serving/:alias/invoke
|
|
117
|
+
Content-Type: application/json
|
|
118
|
+
|
|
119
|
+
{
|
|
120
|
+
"messages": [
|
|
121
|
+
{ "role": "user", "content": "Hello" }
|
|
122
|
+
]
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
## Programmatic access[](#programmatic-access "Direct link to Programmatic access")
|
|
128
|
+
|
|
129
|
+
The plugin exports `invoke` and `stream` methods for server-side use:
|
|
130
|
+
|
|
131
|
+
```ts
|
|
132
|
+
const AppKit = await createApp({
|
|
133
|
+
plugins: [
|
|
134
|
+
server(),
|
|
135
|
+
serving({
|
|
136
|
+
endpoints: {
|
|
137
|
+
llm: { env: "DATABRICKS_SERVING_ENDPOINT_NAME" },
|
|
138
|
+
},
|
|
139
|
+
}),
|
|
140
|
+
],
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
// Non-streaming
|
|
144
|
+
const result = await AppKit.serving("llm").invoke({
|
|
145
|
+
messages: [{ role: "user", content: "Hello" }],
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
// Streaming
|
|
149
|
+
for await (const chunk of AppKit.serving("llm").stream({
|
|
150
|
+
messages: [{ role: "user", content: "Hello" }],
|
|
151
|
+
})) {
|
|
152
|
+
console.log(chunk);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
```
|
|
156
|
+
|
|
157
|
+
## Frontend hooks[](#frontend-hooks "Direct link to Frontend hooks")
|
|
158
|
+
|
|
159
|
+
The `@databricks/appkit-ui` package provides React hooks for serving endpoints:
|
|
160
|
+
|
|
161
|
+
### useServingStream[](#useservingstream "Direct link to useServingStream")
|
|
162
|
+
|
|
163
|
+
Streaming invocation via SSE:
|
|
164
|
+
|
|
165
|
+
```tsx
|
|
166
|
+
import { useServingStream } from "@databricks/appkit-ui/react";
|
|
167
|
+
|
|
168
|
+
function ChatStream() {
|
|
169
|
+
const { stream, chunks, streaming, error, reset } = useServingStream(
|
|
170
|
+
{ messages: [{ role: "user", content: "Hello" }] },
|
|
171
|
+
{
|
|
172
|
+
alias: "llm",
|
|
173
|
+
onComplete: (finalChunks) => {
|
|
174
|
+
// Called with all accumulated chunks when the stream finishes
|
|
175
|
+
console.log("Stream done, got", finalChunks.length, "chunks");
|
|
176
|
+
},
|
|
177
|
+
},
|
|
178
|
+
);
|
|
179
|
+
|
|
180
|
+
return (
|
|
181
|
+
<>
|
|
182
|
+
<button onClick={stream} disabled={streaming}>Send</button>
|
|
183
|
+
<button onClick={reset}>Reset</button>
|
|
184
|
+
{chunks.map((chunk, i) => <pre key={i}>{JSON.stringify(chunk)}</pre>)}
|
|
185
|
+
{error && <p>{error}</p>}
|
|
186
|
+
</>
|
|
187
|
+
);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
### useServingInvoke[](#useservinginvoke "Direct link to useServingInvoke")
|
|
193
|
+
|
|
194
|
+
Non-streaming invocation. `invoke()` returns a promise with the response data (or `null` on error):
|
|
195
|
+
|
|
196
|
+
```tsx
|
|
197
|
+
import { useServingInvoke } from "@databricks/appkit-ui/react";
|
|
198
|
+
|
|
199
|
+
function Classify() {
|
|
200
|
+
const { invoke, data, loading, error } = useServingInvoke(
|
|
201
|
+
{ inputs: ["sample text"] },
|
|
202
|
+
{ alias: "classifier" },
|
|
203
|
+
);
|
|
204
|
+
|
|
205
|
+
async function handleClick() {
|
|
206
|
+
const result = await invoke();
|
|
207
|
+
if (result) {
|
|
208
|
+
console.log("Classification result:", result);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
return (
|
|
213
|
+
<>
|
|
214
|
+
<button onClick={handleClick} disabled={loading}>Classify</button>
|
|
215
|
+
{data && <pre>{JSON.stringify(data)}</pre>}
|
|
216
|
+
{error && <p>{error}</p>}
|
|
217
|
+
</>
|
|
218
|
+
);
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
```
|
|
222
|
+
|
|
223
|
+
Both hooks accept `autoStart: true` to invoke automatically on mount.
|
package/docs.md
CHANGED
|
@@ -19,7 +19,7 @@ AppKit simplifies building data applications on Databricks by providing:
|
|
|
19
19
|
## Prerequisites[](#prerequisites "Direct link to Prerequisites")
|
|
20
20
|
|
|
21
21
|
* [Node.js](https://nodejs.org) v22+ environment with `npm`
|
|
22
|
-
* Databricks CLI (v0.
|
|
22
|
+
* Databricks CLI (v0.295.0 or higher): install and configure it according to the [official tutorial](https://docs.databricks.com/aws/en/dev-tools/cli/tutorial).
|
|
23
23
|
|
|
24
24
|
## Quick start options[](#quick-start-options "Direct link to Quick start options")
|
|
25
25
|
|
|
@@ -37,7 +37,7 @@ Databricks AppKit is designed to work with AI coding assistants through Agent Sk
|
|
|
37
37
|
Install Agent Skills and configure it for use with your preferred AI assistant:
|
|
38
38
|
|
|
39
39
|
```bash
|
|
40
|
-
databricks experimental aitools
|
|
40
|
+
databricks experimental aitools install
|
|
41
41
|
|
|
42
42
|
```
|
|
43
43
|
|
package/llms.txt
CHANGED
|
@@ -27,6 +27,7 @@ npx @databricks/appkit docs <query>
|
|
|
27
27
|
- [Configuration](./docs/configuration.md): This guide covers environment variables and configuration options for AppKit applications.
|
|
28
28
|
- [Core principles](./docs/core-principles.md): Learn about the fundamental concepts and principles behind AppKit.
|
|
29
29
|
- [Development](./docs/development.md): AppKit provides multiple development workflows to suit different needs: local development with hot reload, AI-assisted development with Agent Skills, and remote tunneling to deployed backends.
|
|
30
|
+
- [FAQ](./docs/faq.md): Integrations
|
|
30
31
|
- [Plugins](./docs/plugins.md): Plugins are modular extensions that add capabilities to your AppKit application. They follow a defined lifecycle and have access to shared services like caching, telemetry, and streaming.
|
|
31
32
|
|
|
32
33
|
## Development
|
|
@@ -36,6 +37,7 @@ npx @databricks/appkit docs <query>
|
|
|
36
37
|
- [Local development](./docs/development/local-development.md): Once your app is bootstrapped according to the Manual quick start guide, you can start the development server with hot reload for both UI and backend code.
|
|
37
38
|
- [Project setup](./docs/development/project-setup.md): This guide covers the recommended project structure and scaffolding for AppKit applications.
|
|
38
39
|
- [Remote Bridge](./docs/development/remote-bridge.md): Remote bridge allows you to develop against a deployed backend while keeping your UI and queries local. This is useful for testing against production data or debugging deployed backend code without redeploying your app.
|
|
40
|
+
- [Templates](./docs/development/templates.md): AppKit uses a template system powered by the Databricks CLI's databricks apps init command. Templates define the project structure, and .tmpl files are processed with Go's text/template engine to generate customized output.
|
|
39
41
|
- [Type generation](./docs/development/type-generation.md): AppKit can automatically generate TypeScript types for your SQL queries, providing end-to-end type safety from database to UI.
|
|
40
42
|
|
|
41
43
|
## Plugins
|
|
@@ -49,6 +51,7 @@ npx @databricks/appkit docs <query>
|
|
|
49
51
|
- [Lakebase plugin](./docs/plugins/lakebase.md): Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with automatic OAuth token refresh.
|
|
50
52
|
- [Plugin management](./docs/plugins/plugin-management.md): AppKit includes a CLI for managing plugins. All commands are available under npx @databricks/appkit plugin.
|
|
51
53
|
- [Server plugin](./docs/plugins/server.md): Provides HTTP server capabilities with development and production modes.
|
|
54
|
+
- [Serving plugin](./docs/plugins/serving.md): Provides an authenticated proxy to Databricks Model Serving endpoints, with invoke and streaming support.
|
|
52
55
|
|
|
53
56
|
## appkit API reference [collapsed]
|
|
54
57
|
|
|
@@ -66,9 +69,12 @@ npx @databricks/appkit docs <query>
|
|
|
66
69
|
- [Class: ValidationError](./docs/api/appkit/Class.ValidationError.md): Error thrown when input validation fails.
|
|
67
70
|
- [Enumeration: RequestedClaimsPermissionSet](./docs/api/appkit/Enumeration.RequestedClaimsPermissionSet.md): Permission set for Unity Catalog table access
|
|
68
71
|
- [Enumeration: ResourceType](./docs/api/appkit/Enumeration.ResourceType.md): Resource types from schema $defs.resourceType.enum
|
|
72
|
+
- [Function: appKitServingTypesPlugin()](./docs/api/appkit/Function.appKitServingTypesPlugin.md): Vite plugin to generate TypeScript types for AppKit serving endpoints.
|
|
69
73
|
- [Function: appKitTypesPlugin()](./docs/api/appkit/Function.appKitTypesPlugin.md): Vite plugin to generate types for AppKit queries.
|
|
70
74
|
- [Function: createApp()](./docs/api/appkit/Function.createApp.md): Bootstraps AppKit with the provided configuration.
|
|
71
75
|
- [Function: createLakebasePool()](./docs/api/appkit/Function.createLakebasePool.md): Create a Lakebase pool with appkit's logger integration.
|
|
76
|
+
- [Function: extractServingEndpoints()](./docs/api/appkit/Function.extractServingEndpoints.md): Extract serving endpoint config from a server file by AST-parsing it.
|
|
77
|
+
- [Function: findServerFile()](./docs/api/appkit/Function.findServerFile.md): Find the server entry file by checking candidate paths in order.
|
|
72
78
|
- [Function: generateDatabaseCredential()](./docs/api/appkit/Function.generateDatabaseCredential.md): Generate OAuth credentials for Postgres database connection using the proper Postgres API.
|
|
73
79
|
- [Function: getExecutionContext()](./docs/api/appkit/Function.getExecutionContext.md): Get the current execution context.
|
|
74
80
|
- [Function: getLakebaseOrmConfig()](./docs/api/appkit/Function.getLakebaseOrmConfig.md): Get Lakebase connection configuration for ORMs that don't accept pg.Pool directly.
|
|
@@ -81,6 +87,7 @@ npx @databricks/appkit docs <query>
|
|
|
81
87
|
- [Interface: BasePluginConfig](./docs/api/appkit/Interface.BasePluginConfig.md): Base configuration interface for AppKit plugins
|
|
82
88
|
- [Interface: CacheConfig](./docs/api/appkit/Interface.CacheConfig.md): Configuration for the CacheInterceptor. Controls TTL, size limits, storage backend, and probabilistic cleanup.
|
|
83
89
|
- [Interface: DatabaseCredential](./docs/api/appkit/Interface.DatabaseCredential.md): Database credentials with OAuth token for Postgres connection
|
|
90
|
+
- [Interface: EndpointConfig](./docs/api/appkit/Interface.EndpointConfig.md): Properties
|
|
84
91
|
- [Interface: GenerateDatabaseCredentialRequest](./docs/api/appkit/Interface.GenerateDatabaseCredentialRequest.md): Request parameters for generating database OAuth credentials
|
|
85
92
|
- [Interface: ITelemetry](./docs/api/appkit/Interface.ITelemetry.md): Plugin-facing interface for OpenTelemetry instrumentation.
|
|
86
93
|
- [Interface: LakebasePoolConfig](./docs/api/appkit/Interface.LakebasePoolConfig.md): Configuration for creating a Lakebase connection pool
|
|
@@ -90,13 +97,17 @@ npx @databricks/appkit docs <query>
|
|
|
90
97
|
- [Interface: ResourceEntry](./docs/api/appkit/Interface.ResourceEntry.md): Internal representation of a resource in the registry.
|
|
91
98
|
- [Interface: ResourceFieldEntry](./docs/api/appkit/Interface.ResourceFieldEntry.md): Defines a single field for a resource. Each field has its own environment variable and optional description. Single-value types use one key (e.g. id); multi-value types (database, secret) use multiple (e.g. instancename, databasename or scope, key).
|
|
92
99
|
- [Interface: ResourceRequirement](./docs/api/appkit/Interface.ResourceRequirement.md): Declares a resource requirement for a plugin.
|
|
100
|
+
- [Interface: ServingEndpointEntry](./docs/api/appkit/Interface.ServingEndpointEntry.md): Shape of a single registry entry.
|
|
101
|
+
- [Interface: ServingEndpointRegistry](./docs/api/appkit/Interface.ServingEndpointRegistry.md): Registry interface for serving endpoint type generation.
|
|
93
102
|
- [Interface: StreamExecutionSettings](./docs/api/appkit/Interface.StreamExecutionSettings.md): Execution settings for streaming endpoints. Extends PluginExecutionSettings with SSE stream configuration.
|
|
94
103
|
- [Interface: TelemetryConfig](./docs/api/appkit/Interface.TelemetryConfig.md): OpenTelemetry configuration for AppKit applications
|
|
95
104
|
- [Interface: ValidationResult](./docs/api/appkit/Interface.ValidationResult.md): Result of validating all registered resources against the environment.
|
|
96
105
|
- [Type Alias: ConfigSchema](./docs/api/appkit/TypeAlias.ConfigSchema.md): Configuration schema definition for plugin config.
|
|
106
|
+
- [Type Alias: ExecutionResult<T>](./docs/api/appkit/TypeAlias.ExecutionResult.md): Discriminated union for plugin execution results.
|
|
97
107
|
- [Type Alias: IAppRouter](./docs/api/appkit/TypeAlias.IAppRouter.md): Express router type for plugin route registration
|
|
98
108
|
- [Type Alias: PluginData<T, U, N>](./docs/api/appkit/TypeAlias.PluginData.md): Tuple of plugin class, config, and name. Created by toPlugin() and passed to createApp().
|
|
99
109
|
- [Type Alias: ResourcePermission](./docs/api/appkit/TypeAlias.ResourcePermission.md): Union of all possible permission levels across all resource types.
|
|
110
|
+
- [Type Alias: ServingFactory](./docs/api/appkit/TypeAlias.ServingFactory.md): Factory function returned by AppKit.serving.
|
|
100
111
|
- [Type Alias: ToPlugin()<T, U, N>](./docs/api/appkit/TypeAlias.ToPlugin.md): Factory function type returned by toPlugin(). Accepts optional config and returns a PluginData tuple.
|
|
101
112
|
- [Variable: sql](./docs/api/appkit/Variable.sql.md): SQL helper namespace
|
|
102
113
|
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@databricks/appkit",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.23.0",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"types": "./dist/index.d.ts",
|
|
7
7
|
"packageManager": "pnpm@10.21.0",
|
|
@@ -21,7 +21,8 @@
|
|
|
21
21
|
"llms.txt",
|
|
22
22
|
"README.md",
|
|
23
23
|
"DCO",
|
|
24
|
-
"NOTICE.md"
|
|
24
|
+
"NOTICE.md",
|
|
25
|
+
"sbom.cdx.json"
|
|
25
26
|
],
|
|
26
27
|
"exports": {
|
|
27
28
|
".": "./dist/index.js",
|
|
@@ -40,44 +41,44 @@
|
|
|
40
41
|
"postinstall": "node scripts/postinstall.js"
|
|
41
42
|
},
|
|
42
43
|
"dependencies": {
|
|
44
|
+
"@ast-grep/napi": "0.37.0",
|
|
43
45
|
"@databricks/lakebase": "0.2.0",
|
|
44
|
-
"@databricks/sdk-experimental": "
|
|
45
|
-
"@opentelemetry/api": "
|
|
46
|
-
"@opentelemetry/api-logs": "
|
|
47
|
-
"@opentelemetry/auto-instrumentations-node": "
|
|
48
|
-
"@opentelemetry/exporter-logs-otlp-proto": "
|
|
49
|
-
"@opentelemetry/exporter-metrics-otlp-proto": "
|
|
50
|
-
"@opentelemetry/exporter-trace-otlp-proto": "
|
|
51
|
-
"@opentelemetry/instrumentation": "
|
|
52
|
-
"@opentelemetry/instrumentation-express": "
|
|
53
|
-
"@opentelemetry/instrumentation-http": "
|
|
54
|
-
"@opentelemetry/resources": "
|
|
55
|
-
"@opentelemetry/sdk-logs": "
|
|
56
|
-
"@opentelemetry/sdk-metrics": "
|
|
57
|
-
"@opentelemetry/sdk-node": "
|
|
58
|
-
"@opentelemetry/sdk-trace-base": "
|
|
59
|
-
"@opentelemetry/semantic-conventions": "
|
|
60
|
-
"@types/semver": "
|
|
61
|
-
"dotenv": "
|
|
62
|
-
"express": "
|
|
63
|
-
"obug": "
|
|
64
|
-
"pg": "
|
|
65
|
-
"picocolors": "
|
|
66
|
-
"semver": "
|
|
46
|
+
"@databricks/sdk-experimental": "0.16.0",
|
|
47
|
+
"@opentelemetry/api": "1.9.0",
|
|
48
|
+
"@opentelemetry/api-logs": "0.208.0",
|
|
49
|
+
"@opentelemetry/auto-instrumentations-node": "0.67.2",
|
|
50
|
+
"@opentelemetry/exporter-logs-otlp-proto": "0.208.0",
|
|
51
|
+
"@opentelemetry/exporter-metrics-otlp-proto": "0.208.0",
|
|
52
|
+
"@opentelemetry/exporter-trace-otlp-proto": "0.208.0",
|
|
53
|
+
"@opentelemetry/instrumentation": "0.208.0",
|
|
54
|
+
"@opentelemetry/instrumentation-express": "0.57.0",
|
|
55
|
+
"@opentelemetry/instrumentation-http": "0.208.0",
|
|
56
|
+
"@opentelemetry/resources": "2.2.0",
|
|
57
|
+
"@opentelemetry/sdk-logs": "0.208.0",
|
|
58
|
+
"@opentelemetry/sdk-metrics": "2.2.0",
|
|
59
|
+
"@opentelemetry/sdk-node": "0.208.0",
|
|
60
|
+
"@opentelemetry/sdk-trace-base": "2.6.0",
|
|
61
|
+
"@opentelemetry/semantic-conventions": "1.38.0",
|
|
62
|
+
"@types/semver": "7.7.1",
|
|
63
|
+
"dotenv": "16.6.1",
|
|
64
|
+
"express": "4.22.0",
|
|
65
|
+
"obug": "2.1.1",
|
|
66
|
+
"pg": "8.18.0",
|
|
67
|
+
"picocolors": "1.1.1",
|
|
68
|
+
"semver": "7.7.3",
|
|
67
69
|
"vite": "npm:rolldown-vite@7.1.14",
|
|
68
|
-
"ws": "
|
|
69
|
-
"
|
|
70
|
-
"ajv": "
|
|
71
|
-
"
|
|
72
|
-
"
|
|
73
|
-
"commander": "^12.1.0"
|
|
70
|
+
"ws": "8.18.3",
|
|
71
|
+
"ajv": "8.17.1",
|
|
72
|
+
"ajv-formats": "3.0.1",
|
|
73
|
+
"@clack/prompts": "1.0.1",
|
|
74
|
+
"commander": "12.1.0"
|
|
74
75
|
},
|
|
75
76
|
"devDependencies": {
|
|
76
|
-
"@types/express": "
|
|
77
|
-
"@types/json-schema": "
|
|
78
|
-
"@types/pg": "
|
|
79
|
-
"@types/ws": "
|
|
80
|
-
"@vitejs/plugin-react": "
|
|
77
|
+
"@types/express": "4.17.25",
|
|
78
|
+
"@types/json-schema": "7.0.15",
|
|
79
|
+
"@types/pg": "8.16.0",
|
|
80
|
+
"@types/ws": "8.18.1",
|
|
81
|
+
"@vitejs/plugin-react": "5.1.1"
|
|
81
82
|
},
|
|
82
83
|
"overrides": {
|
|
83
84
|
"vite": "npm:rolldown-vite@7.1.14"
|