@databricks/appkit 0.26.0 → 0.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/CLAUDE.md +7 -0
  2. package/dist/appkit/package.js +1 -1
  3. package/dist/connectors/index.js +2 -0
  4. package/dist/connectors/jobs/client.d.ts +2 -0
  5. package/dist/connectors/jobs/client.js +132 -0
  6. package/dist/connectors/jobs/client.js.map +1 -0
  7. package/dist/connectors/jobs/index.d.ts +2 -0
  8. package/dist/connectors/jobs/index.js +3 -0
  9. package/dist/connectors/jobs/types.d.ts +10 -0
  10. package/dist/connectors/jobs/types.d.ts.map +1 -0
  11. package/dist/connectors/lakebase-v1/client.js.map +1 -1
  12. package/dist/connectors/sql-warehouse/client.js +1 -0
  13. package/dist/connectors/sql-warehouse/client.js.map +1 -1
  14. package/dist/index.d.ts +6 -1
  15. package/dist/index.js +2 -1
  16. package/dist/index.js.map +1 -1
  17. package/dist/plugins/index.d.ts +3 -0
  18. package/dist/plugins/index.js +2 -0
  19. package/dist/plugins/jobs/defaults.js +45 -0
  20. package/dist/plugins/jobs/defaults.js.map +1 -0
  21. package/dist/plugins/jobs/index.d.ts +2 -0
  22. package/dist/plugins/jobs/index.js +3 -0
  23. package/dist/plugins/jobs/manifest.js +40 -0
  24. package/dist/plugins/jobs/manifest.js.map +1 -0
  25. package/dist/plugins/jobs/params.js +35 -0
  26. package/dist/plugins/jobs/params.js.map +1 -0
  27. package/dist/plugins/jobs/plugin.d.ts +66 -0
  28. package/dist/plugins/jobs/plugin.d.ts.map +1 -0
  29. package/dist/plugins/jobs/plugin.js +531 -0
  30. package/dist/plugins/jobs/plugin.js.map +1 -0
  31. package/dist/plugins/jobs/types.d.ts +84 -0
  32. package/dist/plugins/jobs/types.d.ts.map +1 -0
  33. package/dist/registry/manifest-loader.d.ts +2 -2
  34. package/dist/registry/manifest-loader.d.ts.map +1 -1
  35. package/dist/stream/stream-manager.d.ts.map +1 -1
  36. package/dist/stream/stream-manager.js +6 -0
  37. package/dist/stream/stream-manager.js.map +1 -1
  38. package/docs/api/appkit/Interface.BasePluginConfig.md +4 -0
  39. package/docs/api/appkit/Interface.IJobsConfig.md +86 -0
  40. package/docs/api/appkit/Interface.JobAPI.md +163 -0
  41. package/docs/api/appkit/Interface.JobConfig.md +36 -0
  42. package/docs/api/appkit/Interface.JobsConnectorConfig.md +10 -0
  43. package/docs/api/appkit/TypeAlias.JobHandle.md +29 -0
  44. package/docs/api/appkit/TypeAlias.JobsExport.md +34 -0
  45. package/docs/api/appkit.md +6 -0
  46. package/docs/plugins/jobs.md +252 -0
  47. package/docs/plugins.md +2 -1
  48. package/llms.txt +7 -0
  49. package/package.json +2 -1
  50. package/sbom.cdx.json +1 -1
@@ -0,0 +1,40 @@
1
+ //#region src/plugins/jobs/manifest.json
2
+ var manifest_default = {
3
+ $schema: "https://databricks.github.io/appkit/schemas/plugin-manifest.schema.json",
4
+ name: "jobs",
5
+ displayName: "Jobs Plugin",
6
+ description: "Manage Databricks Lakeflow Jobs.",
7
+ resources: {
8
+ "required": [{
9
+ "type": "job",
10
+ "alias": "Job",
11
+ "resourceKey": "job",
12
+ "description": "A Databricks job to trigger and monitor",
13
+ "permission": "CAN_MANAGE_RUN",
14
+ "fields": { "id": {
15
+ "env": "DATABRICKS_JOB_ID",
16
+ "description": "Numeric Databricks job ID. Find it in the Jobs UI or via `databricks jobs list`."
17
+ } }
18
+ }],
19
+ "optional": []
20
+ },
21
+ config: { "schema": {
22
+ "type": "object",
23
+ "properties": {
24
+ "timeout": {
25
+ "type": "number",
26
+ "default": 6e4,
27
+ "description": "Default timeout for Jobs API calls in milliseconds"
28
+ },
29
+ "pollIntervalMs": {
30
+ "type": "number",
31
+ "default": 5e3,
32
+ "description": "Poll interval for waiting on run completion in milliseconds"
33
+ }
34
+ }
35
+ } }
36
+ };
37
+
38
+ //#endregion
39
+ export { manifest_default as default };
40
+ //# sourceMappingURL=manifest.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"manifest.js","names":[],"sources":["../../../src/plugins/jobs/manifest.json"],"sourcesContent":[""],"mappings":""}
@@ -0,0 +1,35 @@
1
+ //#region src/plugins/jobs/params.ts
2
+ /** Throw if any value is not a string, number, or boolean. */
3
+ function assertPrimitiveValues(params) {
4
+ for (const [k, v] of Object.entries(params)) if (v !== null && v !== void 0 && typeof v === "object") throw new Error(`Parameter "${k}" must be a primitive value, got ${Array.isArray(v) ? "array" : "object"}`);
5
+ }
6
+ /**
7
+ * Maps validated parameters to SDK request fields based on the task type.
8
+ * This is a pure function — stateless and testable in isolation.
9
+ */
10
+ function mapParams(taskType, params) {
11
+ switch (taskType) {
12
+ case "notebook":
13
+ assertPrimitiveValues(params);
14
+ return { notebook_params: Object.fromEntries(Object.entries(params).map(([k, v]) => [k, String(v)])) };
15
+ case "python_wheel":
16
+ assertPrimitiveValues(params);
17
+ return { python_named_params: Object.fromEntries(Object.entries(params).map(([k, v]) => [k, String(v)])) };
18
+ case "python_script": return { python_params: Array.isArray(params.args) ? params.args.map(String) : [] };
19
+ case "spark_jar": return { jar_params: Array.isArray(params.args) ? params.args.map(String) : [] };
20
+ case "sql":
21
+ assertPrimitiveValues(params);
22
+ return { sql_params: Object.fromEntries(Object.entries(params).map(([k, v]) => [k, String(v)])) };
23
+ case "dbt":
24
+ if (Object.keys(params).length > 0) throw new Error("dbt tasks do not accept parameters");
25
+ return {};
26
+ default: {
27
+ const _exhaustive = taskType;
28
+ throw new Error(`Unknown task type: ${_exhaustive}`);
29
+ }
30
+ }
31
+ }
32
+
33
+ //#endregion
34
+ export { mapParams };
35
+ //# sourceMappingURL=params.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"params.js","names":[],"sources":["../../../src/plugins/jobs/params.ts"],"sourcesContent":["import type { TaskType } from \"./types\";\n\n/** Throw if any value is not a string, number, or boolean. */\nfunction assertPrimitiveValues(params: Record<string, unknown>): void {\n for (const [k, v] of Object.entries(params)) {\n if (v !== null && v !== undefined && typeof v === \"object\") {\n throw new Error(\n `Parameter \"${k}\" must be a primitive value, got ${Array.isArray(v) ? \"array\" : \"object\"}`,\n );\n }\n }\n}\n\n/**\n * Maps validated parameters to SDK request fields based on the task type.\n * This is a pure function — stateless and testable in isolation.\n */\nexport function mapParams(\n taskType: TaskType,\n params: Record<string, unknown>,\n): Record<string, unknown> {\n switch (taskType) {\n case \"notebook\":\n // notebook_params expects Record<string, string>, values coerced to string\n assertPrimitiveValues(params);\n return {\n notebook_params: Object.fromEntries(\n Object.entries(params).map(([k, v]) => [k, String(v)]),\n ),\n };\n case \"python_wheel\":\n assertPrimitiveValues(params);\n return {\n python_named_params: Object.fromEntries(\n Object.entries(params).map(([k, v]) => [k, String(v)]),\n ),\n };\n case \"python_script\":\n // python_params expects string[] (positional args)\n return {\n python_params: Array.isArray(params.args)\n ? params.args.map(String)\n : [],\n };\n case \"spark_jar\":\n // jar_params expects string[]\n return {\n jar_params: Array.isArray(params.args) ? params.args.map(String) : [],\n };\n case \"sql\":\n assertPrimitiveValues(params);\n return {\n sql_params: Object.fromEntries(\n Object.entries(params).map(([k, v]) => [k, String(v)]),\n ),\n };\n case \"dbt\":\n if (Object.keys(params).length > 0) {\n throw new Error(\"dbt tasks do not accept parameters\");\n }\n return {};\n default: {\n const _exhaustive: never = taskType;\n throw new Error(`Unknown task type: ${_exhaustive}`);\n }\n }\n}\n"],"mappings":";;AAGA,SAAS,sBAAsB,QAAuC;AACpE,MAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,OAAO,CACzC,KAAI,MAAM,QAAQ,MAAM,UAAa,OAAO,MAAM,SAChD,OAAM,IAAI,MACR,cAAc,EAAE,mCAAmC,MAAM,QAAQ,EAAE,GAAG,UAAU,WACjF;;;;;;AASP,SAAgB,UACd,UACA,QACyB;AACzB,SAAQ,UAAR;EACE,KAAK;AAEH,yBAAsB,OAAO;AAC7B,UAAO,EACL,iBAAiB,OAAO,YACtB,OAAO,QAAQ,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,GAAG,OAAO,EAAE,CAAC,CAAC,CACvD,EACF;EACH,KAAK;AACH,yBAAsB,OAAO;AAC7B,UAAO,EACL,qBAAqB,OAAO,YAC1B,OAAO,QAAQ,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,GAAG,OAAO,EAAE,CAAC,CAAC,CACvD,EACF;EACH,KAAK,gBAEH,QAAO,EACL,eAAe,MAAM,QAAQ,OAAO,KAAK,GACrC,OAAO,KAAK,IAAI,OAAO,GACvB,EAAE,EACP;EACH,KAAK,YAEH,QAAO,EACL,YAAY,MAAM,QAAQ,OAAO,KAAK,GAAG,OAAO,KAAK,IAAI,OAAO,GAAG,EAAE,EACtE;EACH,KAAK;AACH,yBAAsB,OAAO;AAC7B,UAAO,EACL,YAAY,OAAO,YACjB,OAAO,QAAQ,OAAO,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,GAAG,OAAO,EAAE,CAAC,CAAC,CACvD,EACF;EACH,KAAK;AACH,OAAI,OAAO,KAAK,OAAO,CAAC,SAAS,EAC/B,OAAM,IAAI,MAAM,qCAAqC;AAEvD,UAAO,EAAE;EACX,SAAS;GACP,MAAM,cAAqB;AAC3B,SAAM,IAAI,MAAM,sBAAsB,cAAc"}
@@ -0,0 +1,66 @@
1
+ import { IAppRouter, ToPlugin } from "../../shared/src/plugin.js";
2
+ import "../../shared/src/index.js";
3
+ import { Plugin } from "../../plugin/plugin.js";
4
+ import "../../plugin/index.js";
5
+ import { PluginManifest, ResourceRequirement } from "../../registry/types.js";
6
+ import "../../registry/index.js";
7
+ import { IJobsConfig, JobAPI, JobConfig, JobsExport } from "./types.js";
8
+
9
+ //#region src/plugins/jobs/plugin.d.ts
10
+ declare class JobsPlugin extends Plugin {
11
+ static manifest: PluginManifest;
12
+ protected config: IJobsConfig;
13
+ private connector;
14
+ private jobIds;
15
+ private jobConfigs;
16
+ private jobKeys;
17
+ /**
18
+ * Scans process.env for DATABRICKS_JOB_* keys and merges with explicit config.
19
+ * Explicit config wins for per-job overrides; auto-discovered jobs get default `{}` config.
20
+ */
21
+ static discoverJobs(config: IJobsConfig): Record<string, JobConfig>;
22
+ /**
23
+ * Generates resource requirements dynamically from discovered + configured jobs.
24
+ * Each job key maps to a `DATABRICKS_JOB_{KEY_UPPERCASE}` env var (or `DATABRICKS_JOB_ID` for "default").
25
+ */
26
+ static getResourceRequirements(config: IJobsConfig): ResourceRequirement[];
27
+ constructor(config: IJobsConfig);
28
+ setup(): Promise<void>;
29
+ private get client();
30
+ private getJobId;
31
+ private _readSettings;
32
+ private _writeSettings;
33
+ /**
34
+ * Validates params against the job's Zod schema (if any) and maps them
35
+ * to SDK request fields based on the task type. Shared by runNow and runAndWait.
36
+ */
37
+ private _validateAndMap;
38
+ /**
39
+ * Creates a JobAPI for a specific configured job key.
40
+ * Each method is scoped to the job's configured ID.
41
+ */
42
+ protected createJobAPI(jobKey: string): JobAPI;
43
+ /**
44
+ * Resolve `:jobKey` from the request. Returns the key and ID,
45
+ * or sends a 404 and returns `{ jobKey: undefined, jobId: undefined }`.
46
+ */
47
+ private _resolveJob;
48
+ private _sendStatusError;
49
+ /**
50
+ * Validate params from an HTTP request body. Eager validation lets streaming
51
+ * requests get a clean 400 instead of a generic SSE error event. Throws
52
+ * ValidationError so handlers can map to a 400 response via their catch block.
53
+ */
54
+ private _parseRunParams;
55
+ private _handleRun;
56
+ injectRoutes(router: IAppRouter): void;
57
+ exports(): JobsExport;
58
+ clientConfig(): Record<string, unknown>;
59
+ }
60
+ /**
61
+ * @internal
62
+ */
63
+ declare const jobs: ToPlugin<typeof JobsPlugin, IJobsConfig, string>;
64
+ //#endregion
65
+ export { jobs };
66
+ //# sourceMappingURL=plugin.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"plugin.d.ts","names":[],"sources":["../../../src/plugins/jobs/plugin.ts"],"mappings":";;;;;;;;;cAsFM,UAAA,SAAmB,MAAA;EAAA,OAChB,QAAA,EAAuB,cAAA;EAAA,UAEZ,MAAA,EAAQ,WAAA;EAAA,QAClB,SAAA;EAAA,QACA,MAAA;EAAA,QACA,UAAA;EAAA,QACA,OAAA;;;;;SAMD,YAAA,CAAa,MAAA,EAAQ,WAAA,GAAc,MAAA,SAAe,SAAA;EAAf;;;;EAAA,OAgCnC,uBAAA,CAAwB,MAAA,EAAQ,WAAA,GAAc,mBAAA;cAqBzC,MAAA,EAAQ,WAAA;EA0Bd,KAAA,CAAA,GAAK,OAAA;EAAA,YAMC,MAAA,CAAA;EAAA,QAIJ,QAAA;EAAA,QAcA,aAAA;EAAA,QAYA,cAAA;EAhIqB;;;;EAAA,QA6IrB,eAAA;EA1IkB;;;;EAAA,UAoKhB,YAAA,CAAa,MAAA,WAAiB,MAAA;EA1JjC;;;;EAAA,QAgXC,WAAA;EAAA,QA0BA,gBAAA;EA1W+B;;;;;EAAA,QAsX/B,eAAA;EAAA,QAuCM,UAAA;EA4Cd,YAAA,CAAa,MAAA,EAAQ,UAAA;EA6IrB,OAAA,CAAA,GAAW,UAAA;EAsBX,YAAA,CAAA,GAAgB,MAAA;AAAA;;;;cAiBL,IAAA,EAAI,QAAA,QAAA,UAAA,EAAA,WAAA"}
@@ -0,0 +1,531 @@
1
+ import { createLogger } from "../../logging/logger.js";
2
+ import { ExecutionError } from "../../errors/execution.js";
3
+ import { ValidationError } from "../../errors/validation.js";
4
+ import { init_errors } from "../../errors/index.js";
5
+ import { getCurrentUserId, getWorkspaceClient } from "../../context/execution-context.js";
6
+ import { init_context } from "../../context/index.js";
7
+ import { ResourceType } from "../../registry/types.generated.js";
8
+ import "../../registry/index.js";
9
+ import { Plugin } from "../../plugin/plugin.js";
10
+ import { toPlugin } from "../../plugin/to-plugin.js";
11
+ import "../../plugin/index.js";
12
+ import { JobsConnector } from "../../connectors/jobs/client.js";
13
+ import "../../connectors/jobs/index.js";
14
+ import { JOBS_READ_DEFAULTS, JOBS_STREAM_DEFAULTS, JOBS_WRITE_DEFAULTS } from "./defaults.js";
15
+ import manifest_default from "./manifest.js";
16
+ import { mapParams } from "./params.js";
17
+ import { STATUS_CODES } from "node:http";
18
+ import { toJSONSchema } from "zod";
19
+
20
+ //#region src/plugins/jobs/plugin.ts
21
+ init_context();
22
+ init_errors();
23
+ const logger = createLogger("jobs");
24
+ const DEFAULT_WAIT_TIMEOUT = 6e5;
25
+ const DEFAULT_POLL_INTERVAL = 5e3;
26
+ /** Cap on param-key count when a job has no Zod schema. Jobs that need more keys must define a schema. */
27
+ const MAX_UNVALIDATED_PARAM_KEYS = 50;
28
+ /** Replace upstream error messages with generic descriptions keyed by HTTP status. */
29
+ function errorResult(status) {
30
+ return {
31
+ ok: false,
32
+ status,
33
+ message: STATUS_CODES[status] ?? "Request failed"
34
+ };
35
+ }
36
+ function isTerminalRunState(state) {
37
+ return state === "TERMINATED" || state === "SKIPPED" || state === "INTERNAL_ERROR";
38
+ }
39
+ /** Exponential backoff (1.5x) with +/- 20% jitter, capped at `max`. */
40
+ function nextPollDelay(current, max) {
41
+ const jitter = 1 + (Math.random() * .4 - .2);
42
+ return {
43
+ delay: Math.min(current * jitter, max),
44
+ next: Math.min(current * 1.5, max)
45
+ };
46
+ }
47
+ function abortableSleep(ms, signal) {
48
+ return new Promise((resolve) => {
49
+ if (signal?.aborted) {
50
+ resolve();
51
+ return;
52
+ }
53
+ const timer = setTimeout(resolve, ms);
54
+ signal?.addEventListener("abort", () => {
55
+ clearTimeout(timer);
56
+ resolve();
57
+ }, { once: true });
58
+ });
59
+ }
60
+ var JobsPlugin = class JobsPlugin extends Plugin {
61
+ static manifest = manifest_default;
62
+ connector;
63
+ jobIds = {};
64
+ jobConfigs = {};
65
+ jobKeys = [];
66
+ /**
67
+ * Scans process.env for DATABRICKS_JOB_* keys and merges with explicit config.
68
+ * Explicit config wins for per-job overrides; auto-discovered jobs get default `{}` config.
69
+ */
70
+ static discoverJobs(config) {
71
+ const explicit = config.jobs ?? {};
72
+ const discovered = {};
73
+ const prefix = "DATABRICKS_JOB_";
74
+ for (const key of Object.keys(process.env)) {
75
+ if (!key.startsWith(prefix)) continue;
76
+ if (key === "DATABRICKS_JOB_ID") continue;
77
+ const suffix = key.slice(15);
78
+ if (!suffix || !process.env[key]) continue;
79
+ const jobKey = suffix.toLowerCase();
80
+ if (!(jobKey in explicit)) discovered[jobKey] = {};
81
+ }
82
+ if (process.env.DATABRICKS_JOB_ID && Object.keys(explicit).length === 0 && Object.keys(discovered).length === 0) discovered.default = {};
83
+ return {
84
+ ...discovered,
85
+ ...explicit
86
+ };
87
+ }
88
+ /**
89
+ * Generates resource requirements dynamically from discovered + configured jobs.
90
+ * Each job key maps to a `DATABRICKS_JOB_{KEY_UPPERCASE}` env var (or `DATABRICKS_JOB_ID` for "default").
91
+ */
92
+ static getResourceRequirements(config) {
93
+ const jobs = JobsPlugin.discoverJobs(config);
94
+ return Object.keys(jobs).map((key) => ({
95
+ type: ResourceType.JOB,
96
+ alias: `job-${key}`,
97
+ resourceKey: `job-${key}`,
98
+ description: `Databricks Job "${key}"`,
99
+ permission: "CAN_MANAGE_RUN",
100
+ fields: { id: {
101
+ env: key === "default" ? "DATABRICKS_JOB_ID" : `DATABRICKS_JOB_${key.toUpperCase()}`,
102
+ description: `Job ID for "${key}"`
103
+ } },
104
+ required: true
105
+ }));
106
+ }
107
+ constructor(config) {
108
+ super(config);
109
+ this.config = config;
110
+ this.connector = new JobsConnector({ telemetry: config.telemetry });
111
+ const jobs = JobsPlugin.discoverJobs(config);
112
+ this.jobKeys = Object.keys(jobs);
113
+ this.jobConfigs = jobs;
114
+ for (const key of this.jobKeys) {
115
+ const envVar = key === "default" ? "DATABRICKS_JOB_ID" : `DATABRICKS_JOB_${key.toUpperCase()}`;
116
+ const jobIdStr = process.env[envVar];
117
+ if (jobIdStr) {
118
+ const parsed = Number.parseInt(jobIdStr, 10);
119
+ if (!Number.isNaN(parsed)) this.jobIds[key] = parsed;
120
+ }
121
+ }
122
+ }
123
+ async setup() {
124
+ logger.info(`Jobs plugin initialized with ${this.jobKeys.length} job(s): ${this.jobKeys.join(", ")}`);
125
+ }
126
+ get client() {
127
+ return getWorkspaceClient();
128
+ }
129
+ getJobId(jobKey) {
130
+ const id = this.jobIds[jobKey];
131
+ if (!id) {
132
+ const envVar = jobKey === "default" ? "DATABRICKS_JOB_ID" : `DATABRICKS_JOB_${jobKey.toUpperCase()}`;
133
+ throw new Error(`Job "${jobKey}" has no configured job ID. Set ${envVar} env var.`);
134
+ }
135
+ return id;
136
+ }
137
+ _readSettings(cacheKey) {
138
+ return { default: {
139
+ ...JOBS_READ_DEFAULTS,
140
+ ...this.config.timeout != null && { timeout: this.config.timeout },
141
+ cache: {
142
+ ...JOBS_READ_DEFAULTS.cache,
143
+ cacheKey
144
+ }
145
+ } };
146
+ }
147
+ _writeSettings() {
148
+ return { default: {
149
+ ...JOBS_WRITE_DEFAULTS,
150
+ ...this.config.timeout != null && { timeout: this.config.timeout }
151
+ } };
152
+ }
153
+ /**
154
+ * Validates params against the job's Zod schema (if any) and maps them
155
+ * to SDK request fields based on the task type. Shared by runNow and runAndWait.
156
+ */
157
+ _validateAndMap(jobKey, params) {
158
+ const jobConfig = this.jobConfigs[jobKey];
159
+ let validated = params;
160
+ if (jobConfig?.params) {
161
+ const result = jobConfig.params.safeParse(params ?? {});
162
+ if (!result.success) throw new ValidationError(`Parameter validation failed for job "${jobKey}": ${result.error.message}`);
163
+ validated = result.data;
164
+ }
165
+ return jobConfig?.taskType && validated ? mapParams(jobConfig.taskType, validated) : validated ?? {};
166
+ }
167
+ /**
168
+ * Creates a JobAPI for a specific configured job key.
169
+ * Each method is scoped to the job's configured ID.
170
+ */
171
+ createJobAPI(jobKey) {
172
+ const jobId = this.getJobId(jobKey);
173
+ const jobConfig = this.jobConfigs[jobKey];
174
+ const self = this;
175
+ const client = this.client;
176
+ const userKey = getCurrentUserId();
177
+ /**
178
+ * Verify that `runId` belongs to this job's configured `jobId`. Returns
179
+ * null if the run is in scope; otherwise returns a 404 `ExecutionResult`.
180
+ * Prevents cross-job access via the `/:jobKey/runs/:runId` HTTP surface.
181
+ */
182
+ const verifyRunScope = async (runId) => {
183
+ const result = await self.execute(async (signal) => self.connector.getRun(client, { run_id: runId }, signal), self._readSettings([
184
+ "jobs:getRun",
185
+ jobKey,
186
+ runId
187
+ ]), userKey);
188
+ if (!result.ok) return errorResult(result.status);
189
+ if (result.data.job_id !== jobId) return errorResult(404);
190
+ return null;
191
+ };
192
+ return {
193
+ runNow: async (params) => {
194
+ const sdkFields = self._validateAndMap(jobKey, params);
195
+ const result = await self.execute(async (signal) => self.connector.runNow(client, {
196
+ ...sdkFields,
197
+ job_id: jobId
198
+ }, signal), self._writeSettings(), userKey);
199
+ return result.ok ? result : errorResult(result.status);
200
+ },
201
+ async *runAndWait(params, signal) {
202
+ const sdkFields = self._validateAndMap(jobKey, params);
203
+ const runResult = await self.execute(async (signal) => self.connector.runNow(client, {
204
+ ...sdkFields,
205
+ job_id: jobId
206
+ }, signal), self._writeSettings(), userKey);
207
+ if (!runResult.ok) throw new ExecutionError("Failed to trigger job run");
208
+ const runId = runResult.data.run_id;
209
+ if (!runId) throw new Error("runNow did not return a run_id");
210
+ const basePollInterval = self.config.pollIntervalMs ?? DEFAULT_POLL_INTERVAL;
211
+ const maxPollInterval = basePollInterval * 6;
212
+ const timeout = jobConfig?.waitTimeout ?? DEFAULT_WAIT_TIMEOUT;
213
+ const startTime = Date.now();
214
+ let currentInterval = basePollInterval;
215
+ while (!signal?.aborted) {
216
+ if (Date.now() - startTime > timeout) throw new Error(`Job run ${runId} polling timeout after ${timeout}ms`);
217
+ const runStatusResult = await self.execute(async (signal) => self.connector.getRun(client, { run_id: runId }, signal), { default: {
218
+ ...JOBS_READ_DEFAULTS,
219
+ cache: { enabled: false }
220
+ } }, userKey);
221
+ if (!runStatusResult.ok) throw new ExecutionError(`Failed to poll run status for run ${runId}`);
222
+ const run = runStatusResult.data;
223
+ const state = run.state?.life_cycle_state;
224
+ yield {
225
+ status: state,
226
+ timestamp: Date.now(),
227
+ run
228
+ };
229
+ if (isTerminalRunState(state)) return;
230
+ const { delay, next } = nextPollDelay(currentInterval, maxPollInterval);
231
+ currentInterval = next;
232
+ await abortableSleep(delay, signal);
233
+ }
234
+ },
235
+ lastRun: async () => {
236
+ const result = await self.execute(async (signal) => self.connector.listRuns(client, {
237
+ job_id: jobId,
238
+ limit: 1
239
+ }, signal), self._readSettings(["jobs:lastRun", jobKey]), userKey);
240
+ if (!result.ok) return errorResult(result.status);
241
+ return {
242
+ ok: true,
243
+ data: result.data[0]
244
+ };
245
+ },
246
+ listRuns: async (options) => {
247
+ const result = await self.execute(async (signal) => self.connector.listRuns(client, {
248
+ job_id: jobId,
249
+ limit: options?.limit
250
+ }, signal), self._readSettings([
251
+ "jobs:listRuns",
252
+ jobKey,
253
+ options?.limit ?? "default"
254
+ ]), userKey);
255
+ return result.ok ? result : errorResult(result.status);
256
+ },
257
+ getRun: async (runId) => {
258
+ const result = await self.execute(async (signal) => self.connector.getRun(client, { run_id: runId }, signal), self._readSettings([
259
+ "jobs:getRun",
260
+ jobKey,
261
+ runId
262
+ ]), userKey);
263
+ if (!result.ok) return errorResult(result.status);
264
+ if (result.data.job_id !== jobId) return errorResult(404);
265
+ return result;
266
+ },
267
+ getRunOutput: async (runId) => {
268
+ const scopeError = await verifyRunScope(runId);
269
+ if (scopeError) return scopeError;
270
+ const result = await self.execute(async (signal) => self.connector.getRunOutput(client, { run_id: runId }, signal), self._readSettings([
271
+ "jobs:getRunOutput",
272
+ jobKey,
273
+ runId
274
+ ]), userKey);
275
+ return result.ok ? result : errorResult(result.status);
276
+ },
277
+ cancelRun: async (runId) => {
278
+ const scopeError = await verifyRunScope(runId);
279
+ if (scopeError) return scopeError;
280
+ const result = await self.execute(async (signal) => self.connector.cancelRun(client, { run_id: runId }, signal), self._writeSettings(), userKey);
281
+ return result.ok ? result : errorResult(result.status);
282
+ },
283
+ getJob: async () => {
284
+ const result = await self.execute(async (signal) => self.connector.getJob(client, { job_id: jobId }, signal), self._readSettings(["jobs:getJob", jobKey]), userKey);
285
+ return result.ok ? result : errorResult(result.status);
286
+ }
287
+ };
288
+ }
289
+ /**
290
+ * Resolve `:jobKey` from the request. Returns the key and ID,
291
+ * or sends a 404 and returns `{ jobKey: undefined, jobId: undefined }`.
292
+ */
293
+ _resolveJob(req, res) {
294
+ const jobKey = req.params.jobKey;
295
+ if (!this.jobKeys.includes(jobKey)) {
296
+ const safeKey = jobKey.replace(/[^a-zA-Z0-9_-]/g, "");
297
+ res.status(404).json({
298
+ error: `Unknown job "${safeKey}"`,
299
+ plugin: this.name
300
+ });
301
+ return {
302
+ jobKey: void 0,
303
+ jobId: void 0
304
+ };
305
+ }
306
+ const jobId = this.jobIds[jobKey];
307
+ if (!jobId) {
308
+ res.status(404).json({
309
+ error: `Job "${jobKey}" has no configured job ID`,
310
+ plugin: this.name
311
+ });
312
+ return {
313
+ jobKey: void 0,
314
+ jobId: void 0
315
+ };
316
+ }
317
+ return {
318
+ jobKey,
319
+ jobId
320
+ };
321
+ }
322
+ _sendStatusError(res, status) {
323
+ res.status(status).json({
324
+ error: STATUS_CODES[status] ?? "Unknown Error",
325
+ plugin: this.name
326
+ });
327
+ }
328
+ /**
329
+ * Validate params from an HTTP request body. Eager validation lets streaming
330
+ * requests get a clean 400 instead of a generic SSE error event. Throws
331
+ * ValidationError so handlers can map to a 400 response via their catch block.
332
+ */
333
+ _parseRunParams(jobKey, rawParams) {
334
+ if (rawParams !== void 0 && (typeof rawParams !== "object" || rawParams === null || Array.isArray(rawParams))) throw new ValidationError("params must be a plain object");
335
+ const jobConfig = this.jobConfigs[jobKey];
336
+ if (jobConfig?.params) {
337
+ if (!jobConfig.params.safeParse(rawParams ?? {}).success) throw new ValidationError("Invalid job parameters");
338
+ return rawParams;
339
+ }
340
+ if (rawParams !== void 0) {
341
+ if (!jobConfig?.taskType) throw new ValidationError("This job does not accept parameters");
342
+ const keyCount = Object.keys(rawParams).length;
343
+ if (keyCount > MAX_UNVALIDATED_PARAM_KEYS) throw new ValidationError(`Too many parameters (${keyCount}). Define a Zod schema to accept more than ${MAX_UNVALIDATED_PARAM_KEYS}.`);
344
+ }
345
+ return rawParams;
346
+ }
347
+ async _handleRun(req, res) {
348
+ const { jobKey } = this._resolveJob(req, res);
349
+ if (!jobKey) return;
350
+ const stream = req.query.stream === "true";
351
+ try {
352
+ const params = this._parseRunParams(jobKey, req.body?.params);
353
+ const api = this.createJobAPI(jobKey);
354
+ if (stream) {
355
+ const streamSettings = { default: JOBS_STREAM_DEFAULTS };
356
+ await this.executeStream(res, (signal) => api.runAndWait(params, signal), streamSettings);
357
+ } else {
358
+ const result = await api.runNow(params);
359
+ if (!result.ok) {
360
+ this._sendStatusError(res, result.status);
361
+ return;
362
+ }
363
+ res.json({ runId: result.data.run_id });
364
+ }
365
+ } catch (error) {
366
+ if (error instanceof ValidationError) {
367
+ if (!res.headersSent) res.status(400).json({
368
+ error: error.message,
369
+ plugin: this.name
370
+ });
371
+ return;
372
+ }
373
+ logger.error("Run failed for job %s: %O", jobKey, error);
374
+ if (!res.headersSent) res.status(500).json({
375
+ error: "Run failed",
376
+ plugin: this.name
377
+ });
378
+ }
379
+ }
380
+ injectRoutes(router) {
381
+ this.route(router, {
382
+ name: "run",
383
+ method: "post",
384
+ path: "/:jobKey/run",
385
+ handler: (req, res) => this._handleRun(req, res)
386
+ });
387
+ this.route(router, {
388
+ name: "runs",
389
+ method: "get",
390
+ path: "/:jobKey/runs",
391
+ handler: async (req, res) => {
392
+ const { jobKey } = this._resolveJob(req, res);
393
+ if (!jobKey) return;
394
+ const limit = Math.max(1, Math.min(Number.parseInt(req.query.limit, 10) || 20, 100));
395
+ try {
396
+ const result = await this.createJobAPI(jobKey).listRuns({ limit });
397
+ if (!result.ok) {
398
+ this._sendStatusError(res, result.status);
399
+ return;
400
+ }
401
+ res.json({ runs: result.data });
402
+ } catch (error) {
403
+ logger.error("List runs failed for job %s: %O", jobKey, error);
404
+ res.status(500).json({
405
+ error: "List runs failed",
406
+ plugin: this.name
407
+ });
408
+ }
409
+ }
410
+ });
411
+ this.route(router, {
412
+ name: "run-detail",
413
+ method: "get",
414
+ path: "/:jobKey/runs/:runId",
415
+ handler: async (req, res) => {
416
+ const { jobKey } = this._resolveJob(req, res);
417
+ if (!jobKey) return;
418
+ const runId = Number.parseInt(req.params.runId, 10);
419
+ if (Number.isNaN(runId) || runId <= 0) {
420
+ res.status(400).json({
421
+ error: "Invalid runId",
422
+ plugin: this.name
423
+ });
424
+ return;
425
+ }
426
+ try {
427
+ const result = await this.createJobAPI(jobKey).getRun(runId);
428
+ if (!result.ok) {
429
+ this._sendStatusError(res, result.status);
430
+ return;
431
+ }
432
+ res.json(result.data);
433
+ } catch (error) {
434
+ logger.error("Get run failed for job %s run %d: %O", jobKey, runId, error);
435
+ res.status(500).json({
436
+ error: "Get run failed",
437
+ plugin: this.name
438
+ });
439
+ }
440
+ }
441
+ });
442
+ this.route(router, {
443
+ name: "status",
444
+ method: "get",
445
+ path: "/:jobKey/status",
446
+ handler: async (req, res) => {
447
+ const { jobKey } = this._resolveJob(req, res);
448
+ if (!jobKey) return;
449
+ try {
450
+ const result = await this.createJobAPI(jobKey).lastRun();
451
+ if (!result.ok) {
452
+ this._sendStatusError(res, result.status);
453
+ return;
454
+ }
455
+ res.json({
456
+ status: result.data?.state?.life_cycle_state ?? null,
457
+ run: result.data ?? null
458
+ });
459
+ } catch (error) {
460
+ logger.error("Status check failed for job %s: %O", jobKey, error);
461
+ res.status(500).json({
462
+ error: "Status check failed",
463
+ plugin: this.name
464
+ });
465
+ }
466
+ }
467
+ });
468
+ this.route(router, {
469
+ name: "cancel-run",
470
+ method: "delete",
471
+ path: "/:jobKey/runs/:runId",
472
+ handler: async (req, res) => {
473
+ const { jobKey } = this._resolveJob(req, res);
474
+ if (!jobKey) return;
475
+ const runId = Number.parseInt(req.params.runId, 10);
476
+ if (Number.isNaN(runId) || runId <= 0) {
477
+ res.status(400).json({
478
+ error: "Invalid runId",
479
+ plugin: this.name
480
+ });
481
+ return;
482
+ }
483
+ try {
484
+ const result = await this.createJobAPI(jobKey).cancelRun(runId);
485
+ if (!result.ok) {
486
+ this._sendStatusError(res, result.status);
487
+ return;
488
+ }
489
+ res.status(204).end();
490
+ } catch (error) {
491
+ logger.error("Cancel run failed for job %s run %d: %O", jobKey, runId, error);
492
+ res.status(500).json({
493
+ error: "Cancel run failed",
494
+ plugin: this.name
495
+ });
496
+ }
497
+ }
498
+ });
499
+ }
500
+ exports() {
501
+ const resolveJob = (jobKey) => {
502
+ if (!this.jobKeys.includes(jobKey)) throw new Error(`Unknown job "${jobKey}". Available jobs: ${this.jobKeys.join(", ")}`);
503
+ return {
504
+ ...this.createJobAPI(jobKey),
505
+ asUser: (req) => {
506
+ return this.asUser(req).createJobAPI(jobKey);
507
+ }
508
+ };
509
+ };
510
+ return resolveJob;
511
+ }
512
+ clientConfig() {
513
+ const jobs = {};
514
+ for (const key of this.jobKeys) {
515
+ const config = this.jobConfigs[key];
516
+ jobs[key] = {
517
+ params: config?.params ? toJSONSchema(config.params) : null,
518
+ taskType: config?.taskType ?? null
519
+ };
520
+ }
521
+ return { jobs };
522
+ }
523
+ };
524
+ /**
525
+ * @internal
526
+ */
527
+ const jobs = toPlugin(JobsPlugin);
528
+
529
+ //#endregion
530
+ export { jobs };
531
+ //# sourceMappingURL=plugin.js.map