@databricks/appkit 0.26.1 → 0.27.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +7 -0
- package/NOTICE.md +1 -0
- package/dist/appkit/package.js +1 -1
- package/dist/connectors/index.js +2 -0
- package/dist/connectors/jobs/client.d.ts +2 -0
- package/dist/connectors/jobs/client.js +132 -0
- package/dist/connectors/jobs/client.js.map +1 -0
- package/dist/connectors/jobs/index.d.ts +2 -0
- package/dist/connectors/jobs/index.js +3 -0
- package/dist/connectors/jobs/types.d.ts +10 -0
- package/dist/connectors/jobs/types.d.ts.map +1 -0
- package/dist/index.d.ts +6 -1
- package/dist/index.js +2 -1
- package/dist/index.js.map +1 -1
- package/dist/plugins/index.d.ts +3 -0
- package/dist/plugins/index.js +2 -0
- package/dist/plugins/jobs/defaults.js +45 -0
- package/dist/plugins/jobs/defaults.js.map +1 -0
- package/dist/plugins/jobs/index.d.ts +2 -0
- package/dist/plugins/jobs/index.js +3 -0
- package/dist/plugins/jobs/manifest.js +40 -0
- package/dist/plugins/jobs/manifest.js.map +1 -0
- package/dist/plugins/jobs/params.js +35 -0
- package/dist/plugins/jobs/params.js.map +1 -0
- package/dist/plugins/jobs/plugin.d.ts +66 -0
- package/dist/plugins/jobs/plugin.d.ts.map +1 -0
- package/dist/plugins/jobs/plugin.js +531 -0
- package/dist/plugins/jobs/plugin.js.map +1 -0
- package/dist/plugins/jobs/types.d.ts +84 -0
- package/dist/plugins/jobs/types.d.ts.map +1 -0
- package/dist/plugins/serving/serving.js +3 -3
- package/dist/plugins/serving/serving.js.map +1 -1
- package/dist/registry/types.generated.d.ts.map +1 -1
- package/dist/registry/types.generated.js.map +1 -1
- package/dist/schemas/plugin-manifest.generated.d.ts +5 -5
- package/dist/schemas/plugin-manifest.generated.d.ts.map +1 -1
- package/dist/shared/src/schemas/plugin-manifest.generated.d.ts +5 -5
- package/dist/shared/src/schemas/plugin-manifest.generated.d.ts.map +1 -1
- package/dist/stream/sse-writer.js +3 -4
- package/dist/stream/sse-writer.js.map +1 -1
- package/dist/stream/stream-manager.d.ts.map +1 -1
- package/dist/stream/stream-manager.js +2 -0
- package/dist/stream/stream-manager.js.map +1 -1
- package/docs/api/appkit/Interface.BasePluginConfig.md +4 -0
- package/docs/api/appkit/Interface.IJobsConfig.md +86 -0
- package/docs/api/appkit/Interface.JobAPI.md +163 -0
- package/docs/api/appkit/Interface.JobConfig.md +36 -0
- package/docs/api/appkit/Interface.JobsConnectorConfig.md +10 -0
- package/docs/api/appkit/TypeAlias.JobHandle.md +29 -0
- package/docs/api/appkit/TypeAlias.JobsExport.md +34 -0
- package/docs/api/appkit.md +6 -0
- package/docs/plugins/jobs.md +252 -0
- package/docs/plugins.md +2 -1
- package/llms.txt +7 -0
- package/package.json +2 -1
- package/sbom.cdx.json +1 -1
|
@@ -0,0 +1,531 @@
|
|
|
1
|
+
import { createLogger } from "../../logging/logger.js";
|
|
2
|
+
import { ExecutionError } from "../../errors/execution.js";
|
|
3
|
+
import { ValidationError } from "../../errors/validation.js";
|
|
4
|
+
import { init_errors } from "../../errors/index.js";
|
|
5
|
+
import { getCurrentUserId, getWorkspaceClient } from "../../context/execution-context.js";
|
|
6
|
+
import { init_context } from "../../context/index.js";
|
|
7
|
+
import { ResourceType } from "../../registry/types.generated.js";
|
|
8
|
+
import "../../registry/index.js";
|
|
9
|
+
import { Plugin } from "../../plugin/plugin.js";
|
|
10
|
+
import { toPlugin } from "../../plugin/to-plugin.js";
|
|
11
|
+
import "../../plugin/index.js";
|
|
12
|
+
import { JobsConnector } from "../../connectors/jobs/client.js";
|
|
13
|
+
import "../../connectors/jobs/index.js";
|
|
14
|
+
import { JOBS_READ_DEFAULTS, JOBS_STREAM_DEFAULTS, JOBS_WRITE_DEFAULTS } from "./defaults.js";
|
|
15
|
+
import manifest_default from "./manifest.js";
|
|
16
|
+
import { mapParams } from "./params.js";
|
|
17
|
+
import { STATUS_CODES } from "node:http";
|
|
18
|
+
import { toJSONSchema } from "zod";
|
|
19
|
+
|
|
20
|
+
//#region src/plugins/jobs/plugin.ts
|
|
21
|
+
init_context();
|
|
22
|
+
init_errors();
|
|
23
|
+
const logger = createLogger("jobs");
|
|
24
|
+
const DEFAULT_WAIT_TIMEOUT = 6e5;
|
|
25
|
+
const DEFAULT_POLL_INTERVAL = 5e3;
|
|
26
|
+
/** Cap on param-key count when a job has no Zod schema. Jobs that need more keys must define a schema. */
|
|
27
|
+
const MAX_UNVALIDATED_PARAM_KEYS = 50;
|
|
28
|
+
/** Replace upstream error messages with generic descriptions keyed by HTTP status. */
|
|
29
|
+
function errorResult(status) {
|
|
30
|
+
return {
|
|
31
|
+
ok: false,
|
|
32
|
+
status,
|
|
33
|
+
message: STATUS_CODES[status] ?? "Request failed"
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
function isTerminalRunState(state) {
|
|
37
|
+
return state === "TERMINATED" || state === "SKIPPED" || state === "INTERNAL_ERROR";
|
|
38
|
+
}
|
|
39
|
+
/** Exponential backoff (1.5x) with +/- 20% jitter, capped at `max`. */
|
|
40
|
+
function nextPollDelay(current, max) {
|
|
41
|
+
const jitter = 1 + (Math.random() * .4 - .2);
|
|
42
|
+
return {
|
|
43
|
+
delay: Math.min(current * jitter, max),
|
|
44
|
+
next: Math.min(current * 1.5, max)
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
function abortableSleep(ms, signal) {
|
|
48
|
+
return new Promise((resolve) => {
|
|
49
|
+
if (signal?.aborted) {
|
|
50
|
+
resolve();
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
const timer = setTimeout(resolve, ms);
|
|
54
|
+
signal?.addEventListener("abort", () => {
|
|
55
|
+
clearTimeout(timer);
|
|
56
|
+
resolve();
|
|
57
|
+
}, { once: true });
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
var JobsPlugin = class JobsPlugin extends Plugin {
|
|
61
|
+
static manifest = manifest_default;
|
|
62
|
+
connector;
|
|
63
|
+
jobIds = {};
|
|
64
|
+
jobConfigs = {};
|
|
65
|
+
jobKeys = [];
|
|
66
|
+
/**
|
|
67
|
+
* Scans process.env for DATABRICKS_JOB_* keys and merges with explicit config.
|
|
68
|
+
* Explicit config wins for per-job overrides; auto-discovered jobs get default `{}` config.
|
|
69
|
+
*/
|
|
70
|
+
static discoverJobs(config) {
|
|
71
|
+
const explicit = config.jobs ?? {};
|
|
72
|
+
const discovered = {};
|
|
73
|
+
const prefix = "DATABRICKS_JOB_";
|
|
74
|
+
for (const key of Object.keys(process.env)) {
|
|
75
|
+
if (!key.startsWith(prefix)) continue;
|
|
76
|
+
if (key === "DATABRICKS_JOB_ID") continue;
|
|
77
|
+
const suffix = key.slice(15);
|
|
78
|
+
if (!suffix || !process.env[key]) continue;
|
|
79
|
+
const jobKey = suffix.toLowerCase();
|
|
80
|
+
if (!(jobKey in explicit)) discovered[jobKey] = {};
|
|
81
|
+
}
|
|
82
|
+
if (process.env.DATABRICKS_JOB_ID && Object.keys(explicit).length === 0 && Object.keys(discovered).length === 0) discovered.default = {};
|
|
83
|
+
return {
|
|
84
|
+
...discovered,
|
|
85
|
+
...explicit
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Generates resource requirements dynamically from discovered + configured jobs.
|
|
90
|
+
* Each job key maps to a `DATABRICKS_JOB_{KEY_UPPERCASE}` env var (or `DATABRICKS_JOB_ID` for "default").
|
|
91
|
+
*/
|
|
92
|
+
static getResourceRequirements(config) {
|
|
93
|
+
const jobs = JobsPlugin.discoverJobs(config);
|
|
94
|
+
return Object.keys(jobs).map((key) => ({
|
|
95
|
+
type: ResourceType.JOB,
|
|
96
|
+
alias: `job-${key}`,
|
|
97
|
+
resourceKey: `job-${key}`,
|
|
98
|
+
description: `Databricks Job "${key}"`,
|
|
99
|
+
permission: "CAN_MANAGE_RUN",
|
|
100
|
+
fields: { id: {
|
|
101
|
+
env: key === "default" ? "DATABRICKS_JOB_ID" : `DATABRICKS_JOB_${key.toUpperCase()}`,
|
|
102
|
+
description: `Job ID for "${key}"`
|
|
103
|
+
} },
|
|
104
|
+
required: true
|
|
105
|
+
}));
|
|
106
|
+
}
|
|
107
|
+
constructor(config) {
|
|
108
|
+
super(config);
|
|
109
|
+
this.config = config;
|
|
110
|
+
this.connector = new JobsConnector({ telemetry: config.telemetry });
|
|
111
|
+
const jobs = JobsPlugin.discoverJobs(config);
|
|
112
|
+
this.jobKeys = Object.keys(jobs);
|
|
113
|
+
this.jobConfigs = jobs;
|
|
114
|
+
for (const key of this.jobKeys) {
|
|
115
|
+
const envVar = key === "default" ? "DATABRICKS_JOB_ID" : `DATABRICKS_JOB_${key.toUpperCase()}`;
|
|
116
|
+
const jobIdStr = process.env[envVar];
|
|
117
|
+
if (jobIdStr) {
|
|
118
|
+
const parsed = Number.parseInt(jobIdStr, 10);
|
|
119
|
+
if (!Number.isNaN(parsed)) this.jobIds[key] = parsed;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
async setup() {
|
|
124
|
+
logger.info(`Jobs plugin initialized with ${this.jobKeys.length} job(s): ${this.jobKeys.join(", ")}`);
|
|
125
|
+
}
|
|
126
|
+
get client() {
|
|
127
|
+
return getWorkspaceClient();
|
|
128
|
+
}
|
|
129
|
+
getJobId(jobKey) {
|
|
130
|
+
const id = this.jobIds[jobKey];
|
|
131
|
+
if (!id) {
|
|
132
|
+
const envVar = jobKey === "default" ? "DATABRICKS_JOB_ID" : `DATABRICKS_JOB_${jobKey.toUpperCase()}`;
|
|
133
|
+
throw new Error(`Job "${jobKey}" has no configured job ID. Set ${envVar} env var.`);
|
|
134
|
+
}
|
|
135
|
+
return id;
|
|
136
|
+
}
|
|
137
|
+
_readSettings(cacheKey) {
|
|
138
|
+
return { default: {
|
|
139
|
+
...JOBS_READ_DEFAULTS,
|
|
140
|
+
...this.config.timeout != null && { timeout: this.config.timeout },
|
|
141
|
+
cache: {
|
|
142
|
+
...JOBS_READ_DEFAULTS.cache,
|
|
143
|
+
cacheKey
|
|
144
|
+
}
|
|
145
|
+
} };
|
|
146
|
+
}
|
|
147
|
+
_writeSettings() {
|
|
148
|
+
return { default: {
|
|
149
|
+
...JOBS_WRITE_DEFAULTS,
|
|
150
|
+
...this.config.timeout != null && { timeout: this.config.timeout }
|
|
151
|
+
} };
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* Validates params against the job's Zod schema (if any) and maps them
|
|
155
|
+
* to SDK request fields based on the task type. Shared by runNow and runAndWait.
|
|
156
|
+
*/
|
|
157
|
+
_validateAndMap(jobKey, params) {
|
|
158
|
+
const jobConfig = this.jobConfigs[jobKey];
|
|
159
|
+
let validated = params;
|
|
160
|
+
if (jobConfig?.params) {
|
|
161
|
+
const result = jobConfig.params.safeParse(params ?? {});
|
|
162
|
+
if (!result.success) throw new ValidationError(`Parameter validation failed for job "${jobKey}": ${result.error.message}`);
|
|
163
|
+
validated = result.data;
|
|
164
|
+
}
|
|
165
|
+
return jobConfig?.taskType && validated ? mapParams(jobConfig.taskType, validated) : validated ?? {};
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Creates a JobAPI for a specific configured job key.
|
|
169
|
+
* Each method is scoped to the job's configured ID.
|
|
170
|
+
*/
|
|
171
|
+
createJobAPI(jobKey) {
|
|
172
|
+
const jobId = this.getJobId(jobKey);
|
|
173
|
+
const jobConfig = this.jobConfigs[jobKey];
|
|
174
|
+
const self = this;
|
|
175
|
+
const client = this.client;
|
|
176
|
+
const userKey = getCurrentUserId();
|
|
177
|
+
/**
|
|
178
|
+
* Verify that `runId` belongs to this job's configured `jobId`. Returns
|
|
179
|
+
* null if the run is in scope; otherwise returns a 404 `ExecutionResult`.
|
|
180
|
+
* Prevents cross-job access via the `/:jobKey/runs/:runId` HTTP surface.
|
|
181
|
+
*/
|
|
182
|
+
const verifyRunScope = async (runId) => {
|
|
183
|
+
const result = await self.execute(async (signal) => self.connector.getRun(client, { run_id: runId }, signal), self._readSettings([
|
|
184
|
+
"jobs:getRun",
|
|
185
|
+
jobKey,
|
|
186
|
+
runId
|
|
187
|
+
]), userKey);
|
|
188
|
+
if (!result.ok) return errorResult(result.status);
|
|
189
|
+
if (result.data.job_id !== jobId) return errorResult(404);
|
|
190
|
+
return null;
|
|
191
|
+
};
|
|
192
|
+
return {
|
|
193
|
+
runNow: async (params) => {
|
|
194
|
+
const sdkFields = self._validateAndMap(jobKey, params);
|
|
195
|
+
const result = await self.execute(async (signal) => self.connector.runNow(client, {
|
|
196
|
+
...sdkFields,
|
|
197
|
+
job_id: jobId
|
|
198
|
+
}, signal), self._writeSettings(), userKey);
|
|
199
|
+
return result.ok ? result : errorResult(result.status);
|
|
200
|
+
},
|
|
201
|
+
async *runAndWait(params, signal) {
|
|
202
|
+
const sdkFields = self._validateAndMap(jobKey, params);
|
|
203
|
+
const runResult = await self.execute(async (signal) => self.connector.runNow(client, {
|
|
204
|
+
...sdkFields,
|
|
205
|
+
job_id: jobId
|
|
206
|
+
}, signal), self._writeSettings(), userKey);
|
|
207
|
+
if (!runResult.ok) throw new ExecutionError("Failed to trigger job run");
|
|
208
|
+
const runId = runResult.data.run_id;
|
|
209
|
+
if (!runId) throw new Error("runNow did not return a run_id");
|
|
210
|
+
const basePollInterval = self.config.pollIntervalMs ?? DEFAULT_POLL_INTERVAL;
|
|
211
|
+
const maxPollInterval = basePollInterval * 6;
|
|
212
|
+
const timeout = jobConfig?.waitTimeout ?? DEFAULT_WAIT_TIMEOUT;
|
|
213
|
+
const startTime = Date.now();
|
|
214
|
+
let currentInterval = basePollInterval;
|
|
215
|
+
while (!signal?.aborted) {
|
|
216
|
+
if (Date.now() - startTime > timeout) throw new Error(`Job run ${runId} polling timeout after ${timeout}ms`);
|
|
217
|
+
const runStatusResult = await self.execute(async (signal) => self.connector.getRun(client, { run_id: runId }, signal), { default: {
|
|
218
|
+
...JOBS_READ_DEFAULTS,
|
|
219
|
+
cache: { enabled: false }
|
|
220
|
+
} }, userKey);
|
|
221
|
+
if (!runStatusResult.ok) throw new ExecutionError(`Failed to poll run status for run ${runId}`);
|
|
222
|
+
const run = runStatusResult.data;
|
|
223
|
+
const state = run.state?.life_cycle_state;
|
|
224
|
+
yield {
|
|
225
|
+
status: state,
|
|
226
|
+
timestamp: Date.now(),
|
|
227
|
+
run
|
|
228
|
+
};
|
|
229
|
+
if (isTerminalRunState(state)) return;
|
|
230
|
+
const { delay, next } = nextPollDelay(currentInterval, maxPollInterval);
|
|
231
|
+
currentInterval = next;
|
|
232
|
+
await abortableSleep(delay, signal);
|
|
233
|
+
}
|
|
234
|
+
},
|
|
235
|
+
lastRun: async () => {
|
|
236
|
+
const result = await self.execute(async (signal) => self.connector.listRuns(client, {
|
|
237
|
+
job_id: jobId,
|
|
238
|
+
limit: 1
|
|
239
|
+
}, signal), self._readSettings(["jobs:lastRun", jobKey]), userKey);
|
|
240
|
+
if (!result.ok) return errorResult(result.status);
|
|
241
|
+
return {
|
|
242
|
+
ok: true,
|
|
243
|
+
data: result.data[0]
|
|
244
|
+
};
|
|
245
|
+
},
|
|
246
|
+
listRuns: async (options) => {
|
|
247
|
+
const result = await self.execute(async (signal) => self.connector.listRuns(client, {
|
|
248
|
+
job_id: jobId,
|
|
249
|
+
limit: options?.limit
|
|
250
|
+
}, signal), self._readSettings([
|
|
251
|
+
"jobs:listRuns",
|
|
252
|
+
jobKey,
|
|
253
|
+
options?.limit ?? "default"
|
|
254
|
+
]), userKey);
|
|
255
|
+
return result.ok ? result : errorResult(result.status);
|
|
256
|
+
},
|
|
257
|
+
getRun: async (runId) => {
|
|
258
|
+
const result = await self.execute(async (signal) => self.connector.getRun(client, { run_id: runId }, signal), self._readSettings([
|
|
259
|
+
"jobs:getRun",
|
|
260
|
+
jobKey,
|
|
261
|
+
runId
|
|
262
|
+
]), userKey);
|
|
263
|
+
if (!result.ok) return errorResult(result.status);
|
|
264
|
+
if (result.data.job_id !== jobId) return errorResult(404);
|
|
265
|
+
return result;
|
|
266
|
+
},
|
|
267
|
+
getRunOutput: async (runId) => {
|
|
268
|
+
const scopeError = await verifyRunScope(runId);
|
|
269
|
+
if (scopeError) return scopeError;
|
|
270
|
+
const result = await self.execute(async (signal) => self.connector.getRunOutput(client, { run_id: runId }, signal), self._readSettings([
|
|
271
|
+
"jobs:getRunOutput",
|
|
272
|
+
jobKey,
|
|
273
|
+
runId
|
|
274
|
+
]), userKey);
|
|
275
|
+
return result.ok ? result : errorResult(result.status);
|
|
276
|
+
},
|
|
277
|
+
cancelRun: async (runId) => {
|
|
278
|
+
const scopeError = await verifyRunScope(runId);
|
|
279
|
+
if (scopeError) return scopeError;
|
|
280
|
+
const result = await self.execute(async (signal) => self.connector.cancelRun(client, { run_id: runId }, signal), self._writeSettings(), userKey);
|
|
281
|
+
return result.ok ? result : errorResult(result.status);
|
|
282
|
+
},
|
|
283
|
+
getJob: async () => {
|
|
284
|
+
const result = await self.execute(async (signal) => self.connector.getJob(client, { job_id: jobId }, signal), self._readSettings(["jobs:getJob", jobKey]), userKey);
|
|
285
|
+
return result.ok ? result : errorResult(result.status);
|
|
286
|
+
}
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
/**
|
|
290
|
+
* Resolve `:jobKey` from the request. Returns the key and ID,
|
|
291
|
+
* or sends a 404 and returns `{ jobKey: undefined, jobId: undefined }`.
|
|
292
|
+
*/
|
|
293
|
+
_resolveJob(req, res) {
|
|
294
|
+
const jobKey = req.params.jobKey;
|
|
295
|
+
if (!this.jobKeys.includes(jobKey)) {
|
|
296
|
+
const safeKey = jobKey.replace(/[^a-zA-Z0-9_-]/g, "");
|
|
297
|
+
res.status(404).json({
|
|
298
|
+
error: `Unknown job "${safeKey}"`,
|
|
299
|
+
plugin: this.name
|
|
300
|
+
});
|
|
301
|
+
return {
|
|
302
|
+
jobKey: void 0,
|
|
303
|
+
jobId: void 0
|
|
304
|
+
};
|
|
305
|
+
}
|
|
306
|
+
const jobId = this.jobIds[jobKey];
|
|
307
|
+
if (!jobId) {
|
|
308
|
+
res.status(404).json({
|
|
309
|
+
error: `Job "${jobKey}" has no configured job ID`,
|
|
310
|
+
plugin: this.name
|
|
311
|
+
});
|
|
312
|
+
return {
|
|
313
|
+
jobKey: void 0,
|
|
314
|
+
jobId: void 0
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
return {
|
|
318
|
+
jobKey,
|
|
319
|
+
jobId
|
|
320
|
+
};
|
|
321
|
+
}
|
|
322
|
+
_sendStatusError(res, status) {
|
|
323
|
+
res.status(status).json({
|
|
324
|
+
error: STATUS_CODES[status] ?? "Unknown Error",
|
|
325
|
+
plugin: this.name
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
/**
|
|
329
|
+
* Validate params from an HTTP request body. Eager validation lets streaming
|
|
330
|
+
* requests get a clean 400 instead of a generic SSE error event. Throws
|
|
331
|
+
* ValidationError so handlers can map to a 400 response via their catch block.
|
|
332
|
+
*/
|
|
333
|
+
_parseRunParams(jobKey, rawParams) {
|
|
334
|
+
if (rawParams !== void 0 && (typeof rawParams !== "object" || rawParams === null || Array.isArray(rawParams))) throw new ValidationError("params must be a plain object");
|
|
335
|
+
const jobConfig = this.jobConfigs[jobKey];
|
|
336
|
+
if (jobConfig?.params) {
|
|
337
|
+
if (!jobConfig.params.safeParse(rawParams ?? {}).success) throw new ValidationError("Invalid job parameters");
|
|
338
|
+
return rawParams;
|
|
339
|
+
}
|
|
340
|
+
if (rawParams !== void 0) {
|
|
341
|
+
if (!jobConfig?.taskType) throw new ValidationError("This job does not accept parameters");
|
|
342
|
+
const keyCount = Object.keys(rawParams).length;
|
|
343
|
+
if (keyCount > MAX_UNVALIDATED_PARAM_KEYS) throw new ValidationError(`Too many parameters (${keyCount}). Define a Zod schema to accept more than ${MAX_UNVALIDATED_PARAM_KEYS}.`);
|
|
344
|
+
}
|
|
345
|
+
return rawParams;
|
|
346
|
+
}
|
|
347
|
+
async _handleRun(req, res) {
|
|
348
|
+
const { jobKey } = this._resolveJob(req, res);
|
|
349
|
+
if (!jobKey) return;
|
|
350
|
+
const stream = req.query.stream === "true";
|
|
351
|
+
try {
|
|
352
|
+
const params = this._parseRunParams(jobKey, req.body?.params);
|
|
353
|
+
const api = this.createJobAPI(jobKey);
|
|
354
|
+
if (stream) {
|
|
355
|
+
const streamSettings = { default: JOBS_STREAM_DEFAULTS };
|
|
356
|
+
await this.executeStream(res, (signal) => api.runAndWait(params, signal), streamSettings);
|
|
357
|
+
} else {
|
|
358
|
+
const result = await api.runNow(params);
|
|
359
|
+
if (!result.ok) {
|
|
360
|
+
this._sendStatusError(res, result.status);
|
|
361
|
+
return;
|
|
362
|
+
}
|
|
363
|
+
res.json({ runId: result.data.run_id });
|
|
364
|
+
}
|
|
365
|
+
} catch (error) {
|
|
366
|
+
if (error instanceof ValidationError) {
|
|
367
|
+
if (!res.headersSent) res.status(400).json({
|
|
368
|
+
error: error.message,
|
|
369
|
+
plugin: this.name
|
|
370
|
+
});
|
|
371
|
+
return;
|
|
372
|
+
}
|
|
373
|
+
logger.error("Run failed for job %s: %O", jobKey, error);
|
|
374
|
+
if (!res.headersSent) res.status(500).json({
|
|
375
|
+
error: "Run failed",
|
|
376
|
+
plugin: this.name
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
injectRoutes(router) {
|
|
381
|
+
this.route(router, {
|
|
382
|
+
name: "run",
|
|
383
|
+
method: "post",
|
|
384
|
+
path: "/:jobKey/run",
|
|
385
|
+
handler: (req, res) => this._handleRun(req, res)
|
|
386
|
+
});
|
|
387
|
+
this.route(router, {
|
|
388
|
+
name: "runs",
|
|
389
|
+
method: "get",
|
|
390
|
+
path: "/:jobKey/runs",
|
|
391
|
+
handler: async (req, res) => {
|
|
392
|
+
const { jobKey } = this._resolveJob(req, res);
|
|
393
|
+
if (!jobKey) return;
|
|
394
|
+
const limit = Math.max(1, Math.min(Number.parseInt(req.query.limit, 10) || 20, 100));
|
|
395
|
+
try {
|
|
396
|
+
const result = await this.createJobAPI(jobKey).listRuns({ limit });
|
|
397
|
+
if (!result.ok) {
|
|
398
|
+
this._sendStatusError(res, result.status);
|
|
399
|
+
return;
|
|
400
|
+
}
|
|
401
|
+
res.json({ runs: result.data });
|
|
402
|
+
} catch (error) {
|
|
403
|
+
logger.error("List runs failed for job %s: %O", jobKey, error);
|
|
404
|
+
res.status(500).json({
|
|
405
|
+
error: "List runs failed",
|
|
406
|
+
plugin: this.name
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
});
|
|
411
|
+
this.route(router, {
|
|
412
|
+
name: "run-detail",
|
|
413
|
+
method: "get",
|
|
414
|
+
path: "/:jobKey/runs/:runId",
|
|
415
|
+
handler: async (req, res) => {
|
|
416
|
+
const { jobKey } = this._resolveJob(req, res);
|
|
417
|
+
if (!jobKey) return;
|
|
418
|
+
const runId = Number.parseInt(req.params.runId, 10);
|
|
419
|
+
if (Number.isNaN(runId) || runId <= 0) {
|
|
420
|
+
res.status(400).json({
|
|
421
|
+
error: "Invalid runId",
|
|
422
|
+
plugin: this.name
|
|
423
|
+
});
|
|
424
|
+
return;
|
|
425
|
+
}
|
|
426
|
+
try {
|
|
427
|
+
const result = await this.createJobAPI(jobKey).getRun(runId);
|
|
428
|
+
if (!result.ok) {
|
|
429
|
+
this._sendStatusError(res, result.status);
|
|
430
|
+
return;
|
|
431
|
+
}
|
|
432
|
+
res.json(result.data);
|
|
433
|
+
} catch (error) {
|
|
434
|
+
logger.error("Get run failed for job %s run %d: %O", jobKey, runId, error);
|
|
435
|
+
res.status(500).json({
|
|
436
|
+
error: "Get run failed",
|
|
437
|
+
plugin: this.name
|
|
438
|
+
});
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
});
|
|
442
|
+
this.route(router, {
|
|
443
|
+
name: "status",
|
|
444
|
+
method: "get",
|
|
445
|
+
path: "/:jobKey/status",
|
|
446
|
+
handler: async (req, res) => {
|
|
447
|
+
const { jobKey } = this._resolveJob(req, res);
|
|
448
|
+
if (!jobKey) return;
|
|
449
|
+
try {
|
|
450
|
+
const result = await this.createJobAPI(jobKey).lastRun();
|
|
451
|
+
if (!result.ok) {
|
|
452
|
+
this._sendStatusError(res, result.status);
|
|
453
|
+
return;
|
|
454
|
+
}
|
|
455
|
+
res.json({
|
|
456
|
+
status: result.data?.state?.life_cycle_state ?? null,
|
|
457
|
+
run: result.data ?? null
|
|
458
|
+
});
|
|
459
|
+
} catch (error) {
|
|
460
|
+
logger.error("Status check failed for job %s: %O", jobKey, error);
|
|
461
|
+
res.status(500).json({
|
|
462
|
+
error: "Status check failed",
|
|
463
|
+
plugin: this.name
|
|
464
|
+
});
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
});
|
|
468
|
+
this.route(router, {
|
|
469
|
+
name: "cancel-run",
|
|
470
|
+
method: "delete",
|
|
471
|
+
path: "/:jobKey/runs/:runId",
|
|
472
|
+
handler: async (req, res) => {
|
|
473
|
+
const { jobKey } = this._resolveJob(req, res);
|
|
474
|
+
if (!jobKey) return;
|
|
475
|
+
const runId = Number.parseInt(req.params.runId, 10);
|
|
476
|
+
if (Number.isNaN(runId) || runId <= 0) {
|
|
477
|
+
res.status(400).json({
|
|
478
|
+
error: "Invalid runId",
|
|
479
|
+
plugin: this.name
|
|
480
|
+
});
|
|
481
|
+
return;
|
|
482
|
+
}
|
|
483
|
+
try {
|
|
484
|
+
const result = await this.createJobAPI(jobKey).cancelRun(runId);
|
|
485
|
+
if (!result.ok) {
|
|
486
|
+
this._sendStatusError(res, result.status);
|
|
487
|
+
return;
|
|
488
|
+
}
|
|
489
|
+
res.status(204).end();
|
|
490
|
+
} catch (error) {
|
|
491
|
+
logger.error("Cancel run failed for job %s run %d: %O", jobKey, runId, error);
|
|
492
|
+
res.status(500).json({
|
|
493
|
+
error: "Cancel run failed",
|
|
494
|
+
plugin: this.name
|
|
495
|
+
});
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
});
|
|
499
|
+
}
|
|
500
|
+
exports() {
|
|
501
|
+
const resolveJob = (jobKey) => {
|
|
502
|
+
if (!this.jobKeys.includes(jobKey)) throw new Error(`Unknown job "${jobKey}". Available jobs: ${this.jobKeys.join(", ")}`);
|
|
503
|
+
return {
|
|
504
|
+
...this.createJobAPI(jobKey),
|
|
505
|
+
asUser: (req) => {
|
|
506
|
+
return this.asUser(req).createJobAPI(jobKey);
|
|
507
|
+
}
|
|
508
|
+
};
|
|
509
|
+
};
|
|
510
|
+
return resolveJob;
|
|
511
|
+
}
|
|
512
|
+
clientConfig() {
|
|
513
|
+
const jobs = {};
|
|
514
|
+
for (const key of this.jobKeys) {
|
|
515
|
+
const config = this.jobConfigs[key];
|
|
516
|
+
jobs[key] = {
|
|
517
|
+
params: config?.params ? toJSONSchema(config.params) : null,
|
|
518
|
+
taskType: config?.taskType ?? null
|
|
519
|
+
};
|
|
520
|
+
}
|
|
521
|
+
return { jobs };
|
|
522
|
+
}
|
|
523
|
+
};
|
|
524
|
+
/**
|
|
525
|
+
* @internal
|
|
526
|
+
*/
|
|
527
|
+
const jobs = toPlugin(JobsPlugin);
|
|
528
|
+
|
|
529
|
+
//#endregion
|
|
530
|
+
export { jobs };
|
|
531
|
+
//# sourceMappingURL=plugin.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"plugin.js","names":["manifest"],"sources":["../../../src/plugins/jobs/plugin.ts"],"sourcesContent":["import { STATUS_CODES } from \"node:http\";\nimport type { jobs as jobsTypes } from \"@databricks/sdk-experimental\";\nimport type express from \"express\";\nimport type {\n IAppRequest,\n IAppRouter,\n PluginExecutionSettings,\n StreamExecutionSettings,\n} from \"shared\";\nimport { toJSONSchema } from \"zod\";\nimport { JobsConnector } from \"../../connectors/jobs\";\nimport { getCurrentUserId, getWorkspaceClient } from \"../../context\";\nimport { ExecutionError, ValidationError } from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport type { ExecutionResult } from \"../../plugin\";\nimport { Plugin, toPlugin } from \"../../plugin\";\nimport type { PluginManifest, ResourceRequirement } from \"../../registry\";\nimport { ResourceType } from \"../../registry\";\nimport {\n JOBS_READ_DEFAULTS,\n JOBS_STREAM_DEFAULTS,\n JOBS_WRITE_DEFAULTS,\n} from \"./defaults\";\nimport manifest from \"./manifest.json\";\nimport { mapParams } from \"./params\";\nimport type {\n IJobsConfig,\n JobAPI,\n JobConfig,\n JobHandle,\n JobRunStatus,\n JobsExport,\n} from \"./types\";\n\nconst logger = createLogger(\"jobs\");\n\nconst DEFAULT_WAIT_TIMEOUT = 600_000;\nconst DEFAULT_POLL_INTERVAL = 5_000;\n/** Cap on param-key count when a job has no Zod schema. Jobs that need more keys must define a schema. */\nconst MAX_UNVALIDATED_PARAM_KEYS = 50;\n\n/** Replace upstream error messages with generic descriptions keyed by HTTP status. */\nfunction errorResult(status: number): ExecutionResult<never> {\n return {\n ok: false,\n status,\n message: STATUS_CODES[status] ?? \"Request failed\",\n };\n}\n\nfunction isTerminalRunState(state: string | undefined): boolean {\n return (\n state === \"TERMINATED\" || state === \"SKIPPED\" || state === \"INTERNAL_ERROR\"\n );\n}\n\n/** Exponential backoff (1.5x) with +/- 20% jitter, capped at `max`. */\nfunction nextPollDelay(\n current: number,\n max: number,\n): { delay: number; next: number } {\n const jitter = 1 + (Math.random() * 0.4 - 0.2);\n return {\n delay: Math.min(current * jitter, max),\n next: Math.min(current * 1.5, max),\n };\n}\n\nfunction abortableSleep(ms: number, signal?: AbortSignal): Promise<void> {\n return new Promise<void>((resolve) => {\n if (signal?.aborted) {\n resolve();\n return;\n }\n const timer = setTimeout(resolve, ms);\n signal?.addEventListener(\n \"abort\",\n () => {\n clearTimeout(timer);\n resolve();\n },\n { once: true },\n );\n });\n}\n\nclass JobsPlugin extends Plugin {\n static manifest = manifest as PluginManifest;\n\n protected declare config: IJobsConfig;\n private connector: JobsConnector;\n private jobIds: Record<string, number> = {};\n private jobConfigs: Record<string, JobConfig> = {};\n private jobKeys: string[] = [];\n\n /**\n * Scans process.env for DATABRICKS_JOB_* keys and merges with explicit config.\n * Explicit config wins for per-job overrides; auto-discovered jobs get default `{}` config.\n */\n static discoverJobs(config: IJobsConfig): Record<string, JobConfig> {\n const explicit = config.jobs ?? {};\n const discovered: Record<string, JobConfig> = {};\n\n const prefix = \"DATABRICKS_JOB_\";\n for (const key of Object.keys(process.env)) {\n if (!key.startsWith(prefix)) continue;\n if (key === \"DATABRICKS_JOB_ID\") continue;\n const suffix = key.slice(prefix.length);\n if (!suffix || !process.env[key]) continue;\n const jobKey = suffix.toLowerCase();\n if (!(jobKey in explicit)) {\n discovered[jobKey] = {};\n }\n }\n\n // Single-job shorthand: DATABRICKS_JOB_ID maps to \"default\" key\n if (\n process.env.DATABRICKS_JOB_ID &&\n Object.keys(explicit).length === 0 &&\n Object.keys(discovered).length === 0\n ) {\n discovered.default = {};\n }\n\n return { ...discovered, ...explicit };\n }\n\n /**\n * Generates resource requirements dynamically from discovered + configured jobs.\n * Each job key maps to a `DATABRICKS_JOB_{KEY_UPPERCASE}` env var (or `DATABRICKS_JOB_ID` for \"default\").\n */\n static getResourceRequirements(config: IJobsConfig): ResourceRequirement[] {\n const jobs = JobsPlugin.discoverJobs(config);\n return Object.keys(jobs).map((key) => ({\n type: ResourceType.JOB,\n alias: `job-${key}`,\n resourceKey: `job-${key}`,\n description: `Databricks Job \"${key}\"`,\n permission: \"CAN_MANAGE_RUN\" as const,\n fields: {\n id: {\n env:\n key === \"default\"\n ? \"DATABRICKS_JOB_ID\"\n : `DATABRICKS_JOB_${key.toUpperCase()}`,\n description: `Job ID for \"${key}\"`,\n },\n },\n required: true,\n }));\n }\n\n constructor(config: IJobsConfig) {\n super(config);\n this.config = config;\n this.connector = new JobsConnector({\n telemetry: config.telemetry,\n });\n\n const jobs = JobsPlugin.discoverJobs(config);\n this.jobKeys = Object.keys(jobs);\n this.jobConfigs = jobs;\n\n for (const key of this.jobKeys) {\n const envVar =\n key === \"default\"\n ? \"DATABRICKS_JOB_ID\"\n : `DATABRICKS_JOB_${key.toUpperCase()}`;\n const jobIdStr = process.env[envVar];\n if (jobIdStr) {\n const parsed = Number.parseInt(jobIdStr, 10);\n if (!Number.isNaN(parsed)) {\n this.jobIds[key] = parsed;\n }\n }\n }\n }\n\n async setup() {\n logger.info(\n `Jobs plugin initialized with ${this.jobKeys.length} job(s): ${this.jobKeys.join(\", \")}`,\n );\n }\n\n private get client() {\n return getWorkspaceClient();\n }\n\n private getJobId(jobKey: string): number {\n const id = this.jobIds[jobKey];\n if (!id) {\n const envVar =\n jobKey === \"default\"\n ? \"DATABRICKS_JOB_ID\"\n : `DATABRICKS_JOB_${jobKey.toUpperCase()}`;\n throw new Error(\n `Job \"${jobKey}\" has no configured job ID. Set ${envVar} env var.`,\n );\n }\n return id;\n }\n\n private _readSettings(\n cacheKey: (string | number | object)[],\n ): PluginExecutionSettings {\n return {\n default: {\n ...JOBS_READ_DEFAULTS,\n ...(this.config.timeout != null && { timeout: this.config.timeout }),\n cache: { ...JOBS_READ_DEFAULTS.cache, cacheKey },\n },\n };\n }\n\n private _writeSettings(): PluginExecutionSettings {\n return {\n default: {\n ...JOBS_WRITE_DEFAULTS,\n ...(this.config.timeout != null && { timeout: this.config.timeout }),\n },\n };\n }\n\n /**\n * Validates params against the job's Zod schema (if any) and maps them\n * to SDK request fields based on the task type. Shared by runNow and runAndWait.\n */\n private _validateAndMap(\n jobKey: string,\n params?: Record<string, unknown>,\n ): Record<string, unknown> {\n const jobConfig = this.jobConfigs[jobKey];\n let validated = params;\n\n if (jobConfig?.params) {\n const result = jobConfig.params.safeParse(params ?? {});\n if (!result.success) {\n throw new ValidationError(\n `Parameter validation failed for job \"${jobKey}\": ${result.error.message}`,\n );\n }\n validated = result.data as Record<string, unknown>;\n }\n\n return jobConfig?.taskType && validated\n ? mapParams(jobConfig.taskType, validated)\n : (validated ?? {});\n }\n\n /**\n * Creates a JobAPI for a specific configured job key.\n * Each method is scoped to the job's configured ID.\n */\n protected createJobAPI(jobKey: string): JobAPI {\n const jobId = this.getJobId(jobKey);\n const jobConfig = this.jobConfigs[jobKey];\n // Capture `this` for use in the async generator\n const self = this;\n // Eagerly capture the client and userId so that when createJobAPI is\n // called inside an asUser() proxy (which runs in user context), the\n // closures below use the user-scoped client instead of falling back\n // to the service principal when the ALS context has already exited.\n const client = this.client;\n const userKey = getCurrentUserId();\n\n /**\n * Verify that `runId` belongs to this job's configured `jobId`. Returns\n * null if the run is in scope; otherwise returns a 404 `ExecutionResult`.\n * Prevents cross-job access via the `/:jobKey/runs/:runId` HTTP surface.\n */\n const verifyRunScope = async (\n runId: number,\n ): Promise<ExecutionResult<never> | null> => {\n const result = await self.execute(\n async (signal) =>\n self.connector.getRun(client, { run_id: runId }, signal),\n self._readSettings([\"jobs:getRun\", jobKey, runId]),\n userKey,\n );\n if (!result.ok) return errorResult(result.status);\n if (result.data.job_id !== jobId) return errorResult(404);\n return null;\n };\n\n return {\n runNow: async (\n params?: Record<string, unknown>,\n ): Promise<ExecutionResult<jobsTypes.RunNowResponse>> => {\n const sdkFields = self._validateAndMap(jobKey, params);\n\n const result = await self.execute(\n async (signal) =>\n self.connector.runNow(\n client,\n { ...sdkFields, job_id: jobId },\n signal,\n ),\n self._writeSettings(),\n userKey,\n );\n return result.ok ? result : errorResult(result.status);\n },\n\n async *runAndWait(\n params?: Record<string, unknown>,\n signal?: AbortSignal,\n ): AsyncGenerator<JobRunStatus, void, unknown> {\n const sdkFields = self._validateAndMap(jobKey, params);\n\n const runResult = await self.execute(\n async (signal) =>\n self.connector.runNow(\n client,\n { ...sdkFields, job_id: jobId },\n signal,\n ),\n self._writeSettings(),\n userKey,\n );\n\n if (!runResult.ok) {\n throw new ExecutionError(\"Failed to trigger job run\");\n }\n const runId = runResult.data.run_id;\n if (!runId) {\n throw new Error(\"runNow did not return a run_id\");\n }\n\n const basePollInterval =\n self.config.pollIntervalMs ?? DEFAULT_POLL_INTERVAL;\n const maxPollInterval = basePollInterval * 6;\n const timeout = jobConfig?.waitTimeout ?? DEFAULT_WAIT_TIMEOUT;\n const startTime = Date.now();\n let currentInterval = basePollInterval;\n\n while (!signal?.aborted) {\n if (Date.now() - startTime > timeout) {\n throw new Error(\n `Job run ${runId} polling timeout after ${timeout}ms`,\n );\n }\n\n const runStatusResult = await self.execute(\n async (signal) =>\n self.connector.getRun(client, { run_id: runId }, signal),\n {\n default: {\n ...JOBS_READ_DEFAULTS,\n cache: { enabled: false },\n },\n },\n userKey,\n );\n if (!runStatusResult.ok) {\n throw new ExecutionError(\n `Failed to poll run status for run ${runId}`,\n );\n }\n const run = runStatusResult.data;\n const state = run.state?.life_cycle_state;\n\n yield { status: state, timestamp: Date.now(), run };\n\n if (isTerminalRunState(state)) return;\n\n const { delay, next } = nextPollDelay(\n currentInterval,\n maxPollInterval,\n );\n currentInterval = next;\n await abortableSleep(delay, signal);\n }\n },\n\n lastRun: async (): Promise<\n ExecutionResult<jobsTypes.BaseRun | undefined>\n > => {\n const result = await self.execute(\n async (signal) =>\n self.connector.listRuns(\n client,\n { job_id: jobId, limit: 1 },\n signal,\n ),\n self._readSettings([\"jobs:lastRun\", jobKey]),\n userKey,\n );\n if (!result.ok) return errorResult(result.status);\n return { ok: true, data: result.data[0] };\n },\n\n listRuns: async (options?: {\n limit?: number;\n }): Promise<ExecutionResult<jobsTypes.BaseRun[]>> => {\n const result = await self.execute(\n async (signal) =>\n self.connector.listRuns(\n client,\n { job_id: jobId, limit: options?.limit },\n signal,\n ),\n self._readSettings([\n \"jobs:listRuns\",\n jobKey,\n options?.limit ?? \"default\",\n ]),\n userKey,\n );\n return result.ok ? result : errorResult(result.status);\n },\n\n getRun: async (\n runId: number,\n ): Promise<ExecutionResult<jobsTypes.Run>> => {\n const result = await self.execute(\n async (signal) =>\n self.connector.getRun(client, { run_id: runId }, signal),\n self._readSettings([\"jobs:getRun\", jobKey, runId]),\n userKey,\n );\n if (!result.ok) return errorResult(result.status);\n if (result.data.job_id !== jobId) return errorResult(404);\n return result;\n },\n\n getRunOutput: async (\n runId: number,\n ): Promise<ExecutionResult<jobsTypes.RunOutput>> => {\n const scopeError = await verifyRunScope(runId);\n if (scopeError) return scopeError;\n const result = await self.execute(\n async (signal) =>\n self.connector.getRunOutput(client, { run_id: runId }, signal),\n self._readSettings([\"jobs:getRunOutput\", jobKey, runId]),\n userKey,\n );\n return result.ok ? result : errorResult(result.status);\n },\n\n cancelRun: async (runId: number): Promise<ExecutionResult<void>> => {\n const scopeError = await verifyRunScope(runId);\n if (scopeError) return scopeError;\n const result = await self.execute(\n async (signal) =>\n self.connector.cancelRun(client, { run_id: runId }, signal),\n self._writeSettings(),\n userKey,\n );\n return result.ok ? result : errorResult(result.status);\n },\n\n getJob: async (): Promise<ExecutionResult<jobsTypes.Job>> => {\n const result = await self.execute(\n async (signal) =>\n self.connector.getJob(client, { job_id: jobId }, signal),\n self._readSettings([\"jobs:getJob\", jobKey]),\n userKey,\n );\n return result.ok ? result : errorResult(result.status);\n },\n };\n }\n\n /**\n * Resolve `:jobKey` from the request. Returns the key and ID,\n * or sends a 404 and returns `{ jobKey: undefined, jobId: undefined }`.\n */\n private _resolveJob(\n req: express.Request,\n res: express.Response,\n ):\n | { jobKey: string; jobId: number }\n | { jobKey: undefined; jobId: undefined } {\n const jobKey = req.params.jobKey;\n if (!this.jobKeys.includes(jobKey)) {\n const safeKey = jobKey.replace(/[^a-zA-Z0-9_-]/g, \"\");\n res.status(404).json({\n error: `Unknown job \"${safeKey}\"`,\n plugin: this.name,\n });\n return { jobKey: undefined, jobId: undefined };\n }\n const jobId = this.jobIds[jobKey];\n if (!jobId) {\n res.status(404).json({\n error: `Job \"${jobKey}\" has no configured job ID`,\n plugin: this.name,\n });\n return { jobKey: undefined, jobId: undefined };\n }\n return { jobKey, jobId };\n }\n\n private _sendStatusError(res: express.Response, status: number): void {\n res.status(status).json({\n error: STATUS_CODES[status] ?? \"Unknown Error\",\n plugin: this.name,\n });\n }\n\n /**\n * Validate params from an HTTP request body. Eager validation lets streaming\n * requests get a clean 400 instead of a generic SSE error event. Throws\n * ValidationError so handlers can map to a 400 response via their catch block.\n */\n private _parseRunParams(\n jobKey: string,\n rawParams: unknown,\n ): Record<string, unknown> | undefined {\n if (\n rawParams !== undefined &&\n (typeof rawParams !== \"object\" ||\n rawParams === null ||\n Array.isArray(rawParams))\n ) {\n throw new ValidationError(\"params must be a plain object\");\n }\n\n const jobConfig = this.jobConfigs[jobKey];\n if (jobConfig?.params) {\n const result = jobConfig.params.safeParse(rawParams ?? {});\n if (!result.success) {\n throw new ValidationError(\"Invalid job parameters\");\n }\n // Pass rawParams — not result.data — to avoid double-transforming\n // when _validateAndMap calls safeParse again downstream.\n return rawParams as Record<string, unknown>;\n }\n // No schema. Either reject (no taskType) or enforce a key cap so that\n // untrusted clients can't spread arbitrarily many fields into the SDK.\n if (rawParams !== undefined) {\n if (!jobConfig?.taskType) {\n throw new ValidationError(\"This job does not accept parameters\");\n }\n const keyCount = Object.keys(rawParams as Record<string, unknown>).length;\n if (keyCount > MAX_UNVALIDATED_PARAM_KEYS) {\n throw new ValidationError(\n `Too many parameters (${keyCount}). Define a Zod schema to accept more than ${MAX_UNVALIDATED_PARAM_KEYS}.`,\n );\n }\n }\n return rawParams as Record<string, unknown> | undefined;\n }\n\n private async _handleRun(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n const { jobKey } = this._resolveJob(req, res);\n if (!jobKey) return;\n\n const stream = req.query.stream === \"true\";\n\n try {\n const params = this._parseRunParams(jobKey, req.body?.params);\n const api = this.createJobAPI(jobKey);\n\n if (stream) {\n const streamSettings: StreamExecutionSettings = {\n default: JOBS_STREAM_DEFAULTS,\n };\n await this.executeStream<JobRunStatus>(\n res,\n (signal) => api.runAndWait(params, signal),\n streamSettings,\n );\n } else {\n const result = await api.runNow(params);\n if (!result.ok) {\n this._sendStatusError(res, result.status);\n return;\n }\n res.json({ runId: result.data.run_id });\n }\n } catch (error) {\n if (error instanceof ValidationError) {\n if (!res.headersSent) {\n res.status(400).json({ error: error.message, plugin: this.name });\n }\n return;\n }\n logger.error(\"Run failed for job %s: %O\", jobKey, error);\n if (!res.headersSent) {\n res.status(500).json({ error: \"Run failed\", plugin: this.name });\n }\n }\n }\n\n injectRoutes(router: IAppRouter) {\n this.route(router, {\n name: \"run\",\n method: \"post\",\n path: \"/:jobKey/run\",\n handler: (req, res) => this._handleRun(req, res),\n });\n\n // GET /:jobKey/runs\n this.route(router, {\n name: \"runs\",\n method: \"get\",\n path: \"/:jobKey/runs\",\n handler: async (req: express.Request, res: express.Response) => {\n const { jobKey } = this._resolveJob(req, res);\n if (!jobKey) return;\n\n const limit = Math.max(\n 1,\n Math.min(Number.parseInt(req.query.limit as string, 10) || 20, 100),\n );\n\n try {\n const api = this.createJobAPI(jobKey);\n const result = await api.listRuns({ limit });\n if (!result.ok) {\n this._sendStatusError(res, result.status);\n return;\n }\n res.json({ runs: result.data });\n } catch (error) {\n logger.error(\"List runs failed for job %s: %O\", jobKey, error);\n res\n .status(500)\n .json({ error: \"List runs failed\", plugin: this.name });\n }\n },\n });\n\n // GET /:jobKey/runs/:runId\n this.route(router, {\n name: \"run-detail\",\n method: \"get\",\n path: \"/:jobKey/runs/:runId\",\n handler: async (req: express.Request, res: express.Response) => {\n const { jobKey } = this._resolveJob(req, res);\n if (!jobKey) return;\n\n const runId = Number.parseInt(req.params.runId, 10);\n if (Number.isNaN(runId) || runId <= 0) {\n res.status(400).json({ error: \"Invalid runId\", plugin: this.name });\n return;\n }\n\n try {\n const api = this.createJobAPI(jobKey);\n const result = await api.getRun(runId);\n if (!result.ok) {\n this._sendStatusError(res, result.status);\n return;\n }\n res.json(result.data);\n } catch (error) {\n logger.error(\n \"Get run failed for job %s run %d: %O\",\n jobKey,\n runId,\n error,\n );\n res.status(500).json({ error: \"Get run failed\", plugin: this.name });\n }\n },\n });\n\n // GET /:jobKey/status\n this.route(router, {\n name: \"status\",\n method: \"get\",\n path: \"/:jobKey/status\",\n handler: async (req: express.Request, res: express.Response) => {\n const { jobKey } = this._resolveJob(req, res);\n if (!jobKey) return;\n\n try {\n const api = this.createJobAPI(jobKey);\n const result = await api.lastRun();\n if (!result.ok) {\n this._sendStatusError(res, result.status);\n return;\n }\n res.json({\n status: result.data?.state?.life_cycle_state ?? null,\n run: result.data ?? null,\n });\n } catch (error) {\n logger.error(\"Status check failed for job %s: %O\", jobKey, error);\n res\n .status(500)\n .json({ error: \"Status check failed\", plugin: this.name });\n }\n },\n });\n\n // DELETE /:jobKey/runs/:runId\n this.route(router, {\n name: \"cancel-run\",\n method: \"delete\",\n path: \"/:jobKey/runs/:runId\",\n handler: async (req: express.Request, res: express.Response) => {\n const { jobKey } = this._resolveJob(req, res);\n if (!jobKey) return;\n\n const runId = Number.parseInt(req.params.runId, 10);\n if (Number.isNaN(runId) || runId <= 0) {\n res.status(400).json({ error: \"Invalid runId\", plugin: this.name });\n return;\n }\n\n try {\n const api = this.createJobAPI(jobKey);\n const result = await api.cancelRun(runId);\n if (!result.ok) {\n this._sendStatusError(res, result.status);\n return;\n }\n res.status(204).end();\n } catch (error) {\n logger.error(\n \"Cancel run failed for job %s run %d: %O\",\n jobKey,\n runId,\n error,\n );\n res\n .status(500)\n .json({ error: \"Cancel run failed\", plugin: this.name });\n }\n },\n });\n }\n\n exports(): JobsExport {\n const resolveJob = (jobKey: string): JobHandle => {\n if (!this.jobKeys.includes(jobKey)) {\n throw new Error(\n `Unknown job \"${jobKey}\". Available jobs: ${this.jobKeys.join(\", \")}`,\n );\n }\n\n const spApi = this.createJobAPI(jobKey);\n\n return {\n ...spApi,\n asUser: (req: IAppRequest) => {\n const userPlugin = this.asUser(req) as JobsPlugin;\n return userPlugin.createJobAPI(jobKey);\n },\n };\n };\n\n return resolveJob as JobsExport;\n }\n\n clientConfig(): Record<string, unknown> {\n const jobs: Record<string, { params: unknown; taskType: string | null }> =\n {};\n for (const key of this.jobKeys) {\n const config = this.jobConfigs[key];\n jobs[key] = {\n params: config?.params ? toJSONSchema(config.params) : null,\n taskType: config?.taskType ?? null,\n };\n }\n return { jobs };\n }\n}\n\n/**\n * @internal\n */\nexport const jobs = toPlugin(JobsPlugin);\n\n/**\n * @internal\n */\nexport { JobsPlugin };\n"],"mappings":";;;;;;;;;;;;;;;;;;;;cAWqE;aACN;AAsB/D,MAAM,SAAS,aAAa,OAAO;AAEnC,MAAM,uBAAuB;AAC7B,MAAM,wBAAwB;;AAE9B,MAAM,6BAA6B;;AAGnC,SAAS,YAAY,QAAwC;AAC3D,QAAO;EACL,IAAI;EACJ;EACA,SAAS,aAAa,WAAW;EAClC;;AAGH,SAAS,mBAAmB,OAAoC;AAC9D,QACE,UAAU,gBAAgB,UAAU,aAAa,UAAU;;;AAK/D,SAAS,cACP,SACA,KACiC;CACjC,MAAM,SAAS,KAAK,KAAK,QAAQ,GAAG,KAAM;AAC1C,QAAO;EACL,OAAO,KAAK,IAAI,UAAU,QAAQ,IAAI;EACtC,MAAM,KAAK,IAAI,UAAU,KAAK,IAAI;EACnC;;AAGH,SAAS,eAAe,IAAY,QAAqC;AACvE,QAAO,IAAI,SAAe,YAAY;AACpC,MAAI,QAAQ,SAAS;AACnB,YAAS;AACT;;EAEF,MAAM,QAAQ,WAAW,SAAS,GAAG;AACrC,UAAQ,iBACN,eACM;AACJ,gBAAa,MAAM;AACnB,YAAS;KAEX,EAAE,MAAM,MAAM,CACf;GACD;;AAGJ,IAAM,aAAN,MAAM,mBAAmB,OAAO;CAC9B,OAAO,WAAWA;CAGlB,AAAQ;CACR,AAAQ,SAAiC,EAAE;CAC3C,AAAQ,aAAwC,EAAE;CAClD,AAAQ,UAAoB,EAAE;;;;;CAM9B,OAAO,aAAa,QAAgD;EAClE,MAAM,WAAW,OAAO,QAAQ,EAAE;EAClC,MAAM,aAAwC,EAAE;EAEhD,MAAM,SAAS;AACf,OAAK,MAAM,OAAO,OAAO,KAAK,QAAQ,IAAI,EAAE;AAC1C,OAAI,CAAC,IAAI,WAAW,OAAO,CAAE;AAC7B,OAAI,QAAQ,oBAAqB;GACjC,MAAM,SAAS,IAAI,MAAM,GAAc;AACvC,OAAI,CAAC,UAAU,CAAC,QAAQ,IAAI,KAAM;GAClC,MAAM,SAAS,OAAO,aAAa;AACnC,OAAI,EAAE,UAAU,UACd,YAAW,UAAU,EAAE;;AAK3B,MACE,QAAQ,IAAI,qBACZ,OAAO,KAAK,SAAS,CAAC,WAAW,KACjC,OAAO,KAAK,WAAW,CAAC,WAAW,EAEnC,YAAW,UAAU,EAAE;AAGzB,SAAO;GAAE,GAAG;GAAY,GAAG;GAAU;;;;;;CAOvC,OAAO,wBAAwB,QAA4C;EACzE,MAAM,OAAO,WAAW,aAAa,OAAO;AAC5C,SAAO,OAAO,KAAK,KAAK,CAAC,KAAK,SAAS;GACrC,MAAM,aAAa;GACnB,OAAO,OAAO;GACd,aAAa,OAAO;GACpB,aAAa,mBAAmB,IAAI;GACpC,YAAY;GACZ,QAAQ,EACN,IAAI;IACF,KACE,QAAQ,YACJ,sBACA,kBAAkB,IAAI,aAAa;IACzC,aAAa,eAAe,IAAI;IACjC,EACF;GACD,UAAU;GACX,EAAE;;CAGL,YAAY,QAAqB;AAC/B,QAAM,OAAO;AACb,OAAK,SAAS;AACd,OAAK,YAAY,IAAI,cAAc,EACjC,WAAW,OAAO,WACnB,CAAC;EAEF,MAAM,OAAO,WAAW,aAAa,OAAO;AAC5C,OAAK,UAAU,OAAO,KAAK,KAAK;AAChC,OAAK,aAAa;AAElB,OAAK,MAAM,OAAO,KAAK,SAAS;GAC9B,MAAM,SACJ,QAAQ,YACJ,sBACA,kBAAkB,IAAI,aAAa;GACzC,MAAM,WAAW,QAAQ,IAAI;AAC7B,OAAI,UAAU;IACZ,MAAM,SAAS,OAAO,SAAS,UAAU,GAAG;AAC5C,QAAI,CAAC,OAAO,MAAM,OAAO,CACvB,MAAK,OAAO,OAAO;;;;CAM3B,MAAM,QAAQ;AACZ,SAAO,KACL,gCAAgC,KAAK,QAAQ,OAAO,WAAW,KAAK,QAAQ,KAAK,KAAK,GACvF;;CAGH,IAAY,SAAS;AACnB,SAAO,oBAAoB;;CAG7B,AAAQ,SAAS,QAAwB;EACvC,MAAM,KAAK,KAAK,OAAO;AACvB,MAAI,CAAC,IAAI;GACP,MAAM,SACJ,WAAW,YACP,sBACA,kBAAkB,OAAO,aAAa;AAC5C,SAAM,IAAI,MACR,QAAQ,OAAO,kCAAkC,OAAO,WACzD;;AAEH,SAAO;;CAGT,AAAQ,cACN,UACyB;AACzB,SAAO,EACL,SAAS;GACP,GAAG;GACH,GAAI,KAAK,OAAO,WAAW,QAAQ,EAAE,SAAS,KAAK,OAAO,SAAS;GACnE,OAAO;IAAE,GAAG,mBAAmB;IAAO;IAAU;GACjD,EACF;;CAGH,AAAQ,iBAA0C;AAChD,SAAO,EACL,SAAS;GACP,GAAG;GACH,GAAI,KAAK,OAAO,WAAW,QAAQ,EAAE,SAAS,KAAK,OAAO,SAAS;GACpE,EACF;;;;;;CAOH,AAAQ,gBACN,QACA,QACyB;EACzB,MAAM,YAAY,KAAK,WAAW;EAClC,IAAI,YAAY;AAEhB,MAAI,WAAW,QAAQ;GACrB,MAAM,SAAS,UAAU,OAAO,UAAU,UAAU,EAAE,CAAC;AACvD,OAAI,CAAC,OAAO,QACV,OAAM,IAAI,gBACR,wCAAwC,OAAO,KAAK,OAAO,MAAM,UAClE;AAEH,eAAY,OAAO;;AAGrB,SAAO,WAAW,YAAY,YAC1B,UAAU,UAAU,UAAU,UAAU,GACvC,aAAa,EAAE;;;;;;CAOtB,AAAU,aAAa,QAAwB;EAC7C,MAAM,QAAQ,KAAK,SAAS,OAAO;EACnC,MAAM,YAAY,KAAK,WAAW;EAElC,MAAM,OAAO;EAKb,MAAM,SAAS,KAAK;EACpB,MAAM,UAAU,kBAAkB;;;;;;EAOlC,MAAM,iBAAiB,OACrB,UAC2C;GAC3C,MAAM,SAAS,MAAM,KAAK,QACxB,OAAO,WACL,KAAK,UAAU,OAAO,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,EAC1D,KAAK,cAAc;IAAC;IAAe;IAAQ;IAAM,CAAC,EAClD,QACD;AACD,OAAI,CAAC,OAAO,GAAI,QAAO,YAAY,OAAO,OAAO;AACjD,OAAI,OAAO,KAAK,WAAW,MAAO,QAAO,YAAY,IAAI;AACzD,UAAO;;AAGT,SAAO;GACL,QAAQ,OACN,WACuD;IACvD,MAAM,YAAY,KAAK,gBAAgB,QAAQ,OAAO;IAEtD,MAAM,SAAS,MAAM,KAAK,QACxB,OAAO,WACL,KAAK,UAAU,OACb,QACA;KAAE,GAAG;KAAW,QAAQ;KAAO,EAC/B,OACD,EACH,KAAK,gBAAgB,EACrB,QACD;AACD,WAAO,OAAO,KAAK,SAAS,YAAY,OAAO,OAAO;;GAGxD,OAAO,WACL,QACA,QAC6C;IAC7C,MAAM,YAAY,KAAK,gBAAgB,QAAQ,OAAO;IAEtD,MAAM,YAAY,MAAM,KAAK,QAC3B,OAAO,WACL,KAAK,UAAU,OACb,QACA;KAAE,GAAG;KAAW,QAAQ;KAAO,EAC/B,OACD,EACH,KAAK,gBAAgB,EACrB,QACD;AAED,QAAI,CAAC,UAAU,GACb,OAAM,IAAI,eAAe,4BAA4B;IAEvD,MAAM,QAAQ,UAAU,KAAK;AAC7B,QAAI,CAAC,MACH,OAAM,IAAI,MAAM,iCAAiC;IAGnD,MAAM,mBACJ,KAAK,OAAO,kBAAkB;IAChC,MAAM,kBAAkB,mBAAmB;IAC3C,MAAM,UAAU,WAAW,eAAe;IAC1C,MAAM,YAAY,KAAK,KAAK;IAC5B,IAAI,kBAAkB;AAEtB,WAAO,CAAC,QAAQ,SAAS;AACvB,SAAI,KAAK,KAAK,GAAG,YAAY,QAC3B,OAAM,IAAI,MACR,WAAW,MAAM,yBAAyB,QAAQ,IACnD;KAGH,MAAM,kBAAkB,MAAM,KAAK,QACjC,OAAO,WACL,KAAK,UAAU,OAAO,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,EAC1D,EACE,SAAS;MACP,GAAG;MACH,OAAO,EAAE,SAAS,OAAO;MAC1B,EACF,EACD,QACD;AACD,SAAI,CAAC,gBAAgB,GACnB,OAAM,IAAI,eACR,qCAAqC,QACtC;KAEH,MAAM,MAAM,gBAAgB;KAC5B,MAAM,QAAQ,IAAI,OAAO;AAEzB,WAAM;MAAE,QAAQ;MAAO,WAAW,KAAK,KAAK;MAAE;MAAK;AAEnD,SAAI,mBAAmB,MAAM,CAAE;KAE/B,MAAM,EAAE,OAAO,SAAS,cACtB,iBACA,gBACD;AACD,uBAAkB;AAClB,WAAM,eAAe,OAAO,OAAO;;;GAIvC,SAAS,YAEJ;IACH,MAAM,SAAS,MAAM,KAAK,QACxB,OAAO,WACL,KAAK,UAAU,SACb,QACA;KAAE,QAAQ;KAAO,OAAO;KAAG,EAC3B,OACD,EACH,KAAK,cAAc,CAAC,gBAAgB,OAAO,CAAC,EAC5C,QACD;AACD,QAAI,CAAC,OAAO,GAAI,QAAO,YAAY,OAAO,OAAO;AACjD,WAAO;KAAE,IAAI;KAAM,MAAM,OAAO,KAAK;KAAI;;GAG3C,UAAU,OAAO,YAEoC;IACnD,MAAM,SAAS,MAAM,KAAK,QACxB,OAAO,WACL,KAAK,UAAU,SACb,QACA;KAAE,QAAQ;KAAO,OAAO,SAAS;KAAO,EACxC,OACD,EACH,KAAK,cAAc;KACjB;KACA;KACA,SAAS,SAAS;KACnB,CAAC,EACF,QACD;AACD,WAAO,OAAO,KAAK,SAAS,YAAY,OAAO,OAAO;;GAGxD,QAAQ,OACN,UAC4C;IAC5C,MAAM,SAAS,MAAM,KAAK,QACxB,OAAO,WACL,KAAK,UAAU,OAAO,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,EAC1D,KAAK,cAAc;KAAC;KAAe;KAAQ;KAAM,CAAC,EAClD,QACD;AACD,QAAI,CAAC,OAAO,GAAI,QAAO,YAAY,OAAO,OAAO;AACjD,QAAI,OAAO,KAAK,WAAW,MAAO,QAAO,YAAY,IAAI;AACzD,WAAO;;GAGT,cAAc,OACZ,UACkD;IAClD,MAAM,aAAa,MAAM,eAAe,MAAM;AAC9C,QAAI,WAAY,QAAO;IACvB,MAAM,SAAS,MAAM,KAAK,QACxB,OAAO,WACL,KAAK,UAAU,aAAa,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,EAChE,KAAK,cAAc;KAAC;KAAqB;KAAQ;KAAM,CAAC,EACxD,QACD;AACD,WAAO,OAAO,KAAK,SAAS,YAAY,OAAO,OAAO;;GAGxD,WAAW,OAAO,UAAkD;IAClE,MAAM,aAAa,MAAM,eAAe,MAAM;AAC9C,QAAI,WAAY,QAAO;IACvB,MAAM,SAAS,MAAM,KAAK,QACxB,OAAO,WACL,KAAK,UAAU,UAAU,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,EAC7D,KAAK,gBAAgB,EACrB,QACD;AACD,WAAO,OAAO,KAAK,SAAS,YAAY,OAAO,OAAO;;GAGxD,QAAQ,YAAqD;IAC3D,MAAM,SAAS,MAAM,KAAK,QACxB,OAAO,WACL,KAAK,UAAU,OAAO,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,EAC1D,KAAK,cAAc,CAAC,eAAe,OAAO,CAAC,EAC3C,QACD;AACD,WAAO,OAAO,KAAK,SAAS,YAAY,OAAO,OAAO;;GAEzD;;;;;;CAOH,AAAQ,YACN,KACA,KAG0C;EAC1C,MAAM,SAAS,IAAI,OAAO;AAC1B,MAAI,CAAC,KAAK,QAAQ,SAAS,OAAO,EAAE;GAClC,MAAM,UAAU,OAAO,QAAQ,mBAAmB,GAAG;AACrD,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,gBAAgB,QAAQ;IAC/B,QAAQ,KAAK;IACd,CAAC;AACF,UAAO;IAAE,QAAQ;IAAW,OAAO;IAAW;;EAEhD,MAAM,QAAQ,KAAK,OAAO;AAC1B,MAAI,CAAC,OAAO;AACV,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,QAAQ,OAAO;IACtB,QAAQ,KAAK;IACd,CAAC;AACF,UAAO;IAAE,QAAQ;IAAW,OAAO;IAAW;;AAEhD,SAAO;GAAE;GAAQ;GAAO;;CAG1B,AAAQ,iBAAiB,KAAuB,QAAsB;AACpE,MAAI,OAAO,OAAO,CAAC,KAAK;GACtB,OAAO,aAAa,WAAW;GAC/B,QAAQ,KAAK;GACd,CAAC;;;;;;;CAQJ,AAAQ,gBACN,QACA,WACqC;AACrC,MACE,cAAc,WACb,OAAO,cAAc,YACpB,cAAc,QACd,MAAM,QAAQ,UAAU,EAE1B,OAAM,IAAI,gBAAgB,gCAAgC;EAG5D,MAAM,YAAY,KAAK,WAAW;AAClC,MAAI,WAAW,QAAQ;AAErB,OAAI,CADW,UAAU,OAAO,UAAU,aAAa,EAAE,CAAC,CAC9C,QACV,OAAM,IAAI,gBAAgB,yBAAyB;AAIrD,UAAO;;AAIT,MAAI,cAAc,QAAW;AAC3B,OAAI,CAAC,WAAW,SACd,OAAM,IAAI,gBAAgB,sCAAsC;GAElE,MAAM,WAAW,OAAO,KAAK,UAAqC,CAAC;AACnE,OAAI,WAAW,2BACb,OAAM,IAAI,gBACR,wBAAwB,SAAS,6CAA6C,2BAA2B,GAC1G;;AAGL,SAAO;;CAGT,MAAc,WACZ,KACA,KACe;EACf,MAAM,EAAE,WAAW,KAAK,YAAY,KAAK,IAAI;AAC7C,MAAI,CAAC,OAAQ;EAEb,MAAM,SAAS,IAAI,MAAM,WAAW;AAEpC,MAAI;GACF,MAAM,SAAS,KAAK,gBAAgB,QAAQ,IAAI,MAAM,OAAO;GAC7D,MAAM,MAAM,KAAK,aAAa,OAAO;AAErC,OAAI,QAAQ;IACV,MAAM,iBAA0C,EAC9C,SAAS,sBACV;AACD,UAAM,KAAK,cACT,MACC,WAAW,IAAI,WAAW,QAAQ,OAAO,EAC1C,eACD;UACI;IACL,MAAM,SAAS,MAAM,IAAI,OAAO,OAAO;AACvC,QAAI,CAAC,OAAO,IAAI;AACd,UAAK,iBAAiB,KAAK,OAAO,OAAO;AACzC;;AAEF,QAAI,KAAK,EAAE,OAAO,OAAO,KAAK,QAAQ,CAAC;;WAElC,OAAO;AACd,OAAI,iBAAiB,iBAAiB;AACpC,QAAI,CAAC,IAAI,YACP,KAAI,OAAO,IAAI,CAAC,KAAK;KAAE,OAAO,MAAM;KAAS,QAAQ,KAAK;KAAM,CAAC;AAEnE;;AAEF,UAAO,MAAM,6BAA6B,QAAQ,MAAM;AACxD,OAAI,CAAC,IAAI,YACP,KAAI,OAAO,IAAI,CAAC,KAAK;IAAE,OAAO;IAAc,QAAQ,KAAK;IAAM,CAAC;;;CAKtE,aAAa,QAAoB;AAC/B,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,UAAU,KAAK,QAAQ,KAAK,WAAW,KAAK,IAAI;GACjD,CAAC;AAGF,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;IAC9D,MAAM,EAAE,WAAW,KAAK,YAAY,KAAK,IAAI;AAC7C,QAAI,CAAC,OAAQ;IAEb,MAAM,QAAQ,KAAK,IACjB,GACA,KAAK,IAAI,OAAO,SAAS,IAAI,MAAM,OAAiB,GAAG,IAAI,IAAI,IAAI,CACpE;AAED,QAAI;KAEF,MAAM,SAAS,MADH,KAAK,aAAa,OAAO,CACZ,SAAS,EAAE,OAAO,CAAC;AAC5C,SAAI,CAAC,OAAO,IAAI;AACd,WAAK,iBAAiB,KAAK,OAAO,OAAO;AACzC;;AAEF,SAAI,KAAK,EAAE,MAAM,OAAO,MAAM,CAAC;aACxB,OAAO;AACd,YAAO,MAAM,mCAAmC,QAAQ,MAAM;AAC9D,SACG,OAAO,IAAI,CACX,KAAK;MAAE,OAAO;MAAoB,QAAQ,KAAK;MAAM,CAAC;;;GAG9D,CAAC;AAGF,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;IAC9D,MAAM,EAAE,WAAW,KAAK,YAAY,KAAK,IAAI;AAC7C,QAAI,CAAC,OAAQ;IAEb,MAAM,QAAQ,OAAO,SAAS,IAAI,OAAO,OAAO,GAAG;AACnD,QAAI,OAAO,MAAM,MAAM,IAAI,SAAS,GAAG;AACrC,SAAI,OAAO,IAAI,CAAC,KAAK;MAAE,OAAO;MAAiB,QAAQ,KAAK;MAAM,CAAC;AACnE;;AAGF,QAAI;KAEF,MAAM,SAAS,MADH,KAAK,aAAa,OAAO,CACZ,OAAO,MAAM;AACtC,SAAI,CAAC,OAAO,IAAI;AACd,WAAK,iBAAiB,KAAK,OAAO,OAAO;AACzC;;AAEF,SAAI,KAAK,OAAO,KAAK;aACd,OAAO;AACd,YAAO,MACL,wCACA,QACA,OACA,MACD;AACD,SAAI,OAAO,IAAI,CAAC,KAAK;MAAE,OAAO;MAAkB,QAAQ,KAAK;MAAM,CAAC;;;GAGzE,CAAC;AAGF,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;IAC9D,MAAM,EAAE,WAAW,KAAK,YAAY,KAAK,IAAI;AAC7C,QAAI,CAAC,OAAQ;AAEb,QAAI;KAEF,MAAM,SAAS,MADH,KAAK,aAAa,OAAO,CACZ,SAAS;AAClC,SAAI,CAAC,OAAO,IAAI;AACd,WAAK,iBAAiB,KAAK,OAAO,OAAO;AACzC;;AAEF,SAAI,KAAK;MACP,QAAQ,OAAO,MAAM,OAAO,oBAAoB;MAChD,KAAK,OAAO,QAAQ;MACrB,CAAC;aACK,OAAO;AACd,YAAO,MAAM,sCAAsC,QAAQ,MAAM;AACjE,SACG,OAAO,IAAI,CACX,KAAK;MAAE,OAAO;MAAuB,QAAQ,KAAK;MAAM,CAAC;;;GAGjE,CAAC;AAGF,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;IAC9D,MAAM,EAAE,WAAW,KAAK,YAAY,KAAK,IAAI;AAC7C,QAAI,CAAC,OAAQ;IAEb,MAAM,QAAQ,OAAO,SAAS,IAAI,OAAO,OAAO,GAAG;AACnD,QAAI,OAAO,MAAM,MAAM,IAAI,SAAS,GAAG;AACrC,SAAI,OAAO,IAAI,CAAC,KAAK;MAAE,OAAO;MAAiB,QAAQ,KAAK;MAAM,CAAC;AACnE;;AAGF,QAAI;KAEF,MAAM,SAAS,MADH,KAAK,aAAa,OAAO,CACZ,UAAU,MAAM;AACzC,SAAI,CAAC,OAAO,IAAI;AACd,WAAK,iBAAiB,KAAK,OAAO,OAAO;AACzC;;AAEF,SAAI,OAAO,IAAI,CAAC,KAAK;aACd,OAAO;AACd,YAAO,MACL,2CACA,QACA,OACA,MACD;AACD,SACG,OAAO,IAAI,CACX,KAAK;MAAE,OAAO;MAAqB,QAAQ,KAAK;MAAM,CAAC;;;GAG/D,CAAC;;CAGJ,UAAsB;EACpB,MAAM,cAAc,WAA8B;AAChD,OAAI,CAAC,KAAK,QAAQ,SAAS,OAAO,CAChC,OAAM,IAAI,MACR,gBAAgB,OAAO,qBAAqB,KAAK,QAAQ,KAAK,KAAK,GACpE;AAKH,UAAO;IACL,GAHY,KAAK,aAAa,OAAO;IAIrC,SAAS,QAAqB;AAE5B,YADmB,KAAK,OAAO,IAAI,CACjB,aAAa,OAAO;;IAEzC;;AAGH,SAAO;;CAGT,eAAwC;EACtC,MAAM,OACJ,EAAE;AACJ,OAAK,MAAM,OAAO,KAAK,SAAS;GAC9B,MAAM,SAAS,KAAK,WAAW;AAC/B,QAAK,OAAO;IACV,QAAQ,QAAQ,SAAS,aAAa,OAAO,OAAO,GAAG;IACvD,UAAU,QAAQ,YAAY;IAC/B;;AAEH,SAAO,EAAE,MAAM;;;;;;AAOnB,MAAa,OAAO,SAAS,WAAW"}
|