@poolzin/pool-bot 2026.3.4 → 2026.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/CHANGELOG.md +10 -0
  2. package/assets/pool-bot-icon-dark.png +0 -0
  3. package/assets/pool-bot-logo-1.png +0 -0
  4. package/assets/pool-bot-mascot.png +0 -0
  5. package/dist/agents/pi-embedded-runner/tool-result-truncation.js +62 -7
  6. package/dist/agents/poolbot-tools.js +12 -0
  7. package/dist/agents/session-write-lock.js +93 -8
  8. package/dist/agents/tools/pdf-native-providers.js +102 -0
  9. package/dist/agents/tools/pdf-tool.helpers.js +86 -0
  10. package/dist/agents/tools/pdf-tool.js +508 -0
  11. package/dist/build-info.json +3 -3
  12. package/dist/cron/normalize.js +3 -0
  13. package/dist/cron/service/jobs.js +48 -0
  14. package/dist/gateway/protocol/schema/cron.js +3 -0
  15. package/dist/gateway/server-channels.js +99 -14
  16. package/dist/gateway/server-cron.js +89 -0
  17. package/dist/gateway/server-health-probes.js +55 -0
  18. package/dist/gateway/server-http.js +5 -0
  19. package/dist/hooks/bundled/session-memory/handler.js +8 -2
  20. package/dist/infra/abort-signal.js +12 -0
  21. package/dist/infra/boundary-file-read.js +118 -0
  22. package/dist/infra/boundary-path.js +594 -0
  23. package/dist/infra/file-identity.js +12 -0
  24. package/dist/infra/fs-safe.js +377 -12
  25. package/dist/infra/hardlink-guards.js +30 -0
  26. package/dist/infra/json-utf8-bytes.js +8 -0
  27. package/dist/infra/net/fetch-guard.js +63 -13
  28. package/dist/infra/net/proxy-env.js +17 -0
  29. package/dist/infra/net/ssrf.js +74 -272
  30. package/dist/infra/path-alias-guards.js +21 -0
  31. package/dist/infra/path-guards.js +13 -1
  32. package/dist/infra/ports-probe.js +19 -0
  33. package/dist/infra/prototype-keys.js +4 -0
  34. package/dist/infra/restart-stale-pids.js +254 -0
  35. package/dist/infra/safe-open-sync.js +71 -0
  36. package/dist/infra/secure-random.js +7 -0
  37. package/dist/media/ffmpeg-limits.js +4 -0
  38. package/dist/media/input-files.js +6 -2
  39. package/dist/media/temp-files.js +12 -0
  40. package/dist/memory/embedding-chunk-limits.js +5 -2
  41. package/dist/memory/embeddings-ollama.js +91 -138
  42. package/dist/memory/embeddings-remote-fetch.js +11 -10
  43. package/dist/memory/embeddings.js +25 -9
  44. package/dist/memory/manager-embedding-ops.js +1 -1
  45. package/dist/memory/post-json.js +23 -0
  46. package/dist/memory/qmd-manager.js +272 -77
  47. package/dist/memory/remote-http.js +33 -0
  48. package/dist/plugin-sdk/windows-spawn.js +214 -0
  49. package/dist/shared/net/ip-test-fixtures.js +1 -0
  50. package/dist/shared/net/ip.js +303 -0
  51. package/dist/shared/net/ipv4.js +8 -11
  52. package/dist/shared/pid-alive.js +59 -2
  53. package/dist/test-helpers/ssrf.js +13 -0
  54. package/dist/tui/tui.js +9 -4
  55. package/dist/utils/fetch-timeout.js +12 -1
  56. package/docs/adr/003-feature-gap-analysis.md +112 -0
  57. package/package.json +10 -4
@@ -0,0 +1,508 @@
1
+ import path from "node:path";
2
+ import { complete } from "@mariozechner/pi-ai";
3
+ import { Type } from "@sinclair/typebox";
4
+ import { resolveUserPath } from "../../utils.js";
5
+ import { getDefaultLocalRoots, loadWebMedia } from "../../web/media.js";
6
+ import { ensureAuthProfileStore, listProfilesForProvider } from "../auth-profiles.js";
7
+ import { DEFAULT_MODEL, DEFAULT_PROVIDER } from "../defaults.js";
8
+ import { getApiKeyForModel, requireApiKey, resolveEnvApiKey } from "../model-auth.js";
9
+ import { runWithImageModelFallback } from "../model-fallback.js";
10
+ import { resolveConfiguredModelRef } from "../model-selection.js";
11
+ import { ensurePoolbotModelsJson } from "../models-config.js";
12
+ import { discoverAuthStorage, discoverModels } from "../pi-model-discovery.js";
13
+ import { normalizeWorkspaceDir } from "../workspace-dir.js";
14
+ import { resolveProviderVisionModelFromConfig } from "./image-tool.helpers.js";
15
+ import { anthropicAnalyzePdf, geminiAnalyzePdf } from "./pdf-native-providers.js";
16
+ import { extractPdfContent, normalizeMimeType, resolveInputFileLimits, } from "../../media/input-files.js";
17
+ import { coercePdfAssistantText, coercePdfModelConfig, parsePageRange, providerSupportsNativePdf, resolvePdfToolMaxTokens, } from "./pdf-tool.helpers.js";
18
+ const DEFAULT_PROMPT = "Analyze the PDF document.";
19
+ const ANTHROPIC_PDF_PRIMARY = "anthropic/claude-opus-4-6";
20
+ const ANTHROPIC_PDF_FALLBACK = "anthropic/claude-opus-4-5";
21
+ const DEFAULT_MAX_PDFS = 10;
22
+ export const __testing = {
23
+ coercePdfAssistantText,
24
+ resolvePdfToolMaxTokens,
25
+ };
26
+ function resolveDefaultModelRef(cfg) {
27
+ if (cfg) {
28
+ const resolved = resolveConfiguredModelRef({
29
+ cfg,
30
+ defaultProvider: DEFAULT_PROVIDER,
31
+ defaultModel: DEFAULT_MODEL,
32
+ });
33
+ return { provider: resolved.provider, model: resolved.model };
34
+ }
35
+ return { provider: DEFAULT_PROVIDER, model: DEFAULT_MODEL };
36
+ }
37
+ function hasAuthForProvider(params) {
38
+ if (resolveEnvApiKey(params.provider)?.apiKey) {
39
+ return true;
40
+ }
41
+ const store = ensureAuthProfileStore(params.agentDir, {
42
+ allowKeychainPrompt: false,
43
+ });
44
+ return listProfilesForProvider(store, params.provider).length > 0;
45
+ }
46
+ /**
47
+ * Resolve the effective PDF model config for the `pdf` tool.
48
+ *
49
+ * - Prefer explicit config (`agents.defaults.pdfModel`).
50
+ * - Otherwise, try to "pair" the primary model with a PDF-capable model:
51
+ * - same provider (best effort)
52
+ * - fall back to OpenAI/Anthropic when available
53
+ */
54
+ export function resolvePdfModelConfigForTool(params) {
55
+ const explicit = coercePdfModelConfig(params.cfg);
56
+ if (explicit.primary?.trim() || (explicit.fallbacks?.length ?? 0) > 0) {
57
+ return explicit;
58
+ }
59
+ const primary = resolveDefaultModelRef(params.cfg);
60
+ const openaiOk = hasAuthForProvider({
61
+ provider: "openai",
62
+ agentDir: params.agentDir,
63
+ });
64
+ const anthropicOk = hasAuthForProvider({
65
+ provider: "anthropic",
66
+ agentDir: params.agentDir,
67
+ });
68
+ const fallbacks = [];
69
+ const addFallback = (modelRef) => {
70
+ const ref = (modelRef ?? "").trim();
71
+ if (!ref) {
72
+ return;
73
+ }
74
+ if (fallbacks.includes(ref)) {
75
+ return;
76
+ }
77
+ fallbacks.push(ref);
78
+ };
79
+ const providerVisionFromConfig = resolveProviderVisionModelFromConfig({
80
+ cfg: params.cfg,
81
+ provider: primary.provider,
82
+ });
83
+ const providerOk = hasAuthForProvider({
84
+ provider: primary.provider,
85
+ agentDir: params.agentDir,
86
+ });
87
+ let preferred = null;
88
+ if (providerOk && providerVisionFromConfig) {
89
+ preferred = providerVisionFromConfig;
90
+ }
91
+ else if (primary.provider === "openai" && openaiOk) {
92
+ preferred = "openai/gpt-5-mini";
93
+ }
94
+ else if (primary.provider === "anthropic" && anthropicOk) {
95
+ preferred = ANTHROPIC_PDF_PRIMARY;
96
+ }
97
+ if (preferred?.trim()) {
98
+ if (openaiOk) {
99
+ addFallback("openai/gpt-5-mini");
100
+ }
101
+ if (anthropicOk) {
102
+ addFallback(ANTHROPIC_PDF_FALLBACK);
103
+ }
104
+ // Don't duplicate primary in fallbacks.
105
+ const pruned = fallbacks.filter((ref) => ref !== preferred);
106
+ return {
107
+ primary: preferred,
108
+ ...(pruned.length > 0 ? { fallbacks: pruned } : {}),
109
+ };
110
+ }
111
+ // Cross-provider fallback when we can't pair with the primary provider.
112
+ if (openaiOk) {
113
+ if (anthropicOk) {
114
+ addFallback(ANTHROPIC_PDF_FALLBACK);
115
+ }
116
+ return {
117
+ primary: "openai/gpt-5-mini",
118
+ ...(fallbacks.length ? { fallbacks } : {}),
119
+ };
120
+ }
121
+ if (anthropicOk) {
122
+ return {
123
+ primary: ANTHROPIC_PDF_PRIMARY,
124
+ fallbacks: [ANTHROPIC_PDF_FALLBACK],
125
+ };
126
+ }
127
+ return null;
128
+ }
129
+ function pickMaxBytes(cfg, maxBytesMb) {
130
+ if (typeof maxBytesMb === "number" && Number.isFinite(maxBytesMb) && maxBytesMb > 0) {
131
+ return Math.floor(maxBytesMb * 1024 * 1024);
132
+ }
133
+ const configured = cfg?.agents?.defaults?.mediaMaxMb;
134
+ if (typeof configured === "number" && Number.isFinite(configured) && configured > 0) {
135
+ return Math.floor(configured * 1024 * 1024);
136
+ }
137
+ return undefined;
138
+ }
139
+ function buildPdfContext(prompt, extractedText, images) {
140
+ const content = [];
141
+ // Build text block: prompt + extracted text
142
+ const textBlock = extractedText.trim()
143
+ ? `${prompt}\n\n--- Extracted PDF Text ---\n${extractedText}\n--- End Extracted PDF Text ---`
144
+ : prompt;
145
+ content.push({ type: "text", text: textBlock });
146
+ // Add rendered page images
147
+ for (const img of images) {
148
+ content.push({ type: "image", data: img.data, mimeType: img.mimeType });
149
+ }
150
+ return {
151
+ messages: [
152
+ {
153
+ role: "user",
154
+ content,
155
+ timestamp: Date.now(),
156
+ },
157
+ ],
158
+ };
159
+ }
160
+ async function resolveSandboxedPdfPath(params) {
161
+ const normalize = (p) => (p.startsWith("file://") ? p.slice("file://".length) : p);
162
+ const filePath = normalize(params.pdfPath);
163
+ try {
164
+ const resolved = params.sandbox.bridge.resolvePath({
165
+ filePath,
166
+ cwd: params.sandbox.root,
167
+ });
168
+ return { resolved: resolved.hostPath };
169
+ }
170
+ catch (err) {
171
+ const name = path.basename(filePath);
172
+ const candidateRel = path.join("media", "inbound", name);
173
+ try {
174
+ const stat = await params.sandbox.bridge.stat({
175
+ filePath: candidateRel,
176
+ cwd: params.sandbox.root,
177
+ });
178
+ if (!stat) {
179
+ throw err;
180
+ }
181
+ }
182
+ catch {
183
+ throw err;
184
+ }
185
+ const out = params.sandbox.bridge.resolvePath({
186
+ filePath: candidateRel,
187
+ cwd: params.sandbox.root,
188
+ });
189
+ return { resolved: out.hostPath, rewrittenFrom: filePath };
190
+ }
191
+ }
192
+ async function runPdfPrompt(params) {
193
+ // Overlay pdfModelConfig onto cfg.agents.defaults.imageModel so
194
+ // runWithImageModelFallback reads our PDF-specific model cascade.
195
+ const effectiveCfg = params.cfg
196
+ ? {
197
+ ...params.cfg,
198
+ agents: {
199
+ ...params.cfg.agents,
200
+ defaults: {
201
+ ...params.cfg.agents?.defaults,
202
+ imageModel: params.pdfModelConfig,
203
+ },
204
+ },
205
+ }
206
+ : undefined;
207
+ await ensurePoolbotModelsJson(effectiveCfg, params.agentDir);
208
+ const authStorage = discoverAuthStorage(params.agentDir);
209
+ const modelRegistry = discoverModels(authStorage, params.agentDir);
210
+ const result = await runWithImageModelFallback({
211
+ cfg: effectiveCfg,
212
+ modelOverride: params.modelOverride,
213
+ run: async (provider, modelId) => {
214
+ const model = modelRegistry.find(provider, modelId);
215
+ if (!model) {
216
+ throw new Error(`Unknown model: ${provider}/${modelId}`);
217
+ }
218
+ const apiKeyInfo = await getApiKeyForModel({
219
+ model,
220
+ cfg: effectiveCfg,
221
+ agentDir: params.agentDir,
222
+ });
223
+ const apiKey = requireApiKey(apiKeyInfo, model.provider);
224
+ authStorage.setRuntimeApiKey(model.provider, apiKey);
225
+ // Native path: send raw PDF bytes to providers with native document support.
226
+ if (providerSupportsNativePdf(model.provider)) {
227
+ const pdfs = params.pdfBuffers.map((buf) => ({
228
+ base64: buf.toString("base64"),
229
+ }));
230
+ if (model.provider === "anthropic") {
231
+ const text = await anthropicAnalyzePdf({
232
+ apiKey,
233
+ modelId: model.id,
234
+ prompt: params.prompt,
235
+ pdfs,
236
+ maxTokens: resolvePdfToolMaxTokens(model.maxTokens),
237
+ baseUrl: model.baseUrl,
238
+ });
239
+ return { text, provider: model.provider, model: model.id };
240
+ }
241
+ if (model.provider === "google") {
242
+ const text = await geminiAnalyzePdf({
243
+ apiKey,
244
+ modelId: model.id,
245
+ prompt: params.prompt,
246
+ pdfs,
247
+ baseUrl: model.baseUrl,
248
+ });
249
+ return { text, provider: model.provider, model: model.id };
250
+ }
251
+ }
252
+ // Non-native fallback: extract text + optional page images, then send
253
+ // via pi-ai complete().
254
+ const limits = resolveInputFileLimits();
255
+ let allText = "";
256
+ let allImages = [];
257
+ for (const buf of params.pdfBuffers) {
258
+ const extracted = await extractPdfContent({
259
+ buffer: buf,
260
+ limits,
261
+ pageNumbers: params.pageNumbers,
262
+ });
263
+ if (extracted.text) {
264
+ allText += (allText ? "\n\n" : "") + extracted.text;
265
+ }
266
+ allImages.push(...extracted.images);
267
+ }
268
+ // If model doesn't support images, proceed text-only (don't error).
269
+ if (!model.input?.includes("image")) {
270
+ allImages = [];
271
+ }
272
+ const context = buildPdfContext(params.prompt, allText, allImages);
273
+ const message = await complete(model, context, {
274
+ apiKey,
275
+ maxTokens: resolvePdfToolMaxTokens(model.maxTokens),
276
+ });
277
+ const text = coercePdfAssistantText({
278
+ message,
279
+ provider: model.provider,
280
+ model: model.id,
281
+ });
282
+ return { text, provider: model.provider, model: model.id };
283
+ },
284
+ });
285
+ return {
286
+ text: result.result.text,
287
+ provider: result.result.provider,
288
+ model: result.result.model,
289
+ attempts: result.attempts.map((attempt) => ({
290
+ provider: attempt.provider,
291
+ model: attempt.model,
292
+ error: attempt.error,
293
+ })),
294
+ };
295
+ }
296
+ export function createPdfTool(options) {
297
+ const agentDir = options?.agentDir?.trim();
298
+ if (!agentDir) {
299
+ const explicit = coercePdfModelConfig(options?.config);
300
+ if (explicit.primary?.trim() || (explicit.fallbacks?.length ?? 0) > 0) {
301
+ throw new Error("createPdfTool requires agentDir when enabled");
302
+ }
303
+ return null;
304
+ }
305
+ const pdfModelConfig = resolvePdfModelConfigForTool({
306
+ cfg: options?.config,
307
+ agentDir,
308
+ });
309
+ if (!pdfModelConfig) {
310
+ return null;
311
+ }
312
+ const description = "Analyze one or more PDF documents. Use pdf for a single path/URL, or pdfs for multiple (up to 10). Optionally specify pages to limit analysis to specific page ranges.";
313
+ const localRoots = (() => {
314
+ const roots = getDefaultLocalRoots();
315
+ const workspaceDir = normalizeWorkspaceDir(options?.workspaceDir);
316
+ if (!workspaceDir) {
317
+ return roots;
318
+ }
319
+ return Array.from(new Set([...roots, workspaceDir]));
320
+ })();
321
+ return {
322
+ label: "PDF",
323
+ name: "pdf",
324
+ description,
325
+ parameters: Type.Object({
326
+ prompt: Type.Optional(Type.String()),
327
+ pdf: Type.Optional(Type.String({ description: "Single PDF path or URL." })),
328
+ pdfs: Type.Optional(Type.Array(Type.String(), {
329
+ description: "Multiple PDF paths or URLs (up to 10).",
330
+ })),
331
+ pages: Type.Optional(Type.String({
332
+ description: 'Page range to analyze, e.g. "1-5", "3", "1-3,7-9". Omit to analyze all pages.',
333
+ })),
334
+ model: Type.Optional(Type.String()),
335
+ maxBytesMb: Type.Optional(Type.Number()),
336
+ }),
337
+ execute: async (_toolCallId, args) => {
338
+ const record = args && typeof args === "object" ? args : {};
339
+ // MARK: - Normalize pdf + pdfs input and dedupe while preserving order
340
+ const pdfCandidates = [];
341
+ if (typeof record.pdf === "string") {
342
+ pdfCandidates.push(record.pdf);
343
+ }
344
+ if (Array.isArray(record.pdfs)) {
345
+ pdfCandidates.push(...record.pdfs.filter((v) => typeof v === "string"));
346
+ }
347
+ const seenPdfs = new Set();
348
+ const pdfInputs = [];
349
+ for (const candidate of pdfCandidates) {
350
+ const trimmedCandidate = candidate.trim();
351
+ const normalizedForDedupe = trimmedCandidate.startsWith("@")
352
+ ? trimmedCandidate.slice(1).trim()
353
+ : trimmedCandidate;
354
+ if (!normalizedForDedupe || seenPdfs.has(normalizedForDedupe)) {
355
+ continue;
356
+ }
357
+ seenPdfs.add(normalizedForDedupe);
358
+ pdfInputs.push(trimmedCandidate);
359
+ }
360
+ if (pdfInputs.length === 0) {
361
+ throw new Error("pdf required");
362
+ }
363
+ // MARK: - Enforce max PDFs cap (hardcoded)
364
+ if (pdfInputs.length > DEFAULT_MAX_PDFS) {
365
+ return {
366
+ content: [
367
+ {
368
+ type: "text",
369
+ text: `Too many PDFs: ${pdfInputs.length} provided, maximum is ${DEFAULT_MAX_PDFS}. Please reduce the number of PDFs.`,
370
+ },
371
+ ],
372
+ details: { error: "too_many_pdfs", count: pdfInputs.length, max: DEFAULT_MAX_PDFS },
373
+ };
374
+ }
375
+ // MARK: - Parse pages param
376
+ const pagesRaw = typeof record.pages === "string" && record.pages.trim() ? record.pages.trim() : undefined;
377
+ const pageNumbers = pagesRaw ? parsePageRange(pagesRaw, 9999) : undefined;
378
+ const promptRaw = typeof record.prompt === "string" && record.prompt.trim()
379
+ ? record.prompt.trim()
380
+ : DEFAULT_PROMPT;
381
+ const modelOverride = typeof record.model === "string" && record.model.trim() ? record.model.trim() : undefined;
382
+ const maxBytesMb = typeof record.maxBytesMb === "number" ? record.maxBytesMb : undefined;
383
+ const maxBytes = pickMaxBytes(options?.config, maxBytesMb);
384
+ const sandboxConfig = options?.sandbox && options?.sandbox.root.trim()
385
+ ? { root: options.sandbox.root.trim(), bridge: options.sandbox.bridge }
386
+ : null;
387
+ // MARK: - Load and resolve each PDF
388
+ const loadedPdfs = [];
389
+ for (const pdfRawInput of pdfInputs) {
390
+ const trimmed = pdfRawInput.trim();
391
+ const pdfRaw = trimmed.startsWith("@") ? trimmed.slice(1).trim() : trimmed;
392
+ if (!pdfRaw) {
393
+ throw new Error("pdf required (empty string in array)");
394
+ }
395
+ // Validate scheme: allow file://, http(s)://, and bare paths.
396
+ // Reject data: URLs (too large for PDFs) and unsupported schemes.
397
+ const looksLikeWindowsDrivePath = /^[a-zA-Z]:[\\/]/.test(pdfRaw);
398
+ const hasScheme = /^[a-z][a-z0-9+.-]*:/i.test(pdfRaw);
399
+ const isFileUrl = /^file:/i.test(pdfRaw);
400
+ const isHttpUrl = /^https?:\/\//i.test(pdfRaw);
401
+ const isDataUrl = /^data:/i.test(pdfRaw);
402
+ if (isDataUrl) {
403
+ return {
404
+ content: [
405
+ {
406
+ type: "text",
407
+ text: `Data URLs are not supported for PDFs. Use a file path or an http(s) URL instead.`,
408
+ },
409
+ ],
410
+ details: { error: "unsupported_pdf_reference", pdf: pdfRawInput },
411
+ };
412
+ }
413
+ if (hasScheme && !looksLikeWindowsDrivePath && !isFileUrl && !isHttpUrl) {
414
+ return {
415
+ content: [
416
+ {
417
+ type: "text",
418
+ text: `Unsupported PDF reference: ${pdfRawInput}. Use a file path, a file:// URL, or an http(s) URL.`,
419
+ },
420
+ ],
421
+ details: {
422
+ error: "unsupported_pdf_reference",
423
+ pdf: pdfRawInput,
424
+ },
425
+ };
426
+ }
427
+ if (sandboxConfig && isHttpUrl) {
428
+ throw new Error("Sandboxed PDF tool does not allow remote URLs.");
429
+ }
430
+ const resolvedPdf = (() => {
431
+ if (sandboxConfig) {
432
+ return pdfRaw;
433
+ }
434
+ if (pdfRaw.startsWith("~")) {
435
+ return resolveUserPath(pdfRaw);
436
+ }
437
+ return pdfRaw;
438
+ })();
439
+ const resolvedPathInfo = sandboxConfig
440
+ ? await resolveSandboxedPdfPath({
441
+ sandbox: sandboxConfig,
442
+ pdfPath: resolvedPdf,
443
+ })
444
+ : {
445
+ resolved: resolvedPdf.startsWith("file://")
446
+ ? resolvedPdf.slice("file://".length)
447
+ : resolvedPdf,
448
+ };
449
+ const resolvedPath = resolvedPathInfo.resolved;
450
+ const media = sandboxConfig
451
+ ? await loadWebMedia(resolvedPath ?? resolvedPdf, {
452
+ maxBytes,
453
+ sandboxValidated: true,
454
+ optimizeImages: false,
455
+ readFile: (filePath) => sandboxConfig.bridge.readFile({ filePath, cwd: sandboxConfig.root }),
456
+ })
457
+ : await loadWebMedia(resolvedPath ?? resolvedPdf, {
458
+ maxBytes,
459
+ localRoots,
460
+ optimizeImages: false,
461
+ });
462
+ const mime = normalizeMimeType(("contentType" in media && media.contentType) || "");
463
+ if (mime !== "application/pdf") {
464
+ throw new Error(`Expected PDF, got: ${("contentType" in media && media.contentType) || "unknown"}`);
465
+ }
466
+ loadedPdfs.push({
467
+ buffer: media.buffer,
468
+ resolvedPdf,
469
+ ...(resolvedPathInfo.rewrittenFrom
470
+ ? { rewrittenFrom: resolvedPathInfo.rewrittenFrom }
471
+ : {}),
472
+ });
473
+ }
474
+ // MARK: - Run PDF prompt with all loaded PDFs
475
+ const result = await runPdfPrompt({
476
+ cfg: options?.config,
477
+ agentDir,
478
+ pdfModelConfig,
479
+ modelOverride,
480
+ prompt: promptRaw,
481
+ pdfBuffers: loadedPdfs.map((p) => p.buffer),
482
+ pageNumbers,
483
+ });
484
+ const pdfDetails = loadedPdfs.length === 1
485
+ ? {
486
+ pdf: loadedPdfs[0].resolvedPdf,
487
+ ...(loadedPdfs[0].rewrittenFrom
488
+ ? { rewrittenFrom: loadedPdfs[0].rewrittenFrom }
489
+ : {}),
490
+ }
491
+ : {
492
+ pdfs: loadedPdfs.map((p) => ({
493
+ pdf: p.resolvedPdf,
494
+ ...(p.rewrittenFrom ? { rewrittenFrom: p.rewrittenFrom } : {}),
495
+ })),
496
+ };
497
+ return {
498
+ content: [{ type: "text", text: result.text }],
499
+ details: {
500
+ model: `${result.provider}/${result.model}`,
501
+ ...pdfDetails,
502
+ ...(pageNumbers ? { pages: pageNumbers } : {}),
503
+ attempts: result.attempts,
504
+ },
505
+ };
506
+ },
507
+ };
508
+ }
@@ -1,5 +1,5 @@
1
1
  {
2
- "version": "2026.3.4",
3
- "commit": "8f59018950a00bf223a194a5e8af854aed69c342",
4
- "builtAt": "2026-03-04T11:23:48.618Z"
2
+ "version": "2026.3.6",
3
+ "commit": "def3163876b28829de26ed4eb0c2166b051434ae",
4
+ "builtAt": "2026-03-06T12:13:28.765Z"
5
5
  }
@@ -351,6 +351,9 @@ export function normalizeCronJobInput(raw, options = DEFAULT_OPTIONS) {
351
351
  if (isRecord(base.delivery)) {
352
352
  next.delivery = coerceDelivery(base.delivery);
353
353
  }
354
+ if (isRecord(base.onFailure)) {
355
+ next.onFailure = coerceDelivery(base.onFailure);
356
+ }
354
357
  if ("isolation" in next) {
355
358
  delete next.isolation;
356
359
  }
@@ -68,6 +68,22 @@ function assertDeliverySupport(job) {
68
68
  throw new Error('cron channel delivery config is only supported for sessionTarget="isolated"');
69
69
  }
70
70
  }
71
+ function assertFailureAlertSupport(job) {
72
+ if (!job.onFailure) {
73
+ return;
74
+ }
75
+ if (job.onFailure.mode === "webhook") {
76
+ const target = normalizeHttpWebhookUrl(job.onFailure.to);
77
+ if (!target) {
78
+ throw new Error("cron onFailure webhook requires onFailure.to to be a valid http(s) URL");
79
+ }
80
+ job.onFailure.to = target;
81
+ return;
82
+ }
83
+ if (job.sessionTarget !== "isolated") {
84
+ throw new Error('cron onFailure announce config is only supported for sessionTarget="isolated"');
85
+ }
86
+ }
71
87
  export function findJobOrThrow(state, id) {
72
88
  const job = state.store?.jobs.find((j) => j.id === id);
73
89
  if (!job) {
@@ -285,12 +301,14 @@ export function createJob(state, input) {
285
301
  wakeMode: input.wakeMode,
286
302
  payload: input.payload,
287
303
  delivery: input.delivery,
304
+ onFailure: input.onFailure,
288
305
  state: {
289
306
  ...input.state,
290
307
  },
291
308
  };
292
309
  assertSupportedJobSpec(job);
293
310
  assertDeliverySupport(job);
311
+ assertFailureAlertSupport(job);
294
312
  job.state.nextRunAtMs = computeJobNextRunAtMs(job, now);
295
313
  return job;
296
314
  }
@@ -352,6 +370,12 @@ export function applyJobPatch(job, patch) {
352
370
  if (job.sessionTarget === "main" && job.delivery?.mode !== "webhook") {
353
371
  job.delivery = undefined;
354
372
  }
373
+ if (patch.onFailure) {
374
+ job.onFailure = mergeCronFailureAlert(job.onFailure, patch.onFailure);
375
+ }
376
+ if (job.sessionTarget === "main" && job.onFailure?.mode !== "webhook") {
377
+ job.onFailure = undefined;
378
+ }
355
379
  if (patch.state) {
356
380
  job.state = { ...job.state, ...patch.state };
357
381
  }
@@ -363,6 +387,7 @@ export function applyJobPatch(job, patch) {
363
387
  }
364
388
  assertSupportedJobSpec(job);
365
389
  assertDeliverySupport(job);
390
+ assertFailureAlertSupport(job);
366
391
  }
367
392
  function mergeCronPayload(existing, patch) {
368
393
  if (patch.kind !== existing.kind) {
@@ -488,6 +513,29 @@ function mergeCronDelivery(existing, patch) {
488
513
  }
489
514
  return next;
490
515
  }
516
+ function mergeCronFailureAlert(existing, patch) {
517
+ const next = {
518
+ mode: existing?.mode ?? "none",
519
+ channel: existing?.channel,
520
+ to: existing?.to,
521
+ bestEffort: existing?.bestEffort,
522
+ };
523
+ if (typeof patch.mode === "string") {
524
+ next.mode = patch.mode === "deliver" ? "announce" : patch.mode;
525
+ }
526
+ if ("channel" in patch) {
527
+ const channel = typeof patch.channel === "string" ? patch.channel.trim() : "";
528
+ next.channel = channel ? channel : undefined;
529
+ }
530
+ if ("to" in patch) {
531
+ const to = typeof patch.to === "string" ? patch.to.trim() : "";
532
+ next.to = to ? to : undefined;
533
+ }
534
+ if (typeof patch.bestEffort === "boolean") {
535
+ next.bestEffort = patch.bestEffort;
536
+ }
537
+ return next;
538
+ }
491
539
  export function isJobDue(job, nowMs, opts) {
492
540
  if (!job.state) {
493
541
  job.state = {};
@@ -99,6 +99,7 @@ export const CronJobSchema = Type.Object({
99
99
  wakeMode: Type.Union([Type.Literal("next-heartbeat"), Type.Literal("now")]),
100
100
  payload: CronPayloadSchema,
101
101
  delivery: Type.Optional(CronDeliverySchema),
102
+ onFailure: Type.Optional(CronDeliverySchema),
102
103
  state: CronJobStateSchema,
103
104
  }, { additionalProperties: false });
104
105
  export const CronListParamsSchema = Type.Object({
@@ -117,6 +118,7 @@ export const CronAddParamsSchema = Type.Object({
117
118
  wakeMode: Type.Union([Type.Literal("next-heartbeat"), Type.Literal("now")]),
118
119
  payload: CronPayloadSchema,
119
120
  delivery: Type.Optional(CronDeliverySchema),
121
+ onFailure: Type.Optional(CronDeliverySchema),
120
122
  }, { additionalProperties: false });
121
123
  export const CronJobPatchSchema = Type.Object({
122
124
  name: Type.Optional(NonEmptyString),
@@ -130,6 +132,7 @@ export const CronJobPatchSchema = Type.Object({
130
132
  wakeMode: Type.Optional(Type.Union([Type.Literal("next-heartbeat"), Type.Literal("now")])),
131
133
  payload: Type.Optional(CronPayloadPatchSchema),
132
134
  delivery: Type.Optional(CronDeliveryPatchSchema),
135
+ onFailure: Type.Optional(CronDeliveryPatchSchema),
133
136
  state: Type.Optional(Type.Partial(CronJobStateSchema)),
134
137
  }, { additionalProperties: false });
135
138
  export const CronUpdateParamsSchema = Type.Union([