@probelabs/visor 0.1.171 → 0.1.172-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/dist/agent-protocol/task-store.d.ts +8 -0
  2. package/dist/agent-protocol/task-store.d.ts.map +1 -1
  3. package/dist/agent-protocol/tasks-cli-handler.d.ts.map +1 -1
  4. package/dist/cli-main.d.ts.map +1 -1
  5. package/dist/index.js +2033 -99
  6. package/dist/sdk/{a2a-frontend-GUEGI5SX.mjs → a2a-frontend-N4VU3PHW.mjs} +36 -1
  7. package/dist/sdk/a2a-frontend-N4VU3PHW.mjs.map +1 -0
  8. package/dist/sdk/{check-provider-registry-YTI4PU5F.mjs → check-provider-registry-4KW2YPZW.mjs} +3 -3
  9. package/dist/sdk/{check-provider-registry-ZUU7KSKR.mjs → check-provider-registry-H5GQGT2X.mjs} +3 -3
  10. package/dist/sdk/{chunk-CXA3WUOB.mjs → chunk-CNZ7XHLN.mjs} +9 -9
  11. package/dist/sdk/{chunk-2VDUNKIP.mjs → chunk-ONN7TBQM.mjs} +14 -14
  12. package/dist/sdk/{chunk-XUQSI5SR.mjs.map → chunk-ONN7TBQM.mjs.map} +1 -1
  13. package/dist/sdk/{chunk-AVMMKGLQ.mjs → chunk-PDQTEBOJ.mjs} +18 -18
  14. package/dist/sdk/chunk-PDQTEBOJ.mjs.map +1 -0
  15. package/dist/sdk/{host-A7UNRBQU.mjs → host-DOJQVREK.mjs} +3 -3
  16. package/dist/sdk/{host-A4GGQVEN.mjs → host-YATAT2B4.mjs} +4 -4
  17. package/dist/sdk/knex-store-CRORFJE6.mjs +527 -0
  18. package/dist/sdk/knex-store-CRORFJE6.mjs.map +1 -0
  19. package/dist/sdk/loader-QMJFFST6.mjs +89 -0
  20. package/dist/sdk/loader-QMJFFST6.mjs.map +1 -0
  21. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
  22. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
  23. package/dist/sdk/{schedule-tool-DGVJDHJM.mjs → schedule-tool-HCJUIF4H.mjs} +3 -3
  24. package/dist/sdk/{schedule-tool-4MTFIHCA.mjs → schedule-tool-KUGCECKZ.mjs} +3 -3
  25. package/dist/sdk/{schedule-tool-handler-LMXQ4BZQ.mjs → schedule-tool-handler-DCMMLWLB.mjs} +3 -3
  26. package/dist/sdk/{schedule-tool-handler-XLCSBU3E.mjs → schedule-tool-handler-IZU43FC2.mjs} +3 -3
  27. package/dist/sdk/sdk.js +1700 -291
  28. package/dist/sdk/sdk.js.map +1 -1
  29. package/dist/sdk/sdk.mjs +5 -5
  30. package/dist/sdk/{slack-frontend-QO7LW5BH.mjs → slack-frontend-7T5UISCX.mjs} +2 -2
  31. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  32. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  33. package/dist/sdk/{workflow-check-provider-KQNLEQEY.mjs → workflow-check-provider-LO2X6XOJ.mjs} +3 -3
  34. package/dist/sdk/{workflow-check-provider-X5EMAJUZ.mjs → workflow-check-provider-O2SIH3PC.mjs} +3 -3
  35. package/dist/slack/markdown.d.ts.map +1 -1
  36. package/package.json +1 -1
  37. package/dist/output/traces/run-2026-03-08T07-55-35-120Z.ndjson +0 -138
  38. package/dist/output/traces/run-2026-03-08T07-56-13-035Z.ndjson +0 -2266
  39. package/dist/sdk/a2a-frontend-FGJ3UBHX.mjs +0 -1622
  40. package/dist/sdk/a2a-frontend-FGJ3UBHX.mjs.map +0 -1
  41. package/dist/sdk/a2a-frontend-GUEGI5SX.mjs.map +0 -1
  42. package/dist/sdk/check-provider-registry-PVTV5G5R.mjs +0 -30
  43. package/dist/sdk/chunk-2VDUNKIP.mjs.map +0 -1
  44. package/dist/sdk/chunk-6FDBLSGV.mjs +0 -739
  45. package/dist/sdk/chunk-6FDBLSGV.mjs.map +0 -1
  46. package/dist/sdk/chunk-AJK3FAA2.mjs +0 -1502
  47. package/dist/sdk/chunk-AJK3FAA2.mjs.map +0 -1
  48. package/dist/sdk/chunk-AVMMKGLQ.mjs.map +0 -1
  49. package/dist/sdk/chunk-O72J3ORS.mjs +0 -449
  50. package/dist/sdk/chunk-O72J3ORS.mjs.map +0 -1
  51. package/dist/sdk/chunk-XUQSI5SR.mjs +0 -44810
  52. package/dist/sdk/failure-condition-evaluator-EFMCQVAK.mjs +0 -18
  53. package/dist/sdk/github-frontend-XG55VJ4R.mjs +0 -1386
  54. package/dist/sdk/github-frontend-XG55VJ4R.mjs.map +0 -1
  55. package/dist/sdk/routing-BVEHVZHK.mjs +0 -26
  56. package/dist/sdk/schedule-tool-PHSF5U2B.mjs +0 -36
  57. package/dist/sdk/schedule-tool-handler-EFNCZNS7.mjs +0 -40
  58. package/dist/sdk/schedule-tool-handler-EFNCZNS7.mjs.map +0 -1
  59. package/dist/sdk/schedule-tool-handler-LMXQ4BZQ.mjs.map +0 -1
  60. package/dist/sdk/schedule-tool-handler-XLCSBU3E.mjs.map +0 -1
  61. package/dist/sdk/trace-helpers-P5L4COO4.mjs +0 -26
  62. package/dist/sdk/trace-helpers-P5L4COO4.mjs.map +0 -1
  63. package/dist/sdk/workflow-check-provider-KQNLEQEY.mjs.map +0 -1
  64. package/dist/sdk/workflow-check-provider-WLMTCFRA.mjs +0 -30
  65. package/dist/sdk/workflow-check-provider-WLMTCFRA.mjs.map +0 -1
  66. package/dist/sdk/workflow-check-provider-X5EMAJUZ.mjs.map +0 -1
  67. package/dist/traces/run-2026-03-08T07-55-35-120Z.ndjson +0 -138
  68. package/dist/traces/run-2026-03-08T07-56-13-035Z.ndjson +0 -2266
  69. /package/dist/sdk/{check-provider-registry-PVTV5G5R.mjs.map → check-provider-registry-4KW2YPZW.mjs.map} +0 -0
  70. /package/dist/sdk/{check-provider-registry-YTI4PU5F.mjs.map → check-provider-registry-H5GQGT2X.mjs.map} +0 -0
  71. /package/dist/sdk/{chunk-CXA3WUOB.mjs.map → chunk-CNZ7XHLN.mjs.map} +0 -0
  72. /package/dist/sdk/{host-A4GGQVEN.mjs.map → host-DOJQVREK.mjs.map} +0 -0
  73. /package/dist/sdk/{host-A7UNRBQU.mjs.map → host-YATAT2B4.mjs.map} +0 -0
  74. /package/dist/sdk/{check-provider-registry-ZUU7KSKR.mjs.map → schedule-tool-HCJUIF4H.mjs.map} +0 -0
  75. /package/dist/sdk/{failure-condition-evaluator-EFMCQVAK.mjs.map → schedule-tool-KUGCECKZ.mjs.map} +0 -0
  76. /package/dist/sdk/{routing-BVEHVZHK.mjs.map → schedule-tool-handler-DCMMLWLB.mjs.map} +0 -0
  77. /package/dist/sdk/{schedule-tool-4MTFIHCA.mjs.map → schedule-tool-handler-IZU43FC2.mjs.map} +0 -0
  78. /package/dist/sdk/{slack-frontend-QO7LW5BH.mjs.map → slack-frontend-7T5UISCX.mjs.map} +0 -0
  79. /package/dist/sdk/{schedule-tool-DGVJDHJM.mjs.map → workflow-check-provider-LO2X6XOJ.mjs.map} +0 -0
  80. /package/dist/sdk/{schedule-tool-PHSF5U2B.mjs.map → workflow-check-provider-O2SIH3PC.mjs.map} +0 -0
@@ -109,28 +109,27 @@ function replaceMermaidBlocks(text, diagrams, replacement = "_(See diagram above
109
109
  }
110
110
  function markdownToSlack(text) {
111
111
  if (!text || typeof text !== "string") return "";
112
- let out = text;
113
- out = out.replace(
114
- /!\[([^\]]*)\]\(([^)\s]+)(?:\s+"[^"]*")?\)/g,
115
- (_m, alt, url) => `<${url}|${alt || "image"}>`
116
- );
117
- out = out.replace(
118
- /\[([^\]]+)\]\(([^)\s]+)(?:\s+"[^"]*")?\)/g,
119
- (_m, label, url) => `<${url}|${label}>`
120
- );
121
- out = out.replace(/\*\*([^*]+)\*\*/g, (_m, inner) => `*${inner}*`);
122
- out = out.replace(/__([^_]+)__/g, (_m, inner) => `*${inner}*`);
123
- const lines = out.split(/\r?\n/);
112
+ const lines = text.split(/\r?\n/);
124
113
  let inCodeBlock = false;
125
114
  for (let i = 0; i < lines.length; i++) {
126
- const line = lines[i];
127
- const trimmed = line.trimStart();
115
+ const trimmed = lines[i].trimStart();
128
116
  if (/^```/.test(trimmed)) {
129
117
  inCodeBlock = !inCodeBlock;
130
118
  continue;
131
119
  }
132
120
  if (inCodeBlock) continue;
133
- const headerMatch = /^(#{1,6})\s+(.+)$/.exec(trimmed);
121
+ let line = lines[i];
122
+ line = line.replace(
123
+ /!\[([^\]]*)\]\(([^)\s]+)(?:\s+"[^"]*")?\)/g,
124
+ (_m, alt, url) => `<${url}|${alt || "image"}>`
125
+ );
126
+ line = line.replace(
127
+ /\[([^\]]+)\]\(([^)\s]+)(?:\s+"[^"]*")?\)/g,
128
+ (_m, label, url) => `<${url}|${label}>`
129
+ );
130
+ line = line.replace(/\*\*([^*]+)\*\*/g, (_m, inner) => `*${inner}*`);
131
+ line = line.replace(/__([^_]+)__/g, (_m, inner) => `*${inner}*`);
132
+ const headerMatch = /^(#{1,6})\s+(.+)$/.exec(line.trimStart());
134
133
  if (headerMatch) {
135
134
  const [, hashes, headerText] = headerMatch;
136
135
  const prevLine = i > 0 ? lines[i - 1].trim() : "";
@@ -147,10 +146,11 @@ function markdownToSlack(text) {
147
146
  if (bulletMatch) {
148
147
  const [, indent, , rest] = bulletMatch;
149
148
  lines[i] = `${indent}\u2022 ${rest}`;
149
+ continue;
150
150
  }
151
+ lines[i] = line;
151
152
  }
152
- out = lines.join("\n");
153
- return out;
153
+ return lines.join("\n");
154
154
  }
155
155
  function extractFileSections(text) {
156
156
  const sections = [];
@@ -210,4 +210,4 @@ export {
210
210
  formatSlackText,
211
211
  init_markdown
212
212
  };
213
- //# sourceMappingURL=chunk-AVMMKGLQ.mjs.map
213
+ //# sourceMappingURL=chunk-PDQTEBOJ.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/slack/markdown.ts"],"sourcesContent":["// Lightweight Markdown → Slack mrkdwn formatter.\n// The goal is to make common Markdown output from AI steps look natural in Slack\n// without pulling in a full Markdown parser.\n//\n// Supported conversions:\n// - # Header / ## Header → *Header* (bold with visual separation)\n// - **bold** / __bold__ → *bold*\n// - [label](url) → <url|label>\n// - ![alt](url) → <url|alt>\n// - *italic* (inline) → _italic_\n// - ```mermaid blocks → rendered to PNG and uploaded to Slack\n//\n// Everything else is passed through unchanged; Slack will still render many\n// Markdown-like constructs (lists, code fences, etc.) natively.\n\nimport { spawn } from 'child_process';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport * as os from 'os';\n\n/**\n * Represents an extracted mermaid diagram\n */\nexport interface MermaidDiagram {\n /** The full match including ```mermaid and ``` */\n fullMatch: string;\n /** The mermaid code content */\n code: string;\n /** Start index in the original text */\n startIndex: number;\n /** End index in the original text */\n endIndex: number;\n}\n\n/**\n * Extract all mermaid code blocks from text\n */\nexport function extractMermaidDiagrams(text: string): MermaidDiagram[] {\n const diagrams: MermaidDiagram[] = [];\n // Match ```mermaid followed by newline, content, and closing ```\n const regex = /```mermaid\\s*\\n([\\s\\S]*?)```/g;\n let match;\n while ((match = regex.exec(text)) !== null) {\n diagrams.push({\n fullMatch: match[0],\n code: match[1].trim(),\n startIndex: match.index,\n endIndex: match.index + match[0].length,\n });\n }\n return diagrams;\n}\n\n/**\n * Render a mermaid diagram to PNG using mmdc CLI (@mermaid-js/mermaid-cli).\n *\n * Requirements:\n * - Node.js and npx must be available in PATH\n * - Network access on first run (npx downloads the package)\n * - Puppeteer/Chromium dependencies (mermaid-cli uses headless browser)\n *\n * On Linux, you may need to install chromium dependencies:\n * apt-get install -y chromium-browser libatk-bridge2.0-0 libgtk-3-0\n *\n * On Docker/CI, consider using a base image with puppeteer support or\n * pre-installing @mermaid-js/mermaid-cli globally.\n *\n * @param mermaidCode The mermaid diagram code\n * @returns Buffer containing PNG data, or null if rendering failed\n */\nexport async function renderMermaidToPng(mermaidCode: string): Promise<Buffer | null> {\n // Create temp files for input and output\n const tmpDir = os.tmpdir();\n const inputFile = path.join(\n tmpDir,\n `mermaid-${Date.now()}-${Math.random().toString(36).slice(2)}.mmd`\n );\n const outputFile = path.join(\n tmpDir,\n `mermaid-${Date.now()}-${Math.random().toString(36).slice(2)}.png`\n );\n\n try {\n // Write mermaid code to temp file\n fs.writeFileSync(inputFile, mermaidCode, 'utf-8');\n\n // Detect system chromium for puppeteer (mermaid-cli dependency)\n // Without this, puppeteer may hang trying to download its own chromium\n const chromiumPaths = [\n '/usr/bin/chromium',\n '/usr/bin/chromium-browser',\n '/usr/bin/google-chrome',\n '/usr/bin/chrome',\n ];\n let chromiumPath: string | undefined;\n for (const p of chromiumPaths) {\n if (fs.existsSync(p)) {\n chromiumPath = p;\n break;\n }\n }\n\n // Build environment with chromium path if found\n const env = { ...process.env };\n if (chromiumPath) {\n env.PUPPETEER_EXECUTABLE_PATH = chromiumPath;\n }\n\n // Run mmdc to render PNG\n const result = await new Promise<{ success: boolean; error?: string }>(resolve => {\n const proc = spawn(\n 'npx',\n [\n '--yes',\n '@mermaid-js/mermaid-cli',\n '-i',\n inputFile,\n '-o',\n outputFile,\n '-e',\n 'png',\n '-b',\n 'white',\n '-w',\n '1200',\n ],\n {\n timeout: 60000, // 60 second timeout (first run may download packages)\n stdio: ['pipe', 'pipe', 'pipe'],\n env,\n }\n );\n\n let stderr = '';\n proc.stderr?.on('data', data => {\n stderr += data.toString();\n });\n\n proc.on('close', code => {\n if (code === 0) {\n resolve({ success: true });\n } else {\n resolve({ success: false, error: stderr || `Exit code ${code}` });\n }\n });\n\n proc.on('error', err => {\n resolve({ success: false, error: err.message });\n });\n });\n\n if (!result.success) {\n console.warn(`Mermaid rendering failed: ${result.error}`);\n return null;\n }\n\n // Read the output PNG\n if (!fs.existsSync(outputFile)) {\n console.warn('Mermaid output file not created');\n return null;\n }\n\n const pngBuffer = fs.readFileSync(outputFile);\n return pngBuffer;\n } catch (e) {\n console.warn(`Mermaid rendering error: ${e instanceof Error ? e.message : String(e)}`);\n return null;\n } finally {\n // Cleanup temp files\n try {\n if (fs.existsSync(inputFile)) fs.unlinkSync(inputFile);\n if (fs.existsSync(outputFile)) fs.unlinkSync(outputFile);\n } catch {\n // Ignore cleanup errors\n }\n }\n}\n\n/**\n * Replace mermaid blocks in text with a placeholder message\n * @param text Original text\n * @param diagrams Extracted diagrams\n * @param replacement Text to replace each diagram with (or a function that returns replacement for each index)\n */\nexport function replaceMermaidBlocks(\n text: string,\n diagrams: MermaidDiagram[],\n replacement: string | ((index: number) => string) = '_(See diagram above)_'\n): string {\n if (diagrams.length === 0) return text;\n\n // Sort by start index descending to replace from end to start (preserves indices)\n const sorted = [...diagrams].sort((a, b) => b.startIndex - a.startIndex);\n\n let result = text;\n sorted.forEach((diagram, sortedIndex) => {\n // Calculate original index (since we sorted in reverse)\n const originalIndex = diagrams.length - 1 - sortedIndex;\n const rep = typeof replacement === 'function' ? replacement(originalIndex) : replacement;\n result = result.slice(0, diagram.startIndex) + rep + result.slice(diagram.endIndex);\n });\n\n return result;\n}\n\nexport function markdownToSlack(text: string): string {\n if (!text || typeof text !== 'string') return '';\n\n // Process line-by-line so ALL transformations respect code blocks.\n const lines = text.split(/\\r?\\n/);\n let inCodeBlock = false;\n for (let i = 0; i < lines.length; i++) {\n const trimmed = lines[i].trimStart();\n\n // Track fenced code blocks — skip all transformations inside them\n if (/^```/.test(trimmed)) {\n inCodeBlock = !inCodeBlock;\n continue;\n }\n if (inCodeBlock) continue;\n\n let line = lines[i];\n\n // Images: ![alt](url) → <url|alt>\n line = line.replace(\n /!\\[([^\\]]*)\\]\\(([^)\\s]+)(?:\\s+\"[^\"]*\")?\\)/g,\n (_m, alt: string, url: string) => `<${url}|${alt || 'image'}>`\n );\n\n // Links: [label](url) → <url|label>\n line = line.replace(\n /\\[([^\\]]+)\\]\\(([^)\\s]+)(?:\\s+\"[^\"]*\")?\\)/g,\n (_m, label: string, url: string) => `<${url}|${label}>`\n );\n\n // Bold: **text** or __text__ → *text*\n line = line.replace(/\\*\\*([^*]+)\\*\\*/g, (_m, inner: string) => `*${inner}*`);\n line = line.replace(/__([^_]+)__/g, (_m, inner: string) => `*${inner}*`);\n\n // Headers: # Header → *Header* (Slack doesn't have native headers)\n const headerMatch = /^(#{1,6})\\s+(.+)$/.exec(line.trimStart());\n if (headerMatch) {\n const [, hashes, headerText] = headerMatch;\n // For h1/h2, add extra emphasis with newline before (if not first line\n // and previous line is not empty/header/code-fence)\n const prevLine = i > 0 ? lines[i - 1].trim() : '';\n const prevIsHeaderOrFence =\n /^#{1,6}\\s+/.test(prevLine) || /^\\*[^*]+\\*$/.test(prevLine) || /^```/.test(prevLine);\n if (hashes.length <= 2 && i > 0 && prevLine !== '' && !prevIsHeaderOrFence) {\n lines[i] = `\\n*${headerText.trim()}*`;\n } else {\n lines[i] = `*${headerText.trim()}*`;\n }\n continue;\n }\n\n // Bullet lists: \"- item\" or \"* item\" → \"• item\" (preserve indentation)\n const bulletMatch = /^(\\s*)([-*])\\s+(.+)$/.exec(line);\n if (bulletMatch) {\n const [, indent, , rest] = bulletMatch;\n lines[i] = `${indent}• ${rest}`;\n continue;\n }\n\n lines[i] = line;\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Represents an extracted file section delimited by --- filename.ext ---\n */\nexport interface FileSection {\n /** Full match including delimiter(s) and content */\n fullMatch: string;\n /** Extracted filename (e.g., \"report.csv\") */\n filename: string;\n /** Content after the opening delimiter (trimmed) */\n content: string;\n /** Start index in the original text */\n startIndex: number;\n /** End index in the original text */\n endIndex: number;\n}\n\n/**\n * Extract all file sections delimited by --- filename.ext --- from text.\n *\n * A section starts at a `--- filename.ext ---` line. It ends at:\n * 1. A closing delimiter with the same filename (optional, backward-compatible)\n * 2. The next `--- other.ext ---` delimiter (starts a new section)\n * 3. End of text\n */\nexport function extractFileSections(text: string): FileSection[] {\n const sections: FileSection[] = [];\n\n // Find all --- filename.ext --- delimiter lines\n const delimRegex = /^--- ([\\w][\\w.\\-]*\\.\\w+) ---$/gm;\n const delimiters: { filename: string; start: number; end: number }[] = [];\n let m;\n while ((m = delimRegex.exec(text)) !== null) {\n delimiters.push({\n filename: m[1],\n start: m.index,\n end: m.index + m[0].length,\n });\n }\n\n if (delimiters.length === 0) return sections;\n\n for (let i = 0; i < delimiters.length; i++) {\n const open = delimiters[i];\n\n // Content starts after the newline following the opening delimiter\n const contentStart =\n open.end < text.length && text[open.end] === '\\n' ? open.end + 1 : open.end;\n\n // Section extends to the next delimiter or end of text\n const sectionEnd = i + 1 < delimiters.length ? delimiters[i + 1].start : text.length;\n const content = text.substring(contentStart, sectionEnd).trim();\n if (content.length > 0) {\n sections.push({\n fullMatch: text.substring(open.start, sectionEnd),\n filename: open.filename,\n content,\n startIndex: open.start,\n endIndex: sectionEnd,\n });\n }\n }\n\n return sections;\n}\n\n/**\n * Replace file sections in text with placeholder messages.\n * Uses back-to-front replacement to preserve indices (same as replaceMermaidBlocks).\n */\nexport function replaceFileSections(\n text: string,\n sections: FileSection[],\n replacement: string | ((index: number) => string) = idx =>\n `_(See file: ${sections[idx].filename} above)_`\n): string {\n if (sections.length === 0) return text;\n\n const sorted = [...sections].sort((a, b) => b.startIndex - a.startIndex);\n\n let result = text;\n sorted.forEach((section, sortedIndex) => {\n const originalIndex = sections.length - 1 - sortedIndex;\n const rep = typeof replacement === 'function' ? replacement(originalIndex) : replacement;\n result = result.slice(0, section.startIndex) + rep + result.slice(section.endIndex);\n });\n\n return result;\n}\n\nexport function formatSlackText(text: string): string {\n return markdownToSlack(text);\n}\n"],"mappings":";;;;;AAeA,SAAS,aAAa;AACtB,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,YAAY,QAAQ;AAoDpB,eAAsB,mBAAmB,aAA6C;AAEpF,QAAM,SAAY,UAAO;AACzB,QAAM,YAAiB;AAAA,IACrB;AAAA,IACA,WAAW,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,MAAM,CAAC,CAAC;AAAA,EAC9D;AACA,QAAM,aAAkB;AAAA,IACtB;AAAA,IACA,WAAW,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,MAAM,CAAC,CAAC;AAAA,EAC9D;AAEA,MAAI;AAEF,IAAG,iBAAc,WAAW,aAAa,OAAO;AAIhD,UAAM,gBAAgB;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,QAAI;AACJ,eAAW,KAAK,eAAe;AAC7B,UAAO,cAAW,CAAC,GAAG;AACpB,uBAAe;AACf;AAAA,MACF;AAAA,IACF;AAGA,UAAM,MAAM,EAAE,GAAG,QAAQ,IAAI;AAC7B,QAAI,cAAc;AAChB,UAAI,4BAA4B;AAAA,IAClC;AAGA,UAAM,SAAS,MAAM,IAAI,QAA8C,aAAW;AAChF,YAAM,OAAO;AAAA,QACX;AAAA,QACA;AAAA,UACE;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,QACA;AAAA,UACE,SAAS;AAAA;AAAA,UACT,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAEA,UAAI,SAAS;AACb,WAAK,QAAQ,GAAG,QAAQ,UAAQ;AAC9B,kBAAU,KAAK,SAAS;AAAA,MAC1B,CAAC;AAED,WAAK,GAAG,SAAS,UAAQ;AACvB,YAAI,SAAS,GAAG;AACd,kBAAQ,EAAE,SAAS,KAAK,CAAC;AAAA,QAC3B,OAAO;AACL,kBAAQ,EAAE,SAAS,OAAO,OAAO,UAAU,aAAa,IAAI,GAAG,CAAC;AAAA,QAClE;AAAA,MACF,CAAC;AAED,WAAK,GAAG,SAAS,SAAO;AACtB,gBAAQ,EAAE,SAAS,OAAO,OAAO,IAAI,QAAQ,CAAC;AAAA,MAChD,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,OAAO,SAAS;AACnB,cAAQ,KAAK,6BAA6B,OAAO,KAAK,EAAE;AACxD,aAAO;AAAA,IACT;AAGA,QAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,cAAQ,KAAK,iCAAiC;AAC9C,aAAO;AAAA,IACT;AAEA,UAAM,YAAe,gBAAa,UAAU;AAC5C,WAAO;AAAA,EACT,SAAS,GAAG;AACV,YAAQ,KAAK,4BAA4B,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC,EAAE;AACrF,WAAO;AAAA,EACT,UAAE;AAEA,QAAI;AACF,UAAO,cAAW,SAAS,EAAG,CAAG,cAAW,SAAS;AACrD,UAAO,cAAW,UAAU,EAAG,CAAG,cAAW,UAAU;AAAA,IACzD,QAAQ;AAAA,IAER;AAAA,EACF;AACF;AAQO,SAAS,qBACd,MACA,UACA,cAAoD,yBAC5C;AACR,MAAI,SAAS,WAAW,EAAG,QAAO;AAGlC,QAAM,SAAS,CAAC,GAAG,QAAQ,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAEvE,MAAI,SAAS;AACb,SAAO,QAAQ,CAAC,SAAS,gBAAgB;AAEvC,UAAM,gBAAgB,SAAS,SAAS,IAAI;AAC5C,UAAM,MAAM,OAAO,gBAAgB,aAAa,YAAY,aAAa,IAAI;AAC7E,aAAS,OAAO,MAAM,GAAG,QAAQ,UAAU,IAAI,MAAM,OAAO,MAAM,QAAQ,QAAQ;AAAA,EACpF,CAAC;AAED,SAAO;AACT;AAEO,SAAS,gBAAgB,MAAsB;AACpD,MAAI,CAAC,QAAQ,OAAO,SAAS,SAAU,QAAO;AAG9C,QAAM,QAAQ,KAAK,MAAM,OAAO;AAChC,MAAI,cAAc;AAClB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,UAAU,MAAM,CAAC,EAAE,UAAU;AAGnC,QAAI,OAAO,KAAK,OAAO,GAAG;AACxB,oBAAc,CAAC;AACf;AAAA,IACF;AACA,QAAI,YAAa;AAEjB,QAAI,OAAO,MAAM,CAAC;AAGlB,WAAO,KAAK;AAAA,MACV;AAAA,MACA,CAAC,IAAI,KAAa,QAAgB,IAAI,GAAG,IAAI,OAAO,OAAO;AAAA,IAC7D;AAGA,WAAO,KAAK;AAAA,MACV;AAAA,MACA,CAAC,IAAI,OAAe,QAAgB,IAAI,GAAG,IAAI,KAAK;AAAA,IACtD;AAGA,WAAO,KAAK,QAAQ,oBAAoB,CAAC,IAAI,UAAkB,IAAI,KAAK,GAAG;AAC3E,WAAO,KAAK,QAAQ,gBAAgB,CAAC,IAAI,UAAkB,IAAI,KAAK,GAAG;AAGvE,UAAM,cAAc,oBAAoB,KAAK,KAAK,UAAU,CAAC;AAC7D,QAAI,aAAa;AACf,YAAM,CAAC,EAAE,QAAQ,UAAU,IAAI;AAG/B,YAAM,WAAW,IAAI,IAAI,MAAM,IAAI,CAAC,EAAE,KAAK,IAAI;AAC/C,YAAM,sBACJ,aAAa,KAAK,QAAQ,KAAK,cAAc,KAAK,QAAQ,KAAK,OAAO,KAAK,QAAQ;AACrF,UAAI,OAAO,UAAU,KAAK,IAAI,KAAK,aAAa,MAAM,CAAC,qBAAqB;AAC1E,cAAM,CAAC,IAAI;AAAA,GAAM,WAAW,KAAK,CAAC;AAAA,MACpC,OAAO;AACL,cAAM,CAAC,IAAI,IAAI,WAAW,KAAK,CAAC;AAAA,MAClC;AACA;AAAA,IACF;AAGA,UAAM,cAAc,uBAAuB,KAAK,IAAI;AACpD,QAAI,aAAa;AACf,YAAM,CAAC,EAAE,QAAQ,EAAE,IAAI,IAAI;AAC3B,YAAM,CAAC,IAAI,GAAG,MAAM,UAAK,IAAI;AAC7B;AAAA,IACF;AAEA,UAAM,CAAC,IAAI;AAAA,EACb;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AA0BO,SAAS,oBAAoB,MAA6B;AAC/D,QAAM,WAA0B,CAAC;AAGjC,QAAM,aAAa;AACnB,QAAM,aAAiE,CAAC;AACxE,MAAI;AACJ,UAAQ,IAAI,WAAW,KAAK,IAAI,OAAO,MAAM;AAC3C,eAAW,KAAK;AAAA,MACd,UAAU,EAAE,CAAC;AAAA,MACb,OAAO,EAAE;AAAA,MACT,KAAK,EAAE,QAAQ,EAAE,CAAC,EAAE;AAAA,IACtB,CAAC;AAAA,EACH;AAEA,MAAI,WAAW,WAAW,EAAG,QAAO;AAEpC,WAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,UAAM,OAAO,WAAW,CAAC;AAGzB,UAAM,eACJ,KAAK,MAAM,KAAK,UAAU,KAAK,KAAK,GAAG,MAAM,OAAO,KAAK,MAAM,IAAI,KAAK;AAG1E,UAAM,aAAa,IAAI,IAAI,WAAW,SAAS,WAAW,IAAI,CAAC,EAAE,QAAQ,KAAK;AAC9E,UAAM,UAAU,KAAK,UAAU,cAAc,UAAU,EAAE,KAAK;AAC9D,QAAI,QAAQ,SAAS,GAAG;AACtB,eAAS,KAAK;AAAA,QACZ,WAAW,KAAK,UAAU,KAAK,OAAO,UAAU;AAAA,QAChD,UAAU,KAAK;AAAA,QACf;AAAA,QACA,YAAY,KAAK;AAAA,QACjB,UAAU;AAAA,MACZ,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAMO,SAAS,oBACd,MACA,UACA,cAAoD,SAClD,eAAe,SAAS,GAAG,EAAE,QAAQ,YAC/B;AACR,MAAI,SAAS,WAAW,EAAG,QAAO;AAElC,QAAM,SAAS,CAAC,GAAG,QAAQ,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAEvE,MAAI,SAAS;AACb,SAAO,QAAQ,CAAC,SAAS,gBAAgB;AACvC,UAAM,gBAAgB,SAAS,SAAS,IAAI;AAC5C,UAAM,MAAM,OAAO,gBAAgB,aAAa,YAAY,aAAa,IAAI;AAC7E,aAAS,OAAO,MAAM,GAAG,QAAQ,UAAU,IAAI,MAAM,OAAO,MAAM,QAAQ,QAAQ;AAAA,EACpF,CAAC;AAED,SAAO;AACT;AAEO,SAAS,gBAAgB,MAAsB;AACpD,SAAO,gBAAgB,IAAI;AAC7B;AAzWA;AAAA;AAAA;AAAA;AAAA;","names":[]}
@@ -27,13 +27,13 @@ var init_host = __esm({
27
27
  const { GitHubFrontend } = await import("./github-frontend-ZZRU6P43.mjs");
28
28
  this.frontends.push(new GitHubFrontend());
29
29
  } else if (spec.name === "slack") {
30
- const { SlackFrontend } = await import("./slack-frontend-QO7LW5BH.mjs");
30
+ const { SlackFrontend } = await import("./slack-frontend-7T5UISCX.mjs");
31
31
  this.frontends.push(new SlackFrontend(spec.config));
32
32
  } else if (spec.name === "tui") {
33
33
  const { TuiFrontend } = await import("./tui-frontend-T56PZB67.mjs");
34
34
  this.frontends.push(new TuiFrontend(spec.config));
35
35
  } else if (spec.name === "a2a") {
36
- const { A2AFrontend } = await import("./a2a-frontend-GUEGI5SX.mjs");
36
+ const { A2AFrontend } = await import("./a2a-frontend-N4VU3PHW.mjs");
37
37
  this.frontends.push(new A2AFrontend(spec.config));
38
38
  } else {
39
39
  this.log.warn(`[FrontendsHost] Unknown frontend '${spec.name}', skipping`);
@@ -72,4 +72,4 @@ export {
72
72
  FrontendsHost,
73
73
  isActiveFrontend
74
74
  };
75
- //# sourceMappingURL=host-A7UNRBQU.mjs.map
75
+ //# sourceMappingURL=host-DOJQVREK.mjs.map
@@ -24,16 +24,16 @@ var init_host = __esm({
24
24
  const { NdjsonSink } = await import("./ndjson-sink-FD2PSXGD.mjs");
25
25
  this.frontends.push(new NdjsonSink(spec.config));
26
26
  } else if (spec.name === "github") {
27
- const { GitHubFrontend } = await import("./github-frontend-XG55VJ4R.mjs");
27
+ const { GitHubFrontend } = await import("./github-frontend-ZZRU6P43.mjs");
28
28
  this.frontends.push(new GitHubFrontend());
29
29
  } else if (spec.name === "slack") {
30
- const { SlackFrontend } = await import("./slack-frontend-QO7LW5BH.mjs");
30
+ const { SlackFrontend } = await import("./slack-frontend-7T5UISCX.mjs");
31
31
  this.frontends.push(new SlackFrontend(spec.config));
32
32
  } else if (spec.name === "tui") {
33
33
  const { TuiFrontend } = await import("./tui-frontend-T56PZB67.mjs");
34
34
  this.frontends.push(new TuiFrontend(spec.config));
35
35
  } else if (spec.name === "a2a") {
36
- const { A2AFrontend } = await import("./a2a-frontend-FGJ3UBHX.mjs");
36
+ const { A2AFrontend } = await import("./a2a-frontend-N4VU3PHW.mjs");
37
37
  this.frontends.push(new A2AFrontend(spec.config));
38
38
  } else {
39
39
  this.log.warn(`[FrontendsHost] Unknown frontend '${spec.name}', skipping`);
@@ -72,4 +72,4 @@ export {
72
72
  FrontendsHost,
73
73
  isActiveFrontend
74
74
  };
75
- //# sourceMappingURL=host-A4GGQVEN.mjs.map
75
+ //# sourceMappingURL=host-YATAT2B4.mjs.map
@@ -0,0 +1,527 @@
1
+ import {
2
+ init_logger,
3
+ logger
4
+ } from "./chunk-SZXICFQ3.mjs";
5
+ import "./chunk-UCMJJ3IM.mjs";
6
+ import {
7
+ __esm,
8
+ __require
9
+ } from "./chunk-J7LXIPZS.mjs";
10
+
11
+ // src/enterprise/scheduler/knex-store.ts
12
+ import * as fs from "fs";
13
+ import * as path from "path";
14
+ import { v4 as uuidv4 } from "uuid";
15
+ function toNum(val) {
16
+ if (val === null || val === void 0) return void 0;
17
+ return typeof val === "string" ? parseInt(val, 10) : val;
18
+ }
19
+ function safeJsonParse(value) {
20
+ if (!value) return void 0;
21
+ try {
22
+ return JSON.parse(value);
23
+ } catch {
24
+ return void 0;
25
+ }
26
+ }
27
+ function fromTriggerRow(row) {
28
+ return {
29
+ id: row.id,
30
+ creatorId: row.creator_id,
31
+ creatorContext: row.creator_context ?? void 0,
32
+ creatorName: row.creator_name ?? void 0,
33
+ description: row.description ?? void 0,
34
+ channels: safeJsonParse(row.channels),
35
+ fromUsers: safeJsonParse(row.from_users),
36
+ fromBots: row.from_bots === true || row.from_bots === 1,
37
+ contains: safeJsonParse(row.contains),
38
+ matchPattern: row.match_pattern ?? void 0,
39
+ threads: row.threads,
40
+ workflow: row.workflow,
41
+ inputs: safeJsonParse(row.inputs),
42
+ outputContext: safeJsonParse(row.output_context),
43
+ status: row.status,
44
+ enabled: row.enabled === true || row.enabled === 1,
45
+ createdAt: toNum(row.created_at)
46
+ };
47
+ }
48
+ function toTriggerInsertRow(trigger) {
49
+ return {
50
+ id: trigger.id,
51
+ creator_id: trigger.creatorId,
52
+ creator_context: trigger.creatorContext ?? null,
53
+ creator_name: trigger.creatorName ?? null,
54
+ description: trigger.description ?? null,
55
+ channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
56
+ from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
57
+ from_bots: trigger.fromBots,
58
+ contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
59
+ match_pattern: trigger.matchPattern ?? null,
60
+ threads: trigger.threads,
61
+ workflow: trigger.workflow,
62
+ inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
63
+ output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
64
+ status: trigger.status,
65
+ enabled: trigger.enabled,
66
+ created_at: trigger.createdAt
67
+ };
68
+ }
69
+ function fromDbRow(row) {
70
+ return {
71
+ id: row.id,
72
+ creatorId: row.creator_id,
73
+ creatorContext: row.creator_context ?? void 0,
74
+ creatorName: row.creator_name ?? void 0,
75
+ timezone: row.timezone,
76
+ schedule: row.schedule_expr,
77
+ runAt: toNum(row.run_at),
78
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
79
+ originalExpression: row.original_expression,
80
+ workflow: row.workflow ?? void 0,
81
+ workflowInputs: safeJsonParse(row.workflow_inputs),
82
+ outputContext: safeJsonParse(row.output_context),
83
+ status: row.status,
84
+ createdAt: toNum(row.created_at),
85
+ lastRunAt: toNum(row.last_run_at),
86
+ nextRunAt: toNum(row.next_run_at),
87
+ runCount: row.run_count,
88
+ failureCount: row.failure_count,
89
+ lastError: row.last_error ?? void 0,
90
+ previousResponse: row.previous_response ?? void 0
91
+ };
92
+ }
93
+ function toInsertRow(schedule) {
94
+ return {
95
+ id: schedule.id,
96
+ creator_id: schedule.creatorId,
97
+ creator_context: schedule.creatorContext ?? null,
98
+ creator_name: schedule.creatorName ?? null,
99
+ timezone: schedule.timezone,
100
+ schedule_expr: schedule.schedule,
101
+ run_at: schedule.runAt ?? null,
102
+ is_recurring: schedule.isRecurring,
103
+ original_expression: schedule.originalExpression,
104
+ workflow: schedule.workflow ?? null,
105
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
106
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
107
+ status: schedule.status,
108
+ created_at: schedule.createdAt,
109
+ last_run_at: schedule.lastRunAt ?? null,
110
+ next_run_at: schedule.nextRunAt ?? null,
111
+ run_count: schedule.runCount,
112
+ failure_count: schedule.failureCount,
113
+ last_error: schedule.lastError ?? null,
114
+ previous_response: schedule.previousResponse ?? null
115
+ };
116
+ }
117
+ var KnexStoreBackend;
118
+ var init_knex_store = __esm({
119
+ "src/enterprise/scheduler/knex-store.ts"() {
120
+ init_logger();
121
+ KnexStoreBackend = class {
122
+ knex = null;
123
+ driver;
124
+ connection;
125
+ constructor(driver, storageConfig, _haConfig) {
126
+ this.driver = driver;
127
+ this.connection = storageConfig.connection || {};
128
+ }
129
+ async initialize() {
130
+ const { createRequire } = __require("module");
131
+ const runtimeRequire = createRequire(__filename);
132
+ let knexFactory;
133
+ try {
134
+ knexFactory = runtimeRequire("knex");
135
+ } catch (err) {
136
+ const code = err?.code;
137
+ if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
138
+ throw new Error(
139
+ "knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
140
+ );
141
+ }
142
+ throw err;
143
+ }
144
+ const clientMap = {
145
+ postgresql: "pg",
146
+ mysql: "mysql2",
147
+ mssql: "tedious"
148
+ };
149
+ const client = clientMap[this.driver];
150
+ let connection;
151
+ if (this.connection.connection_string) {
152
+ connection = this.connection.connection_string;
153
+ } else if (this.driver === "mssql") {
154
+ connection = this.buildMssqlConnection();
155
+ } else {
156
+ connection = this.buildStandardConnection();
157
+ }
158
+ this.knex = knexFactory({
159
+ client,
160
+ connection,
161
+ pool: {
162
+ min: this.connection.pool?.min ?? 0,
163
+ max: this.connection.pool?.max ?? 10
164
+ }
165
+ });
166
+ await this.migrateSchema();
167
+ logger.info(`[KnexStore] Initialized (${this.driver})`);
168
+ }
169
+ buildStandardConnection() {
170
+ return {
171
+ host: this.connection.host || "localhost",
172
+ port: this.connection.port,
173
+ database: this.connection.database || "visor",
174
+ user: this.connection.user,
175
+ password: this.connection.password,
176
+ ssl: this.resolveSslConfig()
177
+ };
178
+ }
179
+ buildMssqlConnection() {
180
+ const ssl = this.connection.ssl;
181
+ const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
182
+ return {
183
+ server: this.connection.host || "localhost",
184
+ port: this.connection.port,
185
+ database: this.connection.database || "visor",
186
+ user: this.connection.user,
187
+ password: this.connection.password,
188
+ options: {
189
+ encrypt: sslEnabled,
190
+ trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
191
+ }
192
+ };
193
+ }
194
+ resolveSslConfig() {
195
+ const ssl = this.connection.ssl;
196
+ if (ssl === false || ssl === void 0) return false;
197
+ if (ssl === true) return { rejectUnauthorized: true };
198
+ if (ssl.enabled === false) return false;
199
+ const result = {
200
+ rejectUnauthorized: ssl.reject_unauthorized !== false
201
+ };
202
+ if (ssl.ca) {
203
+ const caPath = this.validateSslPath(ssl.ca, "CA certificate");
204
+ result.ca = fs.readFileSync(caPath, "utf8");
205
+ }
206
+ if (ssl.cert) {
207
+ const certPath = this.validateSslPath(ssl.cert, "client certificate");
208
+ result.cert = fs.readFileSync(certPath, "utf8");
209
+ }
210
+ if (ssl.key) {
211
+ const keyPath = this.validateSslPath(ssl.key, "client key");
212
+ result.key = fs.readFileSync(keyPath, "utf8");
213
+ }
214
+ return result;
215
+ }
216
+ validateSslPath(filePath, label) {
217
+ const resolved = path.resolve(filePath);
218
+ if (resolved !== path.normalize(resolved)) {
219
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
220
+ }
221
+ if (!fs.existsSync(resolved)) {
222
+ throw new Error(`SSL ${label} not found: ${filePath}`);
223
+ }
224
+ return resolved;
225
+ }
226
+ async shutdown() {
227
+ if (this.knex) {
228
+ await this.knex.destroy();
229
+ this.knex = null;
230
+ }
231
+ }
232
+ async migrateSchema() {
233
+ const knex = this.getKnex();
234
+ const exists = await knex.schema.hasTable("schedules");
235
+ if (!exists) {
236
+ await knex.schema.createTable("schedules", (table) => {
237
+ table.string("id", 36).primary();
238
+ table.string("creator_id", 255).notNullable().index();
239
+ table.string("creator_context", 255);
240
+ table.string("creator_name", 255);
241
+ table.string("timezone", 64).notNullable().defaultTo("UTC");
242
+ table.string("schedule_expr", 255);
243
+ table.bigInteger("run_at");
244
+ table.boolean("is_recurring").notNullable();
245
+ table.text("original_expression");
246
+ table.string("workflow", 255);
247
+ table.text("workflow_inputs");
248
+ table.text("output_context");
249
+ table.string("status", 20).notNullable().index();
250
+ table.bigInteger("created_at").notNullable();
251
+ table.bigInteger("last_run_at");
252
+ table.bigInteger("next_run_at");
253
+ table.integer("run_count").notNullable().defaultTo(0);
254
+ table.integer("failure_count").notNullable().defaultTo(0);
255
+ table.text("last_error");
256
+ table.text("previous_response");
257
+ table.index(["status", "next_run_at"]);
258
+ });
259
+ }
260
+ const triggersExist = await knex.schema.hasTable("message_triggers");
261
+ if (!triggersExist) {
262
+ await knex.schema.createTable("message_triggers", (table) => {
263
+ table.string("id", 36).primary();
264
+ table.string("creator_id", 255).notNullable().index();
265
+ table.string("creator_context", 255);
266
+ table.string("creator_name", 255);
267
+ table.text("description");
268
+ table.text("channels");
269
+ table.text("from_users");
270
+ table.boolean("from_bots").notNullable().defaultTo(false);
271
+ table.text("contains");
272
+ table.text("match_pattern");
273
+ table.string("threads", 20).notNullable().defaultTo("any");
274
+ table.string("workflow", 255).notNullable();
275
+ table.text("inputs");
276
+ table.text("output_context");
277
+ table.string("status", 20).notNullable().defaultTo("active").index();
278
+ table.boolean("enabled").notNullable().defaultTo(true);
279
+ table.bigInteger("created_at").notNullable();
280
+ });
281
+ }
282
+ const locksExist = await knex.schema.hasTable("scheduler_locks");
283
+ if (!locksExist) {
284
+ await knex.schema.createTable("scheduler_locks", (table) => {
285
+ table.string("lock_id", 255).primary();
286
+ table.string("node_id", 255).notNullable();
287
+ table.string("lock_token", 36).notNullable();
288
+ table.bigInteger("acquired_at").notNullable();
289
+ table.bigInteger("expires_at").notNullable();
290
+ });
291
+ }
292
+ }
293
+ getKnex() {
294
+ if (!this.knex) {
295
+ throw new Error("[KnexStore] Not initialized. Call initialize() first.");
296
+ }
297
+ return this.knex;
298
+ }
299
+ // --- CRUD ---
300
+ async create(schedule) {
301
+ const knex = this.getKnex();
302
+ const newSchedule = {
303
+ ...schedule,
304
+ id: uuidv4(),
305
+ createdAt: Date.now(),
306
+ runCount: 0,
307
+ failureCount: 0,
308
+ status: "active"
309
+ };
310
+ await knex("schedules").insert(toInsertRow(newSchedule));
311
+ logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
312
+ return newSchedule;
313
+ }
314
+ async importSchedule(schedule) {
315
+ const knex = this.getKnex();
316
+ const existing = await knex("schedules").where("id", schedule.id).first();
317
+ if (existing) return;
318
+ await knex("schedules").insert(toInsertRow(schedule));
319
+ }
320
+ async get(id) {
321
+ const knex = this.getKnex();
322
+ const row = await knex("schedules").where("id", id).first();
323
+ return row ? fromDbRow(row) : void 0;
324
+ }
325
+ async update(id, patch) {
326
+ const knex = this.getKnex();
327
+ const existing = await knex("schedules").where("id", id).first();
328
+ if (!existing) return void 0;
329
+ const current = fromDbRow(existing);
330
+ const updated = { ...current, ...patch, id: current.id };
331
+ const row = toInsertRow(updated);
332
+ delete row.id;
333
+ await knex("schedules").where("id", id).update(row);
334
+ return updated;
335
+ }
336
+ async delete(id) {
337
+ const knex = this.getKnex();
338
+ const deleted = await knex("schedules").where("id", id).del();
339
+ if (deleted > 0) {
340
+ logger.info(`[KnexStore] Deleted schedule ${id}`);
341
+ return true;
342
+ }
343
+ return false;
344
+ }
345
+ // --- Queries ---
346
+ async getByCreator(creatorId) {
347
+ const knex = this.getKnex();
348
+ const rows = await knex("schedules").where("creator_id", creatorId);
349
+ return rows.map((r) => fromDbRow(r));
350
+ }
351
+ async getActiveSchedules() {
352
+ const knex = this.getKnex();
353
+ const rows = await knex("schedules").where("status", "active");
354
+ return rows.map((r) => fromDbRow(r));
355
+ }
356
+ async getDueSchedules(now) {
357
+ const ts = now ?? Date.now();
358
+ const knex = this.getKnex();
359
+ const bFalse = this.driver === "mssql" ? 0 : false;
360
+ const bTrue = this.driver === "mssql" ? 1 : true;
361
+ const rows = await knex("schedules").where("status", "active").andWhere(function() {
362
+ this.where(function() {
363
+ this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
364
+ }).orWhere(function() {
365
+ this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
366
+ });
367
+ });
368
+ return rows.map((r) => fromDbRow(r));
369
+ }
370
+ async findByWorkflow(creatorId, workflowName) {
371
+ const knex = this.getKnex();
372
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
373
+ const pattern = `%${escaped}%`;
374
+ const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
375
+ return rows.map((r) => fromDbRow(r));
376
+ }
377
+ async getAll() {
378
+ const knex = this.getKnex();
379
+ const rows = await knex("schedules");
380
+ return rows.map((r) => fromDbRow(r));
381
+ }
382
+ async getStats() {
383
+ const knex = this.getKnex();
384
+ const boolTrue = this.driver === "mssql" ? "1" : "true";
385
+ const boolFalse = this.driver === "mssql" ? "0" : "false";
386
+ const result = await knex("schedules").select(
387
+ knex.raw("COUNT(*) as total"),
388
+ knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
389
+ knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
390
+ knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
391
+ knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
392
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
393
+ knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
394
+ ).first();
395
+ return {
396
+ total: Number(result.total) || 0,
397
+ active: Number(result.active) || 0,
398
+ paused: Number(result.paused) || 0,
399
+ completed: Number(result.completed) || 0,
400
+ failed: Number(result.failed) || 0,
401
+ recurring: Number(result.recurring) || 0,
402
+ oneTime: Number(result.one_time) || 0
403
+ };
404
+ }
405
+ async validateLimits(creatorId, isRecurring, limits) {
406
+ const knex = this.getKnex();
407
+ if (limits.maxGlobal) {
408
+ const result = await knex("schedules").count("* as cnt").first();
409
+ if (Number(result?.cnt) >= limits.maxGlobal) {
410
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
411
+ }
412
+ }
413
+ if (limits.maxPerUser) {
414
+ const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
415
+ if (Number(result?.cnt) >= limits.maxPerUser) {
416
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
417
+ }
418
+ }
419
+ if (isRecurring && limits.maxRecurringPerUser) {
420
+ const bTrue = this.driver === "mssql" ? 1 : true;
421
+ const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
422
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
423
+ throw new Error(
424
+ `You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
425
+ );
426
+ }
427
+ }
428
+ }
429
+ // --- HA Distributed Locking (via scheduler_locks table) ---
430
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
431
+ const knex = this.getKnex();
432
+ const now = Date.now();
433
+ const expiresAt = now + ttlSeconds * 1e3;
434
+ const token = uuidv4();
435
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
436
+ node_id: nodeId,
437
+ lock_token: token,
438
+ acquired_at: now,
439
+ expires_at: expiresAt
440
+ });
441
+ if (updated > 0) return token;
442
+ try {
443
+ await knex("scheduler_locks").insert({
444
+ lock_id: lockId,
445
+ node_id: nodeId,
446
+ lock_token: token,
447
+ acquired_at: now,
448
+ expires_at: expiresAt
449
+ });
450
+ return token;
451
+ } catch {
452
+ return null;
453
+ }
454
+ }
455
+ async releaseLock(lockId, lockToken) {
456
+ const knex = this.getKnex();
457
+ await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
458
+ }
459
+ async renewLock(lockId, lockToken, ttlSeconds) {
460
+ const knex = this.getKnex();
461
+ const now = Date.now();
462
+ const expiresAt = now + ttlSeconds * 1e3;
463
+ const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
464
+ return updated > 0;
465
+ }
466
+ async flush() {
467
+ }
468
+ // --- Message Trigger CRUD ---
469
+ async createTrigger(trigger) {
470
+ const knex = this.getKnex();
471
+ const newTrigger = {
472
+ ...trigger,
473
+ id: uuidv4(),
474
+ createdAt: Date.now()
475
+ };
476
+ await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
477
+ logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
478
+ return newTrigger;
479
+ }
480
+ async getTrigger(id) {
481
+ const knex = this.getKnex();
482
+ const row = await knex("message_triggers").where("id", id).first();
483
+ return row ? fromTriggerRow(row) : void 0;
484
+ }
485
+ async updateTrigger(id, patch) {
486
+ const knex = this.getKnex();
487
+ const existing = await knex("message_triggers").where("id", id).first();
488
+ if (!existing) return void 0;
489
+ const current = fromTriggerRow(existing);
490
+ const updated = {
491
+ ...current,
492
+ ...patch,
493
+ id: current.id,
494
+ createdAt: current.createdAt
495
+ };
496
+ const row = toTriggerInsertRow(updated);
497
+ delete row.id;
498
+ await knex("message_triggers").where("id", id).update(row);
499
+ return updated;
500
+ }
501
+ async deleteTrigger(id) {
502
+ const knex = this.getKnex();
503
+ const deleted = await knex("message_triggers").where("id", id).del();
504
+ if (deleted > 0) {
505
+ logger.info(`[KnexStore] Deleted trigger ${id}`);
506
+ return true;
507
+ }
508
+ return false;
509
+ }
510
+ async getTriggersByCreator(creatorId) {
511
+ const knex = this.getKnex();
512
+ const rows = await knex("message_triggers").where("creator_id", creatorId);
513
+ return rows.map((r) => fromTriggerRow(r));
514
+ }
515
+ async getActiveTriggers() {
516
+ const knex = this.getKnex();
517
+ const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
518
+ return rows.map((r) => fromTriggerRow(r));
519
+ }
520
+ };
521
+ }
522
+ });
523
+ init_knex_store();
524
+ export {
525
+ KnexStoreBackend
526
+ };
527
+ //# sourceMappingURL=knex-store-CRORFJE6.mjs.map