@aitne/daemon 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/adapters/composite-dashboard-stream.d.ts +42 -0
- package/dist/adapters/composite-dashboard-stream.d.ts.map +1 -0
- package/dist/adapters/composite-dashboard-stream.js +49 -0
- package/dist/adapters/composite-dashboard-stream.js.map +1 -0
- package/dist/adapters/dashboard-adapter.d.ts +104 -0
- package/dist/adapters/dashboard-adapter.d.ts.map +1 -0
- package/dist/adapters/dashboard-adapter.js +216 -0
- package/dist/adapters/dashboard-adapter.js.map +1 -0
- package/dist/adapters/discord.d.ts +77 -0
- package/dist/adapters/discord.d.ts.map +1 -0
- package/dist/adapters/discord.js +339 -0
- package/dist/adapters/discord.js.map +1 -0
- package/dist/adapters/docs-qa-adapter.d.ts +123 -0
- package/dist/adapters/docs-qa-adapter.d.ts.map +1 -0
- package/dist/adapters/docs-qa-adapter.js +218 -0
- package/dist/adapters/docs-qa-adapter.js.map +1 -0
- package/dist/adapters/message-hub.d.ts +70 -0
- package/dist/adapters/message-hub.d.ts.map +1 -0
- package/dist/adapters/message-hub.js +359 -0
- package/dist/adapters/message-hub.js.map +1 -0
- package/dist/adapters/notification-manager.d.ts +99 -0
- package/dist/adapters/notification-manager.d.ts.map +1 -0
- package/dist/adapters/notification-manager.js +498 -0
- package/dist/adapters/notification-manager.js.map +1 -0
- package/dist/adapters/outbound-text.d.ts +28 -0
- package/dist/adapters/outbound-text.d.ts.map +1 -0
- package/dist/adapters/outbound-text.js +58 -0
- package/dist/adapters/outbound-text.js.map +1 -0
- package/dist/adapters/slack-adapter.d.ts +82 -0
- package/dist/adapters/slack-adapter.d.ts.map +1 -0
- package/dist/adapters/slack-adapter.js +359 -0
- package/dist/adapters/slack-adapter.js.map +1 -0
- package/dist/adapters/telegram-adapter.d.ts +107 -0
- package/dist/adapters/telegram-adapter.d.ts.map +1 -0
- package/dist/adapters/telegram-adapter.js +477 -0
- package/dist/adapters/telegram-adapter.js.map +1 -0
- package/dist/adapters/types.d.ts +92 -0
- package/dist/adapters/types.d.ts.map +1 -0
- package/dist/adapters/types.js +2 -0
- package/dist/adapters/types.js.map +1 -0
- package/dist/adapters/whatsapp-adapter.d.ts +213 -0
- package/dist/adapters/whatsapp-adapter.d.ts.map +1 -0
- package/dist/adapters/whatsapp-adapter.js +1216 -0
- package/dist/adapters/whatsapp-adapter.js.map +1 -0
- package/dist/api/chat-binding-query.d.ts +36 -0
- package/dist/api/chat-binding-query.d.ts.map +1 -0
- package/dist/api/chat-binding-query.js +63 -0
- package/dist/api/chat-binding-query.js.map +1 -0
- package/dist/api/chat-session-resume.d.ts +12 -0
- package/dist/api/chat-session-resume.d.ts.map +1 -0
- package/dist/api/chat-session-resume.js +21 -0
- package/dist/api/chat-session-resume.js.map +1 -0
- package/dist/api/delegated-proxy-helper.d.ts +33 -0
- package/dist/api/delegated-proxy-helper.d.ts.map +1 -0
- package/dist/api/delegated-proxy-helper.js +54 -0
- package/dist/api/delegated-proxy-helper.js.map +1 -0
- package/dist/api/directory-picker.d.ts +38 -0
- package/dist/api/directory-picker.d.ts.map +1 -0
- package/dist/api/directory-picker.js +278 -0
- package/dist/api/directory-picker.js.map +1 -0
- package/dist/api/env-writer.d.ts +25 -0
- package/dist/api/env-writer.d.ts.map +1 -0
- package/dist/api/env-writer.js +421 -0
- package/dist/api/env-writer.js.map +1 -0
- package/dist/api/integration-route-gate.d.ts +60 -0
- package/dist/api/integration-route-gate.d.ts.map +1 -0
- package/dist/api/integration-route-gate.js +83 -0
- package/dist/api/integration-route-gate.js.map +1 -0
- package/dist/api/json-body.d.ts +29 -0
- package/dist/api/json-body.d.ts.map +1 -0
- package/dist/api/json-body.js +87 -0
- package/dist/api/json-body.js.map +1 -0
- package/dist/api/routes/activity-sources.d.ts +20 -0
- package/dist/api/routes/activity-sources.d.ts.map +1 -0
- package/dist/api/routes/activity-sources.js +18 -0
- package/dist/api/routes/activity-sources.js.map +1 -0
- package/dist/api/routes/agent.d.ts +4 -0
- package/dist/api/routes/agent.d.ts.map +1 -0
- package/dist/api/routes/agent.js +619 -0
- package/dist/api/routes/agent.js.map +1 -0
- package/dist/api/routes/apple-calendar.d.ts +31 -0
- package/dist/api/routes/apple-calendar.d.ts.map +1 -0
- package/dist/api/routes/apple-calendar.js +310 -0
- package/dist/api/routes/apple-calendar.js.map +1 -0
- package/dist/api/routes/attachments.d.ts +36 -0
- package/dist/api/routes/attachments.d.ts.map +1 -0
- package/dist/api/routes/attachments.js +305 -0
- package/dist/api/routes/attachments.js.map +1 -0
- package/dist/api/routes/backends.d.ts +4 -0
- package/dist/api/routes/backends.d.ts.map +1 -0
- package/dist/api/routes/backends.js +1132 -0
- package/dist/api/routes/backends.js.map +1 -0
- package/dist/api/routes/books.d.ts +63 -0
- package/dist/api/routes/books.d.ts.map +1 -0
- package/dist/api/routes/books.js +467 -0
- package/dist/api/routes/books.js.map +1 -0
- package/dist/api/routes/calendar.d.ts +36 -0
- package/dist/api/routes/calendar.d.ts.map +1 -0
- package/dist/api/routes/calendar.js +351 -0
- package/dist/api/routes/calendar.js.map +1 -0
- package/dist/api/routes/commands.d.ts +4 -0
- package/dist/api/routes/commands.d.ts.map +1 -0
- package/dist/api/routes/commands.js +251 -0
- package/dist/api/routes/commands.js.map +1 -0
- package/dist/api/routes/context.d.ts +57 -0
- package/dist/api/routes/context.d.ts.map +1 -0
- package/dist/api/routes/context.js +1765 -0
- package/dist/api/routes/context.js.map +1 -0
- package/dist/api/routes/dashboard.d.ts +29 -0
- package/dist/api/routes/dashboard.d.ts.map +1 -0
- package/dist/api/routes/dashboard.js +2062 -0
- package/dist/api/routes/dashboard.js.map +1 -0
- package/dist/api/routes/delegated-sync.d.ts +4 -0
- package/dist/api/routes/delegated-sync.d.ts.map +1 -0
- package/dist/api/routes/delegated-sync.js +192 -0
- package/dist/api/routes/delegated-sync.js.map +1 -0
- package/dist/api/routes/delegated.d.ts +42 -0
- package/dist/api/routes/delegated.d.ts.map +1 -0
- package/dist/api/routes/delegated.js +250 -0
- package/dist/api/routes/delegated.js.map +1 -0
- package/dist/api/routes/docs.d.ts +34 -0
- package/dist/api/routes/docs.d.ts.map +1 -0
- package/dist/api/routes/docs.js +580 -0
- package/dist/api/routes/docs.js.map +1 -0
- package/dist/api/routes/entities.d.ts +9 -0
- package/dist/api/routes/entities.d.ts.map +1 -0
- package/dist/api/routes/entities.js +176 -0
- package/dist/api/routes/entities.js.map +1 -0
- package/dist/api/routes/git-accounts.d.ts +23 -0
- package/dist/api/routes/git-accounts.d.ts.map +1 -0
- package/dist/api/routes/git-accounts.js +227 -0
- package/dist/api/routes/git-accounts.js.map +1 -0
- package/dist/api/routes/git-templates.d.ts +50 -0
- package/dist/api/routes/git-templates.d.ts.map +1 -0
- package/dist/api/routes/git-templates.js +276 -0
- package/dist/api/routes/git-templates.js.map +1 -0
- package/dist/api/routes/git.d.ts +34 -0
- package/dist/api/routes/git.d.ts.map +1 -0
- package/dist/api/routes/git.js +126 -0
- package/dist/api/routes/git.js.map +1 -0
- package/dist/api/routes/github.d.ts +34 -0
- package/dist/api/routes/github.d.ts.map +1 -0
- package/dist/api/routes/github.js +465 -0
- package/dist/api/routes/github.js.map +1 -0
- package/dist/api/routes/health.d.ts +4 -0
- package/dist/api/routes/health.d.ts.map +1 -0
- package/dist/api/routes/health.js +257 -0
- package/dist/api/routes/health.js.map +1 -0
- package/dist/api/routes/integrations-reconcile.d.ts +33 -0
- package/dist/api/routes/integrations-reconcile.d.ts.map +1 -0
- package/dist/api/routes/integrations-reconcile.js +463 -0
- package/dist/api/routes/integrations-reconcile.js.map +1 -0
- package/dist/api/routes/integrations.d.ts +19 -0
- package/dist/api/routes/integrations.d.ts.map +1 -0
- package/dist/api/routes/integrations.js +1384 -0
- package/dist/api/routes/integrations.js.map +1 -0
- package/dist/api/routes/knowledge.d.ts +4 -0
- package/dist/api/routes/knowledge.d.ts.map +1 -0
- package/dist/api/routes/knowledge.js +224 -0
- package/dist/api/routes/knowledge.js.map +1 -0
- package/dist/api/routes/mail.d.ts +39 -0
- package/dist/api/routes/mail.d.ts.map +1 -0
- package/dist/api/routes/mail.js +1406 -0
- package/dist/api/routes/mail.js.map +1 -0
- package/dist/api/routes/managed-tasks.d.ts +48 -0
- package/dist/api/routes/managed-tasks.d.ts.map +1 -0
- package/dist/api/routes/managed-tasks.js +844 -0
- package/dist/api/routes/managed-tasks.js.map +1 -0
- package/dist/api/routes/mcp.d.ts +50 -0
- package/dist/api/routes/mcp.d.ts.map +1 -0
- package/dist/api/routes/mcp.js +470 -0
- package/dist/api/routes/mcp.js.map +1 -0
- package/dist/api/routes/metrics.d.ts +13 -0
- package/dist/api/routes/metrics.d.ts.map +1 -0
- package/dist/api/routes/metrics.js +117 -0
- package/dist/api/routes/metrics.js.map +1 -0
- package/dist/api/routes/notion.d.ts +35 -0
- package/dist/api/routes/notion.d.ts.map +1 -0
- package/dist/api/routes/notion.js +442 -0
- package/dist/api/routes/notion.js.map +1 -0
- package/dist/api/routes/observations.d.ts +4 -0
- package/dist/api/routes/observations.d.ts.map +1 -0
- package/dist/api/routes/observations.js +177 -0
- package/dist/api/routes/observations.js.map +1 -0
- package/dist/api/routes/obsidian.d.ts +16 -0
- package/dist/api/routes/obsidian.d.ts.map +1 -0
- package/dist/api/routes/obsidian.js +321 -0
- package/dist/api/routes/obsidian.js.map +1 -0
- package/dist/api/routes/profile-questions.d.ts +17 -0
- package/dist/api/routes/profile-questions.d.ts.map +1 -0
- package/dist/api/routes/profile-questions.js +115 -0
- package/dist/api/routes/profile-questions.js.map +1 -0
- package/dist/api/routes/receipts.d.ts +4 -0
- package/dist/api/routes/receipts.d.ts.map +1 -0
- package/dist/api/routes/receipts.js +155 -0
- package/dist/api/routes/receipts.js.map +1 -0
- package/dist/api/routes/recurring-schedules.d.ts +4 -0
- package/dist/api/routes/recurring-schedules.d.ts.map +1 -0
- package/dist/api/routes/recurring-schedules.js +137 -0
- package/dist/api/routes/recurring-schedules.js.map +1 -0
- package/dist/api/routes/repositories.d.ts +40 -0
- package/dist/api/routes/repositories.d.ts.map +1 -0
- package/dist/api/routes/repositories.js +857 -0
- package/dist/api/routes/repositories.js.map +1 -0
- package/dist/api/routes/setup-migrate.d.ts +74 -0
- package/dist/api/routes/setup-migrate.d.ts.map +1 -0
- package/dist/api/routes/setup-migrate.js +944 -0
- package/dist/api/routes/setup-migrate.js.map +1 -0
- package/dist/api/routes/setup.d.ts +4 -0
- package/dist/api/routes/setup.d.ts.map +1 -0
- package/dist/api/routes/setup.js +443 -0
- package/dist/api/routes/setup.js.map +1 -0
- package/dist/api/routes/skill-curation.d.ts +5 -0
- package/dist/api/routes/skill-curation.d.ts.map +1 -0
- package/dist/api/routes/skill-curation.js +728 -0
- package/dist/api/routes/skill-curation.js.map +1 -0
- package/dist/api/routes/skills.d.ts +52 -0
- package/dist/api/routes/skills.d.ts.map +1 -0
- package/dist/api/routes/skills.js +429 -0
- package/dist/api/routes/skills.js.map +1 -0
- package/dist/api/routes/sot-bindings.d.ts +20 -0
- package/dist/api/routes/sot-bindings.d.ts.map +1 -0
- package/dist/api/routes/sot-bindings.js +163 -0
- package/dist/api/routes/sot-bindings.js.map +1 -0
- package/dist/api/routes/sse.d.ts +86 -0
- package/dist/api/routes/sse.d.ts.map +1 -0
- package/dist/api/routes/sse.js +378 -0
- package/dist/api/routes/sse.js.map +1 -0
- package/dist/api/routes/system.d.ts +4 -0
- package/dist/api/routes/system.d.ts.map +1 -0
- package/dist/api/routes/system.js +207 -0
- package/dist/api/routes/system.js.map +1 -0
- package/dist/api/routes/task-flows.d.ts +30 -0
- package/dist/api/routes/task-flows.d.ts.map +1 -0
- package/dist/api/routes/task-flows.js +155 -0
- package/dist/api/routes/task-flows.js.map +1 -0
- package/dist/api/routes/travel-bookings.d.ts +4 -0
- package/dist/api/routes/travel-bookings.d.ts.map +1 -0
- package/dist/api/routes/travel-bookings.js +142 -0
- package/dist/api/routes/travel-bookings.js.map +1 -0
- package/dist/api/routes/travel-time.d.ts +8 -0
- package/dist/api/routes/travel-time.d.ts.map +1 -0
- package/dist/api/routes/travel-time.js +87 -0
- package/dist/api/routes/travel-time.js.map +1 -0
- package/dist/api/routes/triggers.d.ts +4 -0
- package/dist/api/routes/triggers.d.ts.map +1 -0
- package/dist/api/routes/triggers.js +101 -0
- package/dist/api/routes/triggers.js.map +1 -0
- package/dist/api/routes/voice.d.ts +48 -0
- package/dist/api/routes/voice.d.ts.map +1 -0
- package/dist/api/routes/voice.js +232 -0
- package/dist/api/routes/voice.js.map +1 -0
- package/dist/api/server.d.ts +428 -0
- package/dist/api/server.d.ts.map +1 -0
- package/dist/api/server.js +558 -0
- package/dist/api/server.js.map +1 -0
- package/dist/config.d.ts +136 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +699 -0
- package/dist/config.js.map +1 -0
- package/dist/core/agent-core.d.ts +517 -0
- package/dist/core/agent-core.d.ts.map +1 -0
- package/dist/core/agent-core.js +102 -0
- package/dist/core/agent-core.js.map +1 -0
- package/dist/core/alerts.d.ts +86 -0
- package/dist/core/alerts.d.ts.map +1 -0
- package/dist/core/alerts.js +304 -0
- package/dist/core/alerts.js.map +1 -0
- package/dist/core/atomic-write.d.ts +51 -0
- package/dist/core/atomic-write.d.ts.map +1 -0
- package/dist/core/atomic-write.js +135 -0
- package/dist/core/atomic-write.js.map +1 -0
- package/dist/core/backends/api-key-probe.d.ts +40 -0
- package/dist/core/backends/api-key-probe.d.ts.map +1 -0
- package/dist/core/backends/api-key-probe.js +116 -0
- package/dist/core/backends/api-key-probe.js.map +1 -0
- package/dist/core/backends/auth-health-monitor.d.ts +373 -0
- package/dist/core/backends/auth-health-monitor.d.ts.map +1 -0
- package/dist/core/backends/auth-health-monitor.js +950 -0
- package/dist/core/backends/auth-health-monitor.js.map +1 -0
- package/dist/core/backends/auth-recovery.d.ts +263 -0
- package/dist/core/backends/auth-recovery.d.ts.map +1 -0
- package/dist/core/backends/auth-recovery.js +1086 -0
- package/dist/core/backends/auth-recovery.js.map +1 -0
- package/dist/core/backends/auth-telemetry.d.ts +81 -0
- package/dist/core/backends/auth-telemetry.d.ts.map +1 -0
- package/dist/core/backends/auth-telemetry.js +108 -0
- package/dist/core/backends/auth-telemetry.js.map +1 -0
- package/dist/core/backends/backend-router.d.ts +272 -0
- package/dist/core/backends/backend-router.d.ts.map +1 -0
- package/dist/core/backends/backend-router.js +759 -0
- package/dist/core/backends/backend-router.js.map +1 -0
- package/dist/core/backends/claude-code-core.d.ts +299 -0
- package/dist/core/backends/claude-code-core.d.ts.map +1 -0
- package/dist/core/backends/claude-code-core.js +2541 -0
- package/dist/core/backends/claude-code-core.js.map +1 -0
- package/dist/core/backends/claude-credentials-store.d.ts +83 -0
- package/dist/core/backends/claude-credentials-store.d.ts.map +1 -0
- package/dist/core/backends/claude-credentials-store.js +243 -0
- package/dist/core/backends/claude-credentials-store.js.map +1 -0
- package/dist/core/backends/cli-utils.d.ts +95 -0
- package/dist/core/backends/cli-utils.d.ts.map +1 -0
- package/dist/core/backends/cli-utils.js +464 -0
- package/dist/core/backends/cli-utils.js.map +1 -0
- package/dist/core/backends/codex-core.d.ts +127 -0
- package/dist/core/backends/codex-core.d.ts.map +1 -0
- package/dist/core/backends/codex-core.js +1693 -0
- package/dist/core/backends/codex-core.js.map +1 -0
- package/dist/core/backends/gemini-cli-core.d.ts +367 -0
- package/dist/core/backends/gemini-cli-core.d.ts.map +1 -0
- package/dist/core/backends/gemini-cli-core.js +2331 -0
- package/dist/core/backends/gemini-cli-core.js.map +1 -0
- package/dist/core/backends/idle-watchdog.d.ts +77 -0
- package/dist/core/backends/idle-watchdog.d.ts.map +1 -0
- package/dist/core/backends/idle-watchdog.js +94 -0
- package/dist/core/backends/idle-watchdog.js.map +1 -0
- package/dist/core/backends/install-methods.d.ts +93 -0
- package/dist/core/backends/install-methods.d.ts.map +1 -0
- package/dist/core/backends/install-methods.js +267 -0
- package/dist/core/backends/install-methods.js.map +1 -0
- package/dist/core/backends/model-registry.d.ts +58 -0
- package/dist/core/backends/model-registry.d.ts.map +1 -0
- package/dist/core/backends/model-registry.js +539 -0
- package/dist/core/backends/model-registry.js.map +1 -0
- package/dist/core/backends/plan-presets.d.ts +123 -0
- package/dist/core/backends/plan-presets.d.ts.map +1 -0
- package/dist/core/backends/plan-presets.js +235 -0
- package/dist/core/backends/plan-presets.js.map +1 -0
- package/dist/core/backends/price-fetcher.d.ts +48 -0
- package/dist/core/backends/price-fetcher.d.ts.map +1 -0
- package/dist/core/backends/price-fetcher.js +248 -0
- package/dist/core/backends/price-fetcher.js.map +1 -0
- package/dist/core/backends/process-config-cascade.d.ts +68 -0
- package/dist/core/backends/process-config-cascade.d.ts.map +1 -0
- package/dist/core/backends/process-config-cascade.js +173 -0
- package/dist/core/backends/process-config-cascade.js.map +1 -0
- package/dist/core/backends/prompt-utils.d.ts +6 -0
- package/dist/core/backends/prompt-utils.d.ts.map +1 -0
- package/dist/core/backends/prompt-utils.js +80 -0
- package/dist/core/backends/prompt-utils.js.map +1 -0
- package/dist/core/backends/proxy-model-registry.d.ts +110 -0
- package/dist/core/backends/proxy-model-registry.d.ts.map +1 -0
- package/dist/core/backends/proxy-model-registry.js +195 -0
- package/dist/core/backends/proxy-model-registry.js.map +1 -0
- package/dist/core/backends/silent-api-error-detector.d.ts +31 -0
- package/dist/core/backends/silent-api-error-detector.d.ts.map +1 -0
- package/dist/core/backends/silent-api-error-detector.js +44 -0
- package/dist/core/backends/silent-api-error-detector.js.map +1 -0
- package/dist/core/bang-commands/commands-cost.d.ts +13 -0
- package/dist/core/bang-commands/commands-cost.d.ts.map +1 -0
- package/dist/core/bang-commands/commands-cost.js +91 -0
- package/dist/core/bang-commands/commands-cost.js.map +1 -0
- package/dist/core/bang-commands/commands-report.d.ts +18 -0
- package/dist/core/bang-commands/commands-report.d.ts.map +1 -0
- package/dist/core/bang-commands/commands-report.js +105 -0
- package/dist/core/bang-commands/commands-report.js.map +1 -0
- package/dist/core/bang-commands/commands-stop-start.d.ts +4 -0
- package/dist/core/bang-commands/commands-stop-start.d.ts.map +1 -0
- package/dist/core/bang-commands/commands-stop-start.js +88 -0
- package/dist/core/bang-commands/commands-stop-start.js.map +1 -0
- package/dist/core/bang-commands/format-utils.d.ts +34 -0
- package/dist/core/bang-commands/format-utils.d.ts.map +1 -0
- package/dist/core/bang-commands/format-utils.js +118 -0
- package/dist/core/bang-commands/format-utils.js.map +1 -0
- package/dist/core/bang-commands/index.d.ts +20 -0
- package/dist/core/bang-commands/index.d.ts.map +1 -0
- package/dist/core/bang-commands/index.js +31 -0
- package/dist/core/bang-commands/index.js.map +1 -0
- package/dist/core/bang-commands/registry.d.ts +72 -0
- package/dist/core/bang-commands/registry.d.ts.map +1 -0
- package/dist/core/bang-commands/registry.js +174 -0
- package/dist/core/bang-commands/registry.js.map +1 -0
- package/dist/core/bang-commands/user-commands.d.ts +86 -0
- package/dist/core/bang-commands/user-commands.d.ts.map +1 -0
- package/dist/core/bang-commands/user-commands.js +212 -0
- package/dist/core/bang-commands/user-commands.js.map +1 -0
- package/dist/core/channel-timeline.d.ts +28 -0
- package/dist/core/channel-timeline.d.ts.map +1 -0
- package/dist/core/channel-timeline.js +117 -0
- package/dist/core/channel-timeline.js.map +1 -0
- package/dist/core/character-block.d.ts +37 -0
- package/dist/core/character-block.d.ts.map +1 -0
- package/dist/core/character-block.js +162 -0
- package/dist/core/character-block.js.map +1 -0
- package/dist/core/context/activity-sources.d.ts +37 -0
- package/dist/core/context/activity-sources.d.ts.map +1 -0
- package/dist/core/context/activity-sources.js +69 -0
- package/dist/core/context/activity-sources.js.map +1 -0
- package/dist/core/context/activity-view-reconciler.d.ts +110 -0
- package/dist/core/context/activity-view-reconciler.d.ts.map +1 -0
- package/dist/core/context/activity-view-reconciler.js +252 -0
- package/dist/core/context/activity-view-reconciler.js.map +1 -0
- package/dist/core/context/activity-view-runner.d.ts +38 -0
- package/dist/core/context/activity-view-runner.d.ts.map +1 -0
- package/dist/core/context/activity-view-runner.js +402 -0
- package/dist/core/context/activity-view-runner.js.map +1 -0
- package/dist/core/context/default-schedules-reconciler.d.ts +85 -0
- package/dist/core/context/default-schedules-reconciler.d.ts.map +1 -0
- package/dist/core/context/default-schedules-reconciler.js +153 -0
- package/dist/core/context/default-schedules-reconciler.js.map +1 -0
- package/dist/core/context/default-schedules-runner.d.ts +40 -0
- package/dist/core/context/default-schedules-runner.d.ts.map +1 -0
- package/dist/core/context/default-schedules-runner.js +233 -0
- package/dist/core/context/default-schedules-runner.js.map +1 -0
- package/dist/core/context/domain-index-reconciler.d.ts +81 -0
- package/dist/core/context/domain-index-reconciler.d.ts.map +1 -0
- package/dist/core/context/domain-index-reconciler.js +199 -0
- package/dist/core/context/domain-index-reconciler.js.map +1 -0
- package/dist/core/context/domain-index-runner.d.ts +35 -0
- package/dist/core/context/domain-index-runner.d.ts.map +1 -0
- package/dist/core/context/domain-index-runner.js +223 -0
- package/dist/core/context/domain-index-runner.js.map +1 -0
- package/dist/core/context/entity-mirror.d.ts +227 -0
- package/dist/core/context/entity-mirror.d.ts.map +1 -0
- package/dist/core/context/entity-mirror.js +629 -0
- package/dist/core/context/entity-mirror.js.map +1 -0
- package/dist/core/context/entity-source-rename.d.ts +61 -0
- package/dist/core/context/entity-source-rename.d.ts.map +1 -0
- package/dist/core/context/entity-source-rename.js +237 -0
- package/dist/core/context/entity-source-rename.js.map +1 -0
- package/dist/core/context/index-reconciler.d.ts +61 -0
- package/dist/core/context/index-reconciler.d.ts.map +1 -0
- package/dist/core/context/index-reconciler.js +329 -0
- package/dist/core/context/index-reconciler.js.map +1 -0
- package/dist/core/context/policy-index-reconciler.d.ts +102 -0
- package/dist/core/context/policy-index-reconciler.d.ts.map +1 -0
- package/dist/core/context/policy-index-reconciler.js +202 -0
- package/dist/core/context/policy-index-reconciler.js.map +1 -0
- package/dist/core/context/policy-index-runner.d.ts +66 -0
- package/dist/core/context/policy-index-runner.d.ts.map +1 -0
- package/dist/core/context/policy-index-runner.js +406 -0
- package/dist/core/context/policy-index-runner.js.map +1 -0
- package/dist/core/context/reconciler-runner.d.ts +44 -0
- package/dist/core/context/reconciler-runner.d.ts.map +1 -0
- package/dist/core/context/reconciler-runner.js +273 -0
- package/dist/core/context/reconciler-runner.js.map +1 -0
- package/dist/core/context-builder.d.ts +115 -0
- package/dist/core/context-builder.d.ts.map +1 -0
- package/dist/core/context-builder.js +1148 -0
- package/dist/core/context-builder.js.map +1 -0
- package/dist/core/context-frontmatter-backfill.d.ts +33 -0
- package/dist/core/context-frontmatter-backfill.d.ts.map +1 -0
- package/dist/core/context-frontmatter-backfill.js +111 -0
- package/dist/core/context-frontmatter-backfill.js.map +1 -0
- package/dist/core/context-frontmatter.d.ts +13 -0
- package/dist/core/context-frontmatter.d.ts.map +1 -0
- package/dist/core/context-frontmatter.js +325 -0
- package/dist/core/context-frontmatter.js.map +1 -0
- package/dist/core/context-health.d.ts +51 -0
- package/dist/core/context-health.d.ts.map +1 -0
- package/dist/core/context-health.js +304 -0
- package/dist/core/context-health.js.map +1 -0
- package/dist/core/context-paths.d.ts +183 -0
- package/dist/core/context-paths.d.ts.map +1 -0
- package/dist/core/context-paths.js +241 -0
- package/dist/core/context-paths.js.map +1 -0
- package/dist/core/context-staleness.d.ts +45 -0
- package/dist/core/context-staleness.d.ts.map +1 -0
- package/dist/core/context-staleness.js +88 -0
- package/dist/core/context-staleness.js.map +1 -0
- package/dist/core/custom-routine-scheduler.d.ts +151 -0
- package/dist/core/custom-routine-scheduler.d.ts.map +1 -0
- package/dist/core/custom-routine-scheduler.js +335 -0
- package/dist/core/custom-routine-scheduler.js.map +1 -0
- package/dist/core/daemon-api-cli.d.ts +33 -0
- package/dist/core/daemon-api-cli.d.ts.map +1 -0
- package/dist/core/daemon-api-cli.js +614 -0
- package/dist/core/daemon-api-cli.js.map +1 -0
- package/dist/core/dashboard-session-cleanup.d.ts +39 -0
- package/dist/core/dashboard-session-cleanup.d.ts.map +1 -0
- package/dist/core/dashboard-session-cleanup.js +108 -0
- package/dist/core/dashboard-session-cleanup.js.map +1 -0
- package/dist/core/dashboard-session-controls.d.ts +41 -0
- package/dist/core/dashboard-session-controls.d.ts.map +1 -0
- package/dist/core/dashboard-session-controls.js +154 -0
- package/dist/core/dashboard-session-controls.js.map +1 -0
- package/dist/core/delegated-connector-health.d.ts +63 -0
- package/dist/core/delegated-connector-health.d.ts.map +1 -0
- package/dist/core/delegated-connector-health.js +157 -0
- package/dist/core/delegated-connector-health.js.map +1 -0
- package/dist/core/dispatcher.d.ts +999 -0
- package/dist/core/dispatcher.d.ts.map +1 -0
- package/dist/core/dispatcher.js +4378 -0
- package/dist/core/dispatcher.js.map +1 -0
- package/dist/core/dm-freshness-metrics.d.ts +73 -0
- package/dist/core/dm-freshness-metrics.d.ts.map +1 -0
- package/dist/core/dm-freshness-metrics.js +138 -0
- package/dist/core/dm-freshness-metrics.js.map +1 -0
- package/dist/core/docs/citation-validator.d.ts +73 -0
- package/dist/core/docs/citation-validator.d.ts.map +1 -0
- package/dist/core/docs/citation-validator.js +195 -0
- package/dist/core/docs/citation-validator.js.map +1 -0
- package/dist/core/docs/extract-terms.d.ts +78 -0
- package/dist/core/docs/extract-terms.d.ts.map +1 -0
- package/dist/core/docs/extract-terms.js +147 -0
- package/dist/core/docs/extract-terms.js.map +1 -0
- package/dist/core/docs/indexer.d.ts +104 -0
- package/dist/core/docs/indexer.d.ts.map +1 -0
- package/dist/core/docs/indexer.js +340 -0
- package/dist/core/docs/indexer.js.map +1 -0
- package/dist/core/drift-effects.d.ts +30 -0
- package/dist/core/drift-effects.d.ts.map +1 -0
- package/dist/core/drift-effects.js +384 -0
- package/dist/core/drift-effects.js.map +1 -0
- package/dist/core/event-bus.d.ts +56 -0
- package/dist/core/event-bus.d.ts.map +1 -0
- package/dist/core/event-bus.js +135 -0
- package/dist/core/event-bus.js.map +1 -0
- package/dist/core/git-project-docs.d.ts +77 -0
- package/dist/core/git-project-docs.d.ts.map +1 -0
- package/dist/core/git-project-docs.js +439 -0
- package/dist/core/git-project-docs.js.map +1 -0
- package/dist/core/health-monitor.d.ts +57 -0
- package/dist/core/health-monitor.d.ts.map +1 -0
- package/dist/core/health-monitor.js +137 -0
- package/dist/core/health-monitor.js.map +1 -0
- package/dist/core/heartbeat.d.ts +26 -0
- package/dist/core/heartbeat.d.ts.map +1 -0
- package/dist/core/heartbeat.js +48 -0
- package/dist/core/heartbeat.js.map +1 -0
- package/dist/core/integration-health.d.ts +49 -0
- package/dist/core/integration-health.d.ts.map +1 -0
- package/dist/core/integration-health.js +89 -0
- package/dist/core/integration-health.js.map +1 -0
- package/dist/core/integration-lifecycle.d.ts +79 -0
- package/dist/core/integration-lifecycle.d.ts.map +1 -0
- package/dist/core/integration-lifecycle.js +153 -0
- package/dist/core/integration-lifecycle.js.map +1 -0
- package/dist/core/integration-main-backend.d.ts +36 -0
- package/dist/core/integration-main-backend.d.ts.map +1 -0
- package/dist/core/integration-main-backend.js +59 -0
- package/dist/core/integration-main-backend.js.map +1 -0
- package/dist/core/integration-probe.d.ts +98 -0
- package/dist/core/integration-probe.d.ts.map +1 -0
- package/dist/core/integration-probe.js +152 -0
- package/dist/core/integration-probe.js.map +1 -0
- package/dist/core/management-md-write-lock.d.ts +68 -0
- package/dist/core/management-md-write-lock.d.ts.map +1 -0
- package/dist/core/management-md-write-lock.js +93 -0
- package/dist/core/management-md-write-lock.js.map +1 -0
- package/dist/core/management-md.d.ts +186 -0
- package/dist/core/management-md.d.ts.map +1 -0
- package/dist/core/management-md.js +652 -0
- package/dist/core/management-md.js.map +1 -0
- package/dist/core/management-registry.d.ts +245 -0
- package/dist/core/management-registry.d.ts.map +1 -0
- package/dist/core/management-registry.js +906 -0
- package/dist/core/management-registry.js.map +1 -0
- package/dist/core/management-telemetry.d.ts +100 -0
- package/dist/core/management-telemetry.d.ts.map +1 -0
- package/dist/core/management-telemetry.js +156 -0
- package/dist/core/management-telemetry.js.map +1 -0
- package/dist/core/message-recorder.d.ts +38 -0
- package/dist/core/message-recorder.d.ts.map +1 -0
- package/dist/core/message-recorder.js +88 -0
- package/dist/core/message-recorder.js.map +1 -0
- package/dist/core/metrics.d.ts +338 -0
- package/dist/core/metrics.d.ts.map +1 -0
- package/dist/core/metrics.js +747 -0
- package/dist/core/metrics.js.map +1 -0
- package/dist/core/migration-backup.d.ts +218 -0
- package/dist/core/migration-backup.d.ts.map +1 -0
- package/dist/core/migration-backup.js +934 -0
- package/dist/core/migration-backup.js.map +1 -0
- package/dist/core/overview-write-lock.d.ts +48 -0
- package/dist/core/overview-write-lock.d.ts.map +1 -0
- package/dist/core/overview-write-lock.js +56 -0
- package/dist/core/overview-write-lock.js.map +1 -0
- package/dist/core/path-compat.d.ts +22 -0
- package/dist/core/path-compat.d.ts.map +1 -0
- package/dist/core/path-compat.js +67 -0
- package/dist/core/path-compat.js.map +1 -0
- package/dist/core/path-rewrite.d.ts +58 -0
- package/dist/core/path-rewrite.d.ts.map +1 -0
- package/dist/core/path-rewrite.js +141 -0
- package/dist/core/path-rewrite.js.map +1 -0
- package/dist/core/policy-files.d.ts +108 -0
- package/dist/core/policy-files.d.ts.map +1 -0
- package/dist/core/policy-files.js +198 -0
- package/dist/core/policy-files.js.map +1 -0
- package/dist/core/profile-questions/seed.d.ts +44 -0
- package/dist/core/profile-questions/seed.d.ts.map +1 -0
- package/dist/core/profile-questions/seed.js +173 -0
- package/dist/core/profile-questions/seed.js.map +1 -0
- package/dist/core/profile-questions/slot-filled.d.ts +51 -0
- package/dist/core/profile-questions/slot-filled.d.ts.map +1 -0
- package/dist/core/profile-questions/slot-filled.js +118 -0
- package/dist/core/profile-questions/slot-filled.js.map +1 -0
- package/dist/core/prompts.d.ts +111 -0
- package/dist/core/prompts.d.ts.map +1 -0
- package/dist/core/prompts.js +267 -0
- package/dist/core/prompts.js.map +1 -0
- package/dist/core/quiet-hours-sync.d.ts +15 -0
- package/dist/core/quiet-hours-sync.d.ts.map +1 -0
- package/dist/core/quiet-hours-sync.js +51 -0
- package/dist/core/quiet-hours-sync.js.map +1 -0
- package/dist/core/read-sensitive-token-manager.d.ts +19 -0
- package/dist/core/read-sensitive-token-manager.d.ts.map +1 -0
- package/dist/core/read-sensitive-token-manager.js +29 -0
- package/dist/core/read-sensitive-token-manager.js.map +1 -0
- package/dist/core/recurrence.d.ts +24 -0
- package/dist/core/recurrence.d.ts.map +1 -0
- package/dist/core/recurrence.js +162 -0
- package/dist/core/recurrence.js.map +1 -0
- package/dist/core/reinstall.d.ts +107 -0
- package/dist/core/reinstall.d.ts.map +1 -0
- package/dist/core/reinstall.js +163 -0
- package/dist/core/reinstall.js.map +1 -0
- package/dist/core/release-assets.d.ts +106 -0
- package/dist/core/release-assets.d.ts.map +1 -0
- package/dist/core/release-assets.js +434 -0
- package/dist/core/release-assets.js.map +1 -0
- package/dist/core/repository-management-docs.d.ts +216 -0
- package/dist/core/repository-management-docs.d.ts.map +1 -0
- package/dist/core/repository-management-docs.js +855 -0
- package/dist/core/repository-management-docs.js.map +1 -0
- package/dist/core/retention.d.ts +164 -0
- package/dist/core/retention.d.ts.map +1 -0
- package/dist/core/retention.js +1008 -0
- package/dist/core/retention.js.map +1 -0
- package/dist/core/review-context.d.ts +48 -0
- package/dist/core/review-context.d.ts.map +1 -0
- package/dist/core/review-context.js +282 -0
- package/dist/core/review-context.js.map +1 -0
- package/dist/core/roadmap-horizon.d.ts +48 -0
- package/dist/core/roadmap-horizon.d.ts.map +1 -0
- package/dist/core/roadmap-horizon.js +213 -0
- package/dist/core/roadmap-horizon.js.map +1 -0
- package/dist/core/roadmap-ids.d.ts +57 -0
- package/dist/core/roadmap-ids.d.ts.map +1 -0
- package/dist/core/roadmap-ids.js +118 -0
- package/dist/core/roadmap-ids.js.map +1 -0
- package/dist/core/roadmap-merge.d.ts +7 -0
- package/dist/core/roadmap-merge.d.ts.map +1 -0
- package/dist/core/roadmap-merge.js +187 -0
- package/dist/core/roadmap-merge.js.map +1 -0
- package/dist/core/roadmap-refresh-triggers.d.ts +32 -0
- package/dist/core/roadmap-refresh-triggers.d.ts.map +1 -0
- package/dist/core/roadmap-refresh-triggers.js +51 -0
- package/dist/core/roadmap-refresh-triggers.js.map +1 -0
- package/dist/core/roadmap-truncate.d.ts +49 -0
- package/dist/core/roadmap-truncate.d.ts.map +1 -0
- package/dist/core/roadmap-truncate.js +152 -0
- package/dist/core/roadmap-truncate.js.map +1 -0
- package/dist/core/roadmap-validate.d.ts +31 -0
- package/dist/core/roadmap-validate.d.ts.map +1 -0
- package/dist/core/roadmap-validate.js +403 -0
- package/dist/core/roadmap-validate.js.map +1 -0
- package/dist/core/roadmap-write-lock.d.ts +53 -0
- package/dist/core/roadmap-write-lock.d.ts.map +1 -0
- package/dist/core/roadmap-write-lock.js +59 -0
- package/dist/core/roadmap-write-lock.js.map +1 -0
- package/dist/core/schedule-insert-helper.d.ts +46 -0
- package/dist/core/schedule-insert-helper.d.ts.map +1 -0
- package/dist/core/schedule-insert-helper.js +52 -0
- package/dist/core/schedule-insert-helper.js.map +1 -0
- package/dist/core/schedule-maintenance.d.ts +22 -0
- package/dist/core/schedule-maintenance.d.ts.map +1 -0
- package/dist/core/schedule-maintenance.js +57 -0
- package/dist/core/schedule-maintenance.js.map +1 -0
- package/dist/core/scheduler.d.ts +208 -0
- package/dist/core/scheduler.d.ts.map +1 -0
- package/dist/core/scheduler.js +896 -0
- package/dist/core/scheduler.js.map +1 -0
- package/dist/core/semaphore.d.ts +13 -0
- package/dist/core/semaphore.d.ts.map +1 -0
- package/dist/core/semaphore.js +31 -0
- package/dist/core/semaphore.js.map +1 -0
- package/dist/core/session-gate.d.ts +37 -0
- package/dist/core/session-gate.d.ts.map +1 -0
- package/dist/core/session-gate.js +69 -0
- package/dist/core/session-gate.js.map +1 -0
- package/dist/core/session-manager.d.ts +252 -0
- package/dist/core/session-manager.d.ts.map +1 -0
- package/dist/core/session-manager.js +716 -0
- package/dist/core/session-manager.js.map +1 -0
- package/dist/core/signal-detector.d.ts +97 -0
- package/dist/core/signal-detector.d.ts.map +1 -0
- package/dist/core/signal-detector.js +215 -0
- package/dist/core/signal-detector.js.map +1 -0
- package/dist/core/skeleton.d.ts +83 -0
- package/dist/core/skeleton.d.ts.map +1 -0
- package/dist/core/skeleton.js +255 -0
- package/dist/core/skeleton.js.map +1 -0
- package/dist/core/skill-curation/apply-proposal.d.ts +71 -0
- package/dist/core/skill-curation/apply-proposal.d.ts.map +1 -0
- package/dist/core/skill-curation/apply-proposal.js +175 -0
- package/dist/core/skill-curation/apply-proposal.js.map +1 -0
- package/dist/core/skill-curation/auto-revert.d.ts +43 -0
- package/dist/core/skill-curation/auto-revert.d.ts.map +1 -0
- package/dist/core/skill-curation/auto-revert.js +155 -0
- package/dist/core/skill-curation/auto-revert.js.map +1 -0
- package/dist/core/skill-curation/classify-diff.d.ts +27 -0
- package/dist/core/skill-curation/classify-diff.d.ts.map +1 -0
- package/dist/core/skill-curation/classify-diff.js +0 -0
- package/dist/core/skill-curation/classify-diff.js.map +1 -0
- package/dist/core/skill-curation/declarations.d.ts +32 -0
- package/dist/core/skill-curation/declarations.d.ts.map +1 -0
- package/dist/core/skill-curation/declarations.js +171 -0
- package/dist/core/skill-curation/declarations.js.map +1 -0
- package/dist/core/skill-curation/knowledge-map.d.ts +26 -0
- package/dist/core/skill-curation/knowledge-map.d.ts.map +1 -0
- package/dist/core/skill-curation/knowledge-map.js +154 -0
- package/dist/core/skill-curation/knowledge-map.js.map +1 -0
- package/dist/core/skill-curation/orphan-overlay.d.ts +35 -0
- package/dist/core/skill-curation/orphan-overlay.d.ts.map +1 -0
- package/dist/core/skill-curation/orphan-overlay.js +167 -0
- package/dist/core/skill-curation/orphan-overlay.js.map +1 -0
- package/dist/core/skill-curation/overlay-store.d.ts +41 -0
- package/dist/core/skill-curation/overlay-store.d.ts.map +1 -0
- package/dist/core/skill-curation/overlay-store.js +143 -0
- package/dist/core/skill-curation/overlay-store.js.map +1 -0
- package/dist/core/skill-curation/render/convention-notes.d.ts +4 -0
- package/dist/core/skill-curation/render/convention-notes.d.ts.map +1 -0
- package/dist/core/skill-curation/render/convention-notes.js +13 -0
- package/dist/core/skill-curation/render/convention-notes.js.map +1 -0
- package/dist/core/skill-curation/render/cross-references.d.ts +4 -0
- package/dist/core/skill-curation/render/cross-references.d.ts.map +1 -0
- package/dist/core/skill-curation/render/cross-references.js +10 -0
- package/dist/core/skill-curation/render/cross-references.js.map +1 -0
- package/dist/core/skill-curation/render/frontmatter-schema.d.ts +4 -0
- package/dist/core/skill-curation/render/frontmatter-schema.d.ts.map +1 -0
- package/dist/core/skill-curation/render/frontmatter-schema.js +25 -0
- package/dist/core/skill-curation/render/frontmatter-schema.js.map +1 -0
- package/dist/core/skill-curation/render/index.d.ts +5 -0
- package/dist/core/skill-curation/render/index.d.ts.map +1 -0
- package/dist/core/skill-curation/render/index.js +42 -0
- package/dist/core/skill-curation/render/index.js.map +1 -0
- package/dist/core/skill-curation/render/knowledge-layout.d.ts +4 -0
- package/dist/core/skill-curation/render/knowledge-layout.d.ts.map +1 -0
- package/dist/core/skill-curation/render/knowledge-layout.js +36 -0
- package/dist/core/skill-curation/render/knowledge-layout.js.map +1 -0
- package/dist/core/skill-curation/render/routing-table.d.ts +4 -0
- package/dist/core/skill-curation/render/routing-table.d.ts.map +1 -0
- package/dist/core/skill-curation/render/routing-table.js +37 -0
- package/dist/core/skill-curation/render/routing-table.js.map +1 -0
- package/dist/core/skill-curation/render/search-recipes.d.ts +4 -0
- package/dist/core/skill-curation/render/search-recipes.d.ts.map +1 -0
- package/dist/core/skill-curation/render/search-recipes.js +39 -0
- package/dist/core/skill-curation/render/search-recipes.js.map +1 -0
- package/dist/core/skill-curation/run-token.d.ts +27 -0
- package/dist/core/skill-curation/run-token.d.ts.map +1 -0
- package/dist/core/skill-curation/run-token.js +81 -0
- package/dist/core/skill-curation/run-token.js.map +1 -0
- package/dist/core/skill-curation/signals.d.ts +49 -0
- package/dist/core/skill-curation/signals.d.ts.map +1 -0
- package/dist/core/skill-curation/signals.js +149 -0
- package/dist/core/skill-curation/signals.js.map +1 -0
- package/dist/core/skill-curation/smoke-test.d.ts +39 -0
- package/dist/core/skill-curation/smoke-test.d.ts.map +1 -0
- package/dist/core/skill-curation/smoke-test.js +313 -0
- package/dist/core/skill-curation/smoke-test.js.map +1 -0
- package/dist/core/skill-curation/splicer.d.ts +16 -0
- package/dist/core/skill-curation/splicer.d.ts.map +1 -0
- package/dist/core/skill-curation/splicer.js +78 -0
- package/dist/core/skill-curation/splicer.js.map +1 -0
- package/dist/core/skill-curation/workdir.d.ts +40 -0
- package/dist/core/skill-curation/workdir.d.ts.map +1 -0
- package/dist/core/skill-curation/workdir.js +242 -0
- package/dist/core/skill-curation/workdir.js.map +1 -0
- package/dist/core/skills-compiler.d.ts +391 -0
- package/dist/core/skills-compiler.d.ts.map +1 -0
- package/dist/core/skills-compiler.js +1271 -0
- package/dist/core/skills-compiler.js.map +1 -0
- package/dist/core/skills-manifest.d.ts +8 -0
- package/dist/core/skills-manifest.d.ts.map +1 -0
- package/dist/core/skills-manifest.js +408 -0
- package/dist/core/skills-manifest.js.map +1 -0
- package/dist/core/system-reset.d.ts +268 -0
- package/dist/core/system-reset.d.ts.map +1 -0
- package/dist/core/system-reset.js +816 -0
- package/dist/core/system-reset.js.map +1 -0
- package/dist/core/template-store.d.ts +170 -0
- package/dist/core/template-store.d.ts.map +1 -0
- package/dist/core/template-store.js +388 -0
- package/dist/core/template-store.js.map +1 -0
- package/dist/core/template-versions.d.ts +95 -0
- package/dist/core/template-versions.d.ts.map +1 -0
- package/dist/core/template-versions.js +175 -0
- package/dist/core/template-versions.js.map +1 -0
- package/dist/core/today-agent-plan.d.ts +33 -0
- package/dist/core/today-agent-plan.d.ts.map +1 -0
- package/dist/core/today-agent-plan.js +120 -0
- package/dist/core/today-agent-plan.js.map +1 -0
- package/dist/core/today-direct-writer.d.ts +62 -0
- package/dist/core/today-direct-writer.d.ts.map +1 -0
- package/dist/core/today-direct-writer.js +132 -0
- package/dist/core/today-direct-writer.js.map +1 -0
- package/dist/core/today-write-lock.d.ts +89 -0
- package/dist/core/today-write-lock.d.ts.map +1 -0
- package/dist/core/today-write-lock.js +154 -0
- package/dist/core/today-write-lock.js.map +1 -0
- package/dist/core/trigger-dispatch.d.ts +31 -0
- package/dist/core/trigger-dispatch.d.ts.map +1 -0
- package/dist/core/trigger-dispatch.js +100 -0
- package/dist/core/trigger-dispatch.js.map +1 -0
- package/dist/core/trigger-evaluator.d.ts +59 -0
- package/dist/core/trigger-evaluator.d.ts.map +1 -0
- package/dist/core/trigger-evaluator.js +243 -0
- package/dist/core/trigger-evaluator.js.map +1 -0
- package/dist/core/workdir.d.ts +241 -0
- package/dist/core/workdir.d.ts.map +1 -0
- package/dist/core/workdir.js +565 -0
- package/dist/core/workdir.js.map +1 -0
- package/dist/db/automation-triggers.d.ts +90 -0
- package/dist/db/automation-triggers.d.ts.map +1 -0
- package/dist/db/automation-triggers.js +199 -0
- package/dist/db/automation-triggers.js.map +1 -0
- package/dist/db/client.d.ts +6 -0
- package/dist/db/client.d.ts.map +1 -0
- package/dist/db/client.js +47 -0
- package/dist/db/client.js.map +1 -0
- package/dist/db/entities-store.d.ts +92 -0
- package/dist/db/entities-store.d.ts.map +1 -0
- package/dist/db/entities-store.js +180 -0
- package/dist/db/entities-store.js.map +1 -0
- package/dist/db/hourly-check-signals.d.ts +78 -0
- package/dist/db/hourly-check-signals.d.ts.map +1 -0
- package/dist/db/hourly-check-signals.js +289 -0
- package/dist/db/hourly-check-signals.js.map +1 -0
- package/dist/db/integration-probe-store.d.ts +27 -0
- package/dist/db/integration-probe-store.d.ts.map +1 -0
- package/dist/db/integration-probe-store.js +75 -0
- package/dist/db/integration-probe-store.js.map +1 -0
- package/dist/db/integrations-store.d.ts +19 -0
- package/dist/db/integrations-store.d.ts.map +1 -0
- package/dist/db/integrations-store.js +85 -0
- package/dist/db/integrations-store.js.map +1 -0
- package/dist/db/managed-tasks-store.d.ts +130 -0
- package/dist/db/managed-tasks-store.d.ts.map +1 -0
- package/dist/db/managed-tasks-store.js +238 -0
- package/dist/db/managed-tasks-store.js.map +1 -0
- package/dist/db/management-parse-failures-store.d.ts +45 -0
- package/dist/db/management-parse-failures-store.d.ts.map +1 -0
- package/dist/db/management-parse-failures-store.js +36 -0
- package/dist/db/management-parse-failures-store.js.map +1 -0
- package/dist/db/observations.d.ts +145 -0
- package/dist/db/observations.d.ts.map +1 -0
- package/dist/db/observations.js +287 -0
- package/dist/db/observations.js.map +1 -0
- package/dist/db/recurring-schedules.d.ts +70 -0
- package/dist/db/recurring-schedules.d.ts.map +1 -0
- package/dist/db/recurring-schedules.js +213 -0
- package/dist/db/recurring-schedules.js.map +1 -0
- package/dist/db/repositories-store.d.ts +296 -0
- package/dist/db/repositories-store.d.ts.map +1 -0
- package/dist/db/repositories-store.js +754 -0
- package/dist/db/repositories-store.js.map +1 -0
- package/dist/db/runtime-state.d.ts +61 -0
- package/dist/db/runtime-state.d.ts.map +1 -0
- package/dist/db/runtime-state.js +104 -0
- package/dist/db/runtime-state.js.map +1 -0
- package/dist/db/schema.d.ts +4 -0
- package/dist/db/schema.d.ts.map +1 -0
- package/dist/db/schema.js +1338 -0
- package/dist/db/schema.js.map +1 -0
- package/dist/db/sot-bindings-store.d.ts +41 -0
- package/dist/db/sot-bindings-store.d.ts.map +1 -0
- package/dist/db/sot-bindings-store.js +64 -0
- package/dist/db/sot-bindings-store.js.map +1 -0
- package/dist/db/test-schemas.d.ts +23 -0
- package/dist/db/test-schemas.d.ts.map +1 -0
- package/dist/db/test-schemas.js +111 -0
- package/dist/db/test-schemas.js.map +1 -0
- package/dist/db/voice-transcripts-store.d.ts +28 -0
- package/dist/db/voice-transcripts-store.d.ts.map +1 -0
- package/dist/db/voice-transcripts-store.js +43 -0
- package/dist/db/voice-transcripts-store.js.map +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2913 -0
- package/dist/index.js.map +1 -0
- package/dist/init.d.ts +7 -0
- package/dist/init.d.ts.map +1 -0
- package/dist/init.js +32 -0
- package/dist/init.js.map +1 -0
- package/dist/log-buffer.d.ts +71 -0
- package/dist/log-buffer.d.ts.map +1 -0
- package/dist/log-buffer.js +201 -0
- package/dist/log-buffer.js.map +1 -0
- package/dist/logging.d.ts +5 -0
- package/dist/logging.d.ts.map +1 -0
- package/dist/logging.js +130 -0
- package/dist/logging.js.map +1 -0
- package/dist/management-rules.d.ts +2 -0
- package/dist/management-rules.d.ts.map +1 -0
- package/dist/management-rules.js +62 -0
- package/dist/management-rules.js.map +1 -0
- package/dist/messaging/constants.d.ts +33 -0
- package/dist/messaging/constants.d.ts.map +1 -0
- package/dist/messaging/constants.js +52 -0
- package/dist/messaging/constants.js.map +1 -0
- package/dist/messaging/magic-phrase.d.ts +16 -0
- package/dist/messaging/magic-phrase.d.ts.map +1 -0
- package/dist/messaging/magic-phrase.js +103 -0
- package/dist/messaging/magic-phrase.js.map +1 -0
- package/dist/messaging/owner-channels.d.ts +20 -0
- package/dist/messaging/owner-channels.d.ts.map +1 -0
- package/dist/messaging/owner-channels.js +41 -0
- package/dist/messaging/owner-channels.js.map +1 -0
- package/dist/observers/calendar-poller.d.ts +51 -0
- package/dist/observers/calendar-poller.d.ts.map +1 -0
- package/dist/observers/calendar-poller.js +128 -0
- package/dist/observers/calendar-poller.js.map +1 -0
- package/dist/observers/context-index-reconciler-observer.d.ts +72 -0
- package/dist/observers/context-index-reconciler-observer.d.ts.map +1 -0
- package/dist/observers/context-index-reconciler-observer.js +253 -0
- package/dist/observers/context-index-reconciler-observer.js.map +1 -0
- package/dist/observers/delegated-probe-observer.d.ts +83 -0
- package/dist/observers/delegated-probe-observer.d.ts.map +1 -0
- package/dist/observers/delegated-probe-observer.js +237 -0
- package/dist/observers/delegated-probe-observer.js.map +1 -0
- package/dist/observers/delegated-sync-worker.d.ts +375 -0
- package/dist/observers/delegated-sync-worker.d.ts.map +1 -0
- package/dist/observers/delegated-sync-worker.js +1087 -0
- package/dist/observers/delegated-sync-worker.js.map +1 -0
- package/dist/observers/entity-mirror-observer.d.ts +55 -0
- package/dist/observers/entity-mirror-observer.d.ts.map +1 -0
- package/dist/observers/entity-mirror-observer.js +73 -0
- package/dist/observers/entity-mirror-observer.js.map +1 -0
- package/dist/observers/git-delegated-cron.d.ts +41 -0
- package/dist/observers/git-delegated-cron.d.ts.map +1 -0
- package/dist/observers/git-delegated-cron.js +159 -0
- package/dist/observers/git-delegated-cron.js.map +1 -0
- package/dist/observers/git-event-classifier.d.ts +52 -0
- package/dist/observers/git-event-classifier.d.ts.map +1 -0
- package/dist/observers/git-event-classifier.js +70 -0
- package/dist/observers/git-event-classifier.js.map +1 -0
- package/dist/observers/git-watcher.d.ts +162 -0
- package/dist/observers/git-watcher.d.ts.map +1 -0
- package/dist/observers/git-watcher.js +768 -0
- package/dist/observers/git-watcher.js.map +1 -0
- package/dist/observers/github-poller-classifier.d.ts +101 -0
- package/dist/observers/github-poller-classifier.d.ts.map +1 -0
- package/dist/observers/github-poller-classifier.js +199 -0
- package/dist/observers/github-poller-classifier.js.map +1 -0
- package/dist/observers/github-poller.d.ts +291 -0
- package/dist/observers/github-poller.d.ts.map +1 -0
- package/dist/observers/github-poller.js +609 -0
- package/dist/observers/github-poller.js.map +1 -0
- package/dist/observers/imminent-event-scheduler.d.ts +34 -0
- package/dist/observers/imminent-event-scheduler.d.ts.map +1 -0
- package/dist/observers/imminent-event-scheduler.js +125 -0
- package/dist/observers/imminent-event-scheduler.js.map +1 -0
- package/dist/observers/mail-poller.d.ts +133 -0
- package/dist/observers/mail-poller.d.ts.map +1 -0
- package/dist/observers/mail-poller.js +563 -0
- package/dist/observers/mail-poller.js.map +1 -0
- package/dist/observers/mail-reconciliation.d.ts +87 -0
- package/dist/observers/mail-reconciliation.d.ts.map +1 -0
- package/dist/observers/mail-reconciliation.js +241 -0
- package/dist/observers/mail-reconciliation.js.map +1 -0
- package/dist/observers/manager.d.ts +67 -0
- package/dist/observers/manager.d.ts.map +1 -0
- package/dist/observers/manager.js +136 -0
- package/dist/observers/manager.js.map +1 -0
- package/dist/observers/notion-poller.d.ts +43 -0
- package/dist/observers/notion-poller.d.ts.map +1 -0
- package/dist/observers/notion-poller.js +184 -0
- package/dist/observers/notion-poller.js.map +1 -0
- package/dist/observers/observation-summarizer/index.d.ts +13 -0
- package/dist/observers/observation-summarizer/index.d.ts.map +1 -0
- package/dist/observers/observation-summarizer/index.js +13 -0
- package/dist/observers/observation-summarizer/index.js.map +1 -0
- package/dist/observers/observation-summarizer/pre-filter.d.ts +62 -0
- package/dist/observers/observation-summarizer/pre-filter.d.ts.map +1 -0
- package/dist/observers/observation-summarizer/pre-filter.js +189 -0
- package/dist/observers/observation-summarizer/pre-filter.js.map +1 -0
- package/dist/observers/observation-summarizer/response-parser.d.ts +30 -0
- package/dist/observers/observation-summarizer/response-parser.d.ts.map +1 -0
- package/dist/observers/observation-summarizer/response-parser.js +106 -0
- package/dist/observers/observation-summarizer/response-parser.js.map +1 -0
- package/dist/observers/observation-summarizer/summarizer-client.d.ts +83 -0
- package/dist/observers/observation-summarizer/summarizer-client.d.ts.map +1 -0
- package/dist/observers/observation-summarizer/summarizer-client.js +185 -0
- package/dist/observers/observation-summarizer/summarizer-client.js.map +1 -0
- package/dist/observers/observation-summarizer/summarizer-prompts.d.ts +51 -0
- package/dist/observers/observation-summarizer/summarizer-prompts.d.ts.map +1 -0
- package/dist/observers/observation-summarizer/summarizer-prompts.js +286 -0
- package/dist/observers/observation-summarizer/summarizer-prompts.js.map +1 -0
- package/dist/observers/observation-summarizer/worker.d.ts +106 -0
- package/dist/observers/observation-summarizer/worker.d.ts.map +1 -0
- package/dist/observers/observation-summarizer/worker.js +311 -0
- package/dist/observers/observation-summarizer/worker.js.map +1 -0
- package/dist/observers/obsidian-watcher.d.ts +90 -0
- package/dist/observers/obsidian-watcher.d.ts.map +1 -0
- package/dist/observers/obsidian-watcher.js +166 -0
- package/dist/observers/obsidian-watcher.js.map +1 -0
- package/dist/observers/primary-vault-watcher.d.ts +73 -0
- package/dist/observers/primary-vault-watcher.d.ts.map +1 -0
- package/dist/observers/primary-vault-watcher.js +115 -0
- package/dist/observers/primary-vault-watcher.js.map +1 -0
- package/dist/observers/repository-management-cron.d.ts +70 -0
- package/dist/observers/repository-management-cron.d.ts.map +1 -0
- package/dist/observers/repository-management-cron.js +166 -0
- package/dist/observers/repository-management-cron.js.map +1 -0
- package/dist/observers/skill-curation-walker.d.ts +33 -0
- package/dist/observers/skill-curation-walker.d.ts.map +1 -0
- package/dist/observers/skill-curation-walker.js +216 -0
- package/dist/observers/skill-curation-walker.js.map +1 -0
- package/dist/safety/absolute-block-audit.d.ts +22 -0
- package/dist/safety/absolute-block-audit.d.ts.map +1 -0
- package/dist/safety/absolute-block-audit.js +32 -0
- package/dist/safety/absolute-block-audit.js.map +1 -0
- package/dist/safety/agent-write-tracker.d.ts +42 -0
- package/dist/safety/agent-write-tracker.d.ts.map +1 -0
- package/dist/safety/agent-write-tracker.js +82 -0
- package/dist/safety/agent-write-tracker.js.map +1 -0
- package/dist/safety/always-disallowed.d.ts +66 -0
- package/dist/safety/always-disallowed.d.ts.map +1 -0
- package/dist/safety/always-disallowed.js +347 -0
- package/dist/safety/always-disallowed.js.map +1 -0
- package/dist/safety/audit.d.ts +118 -0
- package/dist/safety/audit.d.ts.map +1 -0
- package/dist/safety/audit.js +324 -0
- package/dist/safety/audit.js.map +1 -0
- package/dist/safety/integration-write-tracker.d.ts +58 -0
- package/dist/safety/integration-write-tracker.d.ts.map +1 -0
- package/dist/safety/integration-write-tracker.js +41 -0
- package/dist/safety/integration-write-tracker.js.map +1 -0
- package/dist/safety/risk-classifier.d.ts +65 -0
- package/dist/safety/risk-classifier.d.ts.map +1 -0
- package/dist/safety/risk-classifier.js +763 -0
- package/dist/safety/risk-classifier.js.map +1 -0
- package/dist/scheduler/hourly-check-gate.d.ts +73 -0
- package/dist/scheduler/hourly-check-gate.d.ts.map +1 -0
- package/dist/scheduler/hourly-check-gate.js +128 -0
- package/dist/scheduler/hourly-check-gate.js.map +1 -0
- package/dist/secrets/backend-api-key-env.d.ts +104 -0
- package/dist/secrets/backend-api-key-env.d.ts.map +1 -0
- package/dist/secrets/backend-api-key-env.js +197 -0
- package/dist/secrets/backend-api-key-env.js.map +1 -0
- package/dist/secrets/codex-home-materializer.d.ts +35 -0
- package/dist/secrets/codex-home-materializer.d.ts.map +1 -0
- package/dist/secrets/codex-home-materializer.js +76 -0
- package/dist/secrets/codex-home-materializer.js.map +1 -0
- package/dist/secrets/encrypted-blob-store.d.ts +20 -0
- package/dist/secrets/encrypted-blob-store.d.ts.map +1 -0
- package/dist/secrets/encrypted-blob-store.js +80 -0
- package/dist/secrets/encrypted-blob-store.js.map +1 -0
- package/dist/secrets/platform-secret-store.d.ts +17 -0
- package/dist/secrets/platform-secret-store.d.ts.map +1 -0
- package/dist/secrets/platform-secret-store.js +37 -0
- package/dist/secrets/platform-secret-store.js.map +1 -0
- package/dist/secrets/redaction.d.ts +2 -0
- package/dist/secrets/redaction.d.ts.map +1 -0
- package/dist/secrets/redaction.js +2 -0
- package/dist/secrets/redaction.js.map +1 -0
- package/dist/secrets/secret-broker.d.ts +61 -0
- package/dist/secrets/secret-broker.d.ts.map +1 -0
- package/dist/secrets/secret-broker.js +160 -0
- package/dist/secrets/secret-broker.js.map +1 -0
- package/dist/secrets/secret-names.d.ts +34 -0
- package/dist/secrets/secret-names.d.ts.map +1 -0
- package/dist/secrets/secret-names.js +39 -0
- package/dist/secrets/secret-names.js.map +1 -0
- package/dist/secrets/secret-store.d.ts +8 -0
- package/dist/secrets/secret-store.d.ts.map +1 -0
- package/dist/secrets/secret-store.js +2 -0
- package/dist/secrets/secret-store.js.map +1 -0
- package/dist/secrets/types.d.ts +7 -0
- package/dist/secrets/types.d.ts.map +1 -0
- package/dist/secrets/types.js +2 -0
- package/dist/secrets/types.js.map +1 -0
- package/dist/services/apple-calendar/caldav-client.d.ts +48 -0
- package/dist/services/apple-calendar/caldav-client.d.ts.map +1 -0
- package/dist/services/apple-calendar/caldav-client.js +86 -0
- package/dist/services/apple-calendar/caldav-client.js.map +1 -0
- package/dist/services/apple-calendar/caldav-codec.d.ts +67 -0
- package/dist/services/apple-calendar/caldav-codec.d.ts.map +1 -0
- package/dist/services/apple-calendar/caldav-codec.js +341 -0
- package/dist/services/apple-calendar/caldav-codec.js.map +1 -0
- package/dist/services/apple-calendar/index.d.ts +3 -0
- package/dist/services/apple-calendar/index.d.ts.map +1 -0
- package/dist/services/apple-calendar/index.js +2 -0
- package/dist/services/apple-calendar/index.js.map +1 -0
- package/dist/services/apple-calendar/service.d.ts +75 -0
- package/dist/services/apple-calendar/service.d.ts.map +1 -0
- package/dist/services/apple-calendar/service.js +374 -0
- package/dist/services/apple-calendar/service.js.map +1 -0
- package/dist/services/apple-calendar/types.d.ts +78 -0
- package/dist/services/apple-calendar/types.d.ts.map +1 -0
- package/dist/services/apple-calendar/types.js +17 -0
- package/dist/services/apple-calendar/types.js.map +1 -0
- package/dist/services/attachments/hardlink.d.ts +11 -0
- package/dist/services/attachments/hardlink.d.ts.map +1 -0
- package/dist/services/attachments/hardlink.js +56 -0
- package/dist/services/attachments/hardlink.js.map +1 -0
- package/dist/services/attachments/sanitize.d.ts +21 -0
- package/dist/services/attachments/sanitize.d.ts.map +1 -0
- package/dist/services/attachments/sanitize.js +128 -0
- package/dist/services/attachments/sanitize.js.map +1 -0
- package/dist/services/attachments/store.d.ts +146 -0
- package/dist/services/attachments/store.d.ts.map +1 -0
- package/dist/services/attachments/store.js +477 -0
- package/dist/services/attachments/store.js.map +1 -0
- package/dist/services/calendar/outlook/graph-calendar-client.d.ts +114 -0
- package/dist/services/calendar/outlook/graph-calendar-client.d.ts.map +1 -0
- package/dist/services/calendar/outlook/graph-calendar-client.js +146 -0
- package/dist/services/calendar/outlook/graph-calendar-client.js.map +1 -0
- package/dist/services/calendar.d.ts +115 -0
- package/dist/services/calendar.d.ts.map +1 -0
- package/dist/services/calendar.js +281 -0
- package/dist/services/calendar.js.map +1 -0
- package/dist/services/delegated-backend-invoker.d.ts +414 -0
- package/dist/services/delegated-backend-invoker.d.ts.map +1 -0
- package/dist/services/delegated-backend-invoker.js +2372 -0
- package/dist/services/delegated-backend-invoker.js.map +1 -0
- package/dist/services/delegated-proxy-config.d.ts +93 -0
- package/dist/services/delegated-proxy-config.d.ts.map +1 -0
- package/dist/services/delegated-proxy-config.js +98 -0
- package/dist/services/delegated-proxy-config.js.map +1 -0
- package/dist/services/delegated-task-result-cache.d.ts +176 -0
- package/dist/services/delegated-task-result-cache.d.ts.map +1 -0
- package/dist/services/delegated-task-result-cache.js +0 -0
- package/dist/services/delegated-task-result-cache.js.map +1 -0
- package/dist/services/delegated-task-runtime.d.ts +346 -0
- package/dist/services/delegated-task-runtime.d.ts.map +1 -0
- package/dist/services/delegated-task-runtime.js +589 -0
- package/dist/services/delegated-task-runtime.js.map +1 -0
- package/dist/services/delegated-task-session-pool.d.ts +182 -0
- package/dist/services/delegated-task-session-pool.d.ts.map +1 -0
- package/dist/services/delegated-task-session-pool.js +292 -0
- package/dist/services/delegated-task-session-pool.js.map +1 -0
- package/dist/services/delegated-tool-runtime.d.ts +50 -0
- package/dist/services/delegated-tool-runtime.d.ts.map +1 -0
- package/dist/services/delegated-tool-runtime.js +120 -0
- package/dist/services/delegated-tool-runtime.js.map +1 -0
- package/dist/services/fts5.d.ts +40 -0
- package/dist/services/fts5.d.ts.map +1 -0
- package/dist/services/fts5.js +54 -0
- package/dist/services/fts5.js.map +1 -0
- package/dist/services/git-account-registry.d.ts +164 -0
- package/dist/services/git-account-registry.d.ts.map +1 -0
- package/dist/services/git-account-registry.js +297 -0
- package/dist/services/git-account-registry.js.map +1 -0
- package/dist/services/github.d.ts +49 -0
- package/dist/services/github.d.ts.map +1 -0
- package/dist/services/github.js +123 -0
- package/dist/services/github.js.map +1 -0
- package/dist/services/gmail-classifier.d.ts +62 -0
- package/dist/services/gmail-classifier.d.ts.map +1 -0
- package/dist/services/gmail-classifier.js +221 -0
- package/dist/services/gmail-classifier.js.map +1 -0
- package/dist/services/gmail.d.ts +192 -0
- package/dist/services/gmail.d.ts.map +1 -0
- package/dist/services/gmail.js +678 -0
- package/dist/services/gmail.js.map +1 -0
- package/dist/services/google-auth.d.ts +16 -0
- package/dist/services/google-auth.d.ts.map +1 -0
- package/dist/services/google-auth.js +37 -0
- package/dist/services/google-auth.js.map +1 -0
- package/dist/services/google-maps.d.ts +35 -0
- package/dist/services/google-maps.d.ts.map +1 -0
- package/dist/services/google-maps.js +82 -0
- package/dist/services/google-maps.js.map +1 -0
- package/dist/services/integrations/extract-write-item-id.d.ts +64 -0
- package/dist/services/integrations/extract-write-item-id.d.ts.map +1 -0
- package/dist/services/integrations/extract-write-item-id.js +188 -0
- package/dist/services/integrations/extract-write-item-id.js.map +1 -0
- package/dist/services/integrations/reconcile.d.ts +136 -0
- package/dist/services/integrations/reconcile.d.ts.map +1 -0
- package/dist/services/integrations/reconcile.js +218 -0
- package/dist/services/integrations/reconcile.js.map +1 -0
- package/dist/services/integrations/snapshot-partitions.d.ts +40 -0
- package/dist/services/integrations/snapshot-partitions.d.ts.map +1 -0
- package/dist/services/integrations/snapshot-partitions.js +113 -0
- package/dist/services/integrations/snapshot-partitions.js.map +1 -0
- package/dist/services/journal/render.d.ts +15 -0
- package/dist/services/journal/render.d.ts.map +1 -0
- package/dist/services/journal/render.js +17 -0
- package/dist/services/journal/render.js.map +1 -0
- package/dist/services/journal/writer.d.ts +26 -0
- package/dist/services/journal/writer.d.ts.map +1 -0
- package/dist/services/journal/writer.js +50 -0
- package/dist/services/journal/writer.js.map +1 -0
- package/dist/services/mail/account-registry.d.ts +208 -0
- package/dist/services/mail/account-registry.d.ts.map +1 -0
- package/dist/services/mail/account-registry.js +554 -0
- package/dist/services/mail/account-registry.js.map +1 -0
- package/dist/services/mail/gmail/auth-failure-classifier.d.ts +24 -0
- package/dist/services/mail/gmail/auth-failure-classifier.d.ts.map +1 -0
- package/dist/services/mail/gmail/auth-failure-classifier.js +67 -0
- package/dist/services/mail/gmail/auth-failure-classifier.js.map +1 -0
- package/dist/services/mail/gmail/gmail-provider.d.ts +58 -0
- package/dist/services/mail/gmail/gmail-provider.d.ts.map +1 -0
- package/dist/services/mail/gmail/gmail-provider.js +434 -0
- package/dist/services/mail/gmail/gmail-provider.js.map +1 -0
- package/dist/services/mail/gmail/legacy-row.d.ts +24 -0
- package/dist/services/mail/gmail/legacy-row.d.ts.map +1 -0
- package/dist/services/mail/gmail/legacy-row.js +71 -0
- package/dist/services/mail/gmail/legacy-row.js.map +1 -0
- package/dist/services/mail/gmail/poll-cursor.d.ts +12 -0
- package/dist/services/mail/gmail/poll-cursor.d.ts.map +1 -0
- package/dist/services/mail/gmail/poll-cursor.js +32 -0
- package/dist/services/mail/gmail/poll-cursor.js.map +1 -0
- package/dist/services/mail/html-to-plaintext.d.ts +27 -0
- package/dist/services/mail/html-to-plaintext.d.ts.map +1 -0
- package/dist/services/mail/html-to-plaintext.js +163 -0
- package/dist/services/mail/html-to-plaintext.js.map +1 -0
- package/dist/services/mail/imap/app-password.d.ts +27 -0
- package/dist/services/mail/imap/app-password.d.ts.map +1 -0
- package/dist/services/mail/imap/app-password.js +86 -0
- package/dist/services/mail/imap/app-password.js.map +1 -0
- package/dist/services/mail/imap/auth-failure-classifier.d.ts +21 -0
- package/dist/services/mail/imap/auth-failure-classifier.d.ts.map +1 -0
- package/dist/services/mail/imap/auth-failure-classifier.js +54 -0
- package/dist/services/mail/imap/auth-failure-classifier.js.map +1 -0
- package/dist/services/mail/imap/capabilities.d.ts +30 -0
- package/dist/services/mail/imap/capabilities.d.ts.map +1 -0
- package/dist/services/mail/imap/capabilities.js +70 -0
- package/dist/services/mail/imap/capabilities.js.map +1 -0
- package/dist/services/mail/imap/client.d.ts +15 -0
- package/dist/services/mail/imap/client.d.ts.map +1 -0
- package/dist/services/mail/imap/client.js +60 -0
- package/dist/services/mail/imap/client.js.map +1 -0
- package/dist/services/mail/imap/cursor.d.ts +19 -0
- package/dist/services/mail/imap/cursor.d.ts.map +1 -0
- package/dist/services/mail/imap/cursor.js +47 -0
- package/dist/services/mail/imap/cursor.js.map +1 -0
- package/dist/services/mail/imap/folder-resolver.d.ts +24 -0
- package/dist/services/mail/imap/folder-resolver.d.ts.map +1 -0
- package/dist/services/mail/imap/folder-resolver.js +58 -0
- package/dist/services/mail/imap/folder-resolver.js.map +1 -0
- package/dist/services/mail/imap/icloud-provider.d.ts +5 -0
- package/dist/services/mail/imap/icloud-provider.d.ts.map +1 -0
- package/dist/services/mail/imap/icloud-provider.js +5 -0
- package/dist/services/mail/imap/icloud-provider.js.map +1 -0
- package/dist/services/mail/imap/imap-provider-base.d.ts +173 -0
- package/dist/services/mail/imap/imap-provider-base.d.ts.map +1 -0
- package/dist/services/mail/imap/imap-provider-base.js +1004 -0
- package/dist/services/mail/imap/imap-provider-base.js.map +1 -0
- package/dist/services/mail/imap/query-translator.d.ts +13 -0
- package/dist/services/mail/imap/query-translator.d.ts.map +1 -0
- package/dist/services/mail/imap/query-translator.js +114 -0
- package/dist/services/mail/imap/query-translator.js.map +1 -0
- package/dist/services/mail/imap/reconcile-planner.d.ts +56 -0
- package/dist/services/mail/imap/reconcile-planner.d.ts.map +1 -0
- package/dist/services/mail/imap/reconcile-planner.js +52 -0
- package/dist/services/mail/imap/reconcile-planner.js.map +1 -0
- package/dist/services/mail/imap/reply-mime.d.ts +24 -0
- package/dist/services/mail/imap/reply-mime.d.ts.map +1 -0
- package/dist/services/mail/imap/reply-mime.js +77 -0
- package/dist/services/mail/imap/reply-mime.js.map +1 -0
- package/dist/services/mail/imap/yahoo-provider.d.ts +5 -0
- package/dist/services/mail/imap/yahoo-provider.d.ts.map +1 -0
- package/dist/services/mail/imap/yahoo-provider.js +5 -0
- package/dist/services/mail/imap/yahoo-provider.js.map +1 -0
- package/dist/services/mail/mail-search.d.ts +35 -0
- package/dist/services/mail/mail-search.d.ts.map +1 -0
- package/dist/services/mail/mail-search.js +59 -0
- package/dist/services/mail/mail-search.js.map +1 -0
- package/dist/services/mail/outlook/auth-failure-classifier.d.ts +38 -0
- package/dist/services/mail/outlook/auth-failure-classifier.d.ts.map +1 -0
- package/dist/services/mail/outlook/auth-failure-classifier.js +91 -0
- package/dist/services/mail/outlook/auth-failure-classifier.js.map +1 -0
- package/dist/services/mail/outlook/client-config.d.ts +34 -0
- package/dist/services/mail/outlook/client-config.d.ts.map +1 -0
- package/dist/services/mail/outlook/client-config.js +58 -0
- package/dist/services/mail/outlook/client-config.js.map +1 -0
- package/dist/services/mail/outlook/delta-cursor.d.ts +66 -0
- package/dist/services/mail/outlook/delta-cursor.d.ts.map +1 -0
- package/dist/services/mail/outlook/delta-cursor.js +85 -0
- package/dist/services/mail/outlook/delta-cursor.js.map +1 -0
- package/dist/services/mail/outlook/graph-client.d.ts +98 -0
- package/dist/services/mail/outlook/graph-client.d.ts.map +1 -0
- package/dist/services/mail/outlook/graph-client.js +198 -0
- package/dist/services/mail/outlook/graph-client.js.map +1 -0
- package/dist/services/mail/outlook/msal-app-factory.d.ts +20 -0
- package/dist/services/mail/outlook/msal-app-factory.d.ts.map +1 -0
- package/dist/services/mail/outlook/msal-app-factory.js +62 -0
- package/dist/services/mail/outlook/msal-app-factory.js.map +1 -0
- package/dist/services/mail/outlook/msal-cache-plugin.d.ts +19 -0
- package/dist/services/mail/outlook/msal-cache-plugin.d.ts.map +1 -0
- package/dist/services/mail/outlook/msal-cache-plugin.js +30 -0
- package/dist/services/mail/outlook/msal-cache-plugin.js.map +1 -0
- package/dist/services/mail/outlook/oauth-device-code.d.ts +26 -0
- package/dist/services/mail/outlook/oauth-device-code.d.ts.map +1 -0
- package/dist/services/mail/outlook/oauth-device-code.js +32 -0
- package/dist/services/mail/outlook/oauth-device-code.js.map +1 -0
- package/dist/services/mail/outlook/oauth-loopback.d.ts +41 -0
- package/dist/services/mail/outlook/oauth-loopback.d.ts.map +1 -0
- package/dist/services/mail/outlook/oauth-loopback.js +223 -0
- package/dist/services/mail/outlook/oauth-loopback.js.map +1 -0
- package/dist/services/mail/outlook/outlook-provider.d.ts +100 -0
- package/dist/services/mail/outlook/outlook-provider.d.ts.map +1 -0
- package/dist/services/mail/outlook/outlook-provider.js +619 -0
- package/dist/services/mail/outlook/outlook-provider.js.map +1 -0
- package/dist/services/mail/outlook/query-translator.d.ts +10 -0
- package/dist/services/mail/outlook/query-translator.d.ts.map +1 -0
- package/dist/services/mail/outlook/query-translator.js +103 -0
- package/dist/services/mail/outlook/query-translator.js.map +1 -0
- package/dist/services/mail/provider.d.ts +267 -0
- package/dist/services/mail/provider.d.ts.map +1 -0
- package/dist/services/mail/provider.js +34 -0
- package/dist/services/mail/provider.js.map +1 -0
- package/dist/services/mail/query-utils.d.ts +13 -0
- package/dist/services/mail/query-utils.d.ts.map +1 -0
- package/dist/services/mail/query-utils.js +18 -0
- package/dist/services/mail/query-utils.js.map +1 -0
- package/dist/services/mail-classifier.d.ts +25 -0
- package/dist/services/mail-classifier.d.ts.map +1 -0
- package/dist/services/mail-classifier.js +52 -0
- package/dist/services/mail-classifier.js.map +1 -0
- package/dist/services/mail-ingestion.d.ts +139 -0
- package/dist/services/mail-ingestion.d.ts.map +1 -0
- package/dist/services/mail-ingestion.js +223 -0
- package/dist/services/mail-ingestion.js.map +1 -0
- package/dist/services/mcp/auto-probe.d.ts +76 -0
- package/dist/services/mcp/auto-probe.d.ts.map +1 -0
- package/dist/services/mcp/auto-probe.js +147 -0
- package/dist/services/mcp/auto-probe.js.map +1 -0
- package/dist/services/mcp/generators/claude.d.ts +18 -0
- package/dist/services/mcp/generators/claude.d.ts.map +1 -0
- package/dist/services/mcp/generators/claude.js +90 -0
- package/dist/services/mcp/generators/claude.js.map +1 -0
- package/dist/services/mcp/generators/codex.d.ts +22 -0
- package/dist/services/mcp/generators/codex.d.ts.map +1 -0
- package/dist/services/mcp/generators/codex.js +102 -0
- package/dist/services/mcp/generators/codex.js.map +1 -0
- package/dist/services/mcp/generators/gemini.d.ts +20 -0
- package/dist/services/mcp/generators/gemini.d.ts.map +1 -0
- package/dist/services/mcp/generators/gemini.js +97 -0
- package/dist/services/mcp/generators/gemini.js.map +1 -0
- package/dist/services/mcp/generators/index.d.ts +20 -0
- package/dist/services/mcp/generators/index.d.ts.map +1 -0
- package/dist/services/mcp/generators/index.js +29 -0
- package/dist/services/mcp/generators/index.js.map +1 -0
- package/dist/services/mcp/generators/types.d.ts +47 -0
- package/dist/services/mcp/generators/types.d.ts.map +1 -0
- package/dist/services/mcp/generators/types.js +40 -0
- package/dist/services/mcp/generators/types.js.map +1 -0
- package/dist/services/mcp/probe.d.ts +31 -0
- package/dist/services/mcp/probe.d.ts.map +1 -0
- package/dist/services/mcp/probe.js +437 -0
- package/dist/services/mcp/probe.js.map +1 -0
- package/dist/services/mcp/registry.d.ts +84 -0
- package/dist/services/mcp/registry.d.ts.map +1 -0
- package/dist/services/mcp/registry.js +387 -0
- package/dist/services/mcp/registry.js.map +1 -0
- package/dist/services/mcp/risk.d.ts +82 -0
- package/dist/services/mcp/risk.d.ts.map +1 -0
- package/dist/services/mcp/risk.js +126 -0
- package/dist/services/mcp/risk.js.map +1 -0
- package/dist/services/mcp/session-materializer.d.ts +123 -0
- package/dist/services/mcp/session-materializer.d.ts.map +1 -0
- package/dist/services/mcp/session-materializer.js +361 -0
- package/dist/services/mcp/session-materializer.js.map +1 -0
- package/dist/services/mcp/tool-audit.d.ts +53 -0
- package/dist/services/mcp/tool-audit.d.ts.map +1 -0
- package/dist/services/mcp/tool-audit.js +74 -0
- package/dist/services/mcp/tool-audit.js.map +1 -0
- package/dist/services/mcp/types.d.ts +88 -0
- package/dist/services/mcp/types.d.ts.map +1 -0
- package/dist/services/mcp/types.js +94 -0
- package/dist/services/mcp/types.js.map +1 -0
- package/dist/services/notion.d.ts +134 -0
- package/dist/services/notion.d.ts.map +1 -0
- package/dist/services/notion.js +350 -0
- package/dist/services/notion.js.map +1 -0
- package/dist/services/obsidian.d.ts +116 -0
- package/dist/services/obsidian.d.ts.map +1 -0
- package/dist/services/obsidian.js +305 -0
- package/dist/services/obsidian.js.map +1 -0
- package/dist/services/service-registry.d.ts +31 -0
- package/dist/services/service-registry.d.ts.map +1 -0
- package/dist/services/service-registry.js +15 -0
- package/dist/services/service-registry.js.map +1 -0
- package/dist/services/voice/transcriber-impl.d.ts +15 -0
- package/dist/services/voice/transcriber-impl.d.ts.map +1 -0
- package/dist/services/voice/transcriber-impl.js +129 -0
- package/dist/services/voice/transcriber-impl.js.map +1 -0
- package/dist/services/voice/transcriber.d.ts +117 -0
- package/dist/services/voice/transcriber.d.ts.map +1 -0
- package/dist/services/voice/transcriber.js +201 -0
- package/dist/services/voice/transcriber.js.map +1 -0
- package/dist/settings/runtime-settings.d.ts +232 -0
- package/dist/settings/runtime-settings.d.ts.map +1 -0
- package/dist/settings/runtime-settings.js +769 -0
- package/dist/settings/runtime-settings.js.map +1 -0
- package/dist/settings/settings-store.d.ts +13 -0
- package/dist/settings/settings-store.d.ts.map +1 -0
- package/dist/settings/settings-store.js +87 -0
- package/dist/settings/settings-store.js.map +1 -0
- package/package.json +85 -0
|
@@ -0,0 +1,4378 @@
|
|
|
1
|
+
import { EventPriority, createEvent, getAgentDayDateStr, getAgentDayBoundsUtc, isBackendId, isDocsQAMessage, isMessageEvent, isRoutineEvent, isAgentTaskEvent, isScheduledDmEvent, isScheduledEvent, isKnowledgeImportEvent, parseSqliteUtcMs, formatSqliteDatetime, resolveProcessKey, } from "@aitne/shared";
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, renameSync, writeFileSync, } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { CONTEXT_RELATIVE_PATHS } from "./context-paths.js";
|
|
5
|
+
import { getContextDir, isRoadmapStale } from "../config.js";
|
|
6
|
+
import { appendPolicyBlocks, createPromptInjectionBudget, } from "./policy-files.js";
|
|
7
|
+
import { appendReviewContextBlocks } from "./review-context.js";
|
|
8
|
+
import { anyMcpServerEnabled } from "../services/mcp/registry.js";
|
|
9
|
+
import { OWNER_DM_SCOPE, OWNER_SCOPE_KEY, DASHBOARD_CHAT_SCOPE, DASHBOARD_SCOPE_KEY, getConversationScope, } from "../messaging/constants.js";
|
|
10
|
+
import { upsertOwnerChannel } from "../messaging/owner-channels.js";
|
|
11
|
+
import { logInvalidCitations, validateAndRewrite, } from "./docs/citation-validator.js";
|
|
12
|
+
import { cleanupSessionWorkdir, ensureBackendMaterialized, ensureSessionWorkdir, getSessionWorkdirPath, syncAllUserSkills, buildConfiguredServices, } from "./workdir.js";
|
|
13
|
+
import { SessionGateRegistry } from "./session-gate.js";
|
|
14
|
+
import { Semaphore } from "./semaphore.js";
|
|
15
|
+
import { consumeObservations, getPendingCount, getPendingObservations } from "../db/observations.js";
|
|
16
|
+
import { computeHourlyCheckSignals } from "../db/hourly-check-signals.js";
|
|
17
|
+
import { buildGateAuditDetail, decideStage, renderGateDecisionBlock, } from "../scheduler/hourly-check-gate.js";
|
|
18
|
+
import { appendAgentLogLine } from "./today-direct-writer.js";
|
|
19
|
+
import { readIntegrations } from "../db/integrations-store.js";
|
|
20
|
+
import { getRepository, getRepositoryByLocalPath, recordManagementInitDone, recordManagementScan, selectGithubRepoSlugs, } from "../db/repositories-store.js";
|
|
21
|
+
import { runRepositoryManagementInit, runRepositoryManagementScan, } from "./repository-management-docs.js";
|
|
22
|
+
import { consultDelegatedConnectorHealth, markSignoutWarned, renderSignoutDm, } from "./delegated-connector-health.js";
|
|
23
|
+
import { isDegraded as readDegradedMode, isUserPaused, } from "../db/runtime-state.js";
|
|
24
|
+
import { finalizeRetemplate } from "./template-store.js";
|
|
25
|
+
import { countContextWritesInWindow, didRefetchTodayDuringTurn, matchesRecentActivityTrigger, } from "./dm-freshness-metrics.js";
|
|
26
|
+
import { deleteRuntimeState, readRuntimeState, writeRuntimeState, } from "../db/runtime-state.js";
|
|
27
|
+
import { BackendDecisiveFailure, BackendQuotaError, } from "./agent-core.js";
|
|
28
|
+
import { BackendRouterHandledError, } from "./backends/backend-router.js";
|
|
29
|
+
import { maybeTriggerRoadmapRefresh } from "./schedule-insert-helper.js";
|
|
30
|
+
import { flushPendingTodayRefresh } from "./drift-effects.js";
|
|
31
|
+
import { isProactiveForwardMetadata, parseMessageMetadata, } from "./channel-timeline.js";
|
|
32
|
+
import { randomUUID } from "node:crypto";
|
|
33
|
+
import { findRegisteredModel, getModelLabel } from "./backends/model-registry.js";
|
|
34
|
+
import { parseGeminiAuthCode } from "./backends/auth-recovery.js";
|
|
35
|
+
import { tryHandle as tryHandleBangCommand } from "./bang-commands/registry.js";
|
|
36
|
+
import { CUSTOM_BANG_COMMAND_SOURCE, createUserBangCommandEvent, getUserBangCommandById, resolveCommandSkillSlugs, } from "./bang-commands/user-commands.js";
|
|
37
|
+
import { createLogger } from "../logging.js";
|
|
38
|
+
const logger = createLogger("dispatcher");
|
|
39
|
+
/**
|
|
40
|
+
* P22 §3.4 step 4 — the optimizer-only allowedTools envelope. Every
|
|
41
|
+
* `routine.skill_curation` event runs the agent with exactly these tools
|
|
42
|
+
* and nothing else. The curl glob is anchored on the daemon's loopback URL
|
|
43
|
+
* so a hook-bypassed request still hits the curation API's chokepoint
|
|
44
|
+
* (Zod, run-token, smoke test); `Read` is required for the agent to
|
|
45
|
+
* consume the inlined data dump under the workdir's `data/` subtree.
|
|
46
|
+
*
|
|
47
|
+
* Kept narrow on purpose: adding any other tool here widens the optimizer's
|
|
48
|
+
* blast radius. If a future signal source needs the agent to write to a
|
|
49
|
+
* different surface, add a new curation API endpoint and let the curl glob
|
|
50
|
+
* cover it — do NOT add `Bash(*)` or `Write` here.
|
|
51
|
+
*/
|
|
52
|
+
export const SKILL_CURATION_OPTIMIZER_ALLOWED_TOOLS = [
|
|
53
|
+
"Read",
|
|
54
|
+
"Bash(curl http://localhost:8321/api/skill-curation/*)",
|
|
55
|
+
];
|
|
56
|
+
/**
|
|
57
|
+
* Unwrap the partial-run context the audit logger needs for a failed
|
|
58
|
+
* event. The dispatcher owns wall-clock timing (caller passes
|
|
59
|
+
* `durationMs`); the rest is recovered from `BackendRouterHandledError`
|
|
60
|
+
* if the throw came through the router. For non-router errors we still
|
|
61
|
+
* record the duration so the dashboard "Duration" column is honest.
|
|
62
|
+
*/
|
|
63
|
+
function buildLogErrorContext(err, durationMs) {
|
|
64
|
+
if (err instanceof BackendRouterHandledError) {
|
|
65
|
+
const cause = err.cause;
|
|
66
|
+
const failure = cause instanceof BackendQuotaError
|
|
67
|
+
? {
|
|
68
|
+
backendId: cause.backendId,
|
|
69
|
+
kind: "quota",
|
|
70
|
+
code: cause.originalCode,
|
|
71
|
+
}
|
|
72
|
+
: cause instanceof BackendDecisiveFailure
|
|
73
|
+
? { backendId: cause.backendId, kind: cause.kind }
|
|
74
|
+
: { backendId: err.mainFailure.backendId };
|
|
75
|
+
return {
|
|
76
|
+
durationMs,
|
|
77
|
+
backendId: failure.backendId,
|
|
78
|
+
...(failure.kind ? { failureKind: failure.kind } : {}),
|
|
79
|
+
...(failure.code ? { failureCode: failure.code } : {}),
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
if (err instanceof BackendQuotaError) {
|
|
83
|
+
return {
|
|
84
|
+
durationMs,
|
|
85
|
+
backendId: err.backendId,
|
|
86
|
+
failureKind: "quota",
|
|
87
|
+
failureCode: err.originalCode,
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
if (err instanceof BackendDecisiveFailure) {
|
|
91
|
+
return {
|
|
92
|
+
durationMs,
|
|
93
|
+
backendId: err.backendId,
|
|
94
|
+
failureKind: err.kind,
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
return { durationMs };
|
|
98
|
+
}
|
|
99
|
+
function parseRepositoryRunTaskContext(taskCtx) {
|
|
100
|
+
if (!taskCtx || typeof taskCtx !== "object")
|
|
101
|
+
return null;
|
|
102
|
+
const ctx = taskCtx;
|
|
103
|
+
if (ctx.triggerSource !== "manual"
|
|
104
|
+
&& ctx.triggerSource !== "trigger_manual_fire"
|
|
105
|
+
&& ctx.triggerSource !== "repository_trigger") {
|
|
106
|
+
return null;
|
|
107
|
+
}
|
|
108
|
+
if (typeof ctx.repositoryId !== "string"
|
|
109
|
+
|| typeof ctx.slug !== "string"
|
|
110
|
+
|| typeof ctx.prompt !== "string"
|
|
111
|
+
|| (ctx.workdirMode !== "temp" && ctx.workdirMode !== "local-clone")) {
|
|
112
|
+
return null;
|
|
113
|
+
}
|
|
114
|
+
const localPath = typeof ctx.localPath === "string" && ctx.localPath.length > 0
|
|
115
|
+
? ctx.localPath
|
|
116
|
+
: null;
|
|
117
|
+
const githubRepo = typeof ctx.githubRepo === "string" && ctx.githubRepo.length > 0
|
|
118
|
+
? ctx.githubRepo
|
|
119
|
+
: null;
|
|
120
|
+
return {
|
|
121
|
+
triggerSource: ctx.triggerSource,
|
|
122
|
+
repositoryId: ctx.repositoryId,
|
|
123
|
+
slug: ctx.slug,
|
|
124
|
+
localPath,
|
|
125
|
+
githubRepo,
|
|
126
|
+
workdirMode: ctx.workdirMode,
|
|
127
|
+
prompt: ctx.prompt,
|
|
128
|
+
instructionMd: typeof ctx.instructionMd === "string" ? ctx.instructionMd : null,
|
|
129
|
+
timeoutMinutes: typeof ctx.timeoutMinutes === "number" ? ctx.timeoutMinutes : null,
|
|
130
|
+
...(typeof ctx.triggerId === "string" ? { triggerId: ctx.triggerId } : {}),
|
|
131
|
+
...(typeof ctx.triggerName === "string" ? { triggerName: ctx.triggerName } : {}),
|
|
132
|
+
...(typeof ctx.triggerEventType === "string" ? { triggerEventType: ctx.triggerEventType } : {}),
|
|
133
|
+
...("triggerEventPayload" in ctx ? { triggerEventPayload: ctx.triggerEventPayload } : {}),
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
function repositoryRunInstructionFilename(backendId) {
|
|
137
|
+
if (backendId === "codex")
|
|
138
|
+
return "AGENTS.md";
|
|
139
|
+
if (backendId === "gemini")
|
|
140
|
+
return "GEMINI.md";
|
|
141
|
+
return "CLAUDE.md";
|
|
142
|
+
}
|
|
143
|
+
function safeRepositoryRunDirName(slug) {
|
|
144
|
+
const safe = slug
|
|
145
|
+
.toLowerCase()
|
|
146
|
+
.replace(/[^a-z0-9._-]+/g, "-")
|
|
147
|
+
.replace(/^-+|-+$/g, "");
|
|
148
|
+
return safe || "repository";
|
|
149
|
+
}
|
|
150
|
+
function parseGithubRepoSlug(value) {
|
|
151
|
+
if (!value)
|
|
152
|
+
return [null, null];
|
|
153
|
+
const parts = value.split("/");
|
|
154
|
+
if (parts.length !== 2 || !parts[0] || !parts[1])
|
|
155
|
+
return [null, null];
|
|
156
|
+
return [parts[0], parts[1]];
|
|
157
|
+
}
|
|
158
|
+
function normalizeRepositoryClassification(value) {
|
|
159
|
+
return value === "project" ? "project" : "repo-only";
|
|
160
|
+
}
|
|
161
|
+
function normalizeRepositoryCategory(value) {
|
|
162
|
+
return value === "work" ||
|
|
163
|
+
value === "personal" ||
|
|
164
|
+
value === "research" ||
|
|
165
|
+
value === "client" ||
|
|
166
|
+
value === "other"
|
|
167
|
+
? value
|
|
168
|
+
: "other";
|
|
169
|
+
}
|
|
170
|
+
/**
|
|
171
|
+
* cost-reduction-structural §B — extract the JSON verdict from a
|
|
172
|
+
* Stage 2 triage response. The contract is strict: a single line
|
|
173
|
+
* matching `{ "action": "log_only" | "escalate", ... }`. Anything else
|
|
174
|
+
* — empty output, prose around the JSON, missing fields, malformed JSON
|
|
175
|
+
* — falls back to `failed` so the caller cautiously escalates rather
|
|
176
|
+
* than silently silencing.
|
|
177
|
+
*/
|
|
178
|
+
export function parseStage2Verdict(output) {
|
|
179
|
+
const trimmed = (output ?? "").trim();
|
|
180
|
+
if (!trimmed)
|
|
181
|
+
return "failed";
|
|
182
|
+
// Tolerate code fences (```json … ```) without making them mandatory.
|
|
183
|
+
const stripped = trimmed
|
|
184
|
+
.replace(/^```(?:json)?\s*/i, "")
|
|
185
|
+
.replace(/```\s*$/i, "")
|
|
186
|
+
.trim();
|
|
187
|
+
// Find the FIRST balanced JSON object — agents occasionally emit
|
|
188
|
+
// trailing prose after the JSON line.
|
|
189
|
+
const objMatch = stripped.match(/\{[\s\S]*?\}/);
|
|
190
|
+
if (!objMatch)
|
|
191
|
+
return "failed";
|
|
192
|
+
let parsed;
|
|
193
|
+
try {
|
|
194
|
+
parsed = JSON.parse(objMatch[0]);
|
|
195
|
+
}
|
|
196
|
+
catch {
|
|
197
|
+
return "failed";
|
|
198
|
+
}
|
|
199
|
+
if (!parsed || typeof parsed !== "object")
|
|
200
|
+
return "failed";
|
|
201
|
+
const action = parsed.action;
|
|
202
|
+
if (action === "log_only" || action === "escalate")
|
|
203
|
+
return action;
|
|
204
|
+
return "failed";
|
|
205
|
+
}
|
|
206
|
+
const CURRENT_SETUP_MODE_STATE_KEY = "current_setup_mode";
|
|
207
|
+
// English-only patterns by CLAUDE.md convention. The disavowal tripwire
|
|
208
|
+
// is a numerator over a `disavowed / injected` ratio (dm-channel-timeline.md
|
|
209
|
+
// §C.1) — false negatives are tolerated; non-English replies that happen to
|
|
210
|
+
// disavow will simply not contribute to the numerator. Operators monitor the
|
|
211
|
+
// ratio and flip `proactiveForwardForceFreshSession` on rise; that fallback
|
|
212
|
+
// path is language-agnostic.
|
|
213
|
+
const PROACTIVE_FORWARD_DISAVOWAL_PATTERNS = [
|
|
214
|
+
/\b(?:don't|do not) (?:recall|remember)\b/i,
|
|
215
|
+
/\bI (?:didn't|did not) (?:say|send|mention)\b/i,
|
|
216
|
+
/\breferenc(?:ing|e) what\b/i,
|
|
217
|
+
/\bwhat did .* (?:say|mean|refer)\b/i,
|
|
218
|
+
];
|
|
219
|
+
export class EventDispatcher {
|
|
220
|
+
eventBus;
|
|
221
|
+
agentRouter;
|
|
222
|
+
contextBuilder;
|
|
223
|
+
getTaskFlow;
|
|
224
|
+
notificationMgr;
|
|
225
|
+
sessionMgr;
|
|
226
|
+
messageRecorder;
|
|
227
|
+
audit;
|
|
228
|
+
db;
|
|
229
|
+
config;
|
|
230
|
+
todayWriteLock;
|
|
231
|
+
services;
|
|
232
|
+
roadmapWriteLock;
|
|
233
|
+
writeTracker;
|
|
234
|
+
reactiveSem;
|
|
235
|
+
autonomousSem;
|
|
236
|
+
hasMessageBackendMetadataColumns;
|
|
237
|
+
shutdown = false;
|
|
238
|
+
shutdownAwaiters = new Set();
|
|
239
|
+
signalDetector = null;
|
|
240
|
+
dashboardStream = null;
|
|
241
|
+
/**
|
|
242
|
+
* Docs-QA citation lookup. Wired at startup via
|
|
243
|
+
* `setDocsCitationLookup`; null elsewhere so this module stays tree-
|
|
244
|
+
* shakable for tests that don't construct the docs indexer. The
|
|
245
|
+
* dispatcher only consults it when `isDocsQAMessage(event)` is true,
|
|
246
|
+
* so a null lookup never affects chat / DM / routine flows.
|
|
247
|
+
*/
|
|
248
|
+
docsCitationLookup = null;
|
|
249
|
+
authRecovery = null;
|
|
250
|
+
authHealthMonitor = null;
|
|
251
|
+
/**
|
|
252
|
+
* Messaging bang-commands registry — short, exact-match owner controls
|
|
253
|
+
* (`!stop` / `!start` / `!cost` / `!report`) intercepted in
|
|
254
|
+
* `handleMessage` before any agent backend is invoked. Optional so tests
|
|
255
|
+
* that build a dispatcher without the registry continue to pass; when
|
|
256
|
+
* null, all DMs flow straight to the agent path.
|
|
257
|
+
*
|
|
258
|
+
* Spec: docs/design/backlog/messaging-bang-commands.md
|
|
259
|
+
*/
|
|
260
|
+
bangCommandRegistry = null;
|
|
261
|
+
/**
|
|
262
|
+
* Current setup mode — scope-agnostic flag that survives internal
|
|
263
|
+
* direct-message session refresh (day boundary, stale flag, etc). Previously this
|
|
264
|
+
* was a `Map<sessionId, mode>` keyed by `conversation_sessions.id`, but the
|
|
265
|
+
* session row is routinely closed and recreated by `getOrCreateDm()` when
|
|
266
|
+
* a loud prompt-context change marks active DMs stale, which orphaned the
|
|
267
|
+
* map entry and silently dropped the setup flow back to the generic DM prompt.
|
|
268
|
+
* A single nullable flag is the right
|
|
269
|
+
* granularity because the dashboard owner-DM scope is singular.
|
|
270
|
+
*
|
|
271
|
+
* Persisted to runtime_state so setup mode survives daemon restart. Without
|
|
272
|
+
* persistence, an update-flow setup conversation that crashes mid-flight
|
|
273
|
+
* would re-open the gate on restart and re-introduce the stale-session race.
|
|
274
|
+
*
|
|
275
|
+
* No auto-expiry. The original 30-minute safety net turned out to be
|
|
276
|
+
* actively harmful: it fired DURING legitimate long setup conversations
|
|
277
|
+
* (which is exactly the pattern that triggered the original report) and
|
|
278
|
+
* re-opened the bug it was trying to guard. Setup mode is only cleared by
|
|
279
|
+
* explicit `clearSetupMode()` from `/setup/save-rules`.
|
|
280
|
+
*/
|
|
281
|
+
currentSetupMode = null;
|
|
282
|
+
/** Per-session FIFO gate: owner DMs share one key; thread sessions
|
|
283
|
+
* keep their own lane. SCHEDULED-DM-IMPLEMENTATION-PLAN §3.6 — also
|
|
284
|
+
* used by `scheduled.dm` to acquire BOTH owner-facing scopes in
|
|
285
|
+
* lex-sorted (deadlock-free) order. */
|
|
286
|
+
sessionGates = new SessionGateRegistry();
|
|
287
|
+
/** Dedup guard: timestamp of the last roadmap_refresh emission */
|
|
288
|
+
lastRoadmapRefreshEmitMs = 0;
|
|
289
|
+
morningRoutineInProgress = false;
|
|
290
|
+
hourlyCheckInProgress = false;
|
|
291
|
+
/**
|
|
292
|
+
* P22 §3.4 — wired by `index.ts` after the daemon's data dir + skills root
|
|
293
|
+
* are known. Returns a `{runId, runToken, workdirPath, targetSkills}` tuple
|
|
294
|
+
* the optimizer routine then runs against. Injected as a callback so the
|
|
295
|
+
* dispatcher does not import the workdir module directly (avoids a cycle
|
|
296
|
+
* with SkillsCompiler / SecretBroker).
|
|
297
|
+
*/
|
|
298
|
+
materializeOptimizerWorkdir = null;
|
|
299
|
+
teardownOptimizerWorkdir = null;
|
|
300
|
+
setSkillCurationHooks(hooks) {
|
|
301
|
+
this.materializeOptimizerWorkdir = hooks.materialize;
|
|
302
|
+
this.teardownOptimizerWorkdir = hooks.teardown;
|
|
303
|
+
}
|
|
304
|
+
static COST_CAP_SQL = `SELECT COALESCE(SUM(cost_usd), 0) as cost
|
|
305
|
+
FROM agent_actions
|
|
306
|
+
WHERE trigger = 'autonomous'
|
|
307
|
+
AND started_at >= ? AND started_at < ?`;
|
|
308
|
+
/** Map `turn_token → sessionId` for in-flight turns. The API layer
|
|
309
|
+
* calls `validateAttachmentTurnToken(token)` to authorise
|
|
310
|
+
* `POST /api/chat/outbound-attachments`; the entry is cleared in a
|
|
311
|
+
* `finally` so orphan tokens don't survive past the turn that
|
|
312
|
+
* spawned them. */
|
|
313
|
+
activeTurnTokens = new Map();
|
|
314
|
+
/** Injected lazily via `setAttachmentStore` — optional for tests
|
|
315
|
+
* and older code paths that don't wire the store. When null, the
|
|
316
|
+
* dispatcher skips attachment staging + outbound collection. */
|
|
317
|
+
attachmentStore = null;
|
|
318
|
+
/** Injected lazily via `setDelegatedSyncRefresh` — optional. When null,
|
|
319
|
+
* hourly check fires without first refreshing delegated-mode snapshots,
|
|
320
|
+
* matching the pre-Phase-9 behaviour. Wired in production when at
|
|
321
|
+
* least one integration is in delegated mode. See
|
|
322
|
+
* `docs/design/appendices/delegated-sync-opt-in.md` and the worker's
|
|
323
|
+
* `runDisabledCadencesForHourlyCheck` method. */
|
|
324
|
+
delegatedSyncRefresh = null;
|
|
325
|
+
/** Injected lazily via `setVoiceTranscriber` — optional. When null,
|
|
326
|
+
* inbound audio attachments fall back to the path-only prompt block
|
|
327
|
+
* exactly as they did before the local-Whisper layer landed. See
|
|
328
|
+
* `docs/design/appendices/voice-transcription.md`. */
|
|
329
|
+
voiceTranscriber = null;
|
|
330
|
+
/**
|
|
331
|
+
* Notify-dedup tracking — set of correlationIds for in-flight events
|
|
332
|
+
* that have already invoked `POST /api/notify` from inside the agent
|
|
333
|
+
* run. The `/api/notify` route calls `markEventNotified` (via api-deps)
|
|
334
|
+
* on success, and `processResult` consumes the entry with `Set.delete`.
|
|
335
|
+
* When present, the implicit "final assistant text → DM" forward is
|
|
336
|
+
* suppressed — preventing the duplicate-notification bug where the LLM
|
|
337
|
+
* sends both an explicit notify and a non-empty closing turn.
|
|
338
|
+
*
|
|
339
|
+
* In-memory only; single-daemon scope. Cleanup contract:
|
|
340
|
+
* - Success path: `processResult.delete()` removes the entry exactly
|
|
341
|
+
* once per event run (every dispatch path funnels into it).
|
|
342
|
+
* - Throw path: `handleError.delete()` is the defense-in-depth
|
|
343
|
+
* cleanup for entries left when execution threw before reaching
|
|
344
|
+
* `processResult`.
|
|
345
|
+
* - Retry path: `executeWithRetry` reuses the same correlationId
|
|
346
|
+
* across attempts, but only one `processResult` call closes the
|
|
347
|
+
* run, so the marker is consumed exactly once.
|
|
348
|
+
*
|
|
349
|
+
* Cross-event safety: each `createEvent()` mints a fresh UUID, so a
|
|
350
|
+
* stale entry surviving both cleanup paths cannot poison a later
|
|
351
|
+
* unrelated event run. Scheduler-resurrected events (scheduler.ts
|
|
352
|
+
* carries `row.correlation_id` when present) intentionally inherit
|
|
353
|
+
* the same id, which is the correct behaviour — they continue the
|
|
354
|
+
* same logical run.
|
|
355
|
+
*/
|
|
356
|
+
notifiedEvents = new Set();
|
|
357
|
+
constructor(eventBus, agentRouter, contextBuilder, getTaskFlow, notificationMgr, sessionMgr, messageRecorder, audit, db, config, todayWriteLock, services, roadmapWriteLock, writeTracker) {
|
|
358
|
+
this.eventBus = eventBus;
|
|
359
|
+
this.agentRouter = agentRouter;
|
|
360
|
+
this.contextBuilder = contextBuilder;
|
|
361
|
+
this.getTaskFlow = getTaskFlow;
|
|
362
|
+
this.notificationMgr = notificationMgr;
|
|
363
|
+
this.sessionMgr = sessionMgr;
|
|
364
|
+
this.messageRecorder = messageRecorder;
|
|
365
|
+
this.audit = audit;
|
|
366
|
+
this.db = db;
|
|
367
|
+
this.config = config;
|
|
368
|
+
this.todayWriteLock = todayWriteLock;
|
|
369
|
+
this.services = services;
|
|
370
|
+
this.roadmapWriteLock = roadmapWriteLock;
|
|
371
|
+
this.writeTracker = writeTracker;
|
|
372
|
+
this.reactiveSem = new Semaphore(config.maxReactiveSessions);
|
|
373
|
+
this.autonomousSem = new Semaphore(config.maxConcurrentSessions);
|
|
374
|
+
const messageColumns = new Set(this.db.pragma("table_info(messages)").map((column) => column.name));
|
|
375
|
+
this.hasMessageBackendMetadataColumns =
|
|
376
|
+
messageColumns.has("backend") && messageColumns.has("model_id");
|
|
377
|
+
// Restore setup mode from runtime_state. If the daemon crashed or was
|
|
378
|
+
// restarted during a setup conversation, the in-memory flag would be
|
|
379
|
+
// lost and autonomous work would resume mid-setup — re-opening the
|
|
380
|
+
// exact race this gate was designed to prevent.
|
|
381
|
+
this.currentSetupMode = this.loadPersistedSetupMode();
|
|
382
|
+
if (this.currentSetupMode !== null) {
|
|
383
|
+
logger.info({ mode: this.currentSetupMode }, "Restored setup mode from runtime_state — autonomous work remains paused");
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
loadPersistedSetupMode() {
|
|
387
|
+
const raw = readRuntimeState(this.db, CURRENT_SETUP_MODE_STATE_KEY);
|
|
388
|
+
if (raw && (raw.mode === "initial" || raw.mode === "update")) {
|
|
389
|
+
return raw.mode;
|
|
390
|
+
}
|
|
391
|
+
return null;
|
|
392
|
+
}
|
|
393
|
+
/**
|
|
394
|
+
* B-007 §5.8 — compose the final prompt by loading the task-flow
|
|
395
|
+
* template and appending the vault policy-files block (rules/*.md,
|
|
396
|
+
* routines/<cadence>.md, custom routine file, etc.). Centralised
|
|
397
|
+
* here so every dispatch path sees the same policy bundle.
|
|
398
|
+
*/
|
|
399
|
+
assemblePrompt(eventType, processKey, backendId, flags) {
|
|
400
|
+
const integrations = readIntegrations(this.db);
|
|
401
|
+
const base = this.getTaskFlow(eventType, backendId, integrations);
|
|
402
|
+
const contextDir = this.getPromptPolicyContextDir();
|
|
403
|
+
if (!contextDir) {
|
|
404
|
+
return base;
|
|
405
|
+
}
|
|
406
|
+
// B-003 Phase 3 — the `rules/mcp.md` PolicyFileRef is registered with
|
|
407
|
+
// `injectIf: ctx.flags?.mcpEnabled === true`. Compute the flag here so
|
|
408
|
+
// every dispatch path (DM, routine, scheduled task, fallback reassembly)
|
|
409
|
+
// injects the policy without each call site having to remember it.
|
|
410
|
+
const mergedFlags = {
|
|
411
|
+
...(flags ?? {}),
|
|
412
|
+
mcpEnabled: anyMcpServerEnabled(this.db),
|
|
413
|
+
};
|
|
414
|
+
// Share a single budget across policy + review-context injection so the
|
|
415
|
+
// aggregate cap (POLICY_TOTAL_MAX_BYTES) covers both — avoids the
|
|
416
|
+
// double-accounting bug where each injector independently consumed the
|
|
417
|
+
// full cap and inflated the effective prompt-injection ceiling to 2×.
|
|
418
|
+
const budget = createPromptInjectionBudget();
|
|
419
|
+
const withPolicies = appendPolicyBlocks(base, {
|
|
420
|
+
contextDir,
|
|
421
|
+
processKey,
|
|
422
|
+
flags: mergedFlags,
|
|
423
|
+
budget,
|
|
424
|
+
});
|
|
425
|
+
return appendReviewContextBlocks(withPolicies, {
|
|
426
|
+
contextDir,
|
|
427
|
+
processKey,
|
|
428
|
+
flags: {
|
|
429
|
+
useReviewDossiers: this.config.useReviewDossiers,
|
|
430
|
+
useContextIndex: this.config.useContextIndex,
|
|
431
|
+
},
|
|
432
|
+
budget,
|
|
433
|
+
});
|
|
434
|
+
}
|
|
435
|
+
/**
|
|
436
|
+
* Policy-file prompt assembly must not fall back to `<dataDir>/context`
|
|
437
|
+
* while degraded. Reactive sessions still run so the user can repair the
|
|
438
|
+
* vault, but the prompt must not silently inject stale rulebooks from a
|
|
439
|
+
* legacy location.
|
|
440
|
+
*/
|
|
441
|
+
getPromptPolicyContextDir() {
|
|
442
|
+
if (readDegradedMode(this.db)) {
|
|
443
|
+
return null;
|
|
444
|
+
}
|
|
445
|
+
return getContextDir(this.config);
|
|
446
|
+
}
|
|
447
|
+
/** Set the SignalDetector for implicit feedback collection from user messages. */
|
|
448
|
+
setSignalDetector(detector) {
|
|
449
|
+
this.signalDetector = detector;
|
|
450
|
+
}
|
|
451
|
+
/** Set the dashboard stream adapter for real-time response streaming. */
|
|
452
|
+
setDashboardStream(adapter) {
|
|
453
|
+
this.dashboardStream = adapter;
|
|
454
|
+
}
|
|
455
|
+
/**
|
|
456
|
+
* Wire the docs-QA citation lookup. Called once at startup from
|
|
457
|
+
* `index.ts` after the docs indexer is built. The dispatcher uses it
|
|
458
|
+
* for the persistence-side `validateAndRewrite` pass on docs_qa
|
|
459
|
+
* assistant output (see DOCS_QA_B7_DESIGN.md §11.1) — chat / DM /
|
|
460
|
+
* routine paths never touch it.
|
|
461
|
+
*/
|
|
462
|
+
setDocsCitationLookup(lookup) {
|
|
463
|
+
this.docsCitationLookup = lookup;
|
|
464
|
+
}
|
|
465
|
+
/** Chat-attachments Phase 1 — inject the shared AttachmentStore. */
|
|
466
|
+
setAttachmentStore(store) {
|
|
467
|
+
this.attachmentStore = store;
|
|
468
|
+
}
|
|
469
|
+
/** Inject the local-Whisper voice transcriber. Optional — when unset,
|
|
470
|
+
* inbound audio attachments are passed to the backend with a path-only
|
|
471
|
+
* reference (the pre-feature behaviour). */
|
|
472
|
+
setVoiceTranscriber(transcriber) {
|
|
473
|
+
this.voiceTranscriber = transcriber;
|
|
474
|
+
}
|
|
475
|
+
/**
|
|
476
|
+
* Inject the delegated-sync refresh callback. Called from
|
|
477
|
+
* `triggerHourlyCheck` before the gate decision so any cadence the
|
|
478
|
+
* operator left opted-OUT (post-Phase-9 default) populates fresh
|
|
479
|
+
* Gmail / Notion observations the agent can then consume.
|
|
480
|
+
*
|
|
481
|
+
* Wired as a thunk rather than a worker reference so the dispatcher
|
|
482
|
+
* stays decoupled from the observers layer and the live worker
|
|
483
|
+
* instance can be re-registered (integration mode flips) without the
|
|
484
|
+
* dispatcher holding a stale reference.
|
|
485
|
+
*
|
|
486
|
+
* Pass `null` to detach (e.g. when no delegated integration exists).
|
|
487
|
+
* The hourly check then proceeds without a refresh — equivalent to the
|
|
488
|
+
* pre-injection behaviour.
|
|
489
|
+
*/
|
|
490
|
+
setDelegatedSyncRefresh(fn) {
|
|
491
|
+
this.delegatedSyncRefresh = fn;
|
|
492
|
+
}
|
|
493
|
+
/**
|
|
494
|
+
* Authorise an `X-Turn-Token` for POST /api/chat/outbound-attachments.
|
|
495
|
+
* Returns the DB session id bound to that token while a turn is still
|
|
496
|
+
* running, null otherwise.
|
|
497
|
+
*/
|
|
498
|
+
validateAttachmentTurnToken(token) {
|
|
499
|
+
const sessionId = this.activeTurnTokens.get(token);
|
|
500
|
+
if (sessionId === undefined)
|
|
501
|
+
return null;
|
|
502
|
+
return { sessionId };
|
|
503
|
+
}
|
|
504
|
+
/** Internal — issue a turn token bound to a session. Cleared by
|
|
505
|
+
* `releaseAttachmentTurnToken` in a `finally`. */
|
|
506
|
+
issueAttachmentTurnToken(sessionId) {
|
|
507
|
+
const token = randomUUID();
|
|
508
|
+
this.activeTurnTokens.set(token, sessionId);
|
|
509
|
+
return token;
|
|
510
|
+
}
|
|
511
|
+
releaseAttachmentTurnToken(token) {
|
|
512
|
+
this.activeTurnTokens.delete(token);
|
|
513
|
+
}
|
|
514
|
+
/**
|
|
515
|
+
* Stage inbound attachments into `<sessionDir>/_attachments/` via
|
|
516
|
+
* hard-link (or copy on EXDEV). Returns the rows that were actually
|
|
517
|
+
* staged — callers feed these into the prompt-block builder.
|
|
518
|
+
*/
|
|
519
|
+
stageInboundAttachments(event, sessionDir) {
|
|
520
|
+
if (!this.attachmentStore || !sessionDir)
|
|
521
|
+
return [];
|
|
522
|
+
if (!event.attachments || event.attachments.length === 0)
|
|
523
|
+
return [];
|
|
524
|
+
const staged = [];
|
|
525
|
+
for (const ref of event.attachments) {
|
|
526
|
+
const row = this.attachmentStore.get(ref.id);
|
|
527
|
+
if (!row)
|
|
528
|
+
continue;
|
|
529
|
+
try {
|
|
530
|
+
this.attachmentStore.stageIntoWorkdir({ row, sessionDir });
|
|
531
|
+
staged.push(row);
|
|
532
|
+
}
|
|
533
|
+
catch (err) {
|
|
534
|
+
logger.warn({ err, attachmentId: row.id }, "Failed to stage attachment");
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
return staged;
|
|
538
|
+
}
|
|
539
|
+
/**
|
|
540
|
+
* Read the `messages.id` that was just persisted for this session.
|
|
541
|
+
* Used to bind inbound attachment rows to the user message (so the
|
|
542
|
+
* history endpoint can re-serve them) and outbound attachments to the
|
|
543
|
+
* assistant message. `better-sqlite3`'s `last_insert_rowid()` is
|
|
544
|
+
* per-connection and authoritative right after `recordMessage` returns
|
|
545
|
+
* (its transaction has committed synchronously).
|
|
546
|
+
*/
|
|
547
|
+
readLastInsertedMessageId(sessionId) {
|
|
548
|
+
try {
|
|
549
|
+
const row = this.db
|
|
550
|
+
.prepare(`SELECT last_insert_rowid() AS id`)
|
|
551
|
+
.get();
|
|
552
|
+
if (!row || !Number.isFinite(row.id) || row.id <= 0)
|
|
553
|
+
return null;
|
|
554
|
+
// Guard against a completely unrelated insert racing in on the same
|
|
555
|
+
// connection (shouldn't happen — better-sqlite3 is sync — but cheap
|
|
556
|
+
// to verify). If the latest insert isn't for this session, abandon.
|
|
557
|
+
const check = this.db
|
|
558
|
+
.prepare(`SELECT session_id FROM messages WHERE id = ?`)
|
|
559
|
+
.get(row.id);
|
|
560
|
+
if (!check || check.session_id !== sessionId)
|
|
561
|
+
return null;
|
|
562
|
+
return row.id;
|
|
563
|
+
}
|
|
564
|
+
catch {
|
|
565
|
+
return null;
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
/**
|
|
569
|
+
* Resolve the `UserBangCommand` row that produced this event, when one
|
|
570
|
+
* applies. The dispatcher consults the row to apply the per-command
|
|
571
|
+
* skill set and instruction body to the session workdir before the
|
|
572
|
+
* agent runs. Returns `null` for non-bang messages, for bang events
|
|
573
|
+
* whose row was deleted between enqueue and dispatch, and for events
|
|
574
|
+
* whose `data.customBangCommand.id` is missing or malformed (defense
|
|
575
|
+
* against a future event constructor that forgets to set it).
|
|
576
|
+
*/
|
|
577
|
+
lookupCustomBangCommandForEvent(event) {
|
|
578
|
+
if (event.source !== CUSTOM_BANG_COMMAND_SOURCE)
|
|
579
|
+
return null;
|
|
580
|
+
const ref = event.data?.customBangCommand;
|
|
581
|
+
if (!ref || typeof ref !== "object")
|
|
582
|
+
return null;
|
|
583
|
+
const id = ref.id;
|
|
584
|
+
if (typeof id !== "number" || !Number.isInteger(id) || id <= 0) {
|
|
585
|
+
return null;
|
|
586
|
+
}
|
|
587
|
+
return getUserBangCommandById(this.db, id);
|
|
588
|
+
}
|
|
589
|
+
/**
|
|
590
|
+
* Run local-Whisper transcription on every audio attachment in `rows`.
|
|
591
|
+
* Cached transcripts are returned without re-running inference. Returns
|
|
592
|
+
* an empty map when the transcriber is unset, when no rows are audio,
|
|
593
|
+
* or when every transcription failed — callers always render the path
|
|
594
|
+
* even if the transcript is missing.
|
|
595
|
+
*/
|
|
596
|
+
async transcribeAttachments(rows) {
|
|
597
|
+
const transcripts = new Map();
|
|
598
|
+
if (!this.voiceTranscriber || rows.length === 0)
|
|
599
|
+
return transcripts;
|
|
600
|
+
for (const row of rows) {
|
|
601
|
+
if (!this.voiceTranscriber.isAudio(row.mimeType))
|
|
602
|
+
continue;
|
|
603
|
+
try {
|
|
604
|
+
const result = await this.voiceTranscriber.transcribe({
|
|
605
|
+
attachmentId: row.id,
|
|
606
|
+
path: row.path,
|
|
607
|
+
mimeType: row.mimeType,
|
|
608
|
+
});
|
|
609
|
+
if (result)
|
|
610
|
+
transcripts.set(row.id, result);
|
|
611
|
+
}
|
|
612
|
+
catch (err) {
|
|
613
|
+
logger.warn({ err, attachmentId: row.id }, "voice transcription threw; falling back to path-only prompt");
|
|
614
|
+
}
|
|
615
|
+
}
|
|
616
|
+
return transcripts;
|
|
617
|
+
}
|
|
618
|
+
/**
|
|
619
|
+
* Compose the "[Attached files]" prompt block that the dispatcher
|
|
620
|
+
* appends to the task-flow body for turns with inbound attachments.
|
|
621
|
+
* Kept in the dispatcher (not in prompts.ts) because the attachment
|
|
622
|
+
* rows are local state for this turn only.
|
|
623
|
+
*/
|
|
624
|
+
buildAttachmentPromptBlock(rows, transcripts) {
|
|
625
|
+
if (rows.length === 0)
|
|
626
|
+
return "";
|
|
627
|
+
const lines = [
|
|
628
|
+
"",
|
|
629
|
+
"[Attached files]",
|
|
630
|
+
"The user attached the following files for this turn. Paths are relative",
|
|
631
|
+
"to your working directory. Use the appropriate local tool for each",
|
|
632
|
+
"file type: images/PDFs may be readable directly, while audio/video",
|
|
633
|
+
"are staged as files for inspection, transcription, or conversion when",
|
|
634
|
+
"the active backend has suitable tools.",
|
|
635
|
+
];
|
|
636
|
+
const transcriberEnabled = this.voiceTranscriber?.isEnabled() ?? false;
|
|
637
|
+
for (const row of rows) {
|
|
638
|
+
const rel = `_attachments/${row.safeFilename}`;
|
|
639
|
+
const size = `${Math.max(1, Math.round(row.sizeBytes / 1024))} KB`;
|
|
640
|
+
const captionPart = row.caption ? ` — caption: ${JSON.stringify(row.caption)}` : "";
|
|
641
|
+
lines.push(`- ${rel} (${row.mimeType}, ${size})${captionPart}`);
|
|
642
|
+
const transcript = transcripts?.get(row.id);
|
|
643
|
+
if (transcript) {
|
|
644
|
+
const langPart = transcript.language
|
|
645
|
+
? ` (lang=${transcript.language})`
|
|
646
|
+
: "";
|
|
647
|
+
const durationPart = transcript.durationSec !== null
|
|
648
|
+
? `, ${transcript.durationSec.toFixed(1)}s`
|
|
649
|
+
: "";
|
|
650
|
+
lines.push(` Voice transcript${langPart}${durationPart}: ${JSON.stringify(transcript.transcript)}`);
|
|
651
|
+
}
|
|
652
|
+
else if (transcriberEnabled &&
|
|
653
|
+
this.voiceTranscriber?.isAudio(row.mimeType)) {
|
|
654
|
+
// Audio attachment but no transcript was produced. Could be too long,
|
|
655
|
+
// a decoder/inference failure, or a model still warming up. Surface a
|
|
656
|
+
// marker so the agent can ask the user to retype rather than silently
|
|
657
|
+
// pretending the audio was readable.
|
|
658
|
+
lines.push(" (voice transcript unavailable — audio may be too long, untranscribable, or the local model is unavailable)");
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
lines.push("", "If your reply should include a generated file (md/PDF/CSV/image/etc.),", "deliver it via the `attach` skill — write the bytes to a temp path, then", "POST /api/chat/outbound-attachments with `X-Turn-Token: $PA_TURN_TOKEN`.", "`_attachments/` is the read-only inbound staging area, not an output", "location. Never write a filesystem path into your reply and claim you", "created a file unless you actually uploaded it through that endpoint.");
|
|
662
|
+
return lines.join("\n");
|
|
663
|
+
}
|
|
664
|
+
/**
|
|
665
|
+
* Phase 5: set the auth recovery manager so owner DMs like `/auth fix codex`
|
|
666
|
+
* can be intercepted before reaching the agent backend.
|
|
667
|
+
*/
|
|
668
|
+
setAuthRecovery(recovery) {
|
|
669
|
+
this.authRecovery = recovery;
|
|
670
|
+
}
|
|
671
|
+
/**
|
|
672
|
+
* Phase 5 (M2 fix): set the AuthHealthMonitor so `/auth status` can
|
|
673
|
+
* render the full summary in-DM instead of a pointer to the dashboard.
|
|
674
|
+
*/
|
|
675
|
+
setAuthHealthMonitor(monitor) {
|
|
676
|
+
this.authHealthMonitor = monitor;
|
|
677
|
+
}
|
|
678
|
+
/**
|
|
679
|
+
* Wire the bang-command registry so owner DMs are intercepted before the
|
|
680
|
+
* agent path. See docs/design/backlog/messaging-bang-commands.md.
|
|
681
|
+
*/
|
|
682
|
+
setBangCommandRegistry(registry) {
|
|
683
|
+
this.bangCommandRegistry = registry;
|
|
684
|
+
}
|
|
685
|
+
/** Main event processing loop */
|
|
686
|
+
async run() {
|
|
687
|
+
while (!this.shutdown) {
|
|
688
|
+
const event = await this.eventBus.get();
|
|
689
|
+
if (!event)
|
|
690
|
+
break; // EventBus was closed
|
|
691
|
+
void this.handleEvent(event); // fire-and-forget
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
stop() {
|
|
695
|
+
this.shutdown = true;
|
|
696
|
+
this.eventBus.close();
|
|
697
|
+
for (const onShutdown of this.shutdownAwaiters) {
|
|
698
|
+
try {
|
|
699
|
+
onShutdown();
|
|
700
|
+
}
|
|
701
|
+
catch {
|
|
702
|
+
// Awaiter callbacks just resolve a promise — never throw — but keep
|
|
703
|
+
// the loop defensive so one bad callback can't strand the rest.
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
this.shutdownAwaiters.clear();
|
|
707
|
+
}
|
|
708
|
+
/**
|
|
709
|
+
* Enter setup mode. Called from `POST /setup/start` so the warm gate
|
|
710
|
+
* engages the moment the user opens the dashboard setup flow — before any
|
|
711
|
+
* agent turn runs — so concurrent hourly_check / morning routine / scheduled
|
|
712
|
+
* wake work cannot race with the setup conversation. Persisted to
|
|
713
|
+
* `runtime_state` so the flag survives daemon restart.
|
|
714
|
+
*/
|
|
715
|
+
beginSetupMode(mode) {
|
|
716
|
+
if (this.currentSetupMode !== null && this.currentSetupMode !== mode) {
|
|
717
|
+
logger.warn({ previous: this.currentSetupMode, next: mode }, "Setup mode replaced with a different mode while one was already active");
|
|
718
|
+
}
|
|
719
|
+
this.currentSetupMode = mode;
|
|
720
|
+
try {
|
|
721
|
+
writeRuntimeState(this.db, CURRENT_SETUP_MODE_STATE_KEY, { mode });
|
|
722
|
+
}
|
|
723
|
+
catch (err) {
|
|
724
|
+
// Non-fatal: in-memory state still protects the current process.
|
|
725
|
+
logger.warn({ err, mode }, "Failed to persist setup mode to runtime_state");
|
|
726
|
+
}
|
|
727
|
+
logger.info({ mode }, "Setup mode engaged — autonomous work paused");
|
|
728
|
+
}
|
|
729
|
+
/**
|
|
730
|
+
* Exit setup mode. Called from `POST /setup/save-rules` on success.
|
|
731
|
+
* Idempotent.
|
|
732
|
+
*/
|
|
733
|
+
clearSetupMode() {
|
|
734
|
+
if (this.currentSetupMode === null) {
|
|
735
|
+
// Still attempt a best-effort runtime_state cleanup so any stray row
|
|
736
|
+
// (e.g., from a previous run that crashed before clearing) is removed.
|
|
737
|
+
try {
|
|
738
|
+
deleteRuntimeState(this.db, CURRENT_SETUP_MODE_STATE_KEY);
|
|
739
|
+
}
|
|
740
|
+
catch {
|
|
741
|
+
// ignore
|
|
742
|
+
}
|
|
743
|
+
return;
|
|
744
|
+
}
|
|
745
|
+
const mode = this.currentSetupMode;
|
|
746
|
+
this.currentSetupMode = null;
|
|
747
|
+
try {
|
|
748
|
+
deleteRuntimeState(this.db, CURRENT_SETUP_MODE_STATE_KEY);
|
|
749
|
+
}
|
|
750
|
+
catch (err) {
|
|
751
|
+
logger.warn({ err }, "Failed to clear setup mode from runtime_state (in-memory state cleared)");
|
|
752
|
+
}
|
|
753
|
+
logger.info({ mode }, "Setup mode cleared — autonomous work resumed");
|
|
754
|
+
}
|
|
755
|
+
/** Observable getter, primarily for tests and the onPromptContextChanged gate. */
|
|
756
|
+
getCurrentSetupMode() {
|
|
757
|
+
return this.currentSetupMode;
|
|
758
|
+
}
|
|
759
|
+
/**
|
|
760
|
+
* Management Mode Phase 2 — expose in-flight executions so
|
|
761
|
+
* `/api/setup/migrate-context` can refuse to start while real work is
|
|
762
|
+
* still running, not just while sessions remain marked active.
|
|
763
|
+
*/
|
|
764
|
+
getInFlightExecutions() {
|
|
765
|
+
const executions = [];
|
|
766
|
+
for (const key of this.sessionGates.activeKeys()) {
|
|
767
|
+
executions.push({ kind: "session_chain", key });
|
|
768
|
+
}
|
|
769
|
+
if (this.morningRoutineInProgress) {
|
|
770
|
+
executions.push({ kind: "routine", key: "morning_routine" });
|
|
771
|
+
}
|
|
772
|
+
if (this.hourlyCheckInProgress) {
|
|
773
|
+
executions.push({ kind: "routine", key: "hourly_check" });
|
|
774
|
+
}
|
|
775
|
+
const runningTasks = this.db
|
|
776
|
+
.prepare(`SELECT id, task_type, task_description
|
|
777
|
+
FROM agent_schedule
|
|
778
|
+
WHERE status = 'running'`)
|
|
779
|
+
.all();
|
|
780
|
+
for (const task of runningTasks) {
|
|
781
|
+
executions.push({
|
|
782
|
+
kind: "scheduled_task",
|
|
783
|
+
id: task.id,
|
|
784
|
+
taskType: task.task_type,
|
|
785
|
+
detail: task.task_description,
|
|
786
|
+
});
|
|
787
|
+
}
|
|
788
|
+
return executions;
|
|
789
|
+
}
|
|
790
|
+
/**
|
|
791
|
+
* Gate for autonomous background work (cron routines, hourly_check,
|
|
792
|
+
* scheduled wake tasks, startup catchup, calendar-poller reactive events).
|
|
793
|
+
*
|
|
794
|
+
* Two layers:
|
|
795
|
+
* - **Cold gate**: `rules/management.md` must exist. Before initial setup
|
|
796
|
+
* there is no policy document, no user/profile.md, no today.md — running
|
|
797
|
+
* routines would produce garbage AND, crucially, any loud prompt-context
|
|
798
|
+
* write from such a routine can trigger
|
|
799
|
+
* `onPromptContextChanged -> markActiveDmSessionsStale`, which destroys
|
|
800
|
+
* the in-flight setup conversation on the next user turn.
|
|
801
|
+
* - **Warm gate**: while a setup conversation is active (initial OR
|
|
802
|
+
* update), pause autonomous work even though the file exists. This
|
|
803
|
+
* covers the update flow where the rules file is present but the same
|
|
804
|
+
* race still applies.
|
|
805
|
+
*
|
|
806
|
+
* Returns `null` when allowed, or a string reason when blocked.
|
|
807
|
+
*/
|
|
808
|
+
isAutonomousAllowed() {
|
|
809
|
+
// Management Mode (plan §5.4): schedulers and observer-driven routines
|
|
810
|
+
// must skip ticks while the primary vault is unreachable. Reactive DM
|
|
811
|
+
// sessions still run — the user may be messaging the agent precisely
|
|
812
|
+
// to ask about the broken vault. Writes still hit the context 503 gate.
|
|
813
|
+
if (readDegradedMode(this.db)) {
|
|
814
|
+
return "vault_degraded";
|
|
815
|
+
}
|
|
816
|
+
// Owner-initiated pause via `!stop` (docs/design/backlog/messaging-
|
|
817
|
+
// bang-commands.md). Distinct from setup gates so the dashboard banner
|
|
818
|
+
// and audit rows can surface it independently. Cron callbacks consult
|
|
819
|
+
// this via `setAutonomousGate(() => dispatcher.isAutonomousAllowed())`.
|
|
820
|
+
if (isUserPaused(this.db)) {
|
|
821
|
+
return "user_paused";
|
|
822
|
+
}
|
|
823
|
+
const rulesPath = join(getContextDir(this.config, this.db), CONTEXT_RELATIVE_PATHS.rules.management);
|
|
824
|
+
if (!existsSync(rulesPath)) {
|
|
825
|
+
return "setup_incomplete";
|
|
826
|
+
}
|
|
827
|
+
if (this.currentSetupMode !== null) {
|
|
828
|
+
return "setup_in_progress";
|
|
829
|
+
}
|
|
830
|
+
return null;
|
|
831
|
+
}
|
|
832
|
+
/**
|
|
833
|
+
* Process a catchup or bootstrap event synchronously without going through
|
|
834
|
+
* the EventBus loop. Uses the same semaphore and error-handling path as the
|
|
835
|
+
* normal dispatcher.
|
|
836
|
+
*/
|
|
837
|
+
async processInline(event) {
|
|
838
|
+
await this.handleEventInner(event);
|
|
839
|
+
}
|
|
840
|
+
/** Get configured services set, rebuilding when ServiceRegistry changes. */
|
|
841
|
+
getConfiguredServices() {
|
|
842
|
+
// ServiceRegistry is mutable (services come online after OAuth etc.), so
|
|
843
|
+
// rebuild each call. buildConfiguredServices is a cheap set construction.
|
|
844
|
+
// GitHub is sourced from the unified `repositories` table — also live so
|
|
845
|
+
// a row added via /api/repositories shows up on the next session
|
|
846
|
+
// materialization without a daemon restart.
|
|
847
|
+
const hasGithub = selectGithubRepoSlugs(this.db).length > 0;
|
|
848
|
+
if (this.services) {
|
|
849
|
+
return buildConfiguredServices(this.config, {
|
|
850
|
+
...this.services,
|
|
851
|
+
github: hasGithub,
|
|
852
|
+
});
|
|
853
|
+
}
|
|
854
|
+
// Test fallback (no ServiceRegistry). Cannot cache when `hasGithub`
|
|
855
|
+
// changes between calls, so just rebuild — the construction is cheap.
|
|
856
|
+
return buildConfiguredServices(this.config, { github: hasGithub });
|
|
857
|
+
}
|
|
858
|
+
/** Snapshot active mail accounts (§Phase 5 accounts.md materialization). */
|
|
859
|
+
getActiveMailAccounts() {
|
|
860
|
+
return this.services?.mail?.listActiveAccounts() ?? [];
|
|
861
|
+
}
|
|
862
|
+
isReactive(event) {
|
|
863
|
+
if (isMessageEvent(event) && (event.isDm || event.isMention))
|
|
864
|
+
return true;
|
|
865
|
+
if (event.priority === EventPriority.CRITICAL)
|
|
866
|
+
return true;
|
|
867
|
+
// Dashboard-triggered tasks are user-initiated — treat as reactive.
|
|
868
|
+
// Both the regenerate button and the Knowledge upload form fire while
|
|
869
|
+
// the user is on the dashboard waiting for a response, so neither
|
|
870
|
+
// should be gated by setup mode or the autonomous cost cap.
|
|
871
|
+
if (event.source === "dashboard_regenerate")
|
|
872
|
+
return true;
|
|
873
|
+
if (isKnowledgeImportEvent(event))
|
|
874
|
+
return true;
|
|
875
|
+
return false;
|
|
876
|
+
}
|
|
877
|
+
/**
|
|
878
|
+
* Check whether this autonomous event should be skipped because the daily
|
|
879
|
+
* autonomous cost cap has been exceeded. Uses priority-based degradation:
|
|
880
|
+
* hourly_check (lowest priority, skipped first) → roadmap_refresh →
|
|
881
|
+
* evening_review → morning_routine (highest, last to be cut).
|
|
882
|
+
*
|
|
883
|
+
* Lower-priority events are skipped at 100% of cap; higher-priority events
|
|
884
|
+
* only at 150%+, giving headroom for the morning briefing.
|
|
885
|
+
*/
|
|
886
|
+
shouldSkipForCostCap(event) {
|
|
887
|
+
const cap = this.config.autonomousDailyCostCapUsd;
|
|
888
|
+
if (cap == null)
|
|
889
|
+
return false;
|
|
890
|
+
const tz = this.config.timezone || undefined;
|
|
891
|
+
const bounds = getAgentDayBoundsUtc(tz, this.config.dayBoundaryHour);
|
|
892
|
+
// better-sqlite3 caches prepared statements internally by SQL string,
|
|
893
|
+
// so this.db.prepare() with a static string is effectively free.
|
|
894
|
+
const row = this.db
|
|
895
|
+
.prepare(EventDispatcher.COST_CAP_SQL)
|
|
896
|
+
.get(bounds.start, bounds.end);
|
|
897
|
+
const todayCost = row.cost;
|
|
898
|
+
if (todayCost < cap)
|
|
899
|
+
return false;
|
|
900
|
+
// Priority-based degradation: assign each routine a threshold multiplier.
|
|
901
|
+
// Lower multiplier = skipped sooner.
|
|
902
|
+
const routine = isRoutineEvent(event)
|
|
903
|
+
? event.routine
|
|
904
|
+
: null;
|
|
905
|
+
const thresholds = {
|
|
906
|
+
hourly_check: 1.0, // skipped first (at 100% of cap)
|
|
907
|
+
roadmap_refresh: 1.2, // skipped at 120%
|
|
908
|
+
evening_review: 1.5, // skipped at 150%
|
|
909
|
+
morning_routine: 2.0, // last to be cut (only at 200%)
|
|
910
|
+
};
|
|
911
|
+
const threshold = routine ? (thresholds[routine] ?? 1.0) : 1.0;
|
|
912
|
+
return todayCost >= cap * threshold;
|
|
913
|
+
}
|
|
914
|
+
async handleEvent(event) {
|
|
915
|
+
try {
|
|
916
|
+
await this.handleEventInner(event);
|
|
917
|
+
}
|
|
918
|
+
catch (err) {
|
|
919
|
+
// Top-level catch prevents unhandled promise rejections from crashing the process
|
|
920
|
+
// (handleEvent is called with `void` — fire-and-forget — so rejections are unhandled)
|
|
921
|
+
logger.error({ err, eventType: event.type, source: event.source }, "Unhandled error in event processing");
|
|
922
|
+
}
|
|
923
|
+
}
|
|
924
|
+
async handleEventInner(event) {
|
|
925
|
+
const sem = this.isReactive(event) ? this.reactiveSem : this.autonomousSem;
|
|
926
|
+
await sem.acquire();
|
|
927
|
+
try {
|
|
928
|
+
await this.dispatchSafe(event);
|
|
929
|
+
}
|
|
930
|
+
finally {
|
|
931
|
+
sem.release();
|
|
932
|
+
}
|
|
933
|
+
}
|
|
934
|
+
async triggerHourlyCheck(source, options = {}) {
|
|
935
|
+
const forced = options.force === true;
|
|
936
|
+
const minObservations = this.config.hourlyCheckMinObservations;
|
|
937
|
+
// C1 fix: atomic check-and-set on hourlyCheckInProgress BEFORE any await
|
|
938
|
+
// boundary. Previously `await this.isMorningRoutineActive()` yielded to
|
|
939
|
+
// the microtask queue, allowing cron + /api/agent/run-now arriving in
|
|
940
|
+
// the same tick to both observe `hourlyCheckInProgress === false` and
|
|
941
|
+
// both enqueue. Because Node is single-threaded and better-sqlite3 is
|
|
942
|
+
// synchronous, doing set-first + sync checks + rollback-on-skip is now
|
|
943
|
+
// race-free.
|
|
944
|
+
if (this.hourlyCheckInProgress) {
|
|
945
|
+
logger.info({ source }, "Hourly check skipped — previous hourly check is still running");
|
|
946
|
+
return {
|
|
947
|
+
status: "skipped",
|
|
948
|
+
reason: "hourly_check_in_progress",
|
|
949
|
+
minObservations,
|
|
950
|
+
forced,
|
|
951
|
+
};
|
|
952
|
+
}
|
|
953
|
+
this.hourlyCheckInProgress = true;
|
|
954
|
+
// Rollback flag unless we actually enqueue the event or land on a
|
|
955
|
+
// silent path that owns its own reset.
|
|
956
|
+
let enqueued = false;
|
|
957
|
+
let silentPathOwnsReset = false;
|
|
958
|
+
try {
|
|
959
|
+
const setupBlock = this.isAutonomousAllowed();
|
|
960
|
+
if (setupBlock !== null) {
|
|
961
|
+
logger.info({ source, reason: setupBlock }, "Hourly check skipped — autonomous work paused for setup");
|
|
962
|
+
return {
|
|
963
|
+
status: "skipped",
|
|
964
|
+
reason: setupBlock,
|
|
965
|
+
minObservations,
|
|
966
|
+
forced,
|
|
967
|
+
};
|
|
968
|
+
}
|
|
969
|
+
if (this.isMorningRoutineActive()) {
|
|
970
|
+
logger.info({ source }, "Hourly check skipped — morning routine is active");
|
|
971
|
+
return {
|
|
972
|
+
status: "skipped",
|
|
973
|
+
reason: "morning_routine_active",
|
|
974
|
+
minObservations,
|
|
975
|
+
forced,
|
|
976
|
+
};
|
|
977
|
+
}
|
|
978
|
+
// Refresh delegated-sync snapshots for any cadence the operator
|
|
979
|
+
// left opted-OUT (the post-Phase-9 default). Without this, Gmail /
|
|
980
|
+
// Notion observations would dry up entirely in delegated mode and
|
|
981
|
+
// the routine.hourly_check.delegated.* task flow's Step 0a / 0c
|
|
982
|
+
// would have nothing to consume — Step 1's `/api/observations`
|
|
983
|
+
// call would return only Obsidian / Git rows. Calendar's Step 0b
|
|
984
|
+
// already fetches actively via `/reconcile`, so the gap is
|
|
985
|
+
// specific to gmail / notion. See `docs/design/appendices/
|
|
986
|
+
// delegated-sync-opt-in.md` and the worker's
|
|
987
|
+
// `runDisabledCadencesForHourlyCheck` doc-comment for the full
|
|
988
|
+
// reasoning. Failures are logged but do NOT block the check —
|
|
989
|
+
// a stuck cadence cannot starve the entire hourly loop.
|
|
990
|
+
if (this.delegatedSyncRefresh) {
|
|
991
|
+
try {
|
|
992
|
+
await this.delegatedSyncRefresh();
|
|
993
|
+
}
|
|
994
|
+
catch (err) {
|
|
995
|
+
logger.warn({ err, source }, "Pre-hourly-check delegated sync refresh failed; proceeding with stale snapshot");
|
|
996
|
+
}
|
|
997
|
+
}
|
|
998
|
+
const pendingCount = getPendingCount(this.db, { actorFilter: "user" });
|
|
999
|
+
// cost-reduction-structural §B — three-stage gate.
|
|
1000
|
+
// Mode `off` falls through to the legacy min-observations gate +
|
|
1001
|
+
// straight enqueue (rollback path for the gate); `shadow`/`live`
|
|
1002
|
+
// compute the gate verdict before any other branch fires.
|
|
1003
|
+
const gateMode = (this.config.hourlyCheckGateMode
|
|
1004
|
+
?? "shadow");
|
|
1005
|
+
if (gateMode === "off") {
|
|
1006
|
+
if (!forced && pendingCount < minObservations) {
|
|
1007
|
+
logger.debug({ source, pendingCount, minObservations }, "Hourly check skipped — not enough pending observations");
|
|
1008
|
+
return {
|
|
1009
|
+
status: "skipped",
|
|
1010
|
+
reason: "below_threshold",
|
|
1011
|
+
pendingCount,
|
|
1012
|
+
minObservations,
|
|
1013
|
+
forced,
|
|
1014
|
+
};
|
|
1015
|
+
}
|
|
1016
|
+
await this.eventBus.put({
|
|
1017
|
+
...createEvent({
|
|
1018
|
+
type: "routine.hourly_check",
|
|
1019
|
+
source,
|
|
1020
|
+
priority: EventPriority.NORMAL,
|
|
1021
|
+
}),
|
|
1022
|
+
routine: "hourly_check",
|
|
1023
|
+
data: { pendingCount, forced },
|
|
1024
|
+
...(options.requestedModel ? { requestedModel: options.requestedModel } : {}),
|
|
1025
|
+
});
|
|
1026
|
+
enqueued = true;
|
|
1027
|
+
return {
|
|
1028
|
+
status: "queued",
|
|
1029
|
+
pendingCount,
|
|
1030
|
+
minObservations,
|
|
1031
|
+
forced,
|
|
1032
|
+
gateMode: "off",
|
|
1033
|
+
};
|
|
1034
|
+
}
|
|
1035
|
+
const decision = this.computeHourlyCheckGateDecision();
|
|
1036
|
+
if (gateMode === "shadow") {
|
|
1037
|
+
// Shadow mode: log the gate verdict, then proceed to Stage 3
|
|
1038
|
+
// exactly as before so the existing pipeline is uncovered.
|
|
1039
|
+
this.logGateAuditRow(decision, {
|
|
1040
|
+
mode: "shadow",
|
|
1041
|
+
appliedDecision: "stage3_shadow",
|
|
1042
|
+
forced,
|
|
1043
|
+
});
|
|
1044
|
+
if (!forced && pendingCount < minObservations) {
|
|
1045
|
+
logger.debug({ source, pendingCount, minObservations, gateStage: decision.stage }, "Hourly check skipped (shadow) — not enough pending observations");
|
|
1046
|
+
return {
|
|
1047
|
+
status: "skipped",
|
|
1048
|
+
reason: "below_threshold",
|
|
1049
|
+
pendingCount,
|
|
1050
|
+
minObservations,
|
|
1051
|
+
forced,
|
|
1052
|
+
gateMode: "shadow",
|
|
1053
|
+
gateStage: decision.stage,
|
|
1054
|
+
gateReason: decision.reason,
|
|
1055
|
+
appliedStage: "stage3_shadow",
|
|
1056
|
+
};
|
|
1057
|
+
}
|
|
1058
|
+
await this.enqueueStage3HourlyCheck(source, { ...decision }, { mode: "shadow", forced, pendingCount, requestedModel: options.requestedModel });
|
|
1059
|
+
enqueued = true;
|
|
1060
|
+
return {
|
|
1061
|
+
status: "queued",
|
|
1062
|
+
pendingCount,
|
|
1063
|
+
minObservations,
|
|
1064
|
+
forced,
|
|
1065
|
+
gateMode: "shadow",
|
|
1066
|
+
gateStage: decision.stage,
|
|
1067
|
+
gateReason: decision.reason,
|
|
1068
|
+
appliedStage: "stage3_shadow",
|
|
1069
|
+
};
|
|
1070
|
+
}
|
|
1071
|
+
// gateMode === 'live'
|
|
1072
|
+
// Honour the legacy min-observations floor only when the gate
|
|
1073
|
+
// would have proceeded to Stage 3 anyway. The silent gate path
|
|
1074
|
+
// already short-circuits the noisy "1 obs, no signal" case below
|
|
1075
|
+
// it, so keeping the floor active there would just suppress the
|
|
1076
|
+
// gate's telemetry.
|
|
1077
|
+
if (!forced
|
|
1078
|
+
&& decision.stage === "stage3"
|
|
1079
|
+
&& pendingCount < minObservations) {
|
|
1080
|
+
this.logGateAuditRow(decision, {
|
|
1081
|
+
mode: "live",
|
|
1082
|
+
appliedDecision: "stage3",
|
|
1083
|
+
forced,
|
|
1084
|
+
// Mark the row as a skip even though the gate wanted Stage 3 —
|
|
1085
|
+
// the legacy min-observations floor short-circuited it. Without
|
|
1086
|
+
// this, every `below_threshold` skip would persist as a phantom
|
|
1087
|
+
// `result='success'` row in the audit feed.
|
|
1088
|
+
resultOverride: "skipped",
|
|
1089
|
+
extra: { skipped: "below_threshold" },
|
|
1090
|
+
});
|
|
1091
|
+
return {
|
|
1092
|
+
status: "skipped",
|
|
1093
|
+
reason: "below_threshold",
|
|
1094
|
+
pendingCount,
|
|
1095
|
+
minObservations,
|
|
1096
|
+
forced,
|
|
1097
|
+
gateMode: "live",
|
|
1098
|
+
gateStage: decision.stage,
|
|
1099
|
+
gateReason: decision.reason,
|
|
1100
|
+
};
|
|
1101
|
+
}
|
|
1102
|
+
if (decision.stage === "stage0_silent") {
|
|
1103
|
+
const silentResult = this.runSilentHourlyCheckPath(decision, "stage0_silent", {
|
|
1104
|
+
source,
|
|
1105
|
+
forced,
|
|
1106
|
+
});
|
|
1107
|
+
silentPathOwnsReset = true;
|
|
1108
|
+
return {
|
|
1109
|
+
...silentResult,
|
|
1110
|
+
minObservations,
|
|
1111
|
+
gateMode: "live",
|
|
1112
|
+
gateStage: decision.stage,
|
|
1113
|
+
gateReason: decision.reason,
|
|
1114
|
+
appliedStage: "stage0_silent",
|
|
1115
|
+
};
|
|
1116
|
+
}
|
|
1117
|
+
if (decision.stage === "stage2") {
|
|
1118
|
+
const verdict = await this.runStage2Triage(decision, source);
|
|
1119
|
+
if (verdict === "log_only") {
|
|
1120
|
+
const silentResult = this.runSilentHourlyCheckPath(decision, "stage2_log_only", { source, forced });
|
|
1121
|
+
silentPathOwnsReset = true;
|
|
1122
|
+
return {
|
|
1123
|
+
...silentResult,
|
|
1124
|
+
minObservations,
|
|
1125
|
+
gateMode: "live",
|
|
1126
|
+
gateStage: decision.stage,
|
|
1127
|
+
gateReason: decision.reason,
|
|
1128
|
+
appliedStage: "stage2_log_only",
|
|
1129
|
+
};
|
|
1130
|
+
}
|
|
1131
|
+
// verdict === 'escalate' OR 'failed' (failed → cautious escalate
|
|
1132
|
+
// since a malformed JSON should not silently skip a hour's worth
|
|
1133
|
+
// of signals; matches the prompt contract's stated default).
|
|
1134
|
+
await this.enqueueStage3HourlyCheck(source, decision, {
|
|
1135
|
+
mode: "live",
|
|
1136
|
+
forced,
|
|
1137
|
+
pendingCount,
|
|
1138
|
+
requestedModel: options.requestedModel,
|
|
1139
|
+
stage2Verdict: verdict,
|
|
1140
|
+
});
|
|
1141
|
+
enqueued = true;
|
|
1142
|
+
return {
|
|
1143
|
+
status: "queued",
|
|
1144
|
+
pendingCount,
|
|
1145
|
+
minObservations,
|
|
1146
|
+
forced,
|
|
1147
|
+
gateMode: "live",
|
|
1148
|
+
gateStage: decision.stage,
|
|
1149
|
+
gateReason: decision.reason,
|
|
1150
|
+
appliedStage: "stage3",
|
|
1151
|
+
};
|
|
1152
|
+
}
|
|
1153
|
+
// decision.stage === 'stage3'
|
|
1154
|
+
await this.enqueueStage3HourlyCheck(source, decision, { mode: "live", forced, pendingCount, requestedModel: options.requestedModel });
|
|
1155
|
+
enqueued = true;
|
|
1156
|
+
return {
|
|
1157
|
+
status: "queued",
|
|
1158
|
+
pendingCount,
|
|
1159
|
+
minObservations,
|
|
1160
|
+
forced,
|
|
1161
|
+
gateMode: "live",
|
|
1162
|
+
gateStage: decision.stage,
|
|
1163
|
+
gateReason: decision.reason,
|
|
1164
|
+
appliedStage: "stage3",
|
|
1165
|
+
};
|
|
1166
|
+
}
|
|
1167
|
+
finally {
|
|
1168
|
+
// Flag is only left true when we successfully enqueued OR the
|
|
1169
|
+
// silent path explicitly opted out of resetting (it resets at
|
|
1170
|
+
// the end of its own helper). The event loop's dispatchSafe()
|
|
1171
|
+
// finally block clears the flag when an enqueued routine event
|
|
1172
|
+
// finishes processing.
|
|
1173
|
+
if (!enqueued && !silentPathOwnsReset) {
|
|
1174
|
+
this.hourlyCheckInProgress = false;
|
|
1175
|
+
}
|
|
1176
|
+
}
|
|
1177
|
+
}
|
|
1178
|
+
/**
|
|
1179
|
+
* cost-reduction-structural §B — pull a fresh signal snapshot and run
|
|
1180
|
+
* the deterministic gate. Helper so the dispatcher's call site stays
|
|
1181
|
+
* compact and tests can spy on the boundary.
|
|
1182
|
+
*/
|
|
1183
|
+
computeHourlyCheckGateDecision() {
|
|
1184
|
+
const todayMd = this.readTodayMdSafe();
|
|
1185
|
+
const signals = computeHourlyCheckSignals(this.db, {
|
|
1186
|
+
vipMailSenders: this.config.vipMailSenders ?? [],
|
|
1187
|
+
todayMd,
|
|
1188
|
+
// Pass the configured agent timezone so `agentPlanOverdueCount`
|
|
1189
|
+
// compares HH:MM rows in the right zone. Falls back to the
|
|
1190
|
+
// engine's local TZ inside `computeHourlyCheckSignals` when this
|
|
1191
|
+
// config field is empty (the common single-user case).
|
|
1192
|
+
...(this.config.timezone
|
|
1193
|
+
? { agentTimezone: this.config.timezone }
|
|
1194
|
+
: {}),
|
|
1195
|
+
});
|
|
1196
|
+
return decideStage(signals, {
|
|
1197
|
+
heartbeatHours: this.config.hourlyCheckHeartbeatHours ?? 4,
|
|
1198
|
+
stage2Enabled: this.config.hourlyCheckStage2Enabled ?? false,
|
|
1199
|
+
pendingObsLowSignalCeiling: this.config.hourlyCheckLowSignalPendingCeiling ?? 0,
|
|
1200
|
+
});
|
|
1201
|
+
}
|
|
1202
|
+
readTodayMdSafe() {
|
|
1203
|
+
try {
|
|
1204
|
+
const path = join(getContextDir(this.config, this.db), CONTEXT_RELATIVE_PATHS.today);
|
|
1205
|
+
if (!existsSync(path))
|
|
1206
|
+
return null;
|
|
1207
|
+
return readFileSync(path, "utf-8");
|
|
1208
|
+
}
|
|
1209
|
+
catch (err) {
|
|
1210
|
+
logger.warn({ err }, "Failed to read today.md for hourly_check signals");
|
|
1211
|
+
return null;
|
|
1212
|
+
}
|
|
1213
|
+
}
|
|
1214
|
+
/**
|
|
1215
|
+
* cost-reduction-structural §B — daemon-direct silent path. Used by
|
|
1216
|
+
* Stage 0 and Stage 2 log-only verdicts. Consumes pending user
|
|
1217
|
+
* observations + appends a single Agent Log line + records the gate
|
|
1218
|
+
* verdict to `agent_actions`. The flag is reset before return.
|
|
1219
|
+
*/
|
|
1220
|
+
runSilentHourlyCheckPath(decision, appliedDecision, ctx) {
|
|
1221
|
+
const reason = appliedDecision === "stage0_silent"
|
|
1222
|
+
? "gate_stage0_silent"
|
|
1223
|
+
: "gate_stage2_log_only";
|
|
1224
|
+
let pendingCount = 0;
|
|
1225
|
+
try {
|
|
1226
|
+
pendingCount = decision.signals.pendingObsCount;
|
|
1227
|
+
// Append a single bullet to today.md ## Agent Log. Best-effort —
|
|
1228
|
+
// when today.md is missing or the lock is held, we still consume
|
|
1229
|
+
// the observations so the queue doesn't grow indefinitely.
|
|
1230
|
+
const message = appliedDecision === "stage0_silent"
|
|
1231
|
+
? `[hourly_check] Quiet (${decision.reason}) — ${pendingCount} obs consumed silently`
|
|
1232
|
+
: `[hourly_check] Stage 2 log-only (${decision.reason}) — ${pendingCount} obs consumed silently`;
|
|
1233
|
+
if (this.todayWriteLock) {
|
|
1234
|
+
appendAgentLogLine({
|
|
1235
|
+
contextDir: getContextDir(this.config, this.db),
|
|
1236
|
+
message,
|
|
1237
|
+
todayWriteLock: this.todayWriteLock,
|
|
1238
|
+
timezone: this.config.timezone || undefined,
|
|
1239
|
+
});
|
|
1240
|
+
}
|
|
1241
|
+
// Consume the observations under the gate's correlation id so
|
|
1242
|
+
// dashboards can attribute "consumed by gate" rows separately
|
|
1243
|
+
// from agent-driven consumption.
|
|
1244
|
+
try {
|
|
1245
|
+
const pending = getPendingObservations(this.db, {
|
|
1246
|
+
actorFilter: "user",
|
|
1247
|
+
limit: 100,
|
|
1248
|
+
});
|
|
1249
|
+
if (pending.length > 0) {
|
|
1250
|
+
consumeObservations(this.db, pending.map((row) => row.id), `hourly_check_gate:${appliedDecision}`);
|
|
1251
|
+
}
|
|
1252
|
+
}
|
|
1253
|
+
catch (err) {
|
|
1254
|
+
logger.warn({ err }, "Failed to consume observations on silent gate path");
|
|
1255
|
+
}
|
|
1256
|
+
this.logGateAuditRow(decision, {
|
|
1257
|
+
mode: "live",
|
|
1258
|
+
appliedDecision,
|
|
1259
|
+
forced: ctx.forced,
|
|
1260
|
+
});
|
|
1261
|
+
logger.info({
|
|
1262
|
+
source: ctx.source,
|
|
1263
|
+
gateStage: decision.stage,
|
|
1264
|
+
gateReason: decision.reason,
|
|
1265
|
+
appliedDecision,
|
|
1266
|
+
pendingCount,
|
|
1267
|
+
}, "Hourly check silenced by Stage-1 gate");
|
|
1268
|
+
}
|
|
1269
|
+
finally {
|
|
1270
|
+
this.hourlyCheckInProgress = false;
|
|
1271
|
+
}
|
|
1272
|
+
return {
|
|
1273
|
+
status: "skipped",
|
|
1274
|
+
reason,
|
|
1275
|
+
pendingCount,
|
|
1276
|
+
forced: ctx.forced,
|
|
1277
|
+
};
|
|
1278
|
+
}
|
|
1279
|
+
async enqueueStage3HourlyCheck(source, decision, extra) {
|
|
1280
|
+
const gateBlock = renderGateDecisionBlock(decision, {
|
|
1281
|
+
mode: extra.mode,
|
|
1282
|
+
forced: extra.forced,
|
|
1283
|
+
});
|
|
1284
|
+
if (extra.mode === "live") {
|
|
1285
|
+
this.logGateAuditRow(decision, {
|
|
1286
|
+
mode: extra.mode,
|
|
1287
|
+
appliedDecision: "stage3",
|
|
1288
|
+
forced: extra.forced,
|
|
1289
|
+
...(extra.stage2Verdict ? { stage2Verdict: extra.stage2Verdict } : {}),
|
|
1290
|
+
});
|
|
1291
|
+
}
|
|
1292
|
+
await this.eventBus.put({
|
|
1293
|
+
...createEvent({
|
|
1294
|
+
type: "routine.hourly_check",
|
|
1295
|
+
source,
|
|
1296
|
+
priority: EventPriority.NORMAL,
|
|
1297
|
+
}),
|
|
1298
|
+
routine: "hourly_check",
|
|
1299
|
+
data: {
|
|
1300
|
+
pendingCount: extra.pendingCount,
|
|
1301
|
+
forced: extra.forced,
|
|
1302
|
+
gateDecision: {
|
|
1303
|
+
mode: extra.mode,
|
|
1304
|
+
stage: decision.stage,
|
|
1305
|
+
reason: decision.reason,
|
|
1306
|
+
forced: extra.forced,
|
|
1307
|
+
...(extra.stage2Verdict ? { stage2Verdict: extra.stage2Verdict } : {}),
|
|
1308
|
+
block: gateBlock,
|
|
1309
|
+
},
|
|
1310
|
+
},
|
|
1311
|
+
...(extra.requestedModel ? { requestedModel: extra.requestedModel } : {}),
|
|
1312
|
+
});
|
|
1313
|
+
}
|
|
1314
|
+
logGateAuditRow(decision, params) {
|
|
1315
|
+
try {
|
|
1316
|
+
// The gate-audit helper only knows about the canonical stages
|
|
1317
|
+
// (gate output) plus the shadow-mode marker. Map the silent-path
|
|
1318
|
+
// alias `stage2_log_only` onto its canonical sibling so the
|
|
1319
|
+
// helper's typing stays narrow; the verdict is preserved verbatim
|
|
1320
|
+
// alongside `stage_reached` in the merged detail.
|
|
1321
|
+
const auditAppliedDecision = params.appliedDecision === "stage2_log_only"
|
|
1322
|
+
? "stage0_silent"
|
|
1323
|
+
: params.appliedDecision;
|
|
1324
|
+
const detail = {
|
|
1325
|
+
...buildGateAuditDetail(decision, {
|
|
1326
|
+
mode: params.mode,
|
|
1327
|
+
appliedDecision: auditAppliedDecision,
|
|
1328
|
+
forced: params.forced,
|
|
1329
|
+
...(params.stage2Verdict ? { stage2Verdict: params.stage2Verdict } : {}),
|
|
1330
|
+
}),
|
|
1331
|
+
// Always reflect the *real* applied stage in the row regardless
|
|
1332
|
+
// of the alias mapping above.
|
|
1333
|
+
stage_reached: params.appliedDecision,
|
|
1334
|
+
...(params.extra ?? {}),
|
|
1335
|
+
};
|
|
1336
|
+
const isShadow = params.appliedDecision === "stage3_shadow";
|
|
1337
|
+
const isSilentPath = params.appliedDecision === "stage0_silent"
|
|
1338
|
+
|| params.appliedDecision === "stage2_log_only";
|
|
1339
|
+
const result = params.resultOverride
|
|
1340
|
+
?? (isShadow ? "success" : isSilentPath ? "skipped" : "success");
|
|
1341
|
+
this.db
|
|
1342
|
+
.prepare(`INSERT INTO agent_actions
|
|
1343
|
+
(action_type, trigger, result, detail, started_at, completed_at)
|
|
1344
|
+
VALUES ('hourly_check.gate', 'autonomous', ?, json(?), datetime('now'), datetime('now'))`)
|
|
1345
|
+
.run(result, JSON.stringify(detail));
|
|
1346
|
+
}
|
|
1347
|
+
catch (err) {
|
|
1348
|
+
logger.warn({ err }, "Failed to record hourly_check.gate audit row");
|
|
1349
|
+
}
|
|
1350
|
+
}
|
|
1351
|
+
/**
|
|
1352
|
+
* cost-reduction-structural §B Stage 2 — synchronous lite-tier triage.
|
|
1353
|
+
* Builds a `routine.hourly_check.triage` RoutineEvent and runs it
|
|
1354
|
+
* inline through the agent router (NOT the EventBus, so the result
|
|
1355
|
+
* is available before we decide whether to silence or escalate).
|
|
1356
|
+
*
|
|
1357
|
+
* The agent contract is JSON-only output (`{ "action": "log_only" |
|
|
1358
|
+
* "escalate", "reason": "..." }`); on parse failure we return
|
|
1359
|
+
* `'failed'` and the caller treats that as cautious escalate.
|
|
1360
|
+
*
|
|
1361
|
+
* Tool/turn clamp (defense-in-depth):
|
|
1362
|
+
* - `allowedToolsOverride: []` removes every tool from the SDK's
|
|
1363
|
+
* allowlist for the spawn. Stage 2 has nothing to do but emit a
|
|
1364
|
+
* JSON line; the design's "no write tools" rule is enforced here
|
|
1365
|
+
* instead of relying on the prompt alone.
|
|
1366
|
+
* - `maxTurns: 1` caps the spawn at a single assistant turn. Even
|
|
1367
|
+
* if a future prompt change accidentally invites tool use, the
|
|
1368
|
+
* spawn cannot loop. Codex/Gemini have no per-spawn `allowedTools`
|
|
1369
|
+
* surface today (acknowledged gap in `agent-core.ts`); the
|
|
1370
|
+
* `maxTurns` cap and process_backend_config envelope are the
|
|
1371
|
+
* remaining safety floor on those backends.
|
|
1372
|
+
*/
|
|
1373
|
+
async runStage2Triage(decision, source) {
|
|
1374
|
+
const triageEvent = {
|
|
1375
|
+
...createEvent({
|
|
1376
|
+
type: "routine.hourly_check.triage",
|
|
1377
|
+
source,
|
|
1378
|
+
priority: EventPriority.NORMAL,
|
|
1379
|
+
}),
|
|
1380
|
+
routine: "hourly_check.triage",
|
|
1381
|
+
data: {
|
|
1382
|
+
forced: false,
|
|
1383
|
+
gateDecision: {
|
|
1384
|
+
mode: "live",
|
|
1385
|
+
stage: decision.stage,
|
|
1386
|
+
reason: decision.reason,
|
|
1387
|
+
forced: false,
|
|
1388
|
+
block: renderGateDecisionBlock(decision, { mode: "live", forced: false }),
|
|
1389
|
+
},
|
|
1390
|
+
},
|
|
1391
|
+
};
|
|
1392
|
+
let context;
|
|
1393
|
+
try {
|
|
1394
|
+
context = await this.contextBuilder.build(triageEvent);
|
|
1395
|
+
}
|
|
1396
|
+
catch (err) {
|
|
1397
|
+
logger.error({ err }, "Stage 2 triage context build failed");
|
|
1398
|
+
return "failed";
|
|
1399
|
+
}
|
|
1400
|
+
const processKey = "routine.hourly_check.triage";
|
|
1401
|
+
const reassemblePrompt = (bid) => this.assemblePrompt(triageEvent.type, processKey, bid);
|
|
1402
|
+
let binding;
|
|
1403
|
+
try {
|
|
1404
|
+
binding = this.agentRouter.resolveBinding(triageEvent, { processKey });
|
|
1405
|
+
}
|
|
1406
|
+
catch (err) {
|
|
1407
|
+
logger.error({ err }, "Stage 2 triage binding resolve failed");
|
|
1408
|
+
return "failed";
|
|
1409
|
+
}
|
|
1410
|
+
const prompt = reassemblePrompt(binding.main.backendId);
|
|
1411
|
+
let result;
|
|
1412
|
+
try {
|
|
1413
|
+
result = await this.agentRouter.execute({
|
|
1414
|
+
prompt,
|
|
1415
|
+
context,
|
|
1416
|
+
event: triageEvent,
|
|
1417
|
+
processKey,
|
|
1418
|
+
preResolvedBinding: binding,
|
|
1419
|
+
reassemblePrompt,
|
|
1420
|
+
// Defense-in-depth: Stage 2 must not call any tool. Empty
|
|
1421
|
+
// `allowedToolsOverride` REPLACES the default allowlist on
|
|
1422
|
+
// Claude (Codex/Gemini have no per-spawn `allowedTools` surface
|
|
1423
|
+
// — acknowledged gap in `agent-core.ts`). The `max_turns=1` cap
|
|
1424
|
+
// for the spawn comes from the seeded `process_backend_config`
|
|
1425
|
+
// row for `routine.hourly_check.triage` (see `db/schema.ts`),
|
|
1426
|
+
// which the router reads via `binding.main.maxTurns`. Together
|
|
1427
|
+
// these mean: zero tools on Claude, one assistant turn on every
|
|
1428
|
+
// backend.
|
|
1429
|
+
allowedToolsOverride: [],
|
|
1430
|
+
});
|
|
1431
|
+
}
|
|
1432
|
+
catch (err) {
|
|
1433
|
+
logger.error({ err }, "Stage 2 triage agent execution failed");
|
|
1434
|
+
return "failed";
|
|
1435
|
+
}
|
|
1436
|
+
// Audit row for the lite-tier session itself, distinct from the gate
|
|
1437
|
+
// audit row written by `logGateAuditRow`.
|
|
1438
|
+
try {
|
|
1439
|
+
this.audit.logAction({
|
|
1440
|
+
event: triageEvent,
|
|
1441
|
+
model: result.model,
|
|
1442
|
+
costUsd: result.costUsd,
|
|
1443
|
+
usage: result.usage,
|
|
1444
|
+
modelUsage: result.modelUsage,
|
|
1445
|
+
durationMs: result.durationMs,
|
|
1446
|
+
numTurns: result.numTurns,
|
|
1447
|
+
trigger: "autonomous",
|
|
1448
|
+
backend: result.backendId,
|
|
1449
|
+
costSource: result.costSource,
|
|
1450
|
+
contextUpdated: result.contextUpdated,
|
|
1451
|
+
advisorCallCount: result.advisorCallCount,
|
|
1452
|
+
});
|
|
1453
|
+
}
|
|
1454
|
+
catch (err) {
|
|
1455
|
+
logger.warn({ err }, "Failed to log Stage 2 triage agent_actions row");
|
|
1456
|
+
}
|
|
1457
|
+
return parseStage2Verdict(result.output);
|
|
1458
|
+
}
|
|
1459
|
+
/**
|
|
1460
|
+
* Advisory check: is a morning routine execution or retry currently in
|
|
1461
|
+
* progress? Synchronous (no async) so callers can atomically gate other
|
|
1462
|
+
* work without introducing microtask race windows.
|
|
1463
|
+
*
|
|
1464
|
+
* C5 fix: detects retry rows via `task_context.routine='morning_routine'`
|
|
1465
|
+
* instead of a fragile `task_description LIKE 'Morning routine retry%'`
|
|
1466
|
+
* substring match. The schedule row's task_context is written by
|
|
1467
|
+
* scheduleMorningRetry() below, so this JSON path is authoritative even
|
|
1468
|
+
* if the human-readable description string later changes.
|
|
1469
|
+
*
|
|
1470
|
+
* Public (not private) because Phase 4's `AuthHealthMonitor.checkAll()`
|
|
1471
|
+
* shares the same skip-while-morning-routine-active invariant as the
|
|
1472
|
+
* hourly check, and injects this method as an option so a probe tick
|
|
1473
|
+
* running concurrently with morning routine can no-op cleanly. See
|
|
1474
|
+
* `docs/design/09-safety-cost.md` §9.5.4.
|
|
1475
|
+
*/
|
|
1476
|
+
isMorningRoutineActive() {
|
|
1477
|
+
if (this.morningRoutineInProgress) {
|
|
1478
|
+
return true;
|
|
1479
|
+
}
|
|
1480
|
+
const row = this.db.prepare(`SELECT 1 as active
|
|
1481
|
+
FROM agent_schedule
|
|
1482
|
+
WHERE status IN ('pending', 'running')
|
|
1483
|
+
AND task_type = 'wake'
|
|
1484
|
+
AND json_extract(task_context, '$.routine') = 'morning_routine'
|
|
1485
|
+
LIMIT 1`).get();
|
|
1486
|
+
return !!row;
|
|
1487
|
+
}
|
|
1488
|
+
async dispatchSafe(event) {
|
|
1489
|
+
const trigger = this.isReactive(event) ? "reactive" : "autonomous";
|
|
1490
|
+
const startMs = Date.now();
|
|
1491
|
+
logger.info({ eventType: event.type, source: event.source, trigger }, "Event processing started");
|
|
1492
|
+
try {
|
|
1493
|
+
// Setup gate — skip all autonomous work while initial setup is
|
|
1494
|
+
// incomplete or a setup conversation is active. Reactive work is
|
|
1495
|
+
// exempt: user DMs (including the dashboard setup chat itself),
|
|
1496
|
+
// mentions in channels, CRITICAL-priority events, and explicit
|
|
1497
|
+
// dashboard-initiated actions (e.g. dashboard_regenerate). The
|
|
1498
|
+
// `isReactive` check is the semantic match — `isMessageEvent` alone
|
|
1499
|
+
// is both too broad (channel messages without mention are dropped
|
|
1500
|
+
// as a personal-agent policy) and too narrow (dashboard_regenerate
|
|
1501
|
+
// is not a message event but is user-initiated).
|
|
1502
|
+
//
|
|
1503
|
+
// Scheduled wake tasks stay in the agent_schedule table in 'pending'
|
|
1504
|
+
// — ScheduleWatcher's top-level gate prevents claiming them, and
|
|
1505
|
+
// `discardStalePendingSchedules` will tidy anything left over
|
|
1506
|
+
// across day boundaries.
|
|
1507
|
+
if (!this.isReactive(event)) {
|
|
1508
|
+
const setupBlock = this.isAutonomousAllowed();
|
|
1509
|
+
if (setupBlock !== null) {
|
|
1510
|
+
this.audit.logSkip(event, setupBlock, trigger);
|
|
1511
|
+
logger.info({ eventType: event.type, source: event.source, reason: setupBlock }, "Event skipped — autonomous work paused for setup");
|
|
1512
|
+
return;
|
|
1513
|
+
}
|
|
1514
|
+
// Autonomous daily cost cap — safety net distinct from removed Phase 9
|
|
1515
|
+
// maxDailyCostUsd (which blanket-blocked all sessions including DMs).
|
|
1516
|
+
// Reactive sessions always pass. Degradation priority: hourly_check is
|
|
1517
|
+
// skipped first, morning_routine last.
|
|
1518
|
+
if (this.shouldSkipForCostCap(event)) {
|
|
1519
|
+
this.audit.logSkip(event, "autonomous_cost_cap_exceeded", trigger);
|
|
1520
|
+
logger.info({ eventType: event.type, source: event.source }, "Event skipped — autonomous daily cost cap exceeded");
|
|
1521
|
+
return;
|
|
1522
|
+
}
|
|
1523
|
+
}
|
|
1524
|
+
await this.dispatch(event);
|
|
1525
|
+
logger.info({ eventType: event.type, source: event.source, durationMs: Date.now() - startMs }, "Event processing completed");
|
|
1526
|
+
}
|
|
1527
|
+
catch (err) {
|
|
1528
|
+
const durationMs = Date.now() - startMs;
|
|
1529
|
+
logger.error({ err, eventType: event.type, source: event.source, durationMs }, "Event processing failed");
|
|
1530
|
+
this.audit.logError(event, err, trigger, buildLogErrorContext(err, durationMs));
|
|
1531
|
+
await this.handleError(event, err);
|
|
1532
|
+
}
|
|
1533
|
+
finally {
|
|
1534
|
+
if (isRoutineEvent(event) && event.routine === "hourly_check") {
|
|
1535
|
+
this.hourlyCheckInProgress = false;
|
|
1536
|
+
}
|
|
1537
|
+
}
|
|
1538
|
+
}
|
|
1539
|
+
async dispatch(event) {
|
|
1540
|
+
if (isMessageEvent(event)) {
|
|
1541
|
+
if (event.isDm || event.isMention) {
|
|
1542
|
+
await this.runWithSessionGate(this.getMessageExecutionKey(event), () => this.handleMessage(event));
|
|
1543
|
+
}
|
|
1544
|
+
else {
|
|
1545
|
+
// Personal agent — channel messages without mention are dropped.
|
|
1546
|
+
// Adapters already filter these, but guard here as defense-in-depth.
|
|
1547
|
+
this.audit.logSkip(event, "channel_message_ignored", "autonomous");
|
|
1548
|
+
logger.debug({ eventType: event.type, source: event.source, channel: event.channel }, "Channel message without mention dropped — personal agent does not process multi-user channel traffic");
|
|
1549
|
+
return;
|
|
1550
|
+
}
|
|
1551
|
+
}
|
|
1552
|
+
else if (isRoutineEvent(event)) {
|
|
1553
|
+
const routine = event.routine;
|
|
1554
|
+
if (routine === "morning_routine") {
|
|
1555
|
+
await this.executeMorningRoutine(event);
|
|
1556
|
+
}
|
|
1557
|
+
else if (routine === "roadmap_refresh") {
|
|
1558
|
+
await this.executeRoadmapRefresh(event);
|
|
1559
|
+
}
|
|
1560
|
+
else if (routine === "skill_curation") {
|
|
1561
|
+
// P22 §3.4 step 4. The optimizer runs in an isolated workdir under
|
|
1562
|
+
// ~/.personal-agent/optimizer-workdir/<runId>/ with a hard-restricted
|
|
1563
|
+
// allowedTools envelope (curl + Read only). The MaterializeOptimizer
|
|
1564
|
+
// hook is wired in `index.ts`; if absent, the routine no-ops with an
|
|
1565
|
+
// audit log. This is the safety floor — without the materializer the
|
|
1566
|
+
// session would otherwise inherit standard executor allowedTools, so
|
|
1567
|
+
// declining to execute is the correct behaviour for an unwired
|
|
1568
|
+
// installation.
|
|
1569
|
+
if (!this.materializeOptimizerWorkdir) {
|
|
1570
|
+
this.audit.logSkip(event, "skill_curation_unwired", "autonomous");
|
|
1571
|
+
return;
|
|
1572
|
+
}
|
|
1573
|
+
await this.executeSkillCurationRoutine(event);
|
|
1574
|
+
}
|
|
1575
|
+
else {
|
|
1576
|
+
// hourly_check, evening_review, weekly_review, monthly_review
|
|
1577
|
+
// Tier is resolved from process-key defaults by BackendRouter.
|
|
1578
|
+
await this.executeDefault(event);
|
|
1579
|
+
}
|
|
1580
|
+
}
|
|
1581
|
+
else if (isScheduledDmEvent(event)) {
|
|
1582
|
+
// SCHEDULED-DM-IMPLEMENTATION-PLAN §3.6 — serialize behind any
|
|
1583
|
+
// in-flight owner-facing DM (across BOTH OWNER_DM_SCOPE and
|
|
1584
|
+
// DASHBOARD_CHAT_SCOPE) so the briefing never composes
|
|
1585
|
+
// concurrently with a DM reply that's currently answering the
|
|
1586
|
+
// same topic. Sort order on key acquisition is the
|
|
1587
|
+
// deadlock-prevention contract.
|
|
1588
|
+
await this.runWithSessionGates([
|
|
1589
|
+
`${OWNER_DM_SCOPE}:${OWNER_SCOPE_KEY}`,
|
|
1590
|
+
`${DASHBOARD_CHAT_SCOPE}:${DASHBOARD_SCOPE_KEY}`,
|
|
1591
|
+
], async () => {
|
|
1592
|
+
// §3.6.1 max-wait — drop the briefing if gate-acquisition
|
|
1593
|
+
// pushed delivery past `maxBriefingDelayMinutes` of the
|
|
1594
|
+
// scheduled time. Loses the daily heartbeat on chatty
|
|
1595
|
+
// mornings; preserves "morning" semantics on quiet ones.
|
|
1596
|
+
if (event.scheduleId !== undefined) {
|
|
1597
|
+
const row = this.db
|
|
1598
|
+
.prepare("SELECT scheduled_for FROM agent_schedule WHERE id = ?")
|
|
1599
|
+
.get(event.scheduleId);
|
|
1600
|
+
if (row) {
|
|
1601
|
+
const lateMs = Date.now() - parseSqliteUtcMs(row.scheduled_for);
|
|
1602
|
+
const budgetMs = this.config.maxBriefingDelayMinutes * 60_000;
|
|
1603
|
+
if (lateMs > budgetMs) {
|
|
1604
|
+
this.db
|
|
1605
|
+
.prepare("UPDATE agent_schedule SET status = 'skipped' WHERE id = ? AND status = 'running'")
|
|
1606
|
+
.run(event.scheduleId);
|
|
1607
|
+
logger.info({
|
|
1608
|
+
eventType: event.type,
|
|
1609
|
+
scheduleId: event.scheduleId,
|
|
1610
|
+
lateMs,
|
|
1611
|
+
budgetMs,
|
|
1612
|
+
}, "scheduled.dm dropped — gate acquisition exceeded max delay");
|
|
1613
|
+
return;
|
|
1614
|
+
}
|
|
1615
|
+
}
|
|
1616
|
+
}
|
|
1617
|
+
await this.executeScheduledTask(event);
|
|
1618
|
+
});
|
|
1619
|
+
}
|
|
1620
|
+
else if (isAgentTaskEvent(event)) {
|
|
1621
|
+
// scheduled.task — no gate, retains existing parallel-execution
|
|
1622
|
+
// behavior. (scheduled.dm subtype is handled above.)
|
|
1623
|
+
await this.executeScheduledTask(event);
|
|
1624
|
+
}
|
|
1625
|
+
else {
|
|
1626
|
+
await this.executeDefault(event);
|
|
1627
|
+
}
|
|
1628
|
+
}
|
|
1629
|
+
getMessageExecutionKey(event) {
|
|
1630
|
+
const { scope, scopeKey } = getConversationScope({
|
|
1631
|
+
platform: event.platform,
|
|
1632
|
+
channel: event.channel,
|
|
1633
|
+
threadId: event.threadId,
|
|
1634
|
+
isDm: event.isDm,
|
|
1635
|
+
// Forks docs_qa traffic onto its own gate so a QA lookup does not
|
|
1636
|
+
// queue behind an in-flight chat turn (or vice versa).
|
|
1637
|
+
intent: event.intent,
|
|
1638
|
+
});
|
|
1639
|
+
return `${scope}:${scopeKey}`;
|
|
1640
|
+
}
|
|
1641
|
+
async runWithSessionGate(key, fn) {
|
|
1642
|
+
return this.sessionGates.runWithSessionGate(key, fn);
|
|
1643
|
+
}
|
|
1644
|
+
/**
|
|
1645
|
+
* Acquire multiple session gates sequentially in lexicographic order
|
|
1646
|
+
* before invoking `fn`. Used by the `scheduled.dm` dispatch path to
|
|
1647
|
+
* serialize a briefing behind ALL owner-facing DM scopes
|
|
1648
|
+
* (messaging-app DMs and dashboard chat).
|
|
1649
|
+
*
|
|
1650
|
+
* SCHEDULED-DM-IMPLEMENTATION-PLAN §3.6.
|
|
1651
|
+
*/
|
|
1652
|
+
async runWithSessionGates(keys, fn) {
|
|
1653
|
+
return this.sessionGates.runWithSessionGates(keys, fn);
|
|
1654
|
+
}
|
|
1655
|
+
/**
|
|
1656
|
+
* Morning routine execution with pre-processing (lock, rotateDayFiles,
|
|
1657
|
+
* prompt variant selection). Only called for routine === "morning_routine".
|
|
1658
|
+
* Tier is resolved by BackendRouter from process-key defaults or user config.
|
|
1659
|
+
*/
|
|
1660
|
+
async executeMorningRoutine(event) {
|
|
1661
|
+
let lockId = null;
|
|
1662
|
+
let effectiveEvent = event;
|
|
1663
|
+
if (this.todayWriteLock) {
|
|
1664
|
+
const lock = this.todayWriteLock.acquire();
|
|
1665
|
+
if (!lock.ok) {
|
|
1666
|
+
logger.warn({
|
|
1667
|
+
eventType: event.type,
|
|
1668
|
+
source: event.source,
|
|
1669
|
+
holder: lock.holder,
|
|
1670
|
+
}, "today.md write lock held during morning routine — scheduling retry");
|
|
1671
|
+
this.scheduleMorningRetry(event);
|
|
1672
|
+
return;
|
|
1673
|
+
}
|
|
1674
|
+
lockId = lock.lockId;
|
|
1675
|
+
effectiveEvent = {
|
|
1676
|
+
...event,
|
|
1677
|
+
data: {
|
|
1678
|
+
...event.data,
|
|
1679
|
+
todayWriteLockId: lockId,
|
|
1680
|
+
},
|
|
1681
|
+
};
|
|
1682
|
+
}
|
|
1683
|
+
this.rotateDayFiles();
|
|
1684
|
+
// Check roadmap staleness BEFORE agent runs (agent may PATCH roadmap, updating mtime)
|
|
1685
|
+
const roadmapStaleBeforeMorning = this.isRoadmapStale();
|
|
1686
|
+
// Select prompt variant based on whether yesterday.md exists
|
|
1687
|
+
const contextDir = getContextDir(this.config, this.db);
|
|
1688
|
+
const hasYesterday = existsSync(join(contextDir, "yesterday.md"));
|
|
1689
|
+
const promptKey = hasYesterday
|
|
1690
|
+
? "routine.morning_routine"
|
|
1691
|
+
: "routine.morning_routine_initial";
|
|
1692
|
+
// Retry runs on the light tier (Sonnet) instead of the configured heavy
|
|
1693
|
+
// tier (Opus). Cost trade-off: a wrong-date or malformed today.md is
|
|
1694
|
+
// cheap to regenerate — the heavy work (mail classification, journal
|
|
1695
|
+
// synthesis, roadmap walk) was already done by the first attempt and its
|
|
1696
|
+
// outputs persisted via /api/context/* writes that survive into the
|
|
1697
|
+
// retry's prompt context. Sonnet at ~1/5 of Opus's per-turn cost keeps
|
|
1698
|
+
// the worst-case retry chain (3 attempts) under $2 instead of $12,
|
|
1699
|
+
// which is the cap the user asked for after observing $25/hour
|
|
1700
|
+
// burn during a date-format loop. See morning-routine fix doc.
|
|
1701
|
+
const retryCount = Number(effectiveEvent.data?.retryCount ?? 0);
|
|
1702
|
+
const isRetry = retryCount > 0 || effectiveEvent.data?.isRetry === true;
|
|
1703
|
+
let requestedTier = isRetry
|
|
1704
|
+
? "medium"
|
|
1705
|
+
: undefined;
|
|
1706
|
+
// `routine.morning_routine_initial` is non-configurable, so plan presets
|
|
1707
|
+
// and operator pins on `routine.morning_routine` are silently ignored
|
|
1708
|
+
// here without explicit inheritance — Max5/Max20/Team/Enterprise users
|
|
1709
|
+
// pinned to Sonnet would still run Opus on the initial variant whenever
|
|
1710
|
+
// `yesterday.md` is missing (cf. plan-presets.ts and process-key.ts §
|
|
1711
|
+
// morning_routine_initial). Mirror morning_routine's configured tier
|
|
1712
|
+
// onto the initial run when available.
|
|
1713
|
+
if (!isRetry && promptKey === "routine.morning_routine_initial") {
|
|
1714
|
+
const inheritedTier = this.inferConfiguredMorningRoutineTier();
|
|
1715
|
+
if (inheritedTier) {
|
|
1716
|
+
requestedTier = inheritedTier;
|
|
1717
|
+
}
|
|
1718
|
+
}
|
|
1719
|
+
logger.info({ hasYesterday, promptKey, roadmapStale: roadmapStaleBeforeMorning, isRetry, retryCount, requestedTier: requestedTier ?? "default" }, "Morning routine prompt selected");
|
|
1720
|
+
const context = await this.contextBuilder.build(effectiveEvent);
|
|
1721
|
+
const binding = this.agentRouter.resolveBinding(effectiveEvent, {
|
|
1722
|
+
processKey: promptKey,
|
|
1723
|
+
...(requestedTier ? { requestedTier } : {}),
|
|
1724
|
+
});
|
|
1725
|
+
const reassemblePrompt = (bid) => this.assemblePrompt(promptKey, promptKey, bid);
|
|
1726
|
+
const prompt = reassemblePrompt(binding.main.backendId);
|
|
1727
|
+
// B2 fix: wrap the entire retry chain — NOT just the inner execute() —
|
|
1728
|
+
// so that `morningRoutineInProgress` stays true across retry attempts.
|
|
1729
|
+
// Previously .finally() was attached to the inner promise, causing the
|
|
1730
|
+
// flag to reset at attempt #1 completion and letting hourly_check race
|
|
1731
|
+
// with the morning routine during retry backoff / attempt #2.
|
|
1732
|
+
this.morningRoutineInProgress = true;
|
|
1733
|
+
let result;
|
|
1734
|
+
try {
|
|
1735
|
+
result = await this.executeWithRetry(() => this.agentRouter.execute({
|
|
1736
|
+
prompt,
|
|
1737
|
+
context,
|
|
1738
|
+
event: effectiveEvent,
|
|
1739
|
+
processKey: promptKey,
|
|
1740
|
+
preResolvedBinding: binding,
|
|
1741
|
+
reassemblePrompt,
|
|
1742
|
+
...(requestedTier ? { requestedTier } : {}),
|
|
1743
|
+
}), effectiveEvent);
|
|
1744
|
+
}
|
|
1745
|
+
finally {
|
|
1746
|
+
this.morningRoutineInProgress = false;
|
|
1747
|
+
if (lockId && this.todayWriteLock) {
|
|
1748
|
+
this.todayWriteLock.release(lockId);
|
|
1749
|
+
}
|
|
1750
|
+
}
|
|
1751
|
+
await this.processResult(result, effectiveEvent);
|
|
1752
|
+
// Post-morning-routine: verify today.md was generated, retry if not.
|
|
1753
|
+
// This catches agent failures that don't throw (e.g., early stop, context
|
|
1754
|
+
// building succeeded but the PUT /api/context/today call was skipped).
|
|
1755
|
+
//
|
|
1756
|
+
// Distinguish the two failure modes so the operator can tell from the log
|
|
1757
|
+
// whether the agent skipped the write entirely vs. wrote with the wrong
|
|
1758
|
+
// agent-day date. Pre-fix, both paths logged the same "does not exist"
|
|
1759
|
+
// string and the wrong-date case looked indistinguishable from a hard
|
|
1760
|
+
// crash, masking the date-confusion root cause.
|
|
1761
|
+
const todayMdState = this.diagnoseTodayMdState();
|
|
1762
|
+
if (todayMdState.kind !== "fresh") {
|
|
1763
|
+
logger.warn({
|
|
1764
|
+
eventType: effectiveEvent.type,
|
|
1765
|
+
isError: result.isError,
|
|
1766
|
+
numTurns: result.numTurns,
|
|
1767
|
+
todayMdState: todayMdState.kind,
|
|
1768
|
+
...(todayMdState.kind === "wrong_date"
|
|
1769
|
+
? {
|
|
1770
|
+
writtenDate: todayMdState.writtenDate,
|
|
1771
|
+
expectedAgentDay: todayMdState.expectedAgentDay,
|
|
1772
|
+
}
|
|
1773
|
+
: {}),
|
|
1774
|
+
}, todayMdState.kind === "missing"
|
|
1775
|
+
? "Morning routine completed but today.md does not exist — scheduling retry"
|
|
1776
|
+
: "Morning routine completed but today.md has wrong agent-day date — scheduling retry");
|
|
1777
|
+
this.scheduleMorningRetry(effectiveEvent);
|
|
1778
|
+
}
|
|
1779
|
+
else {
|
|
1780
|
+
if (effectiveEvent.data?.deferPostMorningCatchupsUntilStartupReady === true) {
|
|
1781
|
+
logger.info({ eventType: effectiveEvent.type, source: effectiveEvent.source }, "Deferring post-morning catchups until startup messaging is ready");
|
|
1782
|
+
}
|
|
1783
|
+
else {
|
|
1784
|
+
await this.emitPostMorningCatchups(effectiveEvent);
|
|
1785
|
+
}
|
|
1786
|
+
const todayRefreshFlush = flushPendingTodayRefresh(this.db);
|
|
1787
|
+
if (todayRefreshFlush.hadPending) {
|
|
1788
|
+
logger.info({ scheduled: todayRefreshFlush.scheduled }, "Flushed pending today_refresh after morning routine");
|
|
1789
|
+
}
|
|
1790
|
+
}
|
|
1791
|
+
// Post-morning-routine: trigger roadmap refresh if stale
|
|
1792
|
+
if (roadmapStaleBeforeMorning) {
|
|
1793
|
+
if (effectiveEvent.data?.deferPostMorningCatchupsUntilStartupReady === true) {
|
|
1794
|
+
logger.info({ eventType: effectiveEvent.type, source: effectiveEvent.source }, "Deferring roadmap_refresh until startup messaging is ready");
|
|
1795
|
+
}
|
|
1796
|
+
else {
|
|
1797
|
+
this.emitRoadmapRefresh("post_morning_routine");
|
|
1798
|
+
}
|
|
1799
|
+
}
|
|
1800
|
+
}
|
|
1801
|
+
/**
|
|
1802
|
+
* Read the operator-or-preset configured tier for `routine.morning_routine`
|
|
1803
|
+
* so the non-configurable `morning_routine_initial` variant can mirror it.
|
|
1804
|
+
* Returns null when no row exists (fresh install before the setup wizard
|
|
1805
|
+
* applies a preset) or when the pinned model isn't in the registry — the
|
|
1806
|
+
* caller falls back to the router's default tier in those cases.
|
|
1807
|
+
*/
|
|
1808
|
+
inferConfiguredMorningRoutineTier() {
|
|
1809
|
+
try {
|
|
1810
|
+
const row = this.db
|
|
1811
|
+
.prepare("SELECT main_backend, main_model FROM process_backend_config WHERE process_key = 'routine.morning_routine'")
|
|
1812
|
+
.get();
|
|
1813
|
+
if (!row || !isBackendId(row.main_backend))
|
|
1814
|
+
return null;
|
|
1815
|
+
return findRegisteredModel(row.main_backend, row.main_model)?.tier ?? null;
|
|
1816
|
+
}
|
|
1817
|
+
catch (err) {
|
|
1818
|
+
logger.debug({ err }, "Failed to infer morning_routine configured tier");
|
|
1819
|
+
return null;
|
|
1820
|
+
}
|
|
1821
|
+
}
|
|
1822
|
+
async emitPostMorningCatchups(event) {
|
|
1823
|
+
const queuedRoutines = Array.isArray(event.data?.postCatchupRoutines)
|
|
1824
|
+
? event.data.postCatchupRoutines.filter((value) => typeof value === "string")
|
|
1825
|
+
: [];
|
|
1826
|
+
for (const routine of queuedRoutines) {
|
|
1827
|
+
logger.info({ routine }, "Emitting deferred post-morning catchup routine");
|
|
1828
|
+
await this.eventBus.put({
|
|
1829
|
+
...createEvent({
|
|
1830
|
+
type: `routine.${routine}`,
|
|
1831
|
+
source: "post_morning_catchup",
|
|
1832
|
+
priority: EventPriority.HIGH,
|
|
1833
|
+
}),
|
|
1834
|
+
routine,
|
|
1835
|
+
});
|
|
1836
|
+
}
|
|
1837
|
+
if (event.data?.postCatchupHourlyCheck === true) {
|
|
1838
|
+
logger.info("Triggering deferred hourly_check after morning catchup");
|
|
1839
|
+
await this.triggerHourlyCheck("post_morning_catchup");
|
|
1840
|
+
}
|
|
1841
|
+
}
|
|
1842
|
+
/**
|
|
1843
|
+
* Schedule a retry of the morning routine when today.md wasn't generated.
|
|
1844
|
+
*
|
|
1845
|
+
* Uses the existing agent_schedule → ScheduleWatcher path rather than
|
|
1846
|
+
* re-enqueuing on the EventBus directly. Benefits:
|
|
1847
|
+
* 1. Retry persists across daemon restarts.
|
|
1848
|
+
* 2. Shares the same Opus cost-limit and concurrency gates.
|
|
1849
|
+
* 3. Back-off delay is enforced by scheduled_for timestamp.
|
|
1850
|
+
*
|
|
1851
|
+
* Retry policy: exponential back-off (5 min → 10 min → 15 min), max 3
|
|
1852
|
+
* attempts. After the 3rd failure, send a critical notification to
|
|
1853
|
+
* the user and stop retrying.
|
|
1854
|
+
*
|
|
1855
|
+
* Retry count is tracked via `event.data.retryCount` on the RoutineEvent.
|
|
1856
|
+
* On the first failure the count comes from the cron-fired RoutineEvent
|
|
1857
|
+
* (undefined → 0). On subsequent failures handleMorningRoutineRetry
|
|
1858
|
+
* synthesizes a new RoutineEvent carrying the previous count from the
|
|
1859
|
+
* wake task's taskContext, so the chain propagates through a single
|
|
1860
|
+
* code path: event.data.retryCount → +1 → task_context.retryCount
|
|
1861
|
+
* → next event.data.retryCount → ...
|
|
1862
|
+
*
|
|
1863
|
+
* Dedup protects against pathological cases:
|
|
1864
|
+
* - M1: another retry is already pending/running → skip
|
|
1865
|
+
*/
|
|
1866
|
+
scheduleMorningRetry(event) {
|
|
1867
|
+
const previousCount = Number(event.data?.retryCount ?? 0);
|
|
1868
|
+
const retryCount = previousCount + 1;
|
|
1869
|
+
const MAX_RETRIES = 3;
|
|
1870
|
+
// Preserve the original cron morning_routine correlationId through
|
|
1871
|
+
// the chain if present. On the first call this is the cron event's
|
|
1872
|
+
// own id. On later calls it's propagated via event.correlationId
|
|
1873
|
+
// (which handleMorningRoutineRetry sets from taskCtx).
|
|
1874
|
+
const originalCorrelationId = event.data?.originalCorrelationId ??
|
|
1875
|
+
event.correlationId;
|
|
1876
|
+
if (retryCount > MAX_RETRIES) {
|
|
1877
|
+
logger.error({
|
|
1878
|
+
retryCount: previousCount,
|
|
1879
|
+
maxRetries: MAX_RETRIES,
|
|
1880
|
+
originalCorrelationId,
|
|
1881
|
+
}, "Morning routine retry exhausted — sending critical notification");
|
|
1882
|
+
void this.notificationMgr
|
|
1883
|
+
.send(`⚠️ Morning routine failed to generate today.md after ${MAX_RETRIES} attempts. Please regenerate manually from the dashboard.`, event, { category: "critical", priority: "critical" })
|
|
1884
|
+
.catch((err) => {
|
|
1885
|
+
logger.error({ err }, "Failed to send morning-routine-retry-exhausted notification");
|
|
1886
|
+
});
|
|
1887
|
+
return;
|
|
1888
|
+
}
|
|
1889
|
+
// Exponential back-off: 5 / 10 / 15 minutes
|
|
1890
|
+
const delayMinutes = retryCount * 5;
|
|
1891
|
+
const retryTime = new Date(Date.now() + delayMinutes * 60 * 1000);
|
|
1892
|
+
const scheduledFor = formatSqliteDatetime(retryTime);
|
|
1893
|
+
// Encode the retry state in task_context so the wake agent (via
|
|
1894
|
+
// executeScheduledTask → handleMorningRoutineRetry) can propagate
|
|
1895
|
+
// retryCount into the synthesized RoutineEvent's event.data.
|
|
1896
|
+
// `importance: "low"` keeps the retry out of roadmap.md — the
|
|
1897
|
+
// originating morning_routine is already tracked elsewhere.
|
|
1898
|
+
const taskContext = JSON.stringify({
|
|
1899
|
+
routine: "morning_routine",
|
|
1900
|
+
retryCount,
|
|
1901
|
+
originalCorrelationId,
|
|
1902
|
+
source: typeof event.data?.queuedSource === "string" ? event.data.queuedSource : event.source,
|
|
1903
|
+
postCatchupRoutines: Array.isArray(event.data?.postCatchupRoutines)
|
|
1904
|
+
? event.data.postCatchupRoutines
|
|
1905
|
+
: [],
|
|
1906
|
+
postCatchupHourlyCheck: event.data?.postCatchupHourlyCheck === true,
|
|
1907
|
+
importance: "low",
|
|
1908
|
+
});
|
|
1909
|
+
// M1: dedup + INSERT in a single transaction so two concurrent
|
|
1910
|
+
// retry schedulers cannot both race past the dedup check and both
|
|
1911
|
+
// insert new rows. better-sqlite3 is synchronous so the transaction
|
|
1912
|
+
// callback runs atomically relative to any other DB access from
|
|
1913
|
+
// this process.
|
|
1914
|
+
//
|
|
1915
|
+
// Dedup checks for 'pending' only — not 'running' — because the
|
|
1916
|
+
// retry chain legitimately calls this method while the current
|
|
1917
|
+
// wake task is still in 'running' state (handleMorningRoutineRetry
|
|
1918
|
+
// → executeMorningRoutine → this). Including 'running' would break chain
|
|
1919
|
+
// continuation.
|
|
1920
|
+
const insertRetryTxn = this.db.transaction(() => {
|
|
1921
|
+
// C5 fix: dedup on task_context.routine, not task_description prefix.
|
|
1922
|
+
// Both `scheduleMorningRetry` (here) and `isMorningRoutineActive`
|
|
1923
|
+
// (above) now use the same JSON-path check, so the detection path
|
|
1924
|
+
// doesn't depend on the human-readable description string.
|
|
1925
|
+
const existing = this.db
|
|
1926
|
+
.prepare(`SELECT id FROM agent_schedule
|
|
1927
|
+
WHERE task_type = 'wake'
|
|
1928
|
+
AND status = 'pending'
|
|
1929
|
+
AND json_extract(task_context, '$.routine') = 'morning_routine'
|
|
1930
|
+
LIMIT 1`)
|
|
1931
|
+
.get();
|
|
1932
|
+
if (existing) {
|
|
1933
|
+
return { inserted: false, existingId: existing.id };
|
|
1934
|
+
}
|
|
1935
|
+
this.db
|
|
1936
|
+
.prepare(`INSERT INTO agent_schedule
|
|
1937
|
+
(scheduled_for, task_type, task_description, task_context, correlation_id, model, status)
|
|
1938
|
+
VALUES (?, 'wake', ?, ?, ?, NULL, 'pending')`)
|
|
1939
|
+
.run(scheduledFor, `Morning routine retry (attempt ${retryCount}/${MAX_RETRIES}). Generate today.md per the morning_routine flow.`, taskContext, originalCorrelationId);
|
|
1940
|
+
return { inserted: true };
|
|
1941
|
+
});
|
|
1942
|
+
try {
|
|
1943
|
+
const outcome = insertRetryTxn();
|
|
1944
|
+
if (!outcome.inserted) {
|
|
1945
|
+
logger.info({
|
|
1946
|
+
existingScheduleId: outcome.existingId,
|
|
1947
|
+
retryCount,
|
|
1948
|
+
originalCorrelationId,
|
|
1949
|
+
}, "Morning routine retry dedup — another pending retry already exists");
|
|
1950
|
+
return;
|
|
1951
|
+
}
|
|
1952
|
+
logger.info({
|
|
1953
|
+
retryCount,
|
|
1954
|
+
delayMinutes,
|
|
1955
|
+
scheduledFor,
|
|
1956
|
+
originalCorrelationId,
|
|
1957
|
+
// Retries always fall back to the medium tier (Sonnet) per the
|
|
1958
|
+
// cost-cap fix in executeMorningRoutine — surface that explicitly
|
|
1959
|
+
// in the schedule log so the operator can confirm the
|
|
1960
|
+
// downgrade happened without grepping the next agent-execute line.
|
|
1961
|
+
plannedTier: "medium",
|
|
1962
|
+
plannedTierReason: "morning_routine_retry_cost_cap",
|
|
1963
|
+
}, "Morning routine retry scheduled (will run on Sonnet)");
|
|
1964
|
+
// Route the INSERT through the shared roadmap-refresh gate.
|
|
1965
|
+
// `importance:"low"` short-circuits the trigger — the morning
|
|
1966
|
+
// routine is already represented elsewhere — but going through
|
|
1967
|
+
// the helper keeps all five INSERT call-sites on one path.
|
|
1968
|
+
maybeTriggerRoadmapRefresh({ scheduledFor, taskContext: { importance: "low" } }, (src) => this.emitRoadmapRefresh(src), "morning_retry");
|
|
1969
|
+
}
|
|
1970
|
+
catch (err) {
|
|
1971
|
+
logger.error({ err, retryCount }, "Failed to schedule morning routine retry");
|
|
1972
|
+
}
|
|
1973
|
+
}
|
|
1974
|
+
buildRepositoryRunPrompt(ctx) {
|
|
1975
|
+
const lines = [
|
|
1976
|
+
"{context}",
|
|
1977
|
+
"",
|
|
1978
|
+
"## Repository Run",
|
|
1979
|
+
`Repository id: ${ctx.repositoryId}`,
|
|
1980
|
+
`Repository slug: ${ctx.slug}`,
|
|
1981
|
+
`GitHub repo: ${ctx.githubRepo ?? "(none)"}`,
|
|
1982
|
+
`Local path: ${ctx.localPath ?? "(none)"}`,
|
|
1983
|
+
`Workdir mode: ${ctx.workdirMode}`,
|
|
1984
|
+
`Trigger source: ${ctx.triggerSource}`,
|
|
1985
|
+
];
|
|
1986
|
+
if (ctx.triggerId || ctx.triggerName || ctx.triggerEventType) {
|
|
1987
|
+
lines.push("", "## Trigger", `Trigger id: ${ctx.triggerId ?? "(manual)"}`, `Trigger name: ${ctx.triggerName ?? "(manual)"}`, `Event type: ${ctx.triggerEventType ?? "(manual)"}`);
|
|
1988
|
+
if (ctx.triggerEventPayload !== undefined) {
|
|
1989
|
+
lines.push("", "<trigger_event_payload>", JSON.stringify(ctx.triggerEventPayload, null, 2), "</trigger_event_payload>");
|
|
1990
|
+
}
|
|
1991
|
+
}
|
|
1992
|
+
lines.push("", "## User Prompt", ctx.prompt);
|
|
1993
|
+
return lines.join("\n");
|
|
1994
|
+
}
|
|
1995
|
+
prepareRepositoryRunSessionDir(ctx, backendId) {
|
|
1996
|
+
if (ctx.workdirMode === "local-clone") {
|
|
1997
|
+
if (!ctx.localPath) {
|
|
1998
|
+
throw new Error("Repository local-clone run missing localPath");
|
|
1999
|
+
}
|
|
2000
|
+
ensureBackendMaterialized(this.config.workspaceDir, ctx.localPath, backendId, "scheduled.task", "agent.task", this.getConfiguredServices(), this.getActiveMailAccounts(), readIntegrations(this.db), this.config.character);
|
|
2001
|
+
return { sessionDir: ctx.localPath, cleanup: false };
|
|
2002
|
+
}
|
|
2003
|
+
if (!ctx.instructionMd) {
|
|
2004
|
+
throw new Error("Repository temp run missing instructionMd");
|
|
2005
|
+
}
|
|
2006
|
+
const sessionDir = join(this.config.dataDir, "run", `${safeRepositoryRunDirName(ctx.slug)}-${Date.now()}-${randomUUID().slice(0, 8)}`);
|
|
2007
|
+
mkdirSync(sessionDir, { recursive: true, mode: 0o700 });
|
|
2008
|
+
try {
|
|
2009
|
+
ensureBackendMaterialized(this.config.workspaceDir, sessionDir, backendId, "scheduled.task", "agent.task", this.getConfiguredServices(), this.getActiveMailAccounts(), readIntegrations(this.db), this.config.character);
|
|
2010
|
+
writeFileSync(join(sessionDir, repositoryRunInstructionFilename(backendId)), ctx.instructionMd, "utf-8");
|
|
2011
|
+
return { sessionDir, cleanup: true };
|
|
2012
|
+
}
|
|
2013
|
+
catch (err) {
|
|
2014
|
+
cleanupSessionWorkdir(sessionDir);
|
|
2015
|
+
throw err;
|
|
2016
|
+
}
|
|
2017
|
+
}
|
|
2018
|
+
async executeRepositoryRunTask(event, ctx) {
|
|
2019
|
+
const context = await this.contextBuilder.build(event);
|
|
2020
|
+
const processKey = "agent.task";
|
|
2021
|
+
const requestedTier = event.requestedModel
|
|
2022
|
+
? (event.requestedModel === "sonnet" ? "medium" : "high")
|
|
2023
|
+
: undefined;
|
|
2024
|
+
const internalBackendOverride = event.requestedBackendId
|
|
2025
|
+
&& isBackendId(event.requestedBackendId)
|
|
2026
|
+
&& typeof event.requestedModelId === "string"
|
|
2027
|
+
? {
|
|
2028
|
+
requestedBackendId: event.requestedBackendId,
|
|
2029
|
+
requestedModelId: event.requestedModelId,
|
|
2030
|
+
}
|
|
2031
|
+
: {};
|
|
2032
|
+
const binding = this.agentRouter.resolveBinding(event, {
|
|
2033
|
+
processKey,
|
|
2034
|
+
requestedTier,
|
|
2035
|
+
...internalBackendOverride,
|
|
2036
|
+
});
|
|
2037
|
+
const prompt = this.buildRepositoryRunPrompt(ctx);
|
|
2038
|
+
const { sessionDir, cleanup } = this.prepareRepositoryRunSessionDir(ctx, binding.main.backendId);
|
|
2039
|
+
try {
|
|
2040
|
+
const result = await this.executeWithRetry(() => this.agentRouter.execute({
|
|
2041
|
+
prompt,
|
|
2042
|
+
context,
|
|
2043
|
+
event,
|
|
2044
|
+
processKey,
|
|
2045
|
+
requestedTier,
|
|
2046
|
+
preResolvedBinding: binding,
|
|
2047
|
+
reassemblePrompt: () => prompt,
|
|
2048
|
+
sessionDir,
|
|
2049
|
+
workdirEventType: "scheduled.task",
|
|
2050
|
+
workdirProcessKey: processKey,
|
|
2051
|
+
...internalBackendOverride,
|
|
2052
|
+
}), event);
|
|
2053
|
+
await this.processResult(result, event);
|
|
2054
|
+
}
|
|
2055
|
+
finally {
|
|
2056
|
+
if (cleanup) {
|
|
2057
|
+
cleanupSessionWorkdir(sessionDir);
|
|
2058
|
+
}
|
|
2059
|
+
}
|
|
2060
|
+
}
|
|
2061
|
+
/**
|
|
2062
|
+
* Execute a scheduled task with the model specified when the task was
|
|
2063
|
+
* registered via POST /api/schedule.
|
|
2064
|
+
*
|
|
2065
|
+
* Morning-routine retry tasks take a dedicated fast path: they skip
|
|
2066
|
+
* the generic scheduled.task prompt and run the *real* morning routine
|
|
2067
|
+
* flow via executeMorningRoutine, so the retry carries the same rotateDayFiles
|
|
2068
|
+
* / prompt selection / roadmap-refresh chain as the cron-fired path.
|
|
2069
|
+
*/
|
|
2070
|
+
async executeScheduledTask(event) {
|
|
2071
|
+
// Morning-routine retry detection: if taskContext says this wake
|
|
2072
|
+
// task is a morning-routine retry, dispatch through executeMorningRoutine
|
|
2073
|
+
// with a synthesized RoutineEvent instead of the generic flow.
|
|
2074
|
+
const taskCtx = event.taskContext;
|
|
2075
|
+
if (taskCtx &&
|
|
2076
|
+
typeof taskCtx === "object" &&
|
|
2077
|
+
taskCtx.routine === "morning_routine") {
|
|
2078
|
+
await this.handleMorningRoutineRetry(event, taskCtx);
|
|
2079
|
+
return;
|
|
2080
|
+
}
|
|
2081
|
+
if (taskCtx &&
|
|
2082
|
+
typeof taskCtx === "object" &&
|
|
2083
|
+
taskCtx.routine === "today_refresh") {
|
|
2084
|
+
await this.executeScheduledRoutine(event, "today_refresh");
|
|
2085
|
+
return;
|
|
2086
|
+
}
|
|
2087
|
+
const repositoryRunCtx = parseRepositoryRunTaskContext(taskCtx);
|
|
2088
|
+
if (repositoryRunCtx) {
|
|
2089
|
+
await this.executeRepositoryRunTask(event, repositoryRunCtx);
|
|
2090
|
+
return;
|
|
2091
|
+
}
|
|
2092
|
+
if (await this.executeGitProjectDocTaskIfApplicable(event, taskCtx)) {
|
|
2093
|
+
return;
|
|
2094
|
+
}
|
|
2095
|
+
const context = await this.contextBuilder.build(event);
|
|
2096
|
+
const processKeyOverride = taskCtx
|
|
2097
|
+
&& typeof taskCtx === "object"
|
|
2098
|
+
&& typeof taskCtx.processKey === "string"
|
|
2099
|
+
? taskCtx.processKey
|
|
2100
|
+
: null;
|
|
2101
|
+
const processKey = (processKeyOverride ?? resolveProcessKey(event));
|
|
2102
|
+
const promptKey = processKeyOverride ?? event.type;
|
|
2103
|
+
const requestedTier = event.requestedModel
|
|
2104
|
+
? (event.requestedModel === "sonnet" ? "medium" : "high")
|
|
2105
|
+
: undefined;
|
|
2106
|
+
const internalBackendOverride = event.requestedBackendId
|
|
2107
|
+
&& isBackendId(event.requestedBackendId)
|
|
2108
|
+
&& typeof event.requestedModelId === "string"
|
|
2109
|
+
? {
|
|
2110
|
+
requestedBackendId: event.requestedBackendId,
|
|
2111
|
+
requestedModelId: event.requestedModelId,
|
|
2112
|
+
}
|
|
2113
|
+
: {};
|
|
2114
|
+
const binding = this.agentRouter.resolveBinding(event, {
|
|
2115
|
+
processKey,
|
|
2116
|
+
requestedTier,
|
|
2117
|
+
...internalBackendOverride,
|
|
2118
|
+
});
|
|
2119
|
+
const reassemblePrompt = (bid) => this.assemblePrompt(promptKey, processKey, bid);
|
|
2120
|
+
const prompt = reassemblePrompt(binding.main.backendId);
|
|
2121
|
+
const result = await this.executeWithRetry(() => this.agentRouter.execute({
|
|
2122
|
+
prompt,
|
|
2123
|
+
context,
|
|
2124
|
+
event,
|
|
2125
|
+
processKey,
|
|
2126
|
+
requestedTier,
|
|
2127
|
+
preResolvedBinding: binding,
|
|
2128
|
+
reassemblePrompt,
|
|
2129
|
+
}), event);
|
|
2130
|
+
await this.processResult(result, event);
|
|
2131
|
+
}
|
|
2132
|
+
/**
|
|
2133
|
+
* Legacy git project documentation tasks used to run as autonomous Claude
|
|
2134
|
+
* task-flows. That made file creation probabilistic: the backend could
|
|
2135
|
+
* finish "successfully" without calling the daemon context API, or fail
|
|
2136
|
+
* before receiving the `<task_context>` block. The daemon now owns these
|
|
2137
|
+
* writes directly, matching the manual Daily git management buttons and
|
|
2138
|
+
* the repository-management cron.
|
|
2139
|
+
*/
|
|
2140
|
+
async executeGitProjectDocTaskIfApplicable(event, taskCtx) {
|
|
2141
|
+
const processKey = this.resolveGitProjectDocProcessKey(event, taskCtx);
|
|
2142
|
+
if (!processKey)
|
|
2143
|
+
return false;
|
|
2144
|
+
const ctx = taskCtx && typeof taskCtx === "object"
|
|
2145
|
+
? taskCtx
|
|
2146
|
+
: {};
|
|
2147
|
+
const repo = this.resolveRepositoryForGitProjectDocTask(ctx);
|
|
2148
|
+
const triggerSource = typeof ctx.triggerSource === "string"
|
|
2149
|
+
? ctx.triggerSource
|
|
2150
|
+
: null;
|
|
2151
|
+
const isManagementSource = triggerSource === "repository_management_cron" ||
|
|
2152
|
+
triggerSource === "repository_management_manual";
|
|
2153
|
+
try {
|
|
2154
|
+
if (processKey === "git.project.init") {
|
|
2155
|
+
const result = runRepositoryManagementInit({
|
|
2156
|
+
db: this.db,
|
|
2157
|
+
repo,
|
|
2158
|
+
contextDir: getContextDir(this.config, this.db),
|
|
2159
|
+
timezone: this.config.timezone || undefined,
|
|
2160
|
+
writeTracker: this.writeTracker,
|
|
2161
|
+
});
|
|
2162
|
+
if (isManagementSource) {
|
|
2163
|
+
recordManagementInitDone(this.db, repo.id);
|
|
2164
|
+
}
|
|
2165
|
+
this.markScheduledTaskCompleted(event);
|
|
2166
|
+
logger.info({
|
|
2167
|
+
scheduleId: event.scheduleId ?? null,
|
|
2168
|
+
repositoryId: repo.id,
|
|
2169
|
+
slug: repo.slug,
|
|
2170
|
+
result: result.status,
|
|
2171
|
+
architectureScheduleId: result.architectureScheduleId,
|
|
2172
|
+
}, "Handled git.project.init with direct markdown writer");
|
|
2173
|
+
}
|
|
2174
|
+
else {
|
|
2175
|
+
const lookbackHours = typeof ctx.lookbackHours === "number"
|
|
2176
|
+
&& Number.isFinite(ctx.lookbackHours)
|
|
2177
|
+
&& ctx.lookbackHours > 0
|
|
2178
|
+
? ctx.lookbackHours
|
|
2179
|
+
: undefined;
|
|
2180
|
+
const result = await runRepositoryManagementScan({
|
|
2181
|
+
db: this.db,
|
|
2182
|
+
repo,
|
|
2183
|
+
contextDir: getContextDir(this.config, this.db),
|
|
2184
|
+
timezone: this.config.timezone || undefined,
|
|
2185
|
+
lookbackHours,
|
|
2186
|
+
writeTracker: this.writeTracker,
|
|
2187
|
+
});
|
|
2188
|
+
if (isManagementSource) {
|
|
2189
|
+
recordManagementScan(this.db, repo.id, result.status === "skipped_no_activity" ? "skipped_no_activity" : "ok");
|
|
2190
|
+
}
|
|
2191
|
+
this.markScheduledTaskCompleted(event);
|
|
2192
|
+
logger.info({
|
|
2193
|
+
scheduleId: event.scheduleId ?? null,
|
|
2194
|
+
repositoryId: repo.id,
|
|
2195
|
+
slug: repo.slug,
|
|
2196
|
+
result: result.status,
|
|
2197
|
+
journalPath: result.journalPath,
|
|
2198
|
+
}, "Handled git.project.update with direct markdown writer");
|
|
2199
|
+
}
|
|
2200
|
+
return true;
|
|
2201
|
+
}
|
|
2202
|
+
catch (err) {
|
|
2203
|
+
if (isManagementSource) {
|
|
2204
|
+
try {
|
|
2205
|
+
recordManagementScan(this.db, repo.id, "failed");
|
|
2206
|
+
}
|
|
2207
|
+
catch (recordErr) {
|
|
2208
|
+
logger.error({ err: recordErr, repositoryId: repo.id }, "Failed to record repository management direct-writer failure");
|
|
2209
|
+
}
|
|
2210
|
+
}
|
|
2211
|
+
if (event.scheduleId) {
|
|
2212
|
+
this.db
|
|
2213
|
+
.prepare("UPDATE agent_schedule SET status = 'failed' WHERE id = ? AND status = 'running'")
|
|
2214
|
+
.run(event.scheduleId);
|
|
2215
|
+
}
|
|
2216
|
+
logger.error({ err, scheduleId: event.scheduleId ?? null, repositoryId: repo.id }, "Git project documentation direct writer failed");
|
|
2217
|
+
throw err;
|
|
2218
|
+
}
|
|
2219
|
+
}
|
|
2220
|
+
resolveGitProjectDocProcessKey(event, taskCtx) {
|
|
2221
|
+
const ctxProcessKey = taskCtx &&
|
|
2222
|
+
typeof taskCtx === "object" &&
|
|
2223
|
+
typeof taskCtx.processKey === "string"
|
|
2224
|
+
? taskCtx.processKey
|
|
2225
|
+
: null;
|
|
2226
|
+
const value = ctxProcessKey ?? event.source;
|
|
2227
|
+
return value === "git.project.init" || value === "git.project.update"
|
|
2228
|
+
? value
|
|
2229
|
+
: null;
|
|
2230
|
+
}
|
|
2231
|
+
resolveRepositoryForGitProjectDocTask(ctx) {
|
|
2232
|
+
const repositoryId = typeof ctx.repositoryId === "string"
|
|
2233
|
+
? ctx.repositoryId
|
|
2234
|
+
: null;
|
|
2235
|
+
if (repositoryId) {
|
|
2236
|
+
const byId = getRepository(this.db, repositoryId);
|
|
2237
|
+
if (byId)
|
|
2238
|
+
return byId;
|
|
2239
|
+
}
|
|
2240
|
+
const localPath = typeof ctx.localPath === "string"
|
|
2241
|
+
? ctx.localPath
|
|
2242
|
+
: typeof ctx.repository?.localPath === "string"
|
|
2243
|
+
? ctx.repository.localPath
|
|
2244
|
+
: null;
|
|
2245
|
+
if (localPath) {
|
|
2246
|
+
const byPath = getRepositoryByLocalPath(this.db, localPath);
|
|
2247
|
+
if (byPath)
|
|
2248
|
+
return byPath;
|
|
2249
|
+
}
|
|
2250
|
+
const slug = typeof ctx.slug === "string"
|
|
2251
|
+
? ctx.slug
|
|
2252
|
+
: typeof ctx.repository?.slug === "string"
|
|
2253
|
+
? ctx.repository.slug
|
|
2254
|
+
: null;
|
|
2255
|
+
if (!slug || !localPath) {
|
|
2256
|
+
throw new Error("git project documentation task requires repositoryId or slug/localPath task context");
|
|
2257
|
+
}
|
|
2258
|
+
const githubRepo = typeof ctx.githubRepo === "string"
|
|
2259
|
+
? ctx.githubRepo
|
|
2260
|
+
: typeof ctx.repository?.githubRepo === "string"
|
|
2261
|
+
? ctx.repository.githubRepo
|
|
2262
|
+
: null;
|
|
2263
|
+
const [githubOwner, githubRepoName] = parseGithubRepoSlug(githubRepo);
|
|
2264
|
+
const now = Date.now();
|
|
2265
|
+
return {
|
|
2266
|
+
id: repositoryId ?? (githubRepo ? `github:${githubRepo}` : `local:${slug}`),
|
|
2267
|
+
githubOwner,
|
|
2268
|
+
githubRepo: githubRepoName,
|
|
2269
|
+
githubAccount: null,
|
|
2270
|
+
localPath,
|
|
2271
|
+
localOnly: githubRepo === null,
|
|
2272
|
+
displayName: typeof ctx.displayName === "string" ? ctx.displayName : slug,
|
|
2273
|
+
classification: normalizeRepositoryClassification(ctx.classification),
|
|
2274
|
+
category: normalizeRepositoryCategory(ctx.category),
|
|
2275
|
+
pollPriority: "normal",
|
|
2276
|
+
pollIntervalSec: null,
|
|
2277
|
+
slug,
|
|
2278
|
+
createdAt: now,
|
|
2279
|
+
updatedAt: now,
|
|
2280
|
+
};
|
|
2281
|
+
}
|
|
2282
|
+
markScheduledTaskCompleted(event) {
|
|
2283
|
+
if (!event.scheduleId)
|
|
2284
|
+
return;
|
|
2285
|
+
this.db
|
|
2286
|
+
.prepare("UPDATE agent_schedule SET status = 'completed' WHERE id = ? AND status = 'running'")
|
|
2287
|
+
.run(event.scheduleId);
|
|
2288
|
+
}
|
|
2289
|
+
async executeScheduledRoutine(event, routine) {
|
|
2290
|
+
const routineEvent = {
|
|
2291
|
+
...createEvent({
|
|
2292
|
+
type: `routine.${routine}`,
|
|
2293
|
+
source: typeof event.taskContext.source === "string"
|
|
2294
|
+
? event.taskContext.source
|
|
2295
|
+
: event.source,
|
|
2296
|
+
priority: EventPriority.NORMAL,
|
|
2297
|
+
correlationId: event.correlationId,
|
|
2298
|
+
data: {
|
|
2299
|
+
...event.taskContext,
|
|
2300
|
+
scheduleId: event.scheduleId ?? null,
|
|
2301
|
+
},
|
|
2302
|
+
}),
|
|
2303
|
+
routine,
|
|
2304
|
+
...(event.requestedModel ? { requestedModel: event.requestedModel } : {}),
|
|
2305
|
+
};
|
|
2306
|
+
try {
|
|
2307
|
+
await this.executeDefault(routineEvent);
|
|
2308
|
+
if (event.scheduleId) {
|
|
2309
|
+
this.db
|
|
2310
|
+
.prepare("UPDATE agent_schedule SET status = 'completed' WHERE id = ? AND status = 'running'")
|
|
2311
|
+
.run(event.scheduleId);
|
|
2312
|
+
}
|
|
2313
|
+
}
|
|
2314
|
+
catch (err) {
|
|
2315
|
+
if (event.scheduleId) {
|
|
2316
|
+
this.db
|
|
2317
|
+
.prepare("UPDATE agent_schedule SET status = 'failed' WHERE id = ? AND status = 'running'")
|
|
2318
|
+
.run(event.scheduleId);
|
|
2319
|
+
}
|
|
2320
|
+
throw err;
|
|
2321
|
+
}
|
|
2322
|
+
}
|
|
2323
|
+
/**
|
|
2324
|
+
* Handle a morning-routine retry wake task.
|
|
2325
|
+
*
|
|
2326
|
+
* Steps:
|
|
2327
|
+
* 1. Early skip: if today.md already exists (e.g., the cron-fired
|
|
2328
|
+
* morning routine raced us to it), mark this wake task completed
|
|
2329
|
+
* without running the agent — saves one Opus session.
|
|
2330
|
+
* 2. Synthesize a RoutineEvent with `event.data.retryCount` carrying
|
|
2331
|
+
* the current attempt number, so that the recursive
|
|
2332
|
+
* scheduleMorningRetry call from executeMorningRoutine can increment the
|
|
2333
|
+
* retry chain naturally via the event.data code path.
|
|
2334
|
+
* 3. Invoke executeMorningRoutine — this reuses the full morning-routine flow
|
|
2335
|
+
* (rotateDayFiles, prompt selection, agent execute, post-result
|
|
2336
|
+
* today.md check, roadmap_refresh emission).
|
|
2337
|
+
* 4. Mark the wake task row completed. processResult inside the
|
|
2338
|
+
* executeMorningRoutine call operates on the synthetic RoutineEvent, which
|
|
2339
|
+
* is not an AgentTaskEvent, so it does not touch scheduleId — we
|
|
2340
|
+
* must do it ourselves.
|
|
2341
|
+
*/
|
|
2342
|
+
async handleMorningRoutineRetry(event, taskCtx) {
|
|
2343
|
+
const retryCount = Number(taskCtx.retryCount ?? 0);
|
|
2344
|
+
// O1: early skip only when the current agent day's today.md already exists
|
|
2345
|
+
if (this.hasCurrentAgentDayTodayMd()) {
|
|
2346
|
+
logger.info({
|
|
2347
|
+
retryCount,
|
|
2348
|
+
originalCorrelationId: taskCtx.originalCorrelationId,
|
|
2349
|
+
}, "Morning routine retry skipped — today.md already exists (cron likely raced us)");
|
|
2350
|
+
if (event.scheduleId) {
|
|
2351
|
+
this.db
|
|
2352
|
+
.prepare("UPDATE agent_schedule SET status = 'completed' WHERE id = ? AND status = 'running'")
|
|
2353
|
+
.run(event.scheduleId);
|
|
2354
|
+
}
|
|
2355
|
+
return;
|
|
2356
|
+
}
|
|
2357
|
+
// Synthesize a RoutineEvent for executeMorningRoutine. event.data.retryCount
|
|
2358
|
+
// carries the previous attempt so executeMorningRoutine → scheduleMorningRetry
|
|
2359
|
+
// can increment properly. correlationId tracks back to the original
|
|
2360
|
+
// cron morning_routine for log correlation.
|
|
2361
|
+
const synthEvent = {
|
|
2362
|
+
...createEvent({
|
|
2363
|
+
type: "routine.morning_routine",
|
|
2364
|
+
source: typeof taskCtx.source === "string"
|
|
2365
|
+
? taskCtx.source
|
|
2366
|
+
: retryCount > 0
|
|
2367
|
+
? `morning_routine_retry_${retryCount}`
|
|
2368
|
+
: "scheduled_morning_routine",
|
|
2369
|
+
priority: retryCount > 0 ? EventPriority.NORMAL : EventPriority.HIGH,
|
|
2370
|
+
correlationId: taskCtx.originalCorrelationId ?? event.correlationId,
|
|
2371
|
+
data: {
|
|
2372
|
+
...(retryCount > 0 ? { retryCount, isRetry: true } : {}),
|
|
2373
|
+
...(Array.isArray(taskCtx.postCatchupRoutines)
|
|
2374
|
+
? { postCatchupRoutines: taskCtx.postCatchupRoutines }
|
|
2375
|
+
: {}),
|
|
2376
|
+
...(taskCtx.postCatchupHourlyCheck === true
|
|
2377
|
+
? { postCatchupHourlyCheck: true }
|
|
2378
|
+
: {}),
|
|
2379
|
+
...(typeof taskCtx.source === "string"
|
|
2380
|
+
? { queuedSource: taskCtx.source }
|
|
2381
|
+
: {}),
|
|
2382
|
+
},
|
|
2383
|
+
}),
|
|
2384
|
+
routine: "morning_routine",
|
|
2385
|
+
};
|
|
2386
|
+
logger.info({ retryCount, correlationId: synthEvent.correlationId }, "Morning routine retry — routing to executeMorningRoutine with synthesized RoutineEvent");
|
|
2387
|
+
await this.executeMorningRoutine(synthEvent);
|
|
2388
|
+
// Mark the wake task row completed — executeMorningRoutine doesn't know about
|
|
2389
|
+
// scheduleId since it received a RoutineEvent, not an AgentTaskEvent.
|
|
2390
|
+
if (event.scheduleId) {
|
|
2391
|
+
this.db
|
|
2392
|
+
.prepare("UPDATE agent_schedule SET status = 'completed' WHERE id = ? AND status = 'running'")
|
|
2393
|
+
.run(event.scheduleId);
|
|
2394
|
+
}
|
|
2395
|
+
}
|
|
2396
|
+
hasCurrentAgentDayTodayMd() {
|
|
2397
|
+
return this.diagnoseTodayMdState().kind === "fresh";
|
|
2398
|
+
}
|
|
2399
|
+
/**
|
|
2400
|
+
* Inspect today.md and report its state relative to the current agent-day.
|
|
2401
|
+
* Used by the post-routine retry gate so the log can distinguish between
|
|
2402
|
+
* "file is missing" and "file has stale H1 date", which are different
|
|
2403
|
+
* failure modes (process crash vs. format-confusion bug).
|
|
2404
|
+
*/
|
|
2405
|
+
diagnoseTodayMdState() {
|
|
2406
|
+
const todayPath = join(getContextDir(this.config, this.db), "today.md");
|
|
2407
|
+
if (!existsSync(todayPath)) {
|
|
2408
|
+
return { kind: "missing" };
|
|
2409
|
+
}
|
|
2410
|
+
const content = readFileSync(todayPath, "utf-8");
|
|
2411
|
+
const writtenDate = content.match(/^#.*(\d{4}-\d{2}-\d{2})/)?.[1];
|
|
2412
|
+
if (!writtenDate) {
|
|
2413
|
+
return { kind: "no_h1_date" };
|
|
2414
|
+
}
|
|
2415
|
+
const expectedAgentDay = getAgentDayDateStr(this.config.timezone || undefined, this.config.dayBoundaryHour);
|
|
2416
|
+
if (writtenDate !== expectedAgentDay) {
|
|
2417
|
+
return { kind: "wrong_date", writtenDate, expectedAgentDay };
|
|
2418
|
+
}
|
|
2419
|
+
return { kind: "fresh" };
|
|
2420
|
+
}
|
|
2421
|
+
/**
|
|
2422
|
+
* Rotate day files before Morning Routine:
|
|
2423
|
+
* 1. today.md → schedule/YYYY-MM-DD.md (archive)
|
|
2424
|
+
* 2. today.md → yesterday.md (rename for context injection)
|
|
2425
|
+
*
|
|
2426
|
+
* After this, ContextBuilder will read yesterday.md as <yesterday>
|
|
2427
|
+
* and today.md will not exist (agent generates it fresh).
|
|
2428
|
+
*/
|
|
2429
|
+
rotateDayFiles() {
|
|
2430
|
+
const contextDir = getContextDir(this.config, this.db);
|
|
2431
|
+
const todayPath = join(contextDir, "today.md");
|
|
2432
|
+
if (!existsSync(todayPath))
|
|
2433
|
+
return;
|
|
2434
|
+
const content = readFileSync(todayPath, "utf-8");
|
|
2435
|
+
const dateStr = content.match(/^#.*(\d{4}-\d{2}-\d{2})/)?.[1];
|
|
2436
|
+
// Skip if today.md is already today's date (no rotation needed)
|
|
2437
|
+
const todayDateStr = getAgentDayDateStr(this.config.timezone || undefined, this.config.dayBoundaryHour);
|
|
2438
|
+
if (dateStr === todayDateStr)
|
|
2439
|
+
return;
|
|
2440
|
+
if (!dateStr)
|
|
2441
|
+
return;
|
|
2442
|
+
// B-007 §5.9 — mechanical copy to schedule/ is retired. The only
|
|
2443
|
+
// rotation artifact we preserve is a DB snapshot of the closing
|
|
2444
|
+
// today.md; the synthesized `daily/YYYY-MM-DD.md` is written later by
|
|
2445
|
+
// the morning routine from yesterday.md + SQLite event records.
|
|
2446
|
+
// 1. Snapshot to DB for rebuild safety
|
|
2447
|
+
try {
|
|
2448
|
+
this.db
|
|
2449
|
+
.prepare("INSERT INTO md_file_snapshots (file_path, content, trigger) VALUES (?, ?, ?)")
|
|
2450
|
+
.run("today", content, "day_rotation");
|
|
2451
|
+
}
|
|
2452
|
+
catch (err) {
|
|
2453
|
+
logger.warn({ err }, "Failed to save rotation snapshot");
|
|
2454
|
+
}
|
|
2455
|
+
// 2. Rename today.md → yesterday.md
|
|
2456
|
+
const yesterdayPath = join(contextDir, CONTEXT_RELATIVE_PATHS.yesterday);
|
|
2457
|
+
renameSync(todayPath, yesterdayPath);
|
|
2458
|
+
logger.info({ archived: `schedule/${dateStr}.md` }, "Day files rotated");
|
|
2459
|
+
}
|
|
2460
|
+
/**
|
|
2461
|
+
* Lightweight execution path — delegates tier selection entirely to the
|
|
2462
|
+
* BackendRouter via process-key defaults (or user-configured
|
|
2463
|
+
* `process_backend_config` overrides). No hardcoded requestedTier.
|
|
2464
|
+
*/
|
|
2465
|
+
/**
|
|
2466
|
+
* Roadmap-refresh execution with an exclusive cross-request write
|
|
2467
|
+
* lock. The lockId is surfaced to the session context as
|
|
2468
|
+
* `<roadmap_write_lock_id>` so the task-flow PUT / PATCH calls can
|
|
2469
|
+
* pass `X-Lock-Id` and other concurrent flows (DM handler, evening
|
|
2470
|
+
* sweeper) that attempt to write `/api/context/roadmap` during the
|
|
2471
|
+
* refresh receive a 409.
|
|
2472
|
+
*
|
|
2473
|
+
* If the lock cannot be acquired (another session is mid-write), the
|
|
2474
|
+
* refresh is skipped — `emitRoadmapRefresh` will retry on the next
|
|
2475
|
+
* qualifying signal (dedup window permitting). This is the correct
|
|
2476
|
+
* behaviour: the holder is already producing a fresher roadmap than
|
|
2477
|
+
* anything we would emit right now.
|
|
2478
|
+
*/
|
|
2479
|
+
async executeRoadmapRefresh(event) {
|
|
2480
|
+
let lockId = null;
|
|
2481
|
+
let effectiveEvent = event;
|
|
2482
|
+
if (this.roadmapWriteLock) {
|
|
2483
|
+
const lock = this.roadmapWriteLock.acquire();
|
|
2484
|
+
if (!lock.ok) {
|
|
2485
|
+
logger.info({
|
|
2486
|
+
eventType: event.type,
|
|
2487
|
+
source: event.source,
|
|
2488
|
+
holder: lock.holder,
|
|
2489
|
+
}, "roadmap.md write lock held — skipping this refresh");
|
|
2490
|
+
return;
|
|
2491
|
+
}
|
|
2492
|
+
lockId = lock.lockId;
|
|
2493
|
+
effectiveEvent = {
|
|
2494
|
+
...event,
|
|
2495
|
+
data: {
|
|
2496
|
+
...event.data,
|
|
2497
|
+
roadmapWriteLockId: lockId,
|
|
2498
|
+
},
|
|
2499
|
+
};
|
|
2500
|
+
}
|
|
2501
|
+
try {
|
|
2502
|
+
await this.executeDefault(effectiveEvent);
|
|
2503
|
+
}
|
|
2504
|
+
finally {
|
|
2505
|
+
if (lockId && this.roadmapWriteLock) {
|
|
2506
|
+
this.roadmapWriteLock.release(lockId);
|
|
2507
|
+
}
|
|
2508
|
+
}
|
|
2509
|
+
}
|
|
2510
|
+
/**
|
|
2511
|
+
* P22 §3.4 — skill curation routine. Provisions an isolated optimizer
|
|
2512
|
+
* workdir, hands the runId + runToken into the agent's task context via
|
|
2513
|
+
* `event.data`, and tears the workdir down regardless of success/failure.
|
|
2514
|
+
*
|
|
2515
|
+
* The standard `executeDefault` path produces the agent session itself —
|
|
2516
|
+
* the only differences from a normal routine are: (a) the workdir is the
|
|
2517
|
+
* pre-built optimizer dir (built by `materializeOptimizerWorkdir`), and
|
|
2518
|
+
* (b) `executeDefault` recognises `routine.skill_curation` events and
|
|
2519
|
+
* pins `allowedToolsOverride` to `SKILL_CURATION_OPTIMIZER_ALLOWED_TOOLS`,
|
|
2520
|
+
* which the Claude SDK consumes verbatim and which suspends Allow-mode
|
|
2521
|
+
* `bypassPermissions`. The curation API's run-token + Zod chokepoint
|
|
2522
|
+
* remains the safety floor for the rare case the override is bypassed
|
|
2523
|
+
* (e.g. a future backend that doesn't read `allowedTools`).
|
|
2524
|
+
*/
|
|
2525
|
+
async executeSkillCurationRoutine(event) {
|
|
2526
|
+
if (!this.materializeOptimizerWorkdir)
|
|
2527
|
+
return;
|
|
2528
|
+
// P22 §6.4 — manual run flag rides on the routine event's `data.manual`
|
|
2529
|
+
// (set by `POST /api/skill-curation/runs/manual` from the dashboard).
|
|
2530
|
+
// Cadence-driven cron events have no `manual` key, so the default is
|
|
2531
|
+
// false — exactly the desired contract.
|
|
2532
|
+
const eventData = event.data ?? {};
|
|
2533
|
+
const manual = eventData.manual === true;
|
|
2534
|
+
const targetSkillsOverride = Array.isArray(eventData.target_skills)
|
|
2535
|
+
? eventData.target_skills
|
|
2536
|
+
: undefined;
|
|
2537
|
+
let workdir = null;
|
|
2538
|
+
try {
|
|
2539
|
+
workdir = await this.materializeOptimizerWorkdir({ manual, ...(targetSkillsOverride ? { targetSkillsOverride } : {}) });
|
|
2540
|
+
logger.info({ runId: workdir.runId, targetSkills: workdir.targetSkills, workdirPath: workdir.workdirPath, manual }, "Skill-curation optimizer run starting");
|
|
2541
|
+
// Inject the runId + token into the event so the agent core can pick
|
|
2542
|
+
// them up. The standard executor path runs from here.
|
|
2543
|
+
const enriched = {
|
|
2544
|
+
...event,
|
|
2545
|
+
data: {
|
|
2546
|
+
...event.data,
|
|
2547
|
+
skill_curation_run_id: workdir.runId,
|
|
2548
|
+
skill_curation_run_token: workdir.runToken,
|
|
2549
|
+
skill_curation_workdir: workdir.workdirPath,
|
|
2550
|
+
skill_curation_target_skills: workdir.targetSkills,
|
|
2551
|
+
},
|
|
2552
|
+
};
|
|
2553
|
+
await this.executeDefault(enriched);
|
|
2554
|
+
}
|
|
2555
|
+
catch (err) {
|
|
2556
|
+
logger.error({ err, runId: workdir?.runId }, "Skill-curation routine failed");
|
|
2557
|
+
throw err;
|
|
2558
|
+
}
|
|
2559
|
+
finally {
|
|
2560
|
+
if (workdir && this.teardownOptimizerWorkdir) {
|
|
2561
|
+
try {
|
|
2562
|
+
this.teardownOptimizerWorkdir(workdir.workdirPath);
|
|
2563
|
+
}
|
|
2564
|
+
catch (err) {
|
|
2565
|
+
logger.warn({ err, workdirPath: workdir.workdirPath }, "Skill-curation workdir teardown failed");
|
|
2566
|
+
}
|
|
2567
|
+
}
|
|
2568
|
+
}
|
|
2569
|
+
}
|
|
2570
|
+
async executeDefault(event) {
|
|
2571
|
+
const context = await this.contextBuilder.build(event);
|
|
2572
|
+
const processKey = resolveProcessKey(event);
|
|
2573
|
+
// Honour run-now's `requestedModel` hint for routine events. Other event
|
|
2574
|
+
// types (messages, scheduled.task) have their own dedicated paths that
|
|
2575
|
+
// already handle tier selection, so this branch is routine-only.
|
|
2576
|
+
const routineHint = isRoutineEvent(event) && event.requestedModel
|
|
2577
|
+
? event.requestedModel === "opus"
|
|
2578
|
+
? "high"
|
|
2579
|
+
: "medium"
|
|
2580
|
+
: undefined;
|
|
2581
|
+
// Knowledge-import events carry the dashboard form's backend/model
|
|
2582
|
+
// pick. Honor the (backendId, modelId) pair only when the event was
|
|
2583
|
+
// emitted by the dashboard route — same defense-in-depth gate as the
|
|
2584
|
+
// chat picker — so a malformed event from another path cannot pin a
|
|
2585
|
+
// specific model.
|
|
2586
|
+
const importOverride = isKnowledgeImportEvent(event)
|
|
2587
|
+
&& event.platform === "dashboard"
|
|
2588
|
+
&& event.requestedBackendId
|
|
2589
|
+
&& event.requestedModelId
|
|
2590
|
+
? {
|
|
2591
|
+
requestedBackendId: event.requestedBackendId,
|
|
2592
|
+
requestedModelId: event.requestedModelId,
|
|
2593
|
+
}
|
|
2594
|
+
: undefined;
|
|
2595
|
+
const binding = this.agentRouter.resolveBinding(event, {
|
|
2596
|
+
processKey,
|
|
2597
|
+
...(routineHint ? { requestedTier: routineHint } : {}),
|
|
2598
|
+
...(importOverride ?? {}),
|
|
2599
|
+
});
|
|
2600
|
+
const reassemblePrompt = (bid) => this.assemblePrompt(event.type, processKey, bid);
|
|
2601
|
+
const prompt = reassemblePrompt(binding.main.backendId);
|
|
2602
|
+
// P22 §3.4 step 4 — optimizer agent runs with a hard-clamped tool
|
|
2603
|
+
// envelope. The check is on event type rather than processKey so the
|
|
2604
|
+
// override is impossible to widen by accident from a downstream
|
|
2605
|
+
// dispatch refactor; the only path to skill_curation execution is
|
|
2606
|
+
// through `routine.skill_curation` events, which have no other code
|
|
2607
|
+
// path that strips the override.
|
|
2608
|
+
const skillCurationOverride = isRoutineEvent(event) && event.routine === "skill_curation"
|
|
2609
|
+
? SKILL_CURATION_OPTIMIZER_ALLOWED_TOOLS
|
|
2610
|
+
: undefined;
|
|
2611
|
+
const result = await this.executeWithRetry(() => this.agentRouter.execute({
|
|
2612
|
+
prompt,
|
|
2613
|
+
context,
|
|
2614
|
+
event,
|
|
2615
|
+
processKey,
|
|
2616
|
+
preResolvedBinding: binding,
|
|
2617
|
+
reassemblePrompt,
|
|
2618
|
+
...(skillCurationOverride
|
|
2619
|
+
? { allowedToolsOverride: skillCurationOverride }
|
|
2620
|
+
: {}),
|
|
2621
|
+
}), event);
|
|
2622
|
+
await this.processResult(result, event);
|
|
2623
|
+
}
|
|
2624
|
+
/**
|
|
2625
|
+
* Handle `/auth` prefix commands from owner DMs.
|
|
2626
|
+
* Phase 5 §4.2 (fix codex, cancel), Phase 6 §5.3 (fix gemini),
|
|
2627
|
+
* Phase 7 §6.1 (fix all).
|
|
2628
|
+
* Returns `true` if the message was consumed (caller should return).
|
|
2629
|
+
*/
|
|
2630
|
+
async handleAuthCommand(event) {
|
|
2631
|
+
const text = event.content.trim().toLowerCase();
|
|
2632
|
+
// `/auth status` — show current auth state
|
|
2633
|
+
if (text === "/auth status") {
|
|
2634
|
+
const summary = this.authHealthMonitor
|
|
2635
|
+
? this.authHealthMonitor.renderStatusSummary()
|
|
2636
|
+
: "Check auth status on the dashboard or via `GET /api/backends`.";
|
|
2637
|
+
await this.notificationMgr.send(summary, event);
|
|
2638
|
+
return true;
|
|
2639
|
+
}
|
|
2640
|
+
// `/auth fix claude` — start Claude browser auth recovery (Phase 9)
|
|
2641
|
+
if (text === "/auth fix claude") {
|
|
2642
|
+
if (!this.authRecovery)
|
|
2643
|
+
return false;
|
|
2644
|
+
if (this.authRecovery.isRecoveryActive("claude")) {
|
|
2645
|
+
const active = this.authRecovery.getActiveRecovery("claude");
|
|
2646
|
+
await this.notificationMgr.send(`Claude auth recovery already in progress.\n` +
|
|
2647
|
+
`URL: ${active?.authUrl}`, event);
|
|
2648
|
+
return true;
|
|
2649
|
+
}
|
|
2650
|
+
try {
|
|
2651
|
+
const recovery = await this.authRecovery.initiateClaudeAuth();
|
|
2652
|
+
await this.notificationMgr.send(`Claude auth recovery started.\n` +
|
|
2653
|
+
`Open the following URL in your browser to sign in:\n${recovery.authUrl}` +
|
|
2654
|
+
`\n(timeout in ${recovery.expiresMinutes} min)`, event);
|
|
2655
|
+
}
|
|
2656
|
+
catch (err) {
|
|
2657
|
+
const msg = err instanceof Error ? err.message : "Unknown error";
|
|
2658
|
+
await this.notificationMgr.send(`Failed to start Claude auth recovery: ${msg}`, event);
|
|
2659
|
+
}
|
|
2660
|
+
return true;
|
|
2661
|
+
}
|
|
2662
|
+
// `/auth fix codex` — start Codex device auth recovery
|
|
2663
|
+
if (text === "/auth fix codex") {
|
|
2664
|
+
if (!this.authRecovery)
|
|
2665
|
+
return false;
|
|
2666
|
+
if (this.authRecovery.isRecoveryActive("codex")) {
|
|
2667
|
+
const active = this.authRecovery.getActiveRecovery("codex");
|
|
2668
|
+
await this.notificationMgr.send(`Codex auth recovery already in progress.\n` +
|
|
2669
|
+
`URL: ${active?.authUrl}\nCode: ${active?.userCode}`, event);
|
|
2670
|
+
return true;
|
|
2671
|
+
}
|
|
2672
|
+
try {
|
|
2673
|
+
const recovery = await this.authRecovery.initiateCodexDeviceAuth();
|
|
2674
|
+
// The recovery itself sends a notification with URL/code,
|
|
2675
|
+
// but also reply directly to the DM for immediate feedback.
|
|
2676
|
+
await this.notificationMgr.send(`Codex auth recovery started.\n` +
|
|
2677
|
+
`Open ${recovery.authUrl} in your browser and enter code ${recovery.userCode}.` +
|
|
2678
|
+
`\n(expires in ${recovery.expiresMinutes} min)`, event);
|
|
2679
|
+
}
|
|
2680
|
+
catch (err) {
|
|
2681
|
+
const msg = err instanceof Error ? err.message : "Unknown error";
|
|
2682
|
+
await this.notificationMgr.send(`Failed to start Codex auth recovery: ${msg}`, event);
|
|
2683
|
+
}
|
|
2684
|
+
return true;
|
|
2685
|
+
}
|
|
2686
|
+
// `/auth fix all` — recover all expired backends sequentially
|
|
2687
|
+
if (text === "/auth fix all") {
|
|
2688
|
+
if (!this.authRecovery || !this.authHealthMonitor)
|
|
2689
|
+
return false;
|
|
2690
|
+
const expired = this.authHealthMonitor.listExpiredBackends();
|
|
2691
|
+
if (expired.length === 0) {
|
|
2692
|
+
await this.notificationMgr.send("All backends are healthy. No recovery needed.", event);
|
|
2693
|
+
return true;
|
|
2694
|
+
}
|
|
2695
|
+
const results = [];
|
|
2696
|
+
for (const bid of expired) {
|
|
2697
|
+
// Skip backends that already have an active recovery session
|
|
2698
|
+
if (this.authRecovery.isRecoveryActive(bid)) {
|
|
2699
|
+
results.push(`🔄 ${bid} — Recovery already in progress.`);
|
|
2700
|
+
continue;
|
|
2701
|
+
}
|
|
2702
|
+
try {
|
|
2703
|
+
if (bid === "claude") {
|
|
2704
|
+
const recovery = await this.authRecovery.initiateClaudeAuth();
|
|
2705
|
+
results.push(`✅ claude — Recovery started. Open the following URL in your browser to sign in:\n${recovery.authUrl}\n(timeout in ${recovery.expiresMinutes} min)`);
|
|
2706
|
+
}
|
|
2707
|
+
else if (bid === "codex") {
|
|
2708
|
+
const recovery = await this.authRecovery.initiateCodexDeviceAuth();
|
|
2709
|
+
results.push(`✅ codex — Recovery started. Open ${recovery.authUrl} in your browser and enter code ${recovery.userCode} (expires in ${recovery.expiresMinutes} min).`);
|
|
2710
|
+
}
|
|
2711
|
+
else if (bid === "gemini") {
|
|
2712
|
+
const recovery = await this.authRecovery.initiateGeminiAuth();
|
|
2713
|
+
results.push(`✅ gemini — Recovery started. Open the following URL in your browser and authenticate, then send the code here:\n${recovery.authUrl}\n(expires in ${recovery.expiresMinutes} min)`);
|
|
2714
|
+
}
|
|
2715
|
+
else {
|
|
2716
|
+
results.push(`⚠️ ${bid} — No automated recovery available for this backend.`);
|
|
2717
|
+
}
|
|
2718
|
+
}
|
|
2719
|
+
catch (err) {
|
|
2720
|
+
const msg = err instanceof Error ? err.message : "Unknown error";
|
|
2721
|
+
results.push(`❌ ${bid} — Failed to start recovery: ${msg}`);
|
|
2722
|
+
}
|
|
2723
|
+
}
|
|
2724
|
+
const summary = this.authHealthMonitor.renderStatusSummary();
|
|
2725
|
+
await this.notificationMgr.send(`Auth recovery results:\n\n${results.join("\n\n")}\n\n---\n${summary}`, event);
|
|
2726
|
+
return true;
|
|
2727
|
+
}
|
|
2728
|
+
// `/auth fix gemini` — start Gemini OAuth recovery
|
|
2729
|
+
if (text === "/auth fix gemini") {
|
|
2730
|
+
if (!this.authRecovery)
|
|
2731
|
+
return false;
|
|
2732
|
+
if (this.authRecovery.isRecoveryActive("gemini")) {
|
|
2733
|
+
const active = this.authRecovery.getActiveRecovery("gemini");
|
|
2734
|
+
await this.notificationMgr.send(`Gemini auth recovery already in progress.\n` +
|
|
2735
|
+
`Open the following URL in your browser to authenticate:\n${active?.authUrl}\n` +
|
|
2736
|
+
`Then send the authorization code here.`, event);
|
|
2737
|
+
return true;
|
|
2738
|
+
}
|
|
2739
|
+
try {
|
|
2740
|
+
const recovery = await this.authRecovery.initiateGeminiAuth();
|
|
2741
|
+
await this.notificationMgr.send(`Gemini auth recovery started.\n` +
|
|
2742
|
+
`Open the following URL in your browser and sign in with your Google account:\n${recovery.authUrl}\n` +
|
|
2743
|
+
`Then send the authorization code here.` +
|
|
2744
|
+
`\n(expires in ${recovery.expiresMinutes} min)`, event);
|
|
2745
|
+
}
|
|
2746
|
+
catch (err) {
|
|
2747
|
+
const msg = err instanceof Error ? err.message : "Unknown error";
|
|
2748
|
+
await this.notificationMgr.send(`Failed to start Gemini auth recovery: ${msg}`, event);
|
|
2749
|
+
}
|
|
2750
|
+
return true;
|
|
2751
|
+
}
|
|
2752
|
+
// `/auth cancel` — cancel active recovery
|
|
2753
|
+
if (text === "/auth cancel" || text.startsWith("/auth cancel ")) {
|
|
2754
|
+
if (!this.authRecovery)
|
|
2755
|
+
return false;
|
|
2756
|
+
const parts = text.split(/\s+/);
|
|
2757
|
+
const backendHint = parts[2];
|
|
2758
|
+
// Cancel all active recoveries, or a specific one
|
|
2759
|
+
let cancelled = false;
|
|
2760
|
+
for (const bid of ["codex", "gemini", "claude"]) {
|
|
2761
|
+
if (backendHint && bid !== backendHint)
|
|
2762
|
+
continue;
|
|
2763
|
+
if (this.authRecovery.cancelRecovery(bid))
|
|
2764
|
+
cancelled = true;
|
|
2765
|
+
}
|
|
2766
|
+
await this.notificationMgr.send(cancelled ? "Auth recovery cancelled." : "No active auth recovery to cancel.", event);
|
|
2767
|
+
return true;
|
|
2768
|
+
}
|
|
2769
|
+
// Not an auth command
|
|
2770
|
+
return false;
|
|
2771
|
+
}
|
|
2772
|
+
async handleMessage(event) {
|
|
2773
|
+
// Bang-command interceptor — runs first so `!stop` / `!cost` / `!report`
|
|
2774
|
+
// succeed even mid-setup, mid-auth-recovery, etc., and so non-bang DMs
|
|
2775
|
+
// received while the agent is paused short-circuit before reaching the
|
|
2776
|
+
// backend (I-3). See docs/design/backlog/messaging-bang-commands.md §6.2.
|
|
2777
|
+
if (this.bangCommandRegistry) {
|
|
2778
|
+
const handled = await tryHandleBangCommand(this.bangCommandRegistry, {
|
|
2779
|
+
event,
|
|
2780
|
+
db: this.db,
|
|
2781
|
+
config: this.config,
|
|
2782
|
+
audit: this.audit,
|
|
2783
|
+
rawSend: (text) => this.notificationMgr.send(text, event),
|
|
2784
|
+
enqueueUserBangCommand: async (command, sourceEvent) => {
|
|
2785
|
+
await this.eventBus.put(createUserBangCommandEvent(sourceEvent, command));
|
|
2786
|
+
},
|
|
2787
|
+
});
|
|
2788
|
+
if (handled)
|
|
2789
|
+
return;
|
|
2790
|
+
}
|
|
2791
|
+
// Cross-platform DM lockout during setup.
|
|
2792
|
+
// The owner-DM scope is singular across platforms (Slack/Discord/Telegram/
|
|
2793
|
+
// WhatsApp/dashboard all share one conversation_sessions row). While a
|
|
2794
|
+
// dashboard setup conversation is in progress, a DM from any other
|
|
2795
|
+
// platform would otherwise be routed through the active `setup.initial`
|
|
2796
|
+
// / `setup.update` prompt — taking a Slack "ping" and feeding it to the
|
|
2797
|
+
// rules-generator agent. Reject non-dashboard DMs with a fixed message
|
|
2798
|
+
// so the user knows why we are stalling and where to finish setup.
|
|
2799
|
+
// Dashboard messages are exempt so the user can still progress setup.
|
|
2800
|
+
// Channel mentions (not DMs) are also exempt — they have their own
|
|
2801
|
+
// session scope and do not interact with the owner-DM row.
|
|
2802
|
+
if (event.isDm &&
|
|
2803
|
+
event.platform !== "dashboard" &&
|
|
2804
|
+
this.currentSetupMode !== null) {
|
|
2805
|
+
logger.info({ platform: event.platform, mode: this.currentSetupMode }, "Non-dashboard DM rejected — setup in progress");
|
|
2806
|
+
this.audit.logSkip(event, "setup_in_progress", "reactive");
|
|
2807
|
+
await this.notificationMgr.send("Setup is in progress. Please complete setup on the dashboard first, then try again.", event);
|
|
2808
|
+
return;
|
|
2809
|
+
}
|
|
2810
|
+
// Phase 6 §5.2: intercept Google OAuth auth codes during pending Gemini
|
|
2811
|
+
// recovery. Must come before `/auth` command check so the code isn't
|
|
2812
|
+
// treated as an unknown command or routed to the agent backend.
|
|
2813
|
+
if (event.isDm && this.authRecovery?.isRecoveryActive("gemini")) {
|
|
2814
|
+
const code = parseGeminiAuthCode(event.content);
|
|
2815
|
+
if (code) {
|
|
2816
|
+
try {
|
|
2817
|
+
const result = await this.authRecovery.handleGeminiAuthCode(code);
|
|
2818
|
+
const icon = result.ok ? "✅" : "❌";
|
|
2819
|
+
await this.notificationMgr.send(`${icon} Gemini auth: ${result.detail}`, event);
|
|
2820
|
+
}
|
|
2821
|
+
catch (err) {
|
|
2822
|
+
const msg = err instanceof Error ? err.message : "Unknown error";
|
|
2823
|
+
await this.notificationMgr.send(`Failed to process Gemini auth code: ${msg}`, event);
|
|
2824
|
+
}
|
|
2825
|
+
return;
|
|
2826
|
+
}
|
|
2827
|
+
}
|
|
2828
|
+
// Phase 5: intercept `/auth` commands before they reach the agent backend.
|
|
2829
|
+
// Gated on DM + at least one auth subsystem being available (/auth status
|
|
2830
|
+
// only needs the monitor; /auth fix needs the recovery manager).
|
|
2831
|
+
if (event.isDm && (this.authRecovery || this.authHealthMonitor)) {
|
|
2832
|
+
const authResult = await this.handleAuthCommand(event);
|
|
2833
|
+
if (authResult)
|
|
2834
|
+
return;
|
|
2835
|
+
}
|
|
2836
|
+
// Check for explicit close command before processing.
|
|
2837
|
+
// Use findActive (not getOrCreate) to avoid creating an orphan session.
|
|
2838
|
+
if (this.sessionMgr.isCloseCommand(event.content)) {
|
|
2839
|
+
const existing = await this.sessionMgr.findActive({
|
|
2840
|
+
platform: event.platform,
|
|
2841
|
+
channel: event.channel,
|
|
2842
|
+
threadId: event.threadId,
|
|
2843
|
+
isDm: event.isDm,
|
|
2844
|
+
intent: event.intent,
|
|
2845
|
+
});
|
|
2846
|
+
if (existing) {
|
|
2847
|
+
// recordMessage persists the row and touches
|
|
2848
|
+
// last_message_at/message_count in a single transaction, so
|
|
2849
|
+
// retention + dashboard sidebar stay consistent with the actual
|
|
2850
|
+
// `messages` row count. closeSession then flips status.
|
|
2851
|
+
this.messageRecorder.recordMessage({
|
|
2852
|
+
sessionId: existing.id,
|
|
2853
|
+
role: "user",
|
|
2854
|
+
content: event.content,
|
|
2855
|
+
platform: event.platform,
|
|
2856
|
+
senderId: event.sender,
|
|
2857
|
+
});
|
|
2858
|
+
this.sessionMgr.closeSession(existing.id);
|
|
2859
|
+
}
|
|
2860
|
+
await this.notificationMgr.send("Session closed.", event);
|
|
2861
|
+
return;
|
|
2862
|
+
}
|
|
2863
|
+
const replyActivity = await this.notificationMgr.beginReplyActivity(event);
|
|
2864
|
+
let turnToken = null;
|
|
2865
|
+
// STAGE-C-DM-FRESHNESS-PLAN §Task 4 — capture the turn-start reference
|
|
2866
|
+
// BEFORE any context_write/context_read row could be written during
|
|
2867
|
+
// this turn. Used as the upper bound when counting writes the agent
|
|
2868
|
+
// missed pre-resume, and as the lower bound when detecting whether
|
|
2869
|
+
// the agent issued a refetch during the current turn.
|
|
2870
|
+
const turnStartedAtSqlite = formatSqliteDatetime(new Date());
|
|
2871
|
+
try {
|
|
2872
|
+
// Docs-QA traffic is a side-channel that must never participate in
|
|
2873
|
+
// setup state. Two invariants enforced here:
|
|
2874
|
+
// 1. A docs_qa event with a smuggled `data.setupMode` must NOT
|
|
2875
|
+
// flip the dispatcher's global `currentSetupMode` — that would
|
|
2876
|
+
// hijack subsequent owner DMs into the rules-generator agent.
|
|
2877
|
+
// 2. A docs_qa event arriving while `currentSetupMode` is already
|
|
2878
|
+
// set (operator opens Docs QA in another tab during setup)
|
|
2879
|
+
// must still resolve via `dashboard.docs_qa` so TIER_LOCKED
|
|
2880
|
+
// fires and the QA workdir/skill set is materialized — not the
|
|
2881
|
+
// setup processKey/light tier/setup skill set. Without this
|
|
2882
|
+
// gate, the §11.2 promptKey fix would load the QA prompt while
|
|
2883
|
+
// the binding/workdir came from setup, producing an incoherent
|
|
2884
|
+
// "QA prompt + setup tools" execution.
|
|
2885
|
+
const eventSetupMode = event.data?.setupMode;
|
|
2886
|
+
const isDocsQA = isDocsQAMessage(event);
|
|
2887
|
+
if (eventSetupMode && this.currentSetupMode === null && !isDocsQA) {
|
|
2888
|
+
// Defensive sync — normally `/setup/start` has already called
|
|
2889
|
+
// beginSetupMode, but this keeps prompt selection consistent even if
|
|
2890
|
+
// a future caller bypasses the helper and only sets event.data.
|
|
2891
|
+
this.beginSetupMode(eventSetupMode);
|
|
2892
|
+
}
|
|
2893
|
+
const setupMode = isDocsQA
|
|
2894
|
+
? null
|
|
2895
|
+
: (eventSetupMode ?? this.currentSetupMode);
|
|
2896
|
+
const processKey = setupMode === "initial" || setupMode === "update"
|
|
2897
|
+
? "setup"
|
|
2898
|
+
: resolveProcessKey(event);
|
|
2899
|
+
// Honor the dashboard chat model picker. MessageEvent.requestedModel
|
|
2900
|
+
// and the (requestedBackendId, requestedModelId) pair are only
|
|
2901
|
+
// populated by the dashboard adapter (see POST /chat/messages in
|
|
2902
|
+
// api/routes/sse.ts); other platforms never set them. Defense-in-depth:
|
|
2903
|
+
// even if a future adapter were to set them, we gate on platform here
|
|
2904
|
+
// so Slack/Telegram/Discord/WhatsApp can never force a specific model
|
|
2905
|
+
// through these fields. Setup mode also ignores them — setup runs on
|
|
2906
|
+
// the configured setup process key regardless of the user's pick.
|
|
2907
|
+
//
|
|
2908
|
+
// When both the explicit (backendId, modelId) pair and the legacy
|
|
2909
|
+
// requestedModel are set, the pair wins: it is the superset that
|
|
2910
|
+
// supports all backends and models, not just Claude sonnet/opus.
|
|
2911
|
+
const honorOverride = (event.platform === "dashboard" || event.source === CUSTOM_BANG_COMMAND_SOURCE)
|
|
2912
|
+
&& !setupMode;
|
|
2913
|
+
const requestedTier = honorOverride && event.requestedModel
|
|
2914
|
+
? event.requestedModel === "sonnet"
|
|
2915
|
+
? "medium"
|
|
2916
|
+
: "high"
|
|
2917
|
+
: undefined;
|
|
2918
|
+
const overrideBackendId = honorOverride && event.requestedBackendId && event.requestedModelId
|
|
2919
|
+
? event.requestedBackendId
|
|
2920
|
+
: undefined;
|
|
2921
|
+
const overrideModelId = honorOverride && event.requestedBackendId && event.requestedModelId
|
|
2922
|
+
? event.requestedModelId
|
|
2923
|
+
: undefined;
|
|
2924
|
+
const route = this.agentRouter.resolveBinding(event, {
|
|
2925
|
+
processKey,
|
|
2926
|
+
...(requestedTier ? { requestedTier } : {}),
|
|
2927
|
+
...(overrideBackendId && overrideModelId
|
|
2928
|
+
? { requestedBackendId: overrideBackendId, requestedModelId: overrideModelId }
|
|
2929
|
+
: {}),
|
|
2930
|
+
});
|
|
2931
|
+
const session = await this.sessionMgr.getOrCreate({
|
|
2932
|
+
platform: event.platform,
|
|
2933
|
+
channel: event.channel,
|
|
2934
|
+
threadId: event.threadId,
|
|
2935
|
+
isDm: event.isDm,
|
|
2936
|
+
intent: event.intent,
|
|
2937
|
+
requiredBackend: route.main.backendId,
|
|
2938
|
+
requiredModel: route.main.modelId,
|
|
2939
|
+
});
|
|
2940
|
+
const forwardContextAvailable = this.hasRecentProactiveForwardContext(event, session.id);
|
|
2941
|
+
// Custom messaging bang command (`!commandname`): the owner's
|
|
2942
|
+
// saved row carries an opt-in skill set + an optional custom
|
|
2943
|
+
// profile body. We forward those to `ensureSessionWorkdir` as a
|
|
2944
|
+
// re-materialize override so the agent runs with the row's
|
|
2945
|
+
// configuration for THIS turn. The override forces re-write of
|
|
2946
|
+
// CLAUDE.md / AGENTS.md / GEMINI.md and the skill dirs even when
|
|
2947
|
+
// the workdir already exists (regular DMs share the same dir).
|
|
2948
|
+
// The next regular DM turn detects the bang stamp file written
|
|
2949
|
+
// by `ensureSessionWorkdir` and re-materializes back to manifest
|
|
2950
|
+
// defaults — keeping `!cmd` configurations from leaking into a
|
|
2951
|
+
// natural conversation that follows.
|
|
2952
|
+
const customBangCommand = this.lookupCustomBangCommandForEvent(event);
|
|
2953
|
+
const workdirOverride = customBangCommand
|
|
2954
|
+
? {
|
|
2955
|
+
skillSlugs: [...resolveCommandSkillSlugs(customBangCommand)],
|
|
2956
|
+
profileBody: customBangCommand.instructionMd,
|
|
2957
|
+
}
|
|
2958
|
+
: undefined;
|
|
2959
|
+
// Skip the owner-channel pairing record for docs_qa: the QA panel
|
|
2960
|
+
// is not a messaging-app surface and would otherwise clutter
|
|
2961
|
+
// /connections/messaging with synthetic "dashboard" pairings.
|
|
2962
|
+
//
|
|
2963
|
+
// `pendingConnectorWarnings` is captured here so both the resume and
|
|
2964
|
+
// fresh-execute branches below can call the §4.5 DM dispatch via
|
|
2965
|
+
// `dispatchPendingConnectorHealth()` AFTER each branch's user-message
|
|
2966
|
+
// recordMessage — the dispatch's persist must follow the user message
|
|
2967
|
+
// in DB-timestamp order or the dashboard's chat_meta history reload
|
|
2968
|
+
// reorders the bubbles.
|
|
2969
|
+
let pendingConnectorWarnings = [];
|
|
2970
|
+
const dispatchPendingConnectorHealth = () => {
|
|
2971
|
+
if (pendingConnectorWarnings.length === 0)
|
|
2972
|
+
return;
|
|
2973
|
+
this.runDelegatedConnectorWarningDispatch(pendingConnectorWarnings, event, route.main.backendId, session.id);
|
|
2974
|
+
};
|
|
2975
|
+
if (event.isDm && !isDocsQAMessage(event)) {
|
|
2976
|
+
upsertOwnerChannel(this.db, {
|
|
2977
|
+
platform: event.platform,
|
|
2978
|
+
senderId: event.sender,
|
|
2979
|
+
channelId: event.channel,
|
|
2980
|
+
metadata: { threadId: event.threadId },
|
|
2981
|
+
touchInbound: true,
|
|
2982
|
+
});
|
|
2983
|
+
// DELEGATED-MODE-V2-DESIGN.md §4.5 — at every DM dispatch, consult
|
|
2984
|
+
// the cached probe for delegated integrations whose effective
|
|
2985
|
+
// backend matches the session backend. If the cached probe shows
|
|
2986
|
+
// missing required capabilities (the wizard / a future periodic
|
|
2987
|
+
// re-probe wrote `present=false`), fire a one-shot DM warning the
|
|
2988
|
+
// owner that same-backend mode is non-functional. The helper
|
|
2989
|
+
// dedupes via `runtime_state` so resume-vs-fresh-execute do not
|
|
2990
|
+
// spam the user. Cheap, synchronous DB-only inspection — runs on
|
|
2991
|
+
// the hot path so the warning lands before the agent's reply.
|
|
2992
|
+
//
|
|
2993
|
+
// Skipped while the dispatcher is in setup mode: the wizard's
|
|
2994
|
+
// background `probeLive` call may have just landed a `present=false`
|
|
2995
|
+
// row for a connector the user is in the middle of authorising, and
|
|
2996
|
+
// a DM telling them to "Re-authorize from your … connector
|
|
2997
|
+
// settings, then re-run the integration probe from the dashboard"
|
|
2998
|
+
// is wrong-tense for the in-flight setup conversation. The §10
|
|
2999
|
+
// post-setup sign-out scenario the check exists for fires correctly
|
|
3000
|
+
// on the first DM after `clearSetupMode` runs.
|
|
3001
|
+
//
|
|
3002
|
+
// Two-phase: consult the cached probe NOW (synchronous DB read),
|
|
3003
|
+
// but defer the actual DM dispatch + dashboard messages-table
|
|
3004
|
+
// persist until both branches below have recorded the inbound user
|
|
3005
|
+
// message. Otherwise the warning's persist row carries a
|
|
3006
|
+
// CURRENT_TIMESTAMP that lands BEFORE the user-message row's, and
|
|
3007
|
+
// the dashboard's chat_meta history reload re-orders the bubbles
|
|
3008
|
+
// (warning above user) — a one-time UX flicker.
|
|
3009
|
+
pendingConnectorWarnings =
|
|
3010
|
+
this.currentSetupMode === null
|
|
3011
|
+
? this.consultDelegatedConnectorWarnings(route.main.backendId)
|
|
3012
|
+
: [];
|
|
3013
|
+
}
|
|
3014
|
+
// `event.channel` is captured at the moment the user POSTed their
|
|
3015
|
+
// message. If the tab navigates away and reconnects, the SSE route
|
|
3016
|
+
// calls `rebindSessionChannel` to update `conversation_sessions.
|
|
3017
|
+
// channel_id` to the new UUID — but our closure here still holds
|
|
3018
|
+
// the old value. `resolveDashboardChannel` reads the live DB value
|
|
3019
|
+
// on every send so stream/meta/info/error events reach whichever
|
|
3020
|
+
// tab is currently connected for this session.
|
|
3021
|
+
const resolveDashboardChannel = () => this.sessionMgr.getActiveChannelIdForSession(session.id) ?? event.channel;
|
|
3022
|
+
// Send resolved model info + DB session ID to dashboard so the
|
|
3023
|
+
// sidebar badge is accurate and the frontend can persist the session.
|
|
3024
|
+
if (event.platform === "dashboard" && this.dashboardStream?.sendSessionInfo) {
|
|
3025
|
+
this.dashboardStream.sendSessionInfo(resolveDashboardChannel(), {
|
|
3026
|
+
sessionId: session.id,
|
|
3027
|
+
model: route.main.modelId,
|
|
3028
|
+
backend: route.main.backendId,
|
|
3029
|
+
modelLabel: getModelLabel(route.main.backendId, route.main.modelId),
|
|
3030
|
+
});
|
|
3031
|
+
}
|
|
3032
|
+
// Feed user message to SignalDetector for implicit feedback
|
|
3033
|
+
// detection. Docs-QA messages are docs lookups, not feedback
|
|
3034
|
+
// signals, so they bypass the detector entirely.
|
|
3035
|
+
if (!isDocsQAMessage(event)) {
|
|
3036
|
+
this.signalDetector?.onUserMessage({
|
|
3037
|
+
platform: event.platform,
|
|
3038
|
+
content: event.content,
|
|
3039
|
+
});
|
|
3040
|
+
}
|
|
3041
|
+
// Create stream callbacks for dashboard events (real-time SSE text).
|
|
3042
|
+
// Each callback re-resolves the channel on invocation so a user
|
|
3043
|
+
// who navigates away and returns mid-execute still receives the
|
|
3044
|
+
// tail of the stream on their new tab.
|
|
3045
|
+
let didStream = false;
|
|
3046
|
+
const streamCb = event.platform === "dashboard" && this.dashboardStream
|
|
3047
|
+
? {
|
|
3048
|
+
onText: (text) => {
|
|
3049
|
+
didStream = true;
|
|
3050
|
+
this.dashboardStream.sendStreamChunk(resolveDashboardChannel(), text);
|
|
3051
|
+
},
|
|
3052
|
+
onEnd: () => {
|
|
3053
|
+
this.dashboardStream.sendStreamEnd(resolveDashboardChannel());
|
|
3054
|
+
},
|
|
3055
|
+
}
|
|
3056
|
+
: undefined;
|
|
3057
|
+
// Chat-attachments Phase 1 — issue a per-turn capability token the
|
|
3058
|
+
// agent's `attach` skill will present via `X-Turn-Token`. Valid only
|
|
3059
|
+
// while this turn is running; always cleared in the outer `finally`
|
|
3060
|
+
// below so leakage is bounded to the lifetime of the turn.
|
|
3061
|
+
turnToken = this.attachmentStore
|
|
3062
|
+
? this.issueAttachmentTurnToken(session.id)
|
|
3063
|
+
: null;
|
|
3064
|
+
// Can we resume an existing SDK session?
|
|
3065
|
+
// Resume whenever this conversation already has a stored SDK session.
|
|
3066
|
+
// Never resume on the FIRST message of a new setup — event.data.setupMode means
|
|
3067
|
+
// "start a new setup", not "continue an existing one".
|
|
3068
|
+
//
|
|
3069
|
+
// Also require the session's persistent workdir to exist on disk. If
|
|
3070
|
+
// it was removed out of band (manual cleanup, stale-workdir scanner
|
|
3071
|
+
// bug, disk failure), attempting to resume would land the SDK in a
|
|
3072
|
+
// freshly-created empty directory with no CLAUDE.md / AGENTS.md /
|
|
3073
|
+
// skills tree, producing confusing output. Fall back to the fresh-
|
|
3074
|
+
// execute branch, which re-materializes the workdir via
|
|
3075
|
+
// `ensureSessionWorkdir`.
|
|
3076
|
+
const isNewSetupStart = !!event.data?.setupMode;
|
|
3077
|
+
const existingSessionDirPresent = session.isActive
|
|
3078
|
+
&& existsSync(getSessionWorkdirPath(this.config.dataDir, session.id));
|
|
3079
|
+
const canResume = session.isActive
|
|
3080
|
+
&& session.sessionId
|
|
3081
|
+
&& existingSessionDirPresent
|
|
3082
|
+
&& !isNewSetupStart;
|
|
3083
|
+
if (session.isActive && session.sessionId && !existingSessionDirPresent) {
|
|
3084
|
+
logger.warn({ sessionId: session.id }, "Session marked resumable but workdir missing — falling back to fresh execute");
|
|
3085
|
+
}
|
|
3086
|
+
let result;
|
|
3087
|
+
let userMessageId = null;
|
|
3088
|
+
// STAGE-C-DM-FRESHNESS-PLAN §Task 2 — `<turn_context>` is injected on
|
|
3089
|
+
// resume only. The resume payload is the bare user-message text; the
|
|
3090
|
+
// SDK's cached system prompt holds the original `<current_time>` and
|
|
3091
|
+
// the snapshot anchored by `<today snapshot_at="...">` (Task 1), both
|
|
3092
|
+
// frozen at session start. Without a per-turn fresh-clock anchor, the
|
|
3093
|
+
// model cannot compute "how stale is my snapshot" and answers from
|
|
3094
|
+
// an out-of-date view of `## Agent Log`. On the fresh-execute branch,
|
|
3095
|
+
// the system prompt's `<current_time>` is built at the moment of
|
|
3096
|
+
// dispatch — adding `<turn_context>` there would be redundant AND
|
|
3097
|
+
// would diverge the prompt prefix per turn, defeating prompt caching.
|
|
3098
|
+
// If a future change rebuilds `<today>` mid-session, this code must
|
|
3099
|
+
// be revisited because `started_at` would no longer be the snapshot
|
|
3100
|
+
// reference.
|
|
3101
|
+
let resumeTurnContext = null;
|
|
3102
|
+
let resumeSnapshotAgeMinutes = 0;
|
|
3103
|
+
if (canResume) {
|
|
3104
|
+
// ── Resume existing SDK session ──
|
|
3105
|
+
const proactiveForwardContext = forwardContextAvailable
|
|
3106
|
+
? await this.contextBuilder.build(event)
|
|
3107
|
+
: null;
|
|
3108
|
+
const userMsgRecorded = this.messageRecorder.recordMessage({
|
|
3109
|
+
sessionId: session.id,
|
|
3110
|
+
role: "user",
|
|
3111
|
+
content: event.content,
|
|
3112
|
+
platform: event.platform,
|
|
3113
|
+
senderId: event.sender,
|
|
3114
|
+
});
|
|
3115
|
+
if (userMsgRecorded) {
|
|
3116
|
+
userMessageId = this.readLastInsertedMessageId(session.id);
|
|
3117
|
+
}
|
|
3118
|
+
// Compute the freshness anchors for this resumed turn. `started_at`
|
|
3119
|
+
// is the moment `<today>` was captured (the fresh-execute branch
|
|
3120
|
+
// builds the system prompt then). Reading from the session row
|
|
3121
|
+
// (rather than the in-memory `session` value) keeps this side-
|
|
3122
|
+
// effect-free: the row was just fetched by `getOrCreate` and is
|
|
3123
|
+
// authoritative.
|
|
3124
|
+
const turnNow = new Date();
|
|
3125
|
+
const sessionTimingRow = this.db
|
|
3126
|
+
.prepare(`SELECT started_at FROM conversation_sessions WHERE id = ?`)
|
|
3127
|
+
.get(session.id);
|
|
3128
|
+
const sessionStartedAtSqlite = sessionTimingRow?.started_at ?? null;
|
|
3129
|
+
const sessionStartedAtMs = sessionStartedAtSqlite
|
|
3130
|
+
? parseSqliteUtcMs(sessionStartedAtSqlite)
|
|
3131
|
+
: turnNow.getTime();
|
|
3132
|
+
resumeSnapshotAgeMinutes = Math.max(0, Math.round((turnNow.getTime() - sessionStartedAtMs) / 60_000));
|
|
3133
|
+
resumeTurnContext =
|
|
3134
|
+
`<turn_context current_time="${turnNow.toISOString()}" `
|
|
3135
|
+
+ `snapshot_age_minutes="${resumeSnapshotAgeMinutes}" />`;
|
|
3136
|
+
// §4.5 connector-health DM is dispatched AFTER recordMessage so the
|
|
3137
|
+
// warning's messages-table row carries a strictly-later timestamp
|
|
3138
|
+
// than the user message. See `consultDelegatedConnectorWarnings`.
|
|
3139
|
+
dispatchPendingConnectorHealth();
|
|
3140
|
+
const sessionDir = ensureSessionWorkdir(this.config.workspaceDir, this.config.dataDir, session.id, event.type, {
|
|
3141
|
+
backendId: session.backend ?? "claude",
|
|
3142
|
+
processKey: route.processKey,
|
|
3143
|
+
configuredServices: this.getConfiguredServices(),
|
|
3144
|
+
mailAccounts: this.getActiveMailAccounts(),
|
|
3145
|
+
integrations: readIntegrations(this.db),
|
|
3146
|
+
character: this.config.character,
|
|
3147
|
+
...(workdirOverride ? { override: workdirOverride } : {}),
|
|
3148
|
+
});
|
|
3149
|
+
// Sync user-authored skills into the workdir before resuming, so any
|
|
3150
|
+
// skill added/edited/deleted via /api/skills since the last turn is
|
|
3151
|
+
// visible to the SDK's `.claude/skills/` discovery. Cheap and idempotent.
|
|
3152
|
+
syncAllUserSkills(sessionDir, join(this.config.dataDir, "skills"));
|
|
3153
|
+
// Phase 1 — stage inbound attachments + bind rows + append
|
|
3154
|
+
// bracketed prompt block. For resume we can't prepend to the
|
|
3155
|
+
// task-flow template (there isn't one on this path), so the
|
|
3156
|
+
// attachment block is appended to the user's message text. A
|
|
3157
|
+
// Claude SDK `query()` call sees `prompt` as a single string, so
|
|
3158
|
+
// this is the only surface available.
|
|
3159
|
+
const resumeStaged = isMessageEvent(event)
|
|
3160
|
+
? this.stageInboundAttachments(event, sessionDir)
|
|
3161
|
+
: [];
|
|
3162
|
+
if (resumeStaged.length > 0 && userMessageId !== null && this.attachmentStore) {
|
|
3163
|
+
this.attachmentStore.bindInbound({
|
|
3164
|
+
attachmentIds: resumeStaged.map((r) => r.id),
|
|
3165
|
+
sessionId: session.id,
|
|
3166
|
+
messageId: userMessageId,
|
|
3167
|
+
});
|
|
3168
|
+
}
|
|
3169
|
+
const resumeTranscripts = await this.transcribeAttachments(resumeStaged);
|
|
3170
|
+
const resumeMessage = resumeStaged.length > 0
|
|
3171
|
+
? `${event.content}\n${this.buildAttachmentPromptBlock(resumeStaged, resumeTranscripts)}`
|
|
3172
|
+
: event.content;
|
|
3173
|
+
const resumeMessageWithForwardContext = proactiveForwardContext
|
|
3174
|
+
? `${resumeTurnContext}\n\n${proactiveForwardContext}\n\n<current_user_message>\n${resumeMessage}\n</current_user_message>`
|
|
3175
|
+
: `${resumeTurnContext}\n\n${resumeMessage}`;
|
|
3176
|
+
const resumeStagedForBackend = resumeStaged.length > 0
|
|
3177
|
+
? resumeStaged.map((row) => ({
|
|
3178
|
+
id: row.id,
|
|
3179
|
+
safeFilename: row.safeFilename,
|
|
3180
|
+
mimeType: row.mimeType,
|
|
3181
|
+
absolutePath: `${sessionDir}/_attachments/${row.safeFilename}`,
|
|
3182
|
+
relativePath: `_attachments/${row.safeFilename}`,
|
|
3183
|
+
}))
|
|
3184
|
+
: [];
|
|
3185
|
+
result = await this.executeWithRetry(() => this.agentRouter.executeResume({
|
|
3186
|
+
backendId: session.backend ?? "claude",
|
|
3187
|
+
sessionId: session.sessionId,
|
|
3188
|
+
message: resumeMessageWithForwardContext,
|
|
3189
|
+
modelId: route.main.modelId,
|
|
3190
|
+
maxTurns: route.main.maxTurns,
|
|
3191
|
+
maxBudgetUsd: route.main.maxBudgetUsd,
|
|
3192
|
+
sessionDir,
|
|
3193
|
+
sessionDbId: session.id,
|
|
3194
|
+
eventCorrelationId: event.correlationId,
|
|
3195
|
+
...(turnToken ? { turnToken } : {}),
|
|
3196
|
+
...(resumeStagedForBackend.length > 0
|
|
3197
|
+
? { stagedAttachments: resumeStagedForBackend }
|
|
3198
|
+
: {}),
|
|
3199
|
+
}, streamCb), event);
|
|
3200
|
+
}
|
|
3201
|
+
else {
|
|
3202
|
+
// ── Fresh execute ──
|
|
3203
|
+
// Docs-QA branches FIRST. Without this gate, `event.isDm` would
|
|
3204
|
+
// route the QA event into the generic DM task flow and the
|
|
3205
|
+
// agent would run without the QA system prompt (citation
|
|
3206
|
+
// enforcement, search budget, "no write tools"). The
|
|
3207
|
+
// `dashboard.docs_qa` task flow lives at
|
|
3208
|
+
// agent-assets/task-flows/dashboard.docs_qa.md.
|
|
3209
|
+
const promptKey = isDocsQAMessage(event)
|
|
3210
|
+
? "dashboard.docs_qa"
|
|
3211
|
+
: setupMode === "initial"
|
|
3212
|
+
? "setup.initial"
|
|
3213
|
+
: setupMode === "update"
|
|
3214
|
+
? "setup.update"
|
|
3215
|
+
: event.isDm && !session.isActive
|
|
3216
|
+
? "message.received.dm_first"
|
|
3217
|
+
: event.isDm
|
|
3218
|
+
? "message.received.dm"
|
|
3219
|
+
: event.type;
|
|
3220
|
+
const context = await this.contextBuilder.build(event);
|
|
3221
|
+
// Setup flows route through processKey="setup" for backend binding,
|
|
3222
|
+
// but the workdir must materialize with the mode-specific processKey
|
|
3223
|
+
// so `setup.update` doesn't inherit `setup.initial`'s skill set via
|
|
3224
|
+
// PROCESS_TO_EVENT_TYPE["setup"]="setup.initial".
|
|
3225
|
+
const workdirEventType = setupMode ? `setup.${setupMode}` : promptKey;
|
|
3226
|
+
const workdirProcessKey = setupMode
|
|
3227
|
+
? `setup.${setupMode}`
|
|
3228
|
+
: route.processKey;
|
|
3229
|
+
const reassemblePrompt = (bid) => this.assemblePrompt(promptKey, route.processKey, bid);
|
|
3230
|
+
const prompt = reassemblePrompt(route.main.backendId);
|
|
3231
|
+
// DMs need persistent workdirs/session ids for real resume semantics.
|
|
3232
|
+
// Channel/thread conversations only persist high-tier sessions.
|
|
3233
|
+
const shouldPersistSessionState = event.isDm || route.resolvedTier === "high";
|
|
3234
|
+
const sessionDir = shouldPersistSessionState
|
|
3235
|
+
? ensureSessionWorkdir(this.config.workspaceDir, this.config.dataDir, session.id, workdirEventType, {
|
|
3236
|
+
backendId: route.main.backendId,
|
|
3237
|
+
processKey: workdirProcessKey,
|
|
3238
|
+
configuredServices: this.getConfiguredServices(),
|
|
3239
|
+
mailAccounts: this.getActiveMailAccounts(),
|
|
3240
|
+
integrations: readIntegrations(this.db),
|
|
3241
|
+
character: this.config.character,
|
|
3242
|
+
...(workdirOverride ? { override: workdirOverride } : {}),
|
|
3243
|
+
})
|
|
3244
|
+
: undefined;
|
|
3245
|
+
// Re-sync user skills on every Opus message. ensureSessionWorkdir is
|
|
3246
|
+
// idempotent and skips the copy step on subsequent calls, so without
|
|
3247
|
+
// this explicit sync a skill created mid-session (via POST /api/skills)
|
|
3248
|
+
// would never reach the session's `.claude/skills/` tree and the SDK
|
|
3249
|
+
// wouldn't discover it. The sync is a cheap diff operation backed by
|
|
3250
|
+
// a manifest file inside the workdir.
|
|
3251
|
+
if (sessionDir) {
|
|
3252
|
+
syncAllUserSkills(sessionDir, join(this.config.dataDir, "skills"));
|
|
3253
|
+
}
|
|
3254
|
+
// Docs-QA sessions are stateless lookups (DOCS_QA_B7_DESIGN.md
|
|
3255
|
+
// §11.6 — "QA panel state lives in React state, not the DB").
|
|
3256
|
+
// After a docs_qa session reset (day boundary, model switch),
|
|
3257
|
+
// session-manager's `requiresHistoryInjection` would still fire
|
|
3258
|
+
// because prior messages exist in the docs_qa scope; without
|
|
3259
|
+
// this gate they'd bleed back into the prompt as cross-session
|
|
3260
|
+
// history, contradicting the stateless contract and silently
|
|
3261
|
+
// ballooning the QA token budget across days.
|
|
3262
|
+
const conversationHistory = session.requiresHistoryInjection && !isDocsQAMessage(event)
|
|
3263
|
+
? this.buildCrossSessionConversationHistory(event)
|
|
3264
|
+
: null;
|
|
3265
|
+
// Record user message AFTER context/history build (avoids injecting
|
|
3266
|
+
// the current turn into cross-session history) but BEFORE execute
|
|
3267
|
+
// (ensures DB has the message even if execute crashes).
|
|
3268
|
+
const freshUserMsgRecorded = this.messageRecorder.recordMessage({
|
|
3269
|
+
sessionId: session.id,
|
|
3270
|
+
role: "user",
|
|
3271
|
+
content: event.content,
|
|
3272
|
+
platform: event.platform,
|
|
3273
|
+
senderId: event.sender,
|
|
3274
|
+
});
|
|
3275
|
+
if (freshUserMsgRecorded) {
|
|
3276
|
+
userMessageId = this.readLastInsertedMessageId(session.id);
|
|
3277
|
+
}
|
|
3278
|
+
// §4.5 connector-health DM is dispatched AFTER recordMessage so the
|
|
3279
|
+
// warning's messages-table row carries a strictly-later timestamp
|
|
3280
|
+
// than the user message. See `consultDelegatedConnectorWarnings`.
|
|
3281
|
+
dispatchPendingConnectorHealth();
|
|
3282
|
+
// Phase 1 — stage inbound attachments + bind rows + append
|
|
3283
|
+
// bracketed prompt block to the prompt body.
|
|
3284
|
+
const freshStaged = isMessageEvent(event)
|
|
3285
|
+
? this.stageInboundAttachments(event, sessionDir)
|
|
3286
|
+
: [];
|
|
3287
|
+
if (freshStaged.length > 0 && userMessageId !== null && this.attachmentStore) {
|
|
3288
|
+
this.attachmentStore.bindInbound({
|
|
3289
|
+
attachmentIds: freshStaged.map((r) => r.id),
|
|
3290
|
+
sessionId: session.id,
|
|
3291
|
+
messageId: userMessageId,
|
|
3292
|
+
});
|
|
3293
|
+
}
|
|
3294
|
+
const freshTranscripts = await this.transcribeAttachments(freshStaged);
|
|
3295
|
+
const executePrompt = freshStaged.length > 0
|
|
3296
|
+
? `${prompt}\n${this.buildAttachmentPromptBlock(freshStaged, freshTranscripts)}`
|
|
3297
|
+
: prompt;
|
|
3298
|
+
// DMs should always persist backend sessions so same-session resume and
|
|
3299
|
+
// dashboard history continue do not fall back to history reinjection.
|
|
3300
|
+
const persistSession = shouldPersistSessionState;
|
|
3301
|
+
const freshStagedForBackend = freshStaged.length > 0 && sessionDir
|
|
3302
|
+
? freshStaged.map((row) => ({
|
|
3303
|
+
id: row.id,
|
|
3304
|
+
safeFilename: row.safeFilename,
|
|
3305
|
+
mimeType: row.mimeType,
|
|
3306
|
+
absolutePath: `${sessionDir}/_attachments/${row.safeFilename}`,
|
|
3307
|
+
relativePath: `_attachments/${row.safeFilename}`,
|
|
3308
|
+
}))
|
|
3309
|
+
: [];
|
|
3310
|
+
result = await this.executeWithRetry(() => this.agentRouter.execute({
|
|
3311
|
+
prompt: executePrompt,
|
|
3312
|
+
context,
|
|
3313
|
+
event,
|
|
3314
|
+
processKey: setupMode === "initial" || setupMode === "update"
|
|
3315
|
+
? "setup"
|
|
3316
|
+
: resolveProcessKey(event),
|
|
3317
|
+
sessionDir,
|
|
3318
|
+
sessionDbId: session.id,
|
|
3319
|
+
persistSession,
|
|
3320
|
+
conversationHistory: conversationHistory ?? undefined,
|
|
3321
|
+
preResolvedBinding: route,
|
|
3322
|
+
workdirEventType,
|
|
3323
|
+
workdirProcessKey,
|
|
3324
|
+
reassemblePrompt,
|
|
3325
|
+
...(turnToken ? { turnToken } : {}),
|
|
3326
|
+
...(freshStagedForBackend.length > 0
|
|
3327
|
+
? { stagedAttachments: freshStagedForBackend }
|
|
3328
|
+
: {}),
|
|
3329
|
+
}, streamCb), event);
|
|
3330
|
+
// Store SDK sessionId for future resume, including normal owner DMs.
|
|
3331
|
+
if (persistSession && result.sessionId) {
|
|
3332
|
+
await this.sessionMgr.updateSession(session.id, result.sessionId, result.modelId ?? result.model, result.backendId);
|
|
3333
|
+
}
|
|
3334
|
+
else if (persistSession && !result.sessionId) {
|
|
3335
|
+
// Successful DM/heavy execute, but the backend didn't emit a
|
|
3336
|
+
// resumable session id (observed with certain Gemini CLI
|
|
3337
|
+
// streams where the `init` event fired without `session_id`).
|
|
3338
|
+
// The row keeps its previous `backend_session_id` (possibly
|
|
3339
|
+
// NULL) and the next turn will fall through to fresh-execute
|
|
3340
|
+
// + history injection — still resumable from the sidebar via
|
|
3341
|
+
// the relaxed gate. Log so this stops being invisible.
|
|
3342
|
+
logger.warn({
|
|
3343
|
+
sessionId: session.id,
|
|
3344
|
+
backend: result.backendId,
|
|
3345
|
+
model: result.modelId ?? result.model,
|
|
3346
|
+
}, "Execute completed without a backend session id — next resume will rebuild via history injection");
|
|
3347
|
+
}
|
|
3348
|
+
}
|
|
3349
|
+
// Record assistant response. `recordMessage` also bumps the
|
|
3350
|
+
// session's `last_message_at` and `message_count` in the same
|
|
3351
|
+
// transaction, so nothing else needs to touch the session row here.
|
|
3352
|
+
let assistantMessageId = null;
|
|
3353
|
+
let assistantOutput = result.output.trim();
|
|
3354
|
+
// Docs-QA persistence-side citation validator (DOCS_QA_B7_DESIGN.md
|
|
3355
|
+
// §11.1). The streaming side runs in DocsQAAdapter.sendStreamChunk;
|
|
3356
|
+
// this one-shot pass guarantees the persisted `messages.content`
|
|
3357
|
+
// matches what the dashboard rendered on reload — without it, an
|
|
3358
|
+
// invalid `[doc:slug]` token would be stripped from the SSE wire
|
|
3359
|
+
// but reappear in history. Slug-missing tokens are also logged to
|
|
3360
|
+
// `agent_actions(action_type='qa_invalid_citation')`.
|
|
3361
|
+
if (isDocsQAMessage(event)
|
|
3362
|
+
&& this.docsCitationLookup
|
|
3363
|
+
&& assistantOutput.length > 0) {
|
|
3364
|
+
const validation = validateAndRewrite(assistantOutput, this.docsCitationLookup);
|
|
3365
|
+
assistantOutput = validation.text;
|
|
3366
|
+
logInvalidCitations(this.db, validation, { sessionId: session.id });
|
|
3367
|
+
}
|
|
3368
|
+
if (assistantOutput.length > 0) {
|
|
3369
|
+
const persisted = this.messageRecorder.recordMessage({
|
|
3370
|
+
sessionId: session.id,
|
|
3371
|
+
role: "assistant",
|
|
3372
|
+
content: assistantOutput,
|
|
3373
|
+
platform: event.platform,
|
|
3374
|
+
backend: result.backendId,
|
|
3375
|
+
modelId: result.modelId ?? result.model,
|
|
3376
|
+
});
|
|
3377
|
+
if (persisted) {
|
|
3378
|
+
assistantMessageId = this.readLastInsertedMessageId(session.id);
|
|
3379
|
+
if (forwardContextAvailable) {
|
|
3380
|
+
this.logProactiveForwardDisavowalIfMatched(session.id, assistantOutput);
|
|
3381
|
+
}
|
|
3382
|
+
}
|
|
3383
|
+
if (!persisted && event.platform === "dashboard" && this.dashboardStream?.sendError) {
|
|
3384
|
+
// The agent produced a response but we couldn't persist it. The
|
|
3385
|
+
// dashboard tab has no other signal that the turn finished —
|
|
3386
|
+
// without this inline surfacing the user would watch the reply
|
|
3387
|
+
// stream in, then hit the 120s waiting timeout on refresh with
|
|
3388
|
+
// no history row to reconcile against. Tell them directly.
|
|
3389
|
+
this.dashboardStream.sendError(resolveDashboardChannel(), "The agent's reply could not be saved. Please try again.");
|
|
3390
|
+
}
|
|
3391
|
+
}
|
|
3392
|
+
else {
|
|
3393
|
+
// Agent returned no output — send error feedback so the user isn't left waiting
|
|
3394
|
+
const errorMsg = "Could not generate a response. Please try again.";
|
|
3395
|
+
logger.warn({ sessionId: session.id, isError: result.isError, stopReason: result.stopReason }, "Agent returned empty output for message event");
|
|
3396
|
+
this.messageRecorder.recordMessage({
|
|
3397
|
+
sessionId: session.id,
|
|
3398
|
+
role: "assistant",
|
|
3399
|
+
content: errorMsg,
|
|
3400
|
+
platform: event.platform,
|
|
3401
|
+
backend: result.backendId,
|
|
3402
|
+
modelId: result.modelId ?? result.model,
|
|
3403
|
+
});
|
|
3404
|
+
// Send error to dashboard chat so the user sees it inline
|
|
3405
|
+
if (event.platform === "dashboard" && this.dashboardStream?.sendError) {
|
|
3406
|
+
this.dashboardStream.sendError(resolveDashboardChannel(), errorMsg);
|
|
3407
|
+
}
|
|
3408
|
+
await this.notificationMgr.send(errorMsg, event);
|
|
3409
|
+
}
|
|
3410
|
+
// Send message metadata to dashboard for per-message footer display.
|
|
3411
|
+
// This is also the client's cue to refetch history after a mid-execute
|
|
3412
|
+
// reconnect — the chunks that arrived before the user reopened the tab
|
|
3413
|
+
// were dropped into the old channel, so the live messages state may be
|
|
3414
|
+
// missing content that is already in the DB.
|
|
3415
|
+
if (event.platform === "dashboard" && this.dashboardStream?.sendMessageMeta) {
|
|
3416
|
+
this.dashboardStream.sendMessageMeta(resolveDashboardChannel(), {
|
|
3417
|
+
backend: result.backendId,
|
|
3418
|
+
model: result.modelId ?? result.model,
|
|
3419
|
+
durationMs: result.durationMs,
|
|
3420
|
+
costUsd: result.costUsd,
|
|
3421
|
+
});
|
|
3422
|
+
}
|
|
3423
|
+
// Update session-level model info with actual execution result.
|
|
3424
|
+
// This corrects the pre-execution estimate when fallback kicked in,
|
|
3425
|
+
// and pushes the cumulative costUsd to the sidebar badge.
|
|
3426
|
+
if (event.platform === "dashboard" && this.dashboardStream?.sendSessionInfo) {
|
|
3427
|
+
const actualModel = result.modelId ?? result.model;
|
|
3428
|
+
const actualBackend = result.backendId ?? route.main.backendId;
|
|
3429
|
+
this.dashboardStream.sendSessionInfo(resolveDashboardChannel(), {
|
|
3430
|
+
model: actualModel,
|
|
3431
|
+
backend: actualBackend,
|
|
3432
|
+
modelLabel: getModelLabel(actualBackend, actualModel),
|
|
3433
|
+
costUsd: result.costUsd,
|
|
3434
|
+
});
|
|
3435
|
+
}
|
|
3436
|
+
// Chat-attachments Phase 1 — collect outbound files the agent
|
|
3437
|
+
// produced during this turn and deliver them via the originating
|
|
3438
|
+
// adapter. Currently only the Dashboard adapter delivers outbound
|
|
3439
|
+
// attachments on-wire; other platforms ignore the `attachments`
|
|
3440
|
+
// field until Phase 2.
|
|
3441
|
+
if (turnToken
|
|
3442
|
+
&& this.attachmentStore
|
|
3443
|
+
&& assistantMessageId !== null
|
|
3444
|
+
&& assistantOutput.length > 0) {
|
|
3445
|
+
const outboundRows = this.attachmentStore.collectOutboundForTurn({
|
|
3446
|
+
turnToken,
|
|
3447
|
+
sessionId: session.id,
|
|
3448
|
+
});
|
|
3449
|
+
if (outboundRows.length > 0) {
|
|
3450
|
+
for (const row of outboundRows) {
|
|
3451
|
+
this.attachmentStore.bindOutboundToMessage(row.id, assistantMessageId);
|
|
3452
|
+
}
|
|
3453
|
+
if (event.platform === "dashboard" && this.dashboardStream?.sendAttachments) {
|
|
3454
|
+
this.dashboardStream.sendAttachments(resolveDashboardChannel(), outboundRows.map((row) => ({
|
|
3455
|
+
id: row.id,
|
|
3456
|
+
originalFilename: row.originalFilename,
|
|
3457
|
+
mimeType: row.mimeType,
|
|
3458
|
+
sizeBytes: row.sizeBytes,
|
|
3459
|
+
...(row.caption ? { caption: row.caption } : {}),
|
|
3460
|
+
})));
|
|
3461
|
+
}
|
|
3462
|
+
}
|
|
3463
|
+
}
|
|
3464
|
+
// STAGE-C-DM-FRESHNESS-PLAN §Task 4 — collect the per-turn DM
|
|
3465
|
+
// freshness telemetry before notification + audit. Limited to DM
|
|
3466
|
+
// events: the metric only makes sense for the resume-or-fresh-
|
|
3467
|
+
// execute decision the message dispatch makes. We compute counts
|
|
3468
|
+
// bounded by the captured `turnStartedAtSqlite` so writes the
|
|
3469
|
+
// agent itself made during THIS turn are not folded back in.
|
|
3470
|
+
const dmFreshness = event.isDm
|
|
3471
|
+
? this.collectDmFreshnessTelemetry({
|
|
3472
|
+
sessionId: session.id,
|
|
3473
|
+
canResume: Boolean(canResume),
|
|
3474
|
+
resumeSnapshotAgeMinutes,
|
|
3475
|
+
turnStartedAtSqlite,
|
|
3476
|
+
userContent: event.content,
|
|
3477
|
+
})
|
|
3478
|
+
: undefined;
|
|
3479
|
+
// Skip notification if we already streamed (avoids duplicate message)
|
|
3480
|
+
await this.processResult(result, event, didStream, {
|
|
3481
|
+
originSessionId: session.id,
|
|
3482
|
+
...(dmFreshness ? { dmFreshness } : {}),
|
|
3483
|
+
});
|
|
3484
|
+
}
|
|
3485
|
+
finally {
|
|
3486
|
+
// Always release the turn token, even on error paths. Any outbound
|
|
3487
|
+
// rows the agent posted that weren't collected above fall into the
|
|
3488
|
+
// orphan reaper's domain on the next daemon restart.
|
|
3489
|
+
if (turnToken) {
|
|
3490
|
+
this.releaseAttachmentTurnToken(turnToken);
|
|
3491
|
+
this.attachmentStore?.releaseTurnToken(turnToken);
|
|
3492
|
+
}
|
|
3493
|
+
await replyActivity.stop();
|
|
3494
|
+
}
|
|
3495
|
+
}
|
|
3496
|
+
/**
|
|
3497
|
+
* Mark an in-flight event as having sent a user-facing notification via
|
|
3498
|
+
* `POST /api/notify`. Called by the API layer when the route handler sees
|
|
3499
|
+
* an `X-Pa-Event-Correlation-Id` header (auto-injected by the shim
|
|
3500
|
+
* env). `processResult` consumes the entry to suppress the implicit
|
|
3501
|
+
* final-text DM forward.
|
|
3502
|
+
*/
|
|
3503
|
+
markEventNotified(correlationId) {
|
|
3504
|
+
if (correlationId) {
|
|
3505
|
+
this.notifiedEvents.add(correlationId);
|
|
3506
|
+
}
|
|
3507
|
+
}
|
|
3508
|
+
/**
|
|
3509
|
+
* STAGE-C-DM-FRESHNESS-PLAN §Task 4 — assemble the DM-only freshness
|
|
3510
|
+
* telemetry payload that gets persisted into `agent_actions.detail`.
|
|
3511
|
+
* Pulled into its own helper so the message-dispatch path stays
|
|
3512
|
+
* readable and so unit tests can exercise the SQL aggregation in
|
|
3513
|
+
* isolation.
|
|
3514
|
+
*/
|
|
3515
|
+
collectDmFreshnessTelemetry(input) {
|
|
3516
|
+
const sessionRow = this.db
|
|
3517
|
+
.prepare(`SELECT started_at FROM conversation_sessions WHERE id = ?`)
|
|
3518
|
+
.get(input.sessionId);
|
|
3519
|
+
// Fall back to turnStart so a missing started_at yields zero counts
|
|
3520
|
+
// instead of poisoning the aggregation with a wide-open lower bound.
|
|
3521
|
+
const sessionStartedAtSqlite = sessionRow?.started_at ?? input.turnStartedAtSqlite;
|
|
3522
|
+
const writeCounts = countContextWritesInWindow(this.db, sessionStartedAtSqlite, input.turnStartedAtSqlite);
|
|
3523
|
+
// Bound the refetch window at "now" so a context_read that lands
|
|
3524
|
+
// AFTER this turn's executeWithRetry returns (e.g. from a future
|
|
3525
|
+
// parallel dispatcher, an unrelated routine, or a dashboard reload)
|
|
3526
|
+
// is not wrongly attributed to this turn.
|
|
3527
|
+
const turnEndSqlite = formatSqliteDatetime(new Date());
|
|
3528
|
+
const refetchedToday = didRefetchTodayDuringTurn(this.db, input.turnStartedAtSqlite, turnEndSqlite);
|
|
3529
|
+
return {
|
|
3530
|
+
resumed: input.canResume,
|
|
3531
|
+
// Fresh-execute branch sets resumeSnapshotAgeMinutes=0 by default;
|
|
3532
|
+
// that's the correct lag because the system prompt's <today> was
|
|
3533
|
+
// built at this very turn.
|
|
3534
|
+
agentLogLagMinutes: input.canResume ? input.resumeSnapshotAgeMinutes : 0,
|
|
3535
|
+
loudWritesSinceSessionStart: writeCounts.loud,
|
|
3536
|
+
quietWritesSinceSessionStart: writeCounts.quiet,
|
|
3537
|
+
refetchedToday,
|
|
3538
|
+
triggerMatched: matchesRecentActivityTrigger(input.userContent),
|
|
3539
|
+
};
|
|
3540
|
+
}
|
|
3541
|
+
async processResult(result, event, skipNotify = false, options = {}) {
|
|
3542
|
+
// Notify-dedup: consume the marker (if present) so this method also
|
|
3543
|
+
// serves as the cleanup point — every event run reaches processResult
|
|
3544
|
+
// exactly once on the success path, and on the error path the entry
|
|
3545
|
+
// is harmless (next event gets a fresh UUID).
|
|
3546
|
+
const alreadyNotified = this.notifiedEvents.delete(event.correlationId);
|
|
3547
|
+
const output = result.output.trim();
|
|
3548
|
+
if (!skipNotify
|
|
3549
|
+
&& !alreadyNotified
|
|
3550
|
+
&& output.length > 0
|
|
3551
|
+
&& this.shouldNotify(event)) {
|
|
3552
|
+
if (options.originSessionId !== undefined) {
|
|
3553
|
+
await this.notificationMgr.send(output, event, {
|
|
3554
|
+
originSessionId: options.originSessionId,
|
|
3555
|
+
});
|
|
3556
|
+
}
|
|
3557
|
+
else {
|
|
3558
|
+
await this.notificationMgr.send(output, event);
|
|
3559
|
+
}
|
|
3560
|
+
}
|
|
3561
|
+
this.audit.logAction({
|
|
3562
|
+
event,
|
|
3563
|
+
model: result.model,
|
|
3564
|
+
costUsd: result.costUsd,
|
|
3565
|
+
usage: result.usage,
|
|
3566
|
+
modelUsage: result.modelUsage,
|
|
3567
|
+
durationMs: result.durationMs,
|
|
3568
|
+
numTurns: result.numTurns,
|
|
3569
|
+
trigger: this.isReactive(event) ? "reactive" : "autonomous",
|
|
3570
|
+
backend: result.backendId,
|
|
3571
|
+
costSource: result.costSource,
|
|
3572
|
+
contextUpdated: result.contextUpdated,
|
|
3573
|
+
advisorCallCount: result.advisorCallCount,
|
|
3574
|
+
...(options.dmFreshness ? { dmFreshness: options.dmFreshness } : {}),
|
|
3575
|
+
});
|
|
3576
|
+
// Observer-event observability: log whether an external-change
|
|
3577
|
+
// event actually produced a context-file update. Makes it obvious
|
|
3578
|
+
// from the logs when the pipeline ran but the agent decided the
|
|
3579
|
+
// change wasn't actionable. Covers every autonomous observer:
|
|
3580
|
+
// - calendar.* / schedule.approaching (calendar observer)
|
|
3581
|
+
// - github.* (GitHub poller high-priority events)
|
|
3582
|
+
// - git.* (git watcher batched events)
|
|
3583
|
+
// - notion.* (notion poller)
|
|
3584
|
+
// - routine.hourly_check (Phase-9 polling sink for obsidian/git/notion)
|
|
3585
|
+
if (this.isObserverEvent(event)) {
|
|
3586
|
+
logger.info({
|
|
3587
|
+
eventType: event.type,
|
|
3588
|
+
source: event.source,
|
|
3589
|
+
contextUpdated: result.contextUpdated,
|
|
3590
|
+
numTurns: result.numTurns,
|
|
3591
|
+
costUsd: result.costUsd,
|
|
3592
|
+
}, result.contextUpdated
|
|
3593
|
+
? "Observer event processed — context files updated"
|
|
3594
|
+
: "Observer event processed — no context updates");
|
|
3595
|
+
}
|
|
3596
|
+
// Mark scheduled task as completed or failed (covers both
|
|
3597
|
+
// scheduled.task and scheduled.dm — both share the agent_schedule
|
|
3598
|
+
// row lifecycle).
|
|
3599
|
+
if (isScheduledEvent(event) && event.scheduleId) {
|
|
3600
|
+
const newStatus = result.isError ? "failed" : "completed";
|
|
3601
|
+
this.db
|
|
3602
|
+
.prepare("UPDATE agent_schedule SET status = ? WHERE id = ? AND status = 'running'")
|
|
3603
|
+
.run(newStatus, event.scheduleId);
|
|
3604
|
+
this.finalizeRetemplateRunIfApplicable(event, { errored: result.isError });
|
|
3605
|
+
}
|
|
3606
|
+
// Repository-management events from the daily cron and the manual
|
|
3607
|
+
// /api/repositories/:id/management/{init,scan} routes are
|
|
3608
|
+
// `scheduled.task` events emitted directly to the EventBus (no
|
|
3609
|
+
// `agent_schedule` row), so the finalize hook lives outside the
|
|
3610
|
+
// `scheduleId` guard above. The finalizer is a no-op for any event
|
|
3611
|
+
// whose taskContext doesn't match the management ProcessKey set.
|
|
3612
|
+
this.finalizeManagementScanIfApplicable(event, { errored: result.isError });
|
|
3613
|
+
}
|
|
3614
|
+
/**
|
|
3615
|
+
* P6 (git-lifecycle-and-triggers.md Decision 8) — restore in-flight
|
|
3616
|
+
* `git.project.retemplate` targets from backup whenever a retemplate
|
|
3617
|
+
* scheduled task settles. The agent itself cannot reliably roll back
|
|
3618
|
+
* its own writes (process exit, exceeded turns, backend faults), so
|
|
3619
|
+
* the daemon owns rollback at the dispatcher's two terminal sites
|
|
3620
|
+
* (`processResult` + `handleError`). The status grid is the source of
|
|
3621
|
+
* truth for which files to restore; `finalizeRetemplate` is idempotent
|
|
3622
|
+
* via the `finalizedAt` marker so calling both paths is safe.
|
|
3623
|
+
*/
|
|
3624
|
+
finalizeRetemplateRunIfApplicable(event, options) {
|
|
3625
|
+
if (!isAgentTaskEvent(event) || !event.scheduleId)
|
|
3626
|
+
return;
|
|
3627
|
+
const taskCtx = event.taskContext;
|
|
3628
|
+
const processKey = taskCtx
|
|
3629
|
+
&& typeof taskCtx === "object"
|
|
3630
|
+
&& typeof taskCtx.processKey === "string"
|
|
3631
|
+
? taskCtx.processKey
|
|
3632
|
+
: null;
|
|
3633
|
+
if (processKey !== "git.project.retemplate")
|
|
3634
|
+
return;
|
|
3635
|
+
try {
|
|
3636
|
+
const result = finalizeRetemplate({
|
|
3637
|
+
db: this.db,
|
|
3638
|
+
contextDir: getContextDir(this.config, this.db),
|
|
3639
|
+
scheduleId: event.scheduleId,
|
|
3640
|
+
errored: options.errored,
|
|
3641
|
+
});
|
|
3642
|
+
if (result.applied && result.rolledBackSlugs.length > 0) {
|
|
3643
|
+
logger.info({
|
|
3644
|
+
scheduleId: event.scheduleId,
|
|
3645
|
+
rolledBack: result.rolledBackSlugs,
|
|
3646
|
+
finalStatus: result.finalStatus,
|
|
3647
|
+
}, "Re-template run finalized — rolled back in-flight files from backup");
|
|
3648
|
+
}
|
|
3649
|
+
}
|
|
3650
|
+
catch (err) {
|
|
3651
|
+
logger.error({ err, scheduleId: event.scheduleId }, "Failed to finalize re-template run");
|
|
3652
|
+
}
|
|
3653
|
+
}
|
|
3654
|
+
/**
|
|
3655
|
+
* Unified-repositories §4.5 — settle a `repository_management` row when
|
|
3656
|
+
* a `git.project.init` / `git.project.update` event the daemon emitted
|
|
3657
|
+
* for management terminates. Runs unconditionally on every event
|
|
3658
|
+
* because management events live on the EventBus only (no
|
|
3659
|
+
* `agent_schedule` row), so the scheduleId-guarded path can't see
|
|
3660
|
+
* them; the method early-returns for any taskContext that doesn't
|
|
3661
|
+
* carry management metadata.
|
|
3662
|
+
*
|
|
3663
|
+
* Status mapping (v1):
|
|
3664
|
+
* - `git.project.init` success → `recordManagementInitDone`
|
|
3665
|
+
* - `git.project.update` success → `recordManagementScan('ok')`
|
|
3666
|
+
* (resets `scan_failure_count`)
|
|
3667
|
+
* - either, error → `recordManagementScan('failed')`
|
|
3668
|
+
* (bumps `scan_failure_count`)
|
|
3669
|
+
*
|
|
3670
|
+
* `'skipped_no_activity'` is reserved for future task-flow callback —
|
|
3671
|
+
* the dispatcher cannot reliably distinguish "agent decided no
|
|
3672
|
+
* journal entry needed" from "agent succeeded but didn't write" here.
|
|
3673
|
+
*/
|
|
3674
|
+
finalizeManagementScanIfApplicable(event, options) {
|
|
3675
|
+
if (!isAgentTaskEvent(event))
|
|
3676
|
+
return;
|
|
3677
|
+
const taskCtx = event.taskContext;
|
|
3678
|
+
if (!taskCtx || typeof taskCtx !== "object")
|
|
3679
|
+
return;
|
|
3680
|
+
const ctx = taskCtx;
|
|
3681
|
+
const processKey = typeof ctx.processKey === "string" ? ctx.processKey : null;
|
|
3682
|
+
const repositoryId = typeof ctx.repositoryId === "string" ? ctx.repositoryId : null;
|
|
3683
|
+
const triggerSource = typeof ctx.triggerSource === "string" ? ctx.triggerSource : null;
|
|
3684
|
+
if (!processKey || !repositoryId || !triggerSource)
|
|
3685
|
+
return;
|
|
3686
|
+
// Only management-emitted events should mutate `repository_management`.
|
|
3687
|
+
// Trigger-fired sessions (`triggerSource === 'repository_trigger'`)
|
|
3688
|
+
// share `processKey` in some cases but must not flip the management
|
|
3689
|
+
// row — they have their own observability (`fire_count`).
|
|
3690
|
+
if (triggerSource !== "repository_management_cron"
|
|
3691
|
+
&& triggerSource !== "repository_management_manual") {
|
|
3692
|
+
return;
|
|
3693
|
+
}
|
|
3694
|
+
try {
|
|
3695
|
+
if (processKey === "git.project.init") {
|
|
3696
|
+
if (!options.errored) {
|
|
3697
|
+
recordManagementInitDone(this.db, repositoryId);
|
|
3698
|
+
}
|
|
3699
|
+
else {
|
|
3700
|
+
recordManagementScan(this.db, repositoryId, "failed");
|
|
3701
|
+
}
|
|
3702
|
+
}
|
|
3703
|
+
else if (processKey === "git.project.update") {
|
|
3704
|
+
recordManagementScan(this.db, repositoryId, options.errored ? "failed" : "ok");
|
|
3705
|
+
}
|
|
3706
|
+
}
|
|
3707
|
+
catch (err) {
|
|
3708
|
+
logger.error({ err, repositoryId, processKey, errored: options.errored }, "Failed to finalize repository management state");
|
|
3709
|
+
}
|
|
3710
|
+
}
|
|
3711
|
+
hasRecentProactiveForwardContext(event, sessionId) {
|
|
3712
|
+
if (!event.isDm || isDocsQAMessage(event))
|
|
3713
|
+
return false;
|
|
3714
|
+
const activeRows = this.db
|
|
3715
|
+
.prepare(`SELECT metadata
|
|
3716
|
+
FROM messages
|
|
3717
|
+
WHERE session_id = ?
|
|
3718
|
+
ORDER BY timestamp DESC, id DESC
|
|
3719
|
+
LIMIT ?`)
|
|
3720
|
+
.all(sessionId, this.config.historyInjectionMaxMessages ?? 20);
|
|
3721
|
+
if (activeRows.some((row) => isProactiveForwardMetadata(parseMessageMetadata(row.metadata)))) {
|
|
3722
|
+
return true;
|
|
3723
|
+
}
|
|
3724
|
+
const windowMinutes = this.config.historyOtherSurfaceWindowMinutes ?? 1440;
|
|
3725
|
+
if (!Number.isFinite(windowMinutes) || windowMinutes <= 0)
|
|
3726
|
+
return false;
|
|
3727
|
+
const { scope } = getConversationScope({
|
|
3728
|
+
platform: event.platform,
|
|
3729
|
+
channel: event.channel,
|
|
3730
|
+
threadId: event.threadId,
|
|
3731
|
+
isDm: true,
|
|
3732
|
+
intent: event.intent,
|
|
3733
|
+
});
|
|
3734
|
+
const other = scope === OWNER_DM_SCOPE
|
|
3735
|
+
? { scope: DASHBOARD_CHAT_SCOPE, scopeKey: DASHBOARD_SCOPE_KEY }
|
|
3736
|
+
: scope === DASHBOARD_CHAT_SCOPE
|
|
3737
|
+
? { scope: OWNER_DM_SCOPE, scopeKey: OWNER_SCOPE_KEY }
|
|
3738
|
+
: null;
|
|
3739
|
+
if (!other)
|
|
3740
|
+
return false;
|
|
3741
|
+
const sinceUtc = formatSqliteDatetime(new Date(Date.now() - windowMinutes * 60_000));
|
|
3742
|
+
const otherRows = this.db
|
|
3743
|
+
.prepare(`SELECT m.metadata
|
|
3744
|
+
FROM messages m
|
|
3745
|
+
JOIN conversation_sessions s ON m.session_id = s.id
|
|
3746
|
+
WHERE s.scope = ?
|
|
3747
|
+
AND s.scope_key = ?
|
|
3748
|
+
AND s.status = 'active'
|
|
3749
|
+
AND m.timestamp >= ?
|
|
3750
|
+
ORDER BY m.timestamp DESC, m.id DESC
|
|
3751
|
+
LIMIT 60`)
|
|
3752
|
+
.all(other.scope, other.scopeKey, sinceUtc);
|
|
3753
|
+
return otherRows.some((row) => isProactiveForwardMetadata(parseMessageMetadata(row.metadata)));
|
|
3754
|
+
}
|
|
3755
|
+
logProactiveForwardDisavowalIfMatched(sessionId, reply) {
|
|
3756
|
+
const matchedPattern = PROACTIVE_FORWARD_DISAVOWAL_PATTERNS.find((pattern) => pattern.test(reply));
|
|
3757
|
+
if (!matchedPattern)
|
|
3758
|
+
return;
|
|
3759
|
+
try {
|
|
3760
|
+
this.db
|
|
3761
|
+
.prepare(`INSERT INTO agent_actions (
|
|
3762
|
+
action_type, trigger, result, detail, started_at
|
|
3763
|
+
)
|
|
3764
|
+
VALUES (
|
|
3765
|
+
'proactive_forward_disavowed',
|
|
3766
|
+
'reactive',
|
|
3767
|
+
'success',
|
|
3768
|
+
?,
|
|
3769
|
+
CURRENT_TIMESTAMP
|
|
3770
|
+
)`)
|
|
3771
|
+
.run(JSON.stringify({
|
|
3772
|
+
sessionId,
|
|
3773
|
+
replyExcerpt: reply.slice(0, 240),
|
|
3774
|
+
matchedPattern: matchedPattern.source,
|
|
3775
|
+
}));
|
|
3776
|
+
}
|
|
3777
|
+
catch (err) {
|
|
3778
|
+
logger.warn({ err, sessionId }, "Failed to log proactive forward disavowal");
|
|
3779
|
+
}
|
|
3780
|
+
}
|
|
3781
|
+
formatSummaryRole(message) {
|
|
3782
|
+
const metadata = parseMessageMetadata(message.metadata);
|
|
3783
|
+
if (message.role === "assistant" && isProactiveForwardMetadata(metadata)) {
|
|
3784
|
+
return "assistant (forwarded from autonomous run)";
|
|
3785
|
+
}
|
|
3786
|
+
return message.role;
|
|
3787
|
+
}
|
|
3788
|
+
buildCrossSessionConversationHistory(event) {
|
|
3789
|
+
const { scope, scopeKey } = getConversationScope({
|
|
3790
|
+
platform: event.platform,
|
|
3791
|
+
channel: event.channel,
|
|
3792
|
+
threadId: event.threadId,
|
|
3793
|
+
isDm: event.isDm,
|
|
3794
|
+
intent: event.intent,
|
|
3795
|
+
});
|
|
3796
|
+
const summary = event.isDm && scope === OWNER_DM_SCOPE
|
|
3797
|
+
? this.sessionMgr.getPreviousDmSummary(OWNER_SCOPE_KEY)
|
|
3798
|
+
: null;
|
|
3799
|
+
const statement = this.hasMessageBackendMetadataColumns
|
|
3800
|
+
? this.db.prepare(`SELECT m.role, m.content, m.timestamp, m.metadata, m.backend, m.model_id
|
|
3801
|
+
FROM messages m
|
|
3802
|
+
JOIN conversation_sessions s ON m.session_id = s.id
|
|
3803
|
+
WHERE s.scope = ? AND s.scope_key = ?
|
|
3804
|
+
ORDER BY m.timestamp DESC, m.id DESC
|
|
3805
|
+
LIMIT 20`)
|
|
3806
|
+
: this.db.prepare(`SELECT m.role, m.content, m.timestamp, m.metadata,
|
|
3807
|
+
NULL AS backend,
|
|
3808
|
+
NULL AS model_id
|
|
3809
|
+
FROM messages m
|
|
3810
|
+
JOIN conversation_sessions s ON m.session_id = s.id
|
|
3811
|
+
WHERE s.scope = ? AND s.scope_key = ?
|
|
3812
|
+
ORDER BY m.timestamp DESC, m.id DESC
|
|
3813
|
+
LIMIT 20`);
|
|
3814
|
+
const rows = statement
|
|
3815
|
+
.all(scope, scopeKey);
|
|
3816
|
+
if (!summary && rows.length === 0) {
|
|
3817
|
+
return null;
|
|
3818
|
+
}
|
|
3819
|
+
const parts = ["## Previous conversation in this thread"];
|
|
3820
|
+
if (summary) {
|
|
3821
|
+
parts.push("", "### Earlier summary", summary);
|
|
3822
|
+
}
|
|
3823
|
+
if (rows.length > 0) {
|
|
3824
|
+
parts.push("", "### Recent messages (oldest first)");
|
|
3825
|
+
for (const row of rows.reverse()) {
|
|
3826
|
+
const tag = row.backend
|
|
3827
|
+
? `[${row.role}/${row.backend}:${row.model_id ?? "?"}]`
|
|
3828
|
+
: `[${row.role}/user]`;
|
|
3829
|
+
const forwardSuffix = row.role === "assistant"
|
|
3830
|
+
&& isProactiveForwardMetadata(parseMessageMetadata(row.metadata))
|
|
3831
|
+
? " (forwarded from autonomous run)"
|
|
3832
|
+
: "";
|
|
3833
|
+
parts.push(`${tag}${forwardSuffix}: ${row.content}`);
|
|
3834
|
+
}
|
|
3835
|
+
}
|
|
3836
|
+
return parts.join("\n");
|
|
3837
|
+
}
|
|
3838
|
+
shouldNotify(event) {
|
|
3839
|
+
if (isMessageEvent(event))
|
|
3840
|
+
return true;
|
|
3841
|
+
if (isScheduledEvent(event)) {
|
|
3842
|
+
// Dashboard-triggered tasks (e.g. regenerate) already show status
|
|
3843
|
+
// in the UI — suppress DM notification to avoid noisy messages.
|
|
3844
|
+
// Both scheduled.task and scheduled.dm share this gate; the
|
|
3845
|
+
// briefing path's final assistant turn IS the DM.
|
|
3846
|
+
const ctx = event.taskContext;
|
|
3847
|
+
if (ctx?.triggeredBy === "dashboard")
|
|
3848
|
+
return false;
|
|
3849
|
+
return true;
|
|
3850
|
+
}
|
|
3851
|
+
// Routine events are silent-by-default: result.output is an internal
|
|
3852
|
+
// agent log, never forwarded as a user notification. Routines reach
|
|
3853
|
+
// the user only via explicit POST /api/notify from their prompt. Do
|
|
3854
|
+
// not re-add routines here — the routine_protocol header injected by
|
|
3855
|
+
// the context-builder carries this rule to the agent.
|
|
3856
|
+
return false;
|
|
3857
|
+
}
|
|
3858
|
+
/**
|
|
3859
|
+
* Autonomous "observer" events: external-change detections that the
|
|
3860
|
+
* daemon pushes into the pipeline, as opposed to user-initiated
|
|
3861
|
+
* messages, cron routines, or scheduled tasks. Used for the
|
|
3862
|
+
* contextUpdated observability log in processResult.
|
|
3863
|
+
*/
|
|
3864
|
+
isObserverEvent(event) {
|
|
3865
|
+
return ((isRoutineEvent(event) && event.routine === "hourly_check") ||
|
|
3866
|
+
event.type.startsWith("calendar.") ||
|
|
3867
|
+
event.type === "schedule.approaching" ||
|
|
3868
|
+
event.type.startsWith("notion.") ||
|
|
3869
|
+
event.type.startsWith("github.") ||
|
|
3870
|
+
event.type.startsWith("git."));
|
|
3871
|
+
}
|
|
3872
|
+
/**
|
|
3873
|
+
* Create rolling summaries for DM conversations.
|
|
3874
|
+
* Called at 4 AM (day boundary) before morning routine.
|
|
3875
|
+
*
|
|
3876
|
+
* Session-independent: queries messages directly from the DB regardless
|
|
3877
|
+
* of which session they belong to. Does NOT expire active sessions —
|
|
3878
|
+
* session lifecycle is handled by getOrCreateDm's day boundary check.
|
|
3879
|
+
*
|
|
3880
|
+
* Rolling summary: previous summary + new messages → new summary.
|
|
3881
|
+
* This prevents unbounded growth (summarizing days of history each time).
|
|
3882
|
+
*
|
|
3883
|
+
* Threshold gate: only runs AI summarization when accumulated messages
|
|
3884
|
+
* since the LAST summary exceed the threshold (> 30 messages or > 5000
|
|
3885
|
+
* chars of raw text). Below threshold, nothing is saved — the message
|
|
3886
|
+
* count accumulates across days until the threshold is reached.
|
|
3887
|
+
*/
|
|
3888
|
+
async summarizeDmSessions() {
|
|
3889
|
+
const platforms = this.sessionMgr.getDmPlatformsWithNewMessages();
|
|
3890
|
+
if (platforms.length === 0)
|
|
3891
|
+
return;
|
|
3892
|
+
const MSG_THRESHOLD = 30;
|
|
3893
|
+
const SIZE_THRESHOLD = 5000;
|
|
3894
|
+
// Force summarization before session retention (7 days) deletes messages
|
|
3895
|
+
const DAYS_THRESHOLD = 6;
|
|
3896
|
+
for (const platform of platforms) {
|
|
3897
|
+
try {
|
|
3898
|
+
const newMessages = this.sessionMgr.getUnsummarizedDmMessages(platform);
|
|
3899
|
+
if (newMessages.length === 0)
|
|
3900
|
+
continue;
|
|
3901
|
+
const rawNew = newMessages
|
|
3902
|
+
.map((m) => `[${this.formatSummaryRole(m)}] ${m.content}`)
|
|
3903
|
+
.join("\n");
|
|
3904
|
+
// Check if oldest message is approaching retention cutoff
|
|
3905
|
+
const oldestMs = parseSqliteUtcMs(newMessages[0].timestamp);
|
|
3906
|
+
const daysOld = (Date.now() - oldestMs) / (1000 * 60 * 60 * 24);
|
|
3907
|
+
const approachingRetention = daysOld >= DAYS_THRESHOLD;
|
|
3908
|
+
// Below threshold: skip — count accumulates until next check
|
|
3909
|
+
if (newMessages.length <= MSG_THRESHOLD &&
|
|
3910
|
+
rawNew.length <= SIZE_THRESHOLD &&
|
|
3911
|
+
!approachingRetention) {
|
|
3912
|
+
logger.debug({ platform, messageCount: newMessages.length, rawSize: rawNew.length }, "DM messages below threshold, skipping summarization");
|
|
3913
|
+
continue;
|
|
3914
|
+
}
|
|
3915
|
+
// AI compression (rolling: previous summary + new messages → condensed)
|
|
3916
|
+
const previousSummary = this.sessionMgr.getPreviousDmSummary(platform);
|
|
3917
|
+
const parts = [];
|
|
3918
|
+
if (previousSummary) {
|
|
3919
|
+
parts.push(`Previous context:\n${previousSummary}`);
|
|
3920
|
+
}
|
|
3921
|
+
parts.push(`New messages:\n${newMessages.map((m) => `${this.formatSummaryRole(m)}: ${m.content}`).join("\n")}`);
|
|
3922
|
+
const summary = await this.agentRouter.summarize(parts.join("\n\n"));
|
|
3923
|
+
this.sessionMgr.saveDmSummary(platform, summary, newMessages.length);
|
|
3924
|
+
logger.info({ platform, messageCount: newMessages.length, hadPreviousSummary: !!previousSummary }, "DM conversation summarized");
|
|
3925
|
+
}
|
|
3926
|
+
catch (err) {
|
|
3927
|
+
logger.error({ err, platform }, "Failed to summarize DM conversation");
|
|
3928
|
+
}
|
|
3929
|
+
}
|
|
3930
|
+
}
|
|
3931
|
+
/** Delegate to shared isRoadmapStale utility. */
|
|
3932
|
+
isRoadmapStale(maxAgeDays = 15) {
|
|
3933
|
+
return isRoadmapStale(getContextDir(this.config, this.db), maxAgeDays);
|
|
3934
|
+
}
|
|
3935
|
+
/**
|
|
3936
|
+
* Emit a roadmap_refresh routine event.
|
|
3937
|
+
* Dedup guard: skips if emitted within the last 5 minutes unless
|
|
3938
|
+
* `options.bypassDedup` is true. Dashboard-initiated regeneration is
|
|
3939
|
+
* the one legal caller that may bypass dedup; all internal call-sites
|
|
3940
|
+
* honor it so a burst of signals (flight + hotel confirmations in the
|
|
3941
|
+
* same minute) collapses into a single refresh.
|
|
3942
|
+
*/
|
|
3943
|
+
emitRoadmapRefresh(source, options) {
|
|
3944
|
+
const DEDUP_MS = 5 * 60 * 1000;
|
|
3945
|
+
const bypassDedup = options?.bypassDedup === true;
|
|
3946
|
+
if (!bypassDedup && Date.now() - this.lastRoadmapRefreshEmitMs < DEDUP_MS) {
|
|
3947
|
+
logger.info({ source }, "Skipping roadmap_refresh (dedup, emitted recently)");
|
|
3948
|
+
return;
|
|
3949
|
+
}
|
|
3950
|
+
this.lastRoadmapRefreshEmitMs = Date.now();
|
|
3951
|
+
logger.info({ source, bypassDedup }, "Emitting roadmap_refresh");
|
|
3952
|
+
void this.eventBus.put({
|
|
3953
|
+
...createEvent({
|
|
3954
|
+
type: "routine.roadmap_refresh",
|
|
3955
|
+
source,
|
|
3956
|
+
priority: EventPriority.NORMAL,
|
|
3957
|
+
}),
|
|
3958
|
+
routine: "roadmap_refresh",
|
|
3959
|
+
});
|
|
3960
|
+
}
|
|
3961
|
+
localDateTimeToUtcMs(parts, timeZone) {
|
|
3962
|
+
if (!timeZone) {
|
|
3963
|
+
return new Date(parts.year, parts.month - 1, parts.day, parts.hour, parts.minute, 0, 0).getTime();
|
|
3964
|
+
}
|
|
3965
|
+
const baseUtc = Date.UTC(parts.year, parts.month - 1, parts.day, parts.hour, parts.minute, 0, 0);
|
|
3966
|
+
let guess = baseUtc;
|
|
3967
|
+
for (let attempt = 0; attempt < 3; attempt++) {
|
|
3968
|
+
const offsetMinutes = this.getTimeZoneOffsetMinutes(new Date(guess), timeZone);
|
|
3969
|
+
if (offsetMinutes === null) {
|
|
3970
|
+
return null;
|
|
3971
|
+
}
|
|
3972
|
+
const nextGuess = baseUtc - offsetMinutes * 60 * 1000;
|
|
3973
|
+
if (nextGuess === guess) {
|
|
3974
|
+
break;
|
|
3975
|
+
}
|
|
3976
|
+
guess = nextGuess;
|
|
3977
|
+
}
|
|
3978
|
+
const resolved = this.getLocalDateParts(new Date(guess), timeZone);
|
|
3979
|
+
if (this.compareLocalDateParts(resolved, parts) !== 0) {
|
|
3980
|
+
return null;
|
|
3981
|
+
}
|
|
3982
|
+
return guess;
|
|
3983
|
+
}
|
|
3984
|
+
getTimeZoneOffsetMinutes(date, timeZone) {
|
|
3985
|
+
let formatter;
|
|
3986
|
+
try {
|
|
3987
|
+
formatter = new Intl.DateTimeFormat("en-US", {
|
|
3988
|
+
timeZone,
|
|
3989
|
+
timeZoneName: "shortOffset",
|
|
3990
|
+
});
|
|
3991
|
+
}
|
|
3992
|
+
catch {
|
|
3993
|
+
return null;
|
|
3994
|
+
}
|
|
3995
|
+
const zonePart = formatter.formatToParts(date).find((part) => part.type === "timeZoneName")?.value;
|
|
3996
|
+
if (!zonePart) {
|
|
3997
|
+
return null;
|
|
3998
|
+
}
|
|
3999
|
+
if (zonePart === "GMT") {
|
|
4000
|
+
return 0;
|
|
4001
|
+
}
|
|
4002
|
+
const match = /^GMT([+-])(\d{1,2})(?::?(\d{2}))?$/.exec(zonePart);
|
|
4003
|
+
if (!match) {
|
|
4004
|
+
return null;
|
|
4005
|
+
}
|
|
4006
|
+
const sign = match[1] === "-" ? -1 : 1;
|
|
4007
|
+
const hours = Number(match[2]);
|
|
4008
|
+
const minutes = Number(match[3] ?? "0");
|
|
4009
|
+
return sign * (hours * 60 + minutes);
|
|
4010
|
+
}
|
|
4011
|
+
getLocalDateParts(date, timeZone) {
|
|
4012
|
+
let formatter;
|
|
4013
|
+
try {
|
|
4014
|
+
formatter = new Intl.DateTimeFormat("en-US", {
|
|
4015
|
+
timeZone,
|
|
4016
|
+
year: "numeric",
|
|
4017
|
+
month: "2-digit",
|
|
4018
|
+
day: "2-digit",
|
|
4019
|
+
hour: "2-digit",
|
|
4020
|
+
minute: "2-digit",
|
|
4021
|
+
hourCycle: "h23",
|
|
4022
|
+
});
|
|
4023
|
+
}
|
|
4024
|
+
catch {
|
|
4025
|
+
formatter = new Intl.DateTimeFormat("en-US", {
|
|
4026
|
+
year: "numeric",
|
|
4027
|
+
month: "2-digit",
|
|
4028
|
+
day: "2-digit",
|
|
4029
|
+
hour: "2-digit",
|
|
4030
|
+
minute: "2-digit",
|
|
4031
|
+
hourCycle: "h23",
|
|
4032
|
+
});
|
|
4033
|
+
}
|
|
4034
|
+
const parts = formatter.formatToParts(date);
|
|
4035
|
+
const valueOf = (type) => Number(parts.find((part) => part.type === type)?.value ?? "0");
|
|
4036
|
+
return {
|
|
4037
|
+
year: valueOf("year"),
|
|
4038
|
+
month: valueOf("month"),
|
|
4039
|
+
day: valueOf("day"),
|
|
4040
|
+
hour: valueOf("hour"),
|
|
4041
|
+
minute: valueOf("minute"),
|
|
4042
|
+
};
|
|
4043
|
+
}
|
|
4044
|
+
compareLocalDateParts(a, b) {
|
|
4045
|
+
if (a.year !== b.year)
|
|
4046
|
+
return a.year - b.year;
|
|
4047
|
+
if (a.month !== b.month)
|
|
4048
|
+
return a.month - b.month;
|
|
4049
|
+
if (a.day !== b.day)
|
|
4050
|
+
return a.day - b.day;
|
|
4051
|
+
if (a.hour !== b.hour)
|
|
4052
|
+
return a.hour - b.hour;
|
|
4053
|
+
return a.minute - b.minute;
|
|
4054
|
+
}
|
|
4055
|
+
isRetryable(error) {
|
|
4056
|
+
// BackendCore implementations wrap all errors into BackendQuotaError or
|
|
4057
|
+
// BackendDecisiveFailure before they reach the dispatcher. Both are
|
|
4058
|
+
// decisive (no retry). BackendRouterHandledError is also decisive.
|
|
4059
|
+
if (error instanceof BackendQuotaError ||
|
|
4060
|
+
error instanceof BackendDecisiveFailure ||
|
|
4061
|
+
error instanceof BackendRouterHandledError) {
|
|
4062
|
+
return false;
|
|
4063
|
+
}
|
|
4064
|
+
// Raw 5xx from an unclassified path — retry once.
|
|
4065
|
+
const status = typeof error === "object" && error !== null && "status" in error
|
|
4066
|
+
? error.status
|
|
4067
|
+
: undefined;
|
|
4068
|
+
return typeof status === "number" && status >= 500;
|
|
4069
|
+
}
|
|
4070
|
+
/**
|
|
4071
|
+
* Defense-in-depth retry wrapper around BackendRouter.execute().
|
|
4072
|
+
*
|
|
4073
|
+
* **Primary retry responsibility lives inside each BackendCore** (§12/§13).
|
|
4074
|
+
* Quota errors, timeouts, and auth failures are all normalized into
|
|
4075
|
+
* BackendDecisiveFailure / BackendQuotaError before they reach this layer.
|
|
4076
|
+
* The BackendRouter handles fallback on decisive failures.
|
|
4077
|
+
*
|
|
4078
|
+
* This outer loop exists solely as a safety net for raw 5xx errors that
|
|
4079
|
+
* somehow escape the BackendCore → Router chain (e.g., an unexpected HTTP
|
|
4080
|
+
* error from the SDK transport layer). In practice it almost never fires.
|
|
4081
|
+
*/
|
|
4082
|
+
async executeWithRetry(fn, event) {
|
|
4083
|
+
const maxRetries = 1;
|
|
4084
|
+
let lastError;
|
|
4085
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
4086
|
+
try {
|
|
4087
|
+
return await fn();
|
|
4088
|
+
}
|
|
4089
|
+
catch (error) {
|
|
4090
|
+
lastError = error;
|
|
4091
|
+
if (attempt < maxRetries && this.isRetryable(error)) {
|
|
4092
|
+
logger.warn({
|
|
4093
|
+
eventType: event.type,
|
|
4094
|
+
attempt: attempt + 1,
|
|
4095
|
+
error: error instanceof Error ? error.message : String(error),
|
|
4096
|
+
}, "Retrying agent execution after backoff");
|
|
4097
|
+
// Shutdown-aware sleep: race a single 5-minute timer against the
|
|
4098
|
+
// shutdown signal so SIGTERM unwinds the retry loop promptly
|
|
4099
|
+
// instead of blocking for up to 5 minutes.
|
|
4100
|
+
await new Promise((resolve) => {
|
|
4101
|
+
const onShutdown = () => {
|
|
4102
|
+
clearTimeout(timer);
|
|
4103
|
+
this.shutdownAwaiters.delete(onShutdown);
|
|
4104
|
+
resolve();
|
|
4105
|
+
};
|
|
4106
|
+
const timer = setTimeout(() => {
|
|
4107
|
+
this.shutdownAwaiters.delete(onShutdown);
|
|
4108
|
+
resolve();
|
|
4109
|
+
}, 5 * 60 * 1000);
|
|
4110
|
+
this.shutdownAwaiters.add(onShutdown);
|
|
4111
|
+
});
|
|
4112
|
+
if (this.shutdown) {
|
|
4113
|
+
throw lastError;
|
|
4114
|
+
}
|
|
4115
|
+
continue;
|
|
4116
|
+
}
|
|
4117
|
+
break;
|
|
4118
|
+
}
|
|
4119
|
+
}
|
|
4120
|
+
throw lastError;
|
|
4121
|
+
}
|
|
4122
|
+
async handleError(event, error) {
|
|
4123
|
+
logger.error({ event: event.type, error: error.message }, "Event processing error");
|
|
4124
|
+
// Defense-in-depth cleanup of the notify-dedup marker — processResult
|
|
4125
|
+
// is the primary collection point, but if execution threw before
|
|
4126
|
+
// reaching it, drop any orphan entry here so the set cannot grow
|
|
4127
|
+
// unbounded across error storms.
|
|
4128
|
+
this.notifiedEvents.delete(event.correlationId);
|
|
4129
|
+
const routerHandledError = error instanceof BackendRouterHandledError
|
|
4130
|
+
? error
|
|
4131
|
+
: null;
|
|
4132
|
+
// Mark scheduled task as failed whenever execution terminates
|
|
4133
|
+
// without a result. Covers both scheduled.task and scheduled.dm —
|
|
4134
|
+
// a scheduled.dm row that throws would otherwise stick in
|
|
4135
|
+
// `running` forever.
|
|
4136
|
+
if (isScheduledEvent(event) && event.scheduleId) {
|
|
4137
|
+
this.db
|
|
4138
|
+
.prepare("UPDATE agent_schedule SET status = 'failed' WHERE id = ? AND status = 'running'")
|
|
4139
|
+
.run(event.scheduleId);
|
|
4140
|
+
this.finalizeRetemplateRunIfApplicable(event, { errored: true });
|
|
4141
|
+
}
|
|
4142
|
+
// Same rationale as the success-path call: management events have no
|
|
4143
|
+
// `agent_schedule` row, so this hook is intentionally outside the
|
|
4144
|
+
// scheduleId guard.
|
|
4145
|
+
this.finalizeManagementScanIfApplicable(event, { errored: true });
|
|
4146
|
+
if (routerHandledError) {
|
|
4147
|
+
const quotaError = this.extractQuotaError(routerHandledError.cause);
|
|
4148
|
+
if (quotaError && isMessageEvent(event)) {
|
|
4149
|
+
this.notifyDashboardError(event, this.formatQuotaMessage(quotaError));
|
|
4150
|
+
}
|
|
4151
|
+
return;
|
|
4152
|
+
}
|
|
4153
|
+
const quotaError = this.extractQuotaError(error);
|
|
4154
|
+
if (quotaError && isMessageEvent(event)) {
|
|
4155
|
+
const quotaMsg = this.formatQuotaMessage(quotaError);
|
|
4156
|
+
this.notifyDashboardError(event, quotaMsg);
|
|
4157
|
+
await this.notificationMgr.send(quotaMsg, event);
|
|
4158
|
+
return;
|
|
4159
|
+
}
|
|
4160
|
+
if (isMessageEvent(event)) {
|
|
4161
|
+
const errorMsg = "An error occurred during processing. Please try again.";
|
|
4162
|
+
this.notifyDashboardError(event, errorMsg);
|
|
4163
|
+
await this.notificationMgr.send(errorMsg, event);
|
|
4164
|
+
}
|
|
4165
|
+
}
|
|
4166
|
+
/**
|
|
4167
|
+
* Best-effort inline error to the dashboard tab whose POST triggered
|
|
4168
|
+
* this event. `DashboardAdapter` is `notificationEligible=false`, so
|
|
4169
|
+
* the normal `notificationMgr.send` path skips it — without this hook
|
|
4170
|
+
* the browser sees the request accepted (200 OK), watches nothing
|
|
4171
|
+
* happen, and hits the 120s waiting timeout with no explanation. We
|
|
4172
|
+
* target the originating channel id; if the tab already reconnected
|
|
4173
|
+
* with a new UUID the adapter silently drops, which matches the
|
|
4174
|
+
* chat_error semantics.
|
|
4175
|
+
*/
|
|
4176
|
+
notifyDashboardError(event, message) {
|
|
4177
|
+
if (!isMessageEvent(event))
|
|
4178
|
+
return;
|
|
4179
|
+
if (event.platform !== "dashboard")
|
|
4180
|
+
return;
|
|
4181
|
+
this.dashboardStream?.sendError?.(event.channel, message);
|
|
4182
|
+
}
|
|
4183
|
+
extractQuotaError(error) {
|
|
4184
|
+
if (error instanceof BackendQuotaError) {
|
|
4185
|
+
return error;
|
|
4186
|
+
}
|
|
4187
|
+
if (error instanceof BackendDecisiveFailure &&
|
|
4188
|
+
error.kind === "quota" &&
|
|
4189
|
+
error.cause instanceof BackendQuotaError) {
|
|
4190
|
+
return error.cause;
|
|
4191
|
+
}
|
|
4192
|
+
// All BackendCore implementations normalize quota errors before they
|
|
4193
|
+
// reach the dispatcher, so no Claude-specific fallback is needed here.
|
|
4194
|
+
return null;
|
|
4195
|
+
}
|
|
4196
|
+
formatQuotaMessage(quotaError) {
|
|
4197
|
+
const backendLabel = this.formatBackendLabel(quotaError.backendId);
|
|
4198
|
+
const resetHint = quotaError.resetHint;
|
|
4199
|
+
if (quotaError.originalCode === "max_budget_usd") {
|
|
4200
|
+
return `${backendLabel} reached the per-turn budget limit. Please try a shorter request or raise max_budget_usd in backend settings.`;
|
|
4201
|
+
}
|
|
4202
|
+
if (resetHint) {
|
|
4203
|
+
const timeZone = resetHint.timeZone || this.config.timezone || undefined;
|
|
4204
|
+
const resetAtMs = this.resolveQuotaResetAtMs(resetHint);
|
|
4205
|
+
if (resetAtMs !== null) {
|
|
4206
|
+
const formatted = new Intl.DateTimeFormat("en-US", {
|
|
4207
|
+
timeZone,
|
|
4208
|
+
year: "numeric",
|
|
4209
|
+
month: "2-digit",
|
|
4210
|
+
day: "2-digit",
|
|
4211
|
+
hour: "2-digit",
|
|
4212
|
+
minute: "2-digit",
|
|
4213
|
+
hourCycle: "h12",
|
|
4214
|
+
}).format(new Date(resetAtMs));
|
|
4215
|
+
const zoneLabel = timeZone ? ` (${timeZone})` : "";
|
|
4216
|
+
return `${backendLabel} has reached its usage limit. Resets at ${formatted}${zoneLabel}. Please try again after the reset.`;
|
|
4217
|
+
}
|
|
4218
|
+
try {
|
|
4219
|
+
new Intl.DateTimeFormat("en-US", { timeZone }).format(new Date());
|
|
4220
|
+
}
|
|
4221
|
+
catch {
|
|
4222
|
+
// Fall through to rawLabel/generic message if the timezone label is invalid.
|
|
4223
|
+
}
|
|
4224
|
+
const rawLabel = resetHint.rawLabel.trim();
|
|
4225
|
+
if (rawLabel) {
|
|
4226
|
+
return `${backendLabel} has reached its usage limit. Resets at ${rawLabel}. Please try again after the reset.`;
|
|
4227
|
+
}
|
|
4228
|
+
}
|
|
4229
|
+
return `${backendLabel} has reached its usage limit. Please wait and try again later.`;
|
|
4230
|
+
}
|
|
4231
|
+
formatBackendLabel(backendId) {
|
|
4232
|
+
switch (backendId) {
|
|
4233
|
+
case "claude":
|
|
4234
|
+
return "Claude Code";
|
|
4235
|
+
case "codex":
|
|
4236
|
+
return "Codex";
|
|
4237
|
+
case "gemini":
|
|
4238
|
+
return "Gemini CLI";
|
|
4239
|
+
default:
|
|
4240
|
+
return backendId;
|
|
4241
|
+
}
|
|
4242
|
+
}
|
|
4243
|
+
resolveQuotaResetAtMs(resetHint) {
|
|
4244
|
+
const timeZone = resetHint.timeZone || this.config.timezone || undefined;
|
|
4245
|
+
const now = new Date();
|
|
4246
|
+
const current = this.getLocalDateParts(now, timeZone);
|
|
4247
|
+
let target = {
|
|
4248
|
+
year: current.year,
|
|
4249
|
+
month: current.month,
|
|
4250
|
+
day: current.day,
|
|
4251
|
+
hour: resetHint.hour,
|
|
4252
|
+
minute: resetHint.minute,
|
|
4253
|
+
};
|
|
4254
|
+
if (this.compareLocalDateParts(current, target) >= 0) {
|
|
4255
|
+
const nextDate = new Date(Date.UTC(current.year, current.month - 1, current.day + 1));
|
|
4256
|
+
target = {
|
|
4257
|
+
year: nextDate.getUTCFullYear(),
|
|
4258
|
+
month: nextDate.getUTCMonth() + 1,
|
|
4259
|
+
day: nextDate.getUTCDate(),
|
|
4260
|
+
hour: resetHint.hour,
|
|
4261
|
+
minute: resetHint.minute,
|
|
4262
|
+
};
|
|
4263
|
+
}
|
|
4264
|
+
return this.localDateTimeToUtcMs(target, timeZone);
|
|
4265
|
+
}
|
|
4266
|
+
/**
|
|
4267
|
+
* DELEGATED-MODE-V2-DESIGN.md §4.5 — at every DM dispatch, consult the
|
|
4268
|
+
* cached probe for delegated integrations whose effective backend
|
|
4269
|
+
* matches the session backend. Surfaces a one-shot DM (deduped via
|
|
4270
|
+
* `runtime_state`) when the cached probe says required capabilities
|
|
4271
|
+
* are no longer present.
|
|
4272
|
+
*
|
|
4273
|
+
* The consult itself is synchronous DB-only work (cheap on the hot
|
|
4274
|
+
* path). The DM dispatch is fire-and-forget so the agent's response
|
|
4275
|
+
* latency is not gated on Slack/Telegram round-trips. Per-warning
|
|
4276
|
+
* dispatch failures are swallowed so a flaky messaging adapter never
|
|
4277
|
+
* breaks the user's actual DM.
|
|
4278
|
+
*/
|
|
4279
|
+
/**
|
|
4280
|
+
* Phase 1 of the §4.5 health check — synchronous cache consult only.
|
|
4281
|
+
* Returns the warnings the dispatcher must surface this turn (or `[]`
|
|
4282
|
+
* when nothing is broken / setup mode is active / the consult itself
|
|
4283
|
+
* threw). Recovery markers are cleared inline by the consult helper, so
|
|
4284
|
+
* the caller does not have to track them.
|
|
4285
|
+
*
|
|
4286
|
+
* Split from the dispatch step so the actual DM (and its messages-table
|
|
4287
|
+
* persist) fires AFTER the dispatcher has recorded the inbound user
|
|
4288
|
+
* message — otherwise the warning row's `CURRENT_TIMESTAMP` lands before
|
|
4289
|
+
* the user-message row's, which makes `chat_meta` history reload reorder
|
|
4290
|
+
* the bubbles (warning above user) and a one-time visual flicker leaks
|
|
4291
|
+
* to the user. See `runDelegatedConnectorWarningDispatch` below.
|
|
4292
|
+
*/
|
|
4293
|
+
consultDelegatedConnectorWarnings(sessionBackend) {
|
|
4294
|
+
try {
|
|
4295
|
+
const result = consultDelegatedConnectorHealth(this.db, sessionBackend);
|
|
4296
|
+
if (result.recovered.length > 0) {
|
|
4297
|
+
logger.info({ recovered: result.recovered, sessionBackend }, "Delegated connector(s) recovered — sign-out warning markers cleared");
|
|
4298
|
+
}
|
|
4299
|
+
return result.warnings;
|
|
4300
|
+
}
|
|
4301
|
+
catch (err) {
|
|
4302
|
+
logger.warn({ err, sessionBackend }, "Delegated connector-health consult failed — skipping DM warning");
|
|
4303
|
+
return [];
|
|
4304
|
+
}
|
|
4305
|
+
}
|
|
4306
|
+
/**
|
|
4307
|
+
* Phase 2 of the §4.5 health check — asynchronous DM dispatch + post-
|
|
4308
|
+
* delivery bookkeeping (throttle marker + dashboard-channel persist).
|
|
4309
|
+
* Caller invokes this AFTER the user message is recorded so the DM's
|
|
4310
|
+
* messages-table row carries a strictly-later `CURRENT_TIMESTAMP`
|
|
4311
|
+
* (preserves pre-reconcile chat order on the dashboard).
|
|
4312
|
+
*/
|
|
4313
|
+
runDelegatedConnectorWarningDispatch(warnings, event, sessionBackend, sessionId) {
|
|
4314
|
+
for (const warning of warnings) {
|
|
4315
|
+
logger.warn({
|
|
4316
|
+
integration: warning.integration,
|
|
4317
|
+
backend: warning.backend,
|
|
4318
|
+
missingRequired: warning.missingRequired,
|
|
4319
|
+
}, "Delegated connector reports missing required capabilities — DM owner");
|
|
4320
|
+
const message = renderSignoutDm(warning);
|
|
4321
|
+
// Mark the throttle ONLY after a successful dispatch — if the
|
|
4322
|
+
// messaging adapter is down, an absent marker keeps the next
|
|
4323
|
+
// consult ready to re-issue the warning. The .send() promise
|
|
4324
|
+
// resolves on adapter-acknowledged delivery; .catch() is the
|
|
4325
|
+
// failure side, which deliberately leaves the marker unset.
|
|
4326
|
+
//
|
|
4327
|
+
// After delivery, persist the warning to `messages` so it survives
|
|
4328
|
+
// dashboard chat reload + the chat_meta history-reconcile pass
|
|
4329
|
+
// (`reconcileLiveMessagesAfterHistoryReload` drops live bubbles whose
|
|
4330
|
+
// timestamp is before the sync started AND whose signature is not in
|
|
4331
|
+
// the restored history; without this persist the DM bubble vanishes
|
|
4332
|
+
// the moment the agent's reply chat_meta arrives). For non-dashboard
|
|
4333
|
+
// platforms (Slack/Telegram) the message-store is the platform itself,
|
|
4334
|
+
// so we deliberately persist only when `event.platform === "dashboard"`
|
|
4335
|
+
// to avoid duplicating remote-platform messages locally.
|
|
4336
|
+
void this.notificationMgr
|
|
4337
|
+
.send(message, event, {
|
|
4338
|
+
priority: "high",
|
|
4339
|
+
category: "delegated_signout",
|
|
4340
|
+
})
|
|
4341
|
+
.then(() => {
|
|
4342
|
+
try {
|
|
4343
|
+
markSignoutWarned(this.db, warning);
|
|
4344
|
+
}
|
|
4345
|
+
catch (err) {
|
|
4346
|
+
logger.warn({
|
|
4347
|
+
err,
|
|
4348
|
+
integration: warning.integration,
|
|
4349
|
+
backend: warning.backend,
|
|
4350
|
+
}, "Failed to persist delegated-signout marker — next consult may re-warn");
|
|
4351
|
+
}
|
|
4352
|
+
if (event.platform === "dashboard") {
|
|
4353
|
+
try {
|
|
4354
|
+
this.messageRecorder.recordMessage({
|
|
4355
|
+
sessionId,
|
|
4356
|
+
role: "assistant",
|
|
4357
|
+
content: message,
|
|
4358
|
+
platform: event.platform,
|
|
4359
|
+
backend: sessionBackend,
|
|
4360
|
+
});
|
|
4361
|
+
}
|
|
4362
|
+
catch (err) {
|
|
4363
|
+
logger.warn({
|
|
4364
|
+
err,
|
|
4365
|
+
integration: warning.integration,
|
|
4366
|
+
backend: warning.backend,
|
|
4367
|
+
sessionId,
|
|
4368
|
+
}, "Failed to persist delegated-signout DM into messages — bubble may vanish on chat reload");
|
|
4369
|
+
}
|
|
4370
|
+
}
|
|
4371
|
+
})
|
|
4372
|
+
.catch((err) => {
|
|
4373
|
+
logger.error({ err, integration: warning.integration, backend: warning.backend }, "Failed to deliver delegated-signout DM — marker not set, will retry next dispatch");
|
|
4374
|
+
});
|
|
4375
|
+
}
|
|
4376
|
+
}
|
|
4377
|
+
}
|
|
4378
|
+
//# sourceMappingURL=dispatcher.js.map
|