@openrig/cli 0.1.12 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/daemon/assets/openrig-activity-hook-relay.cjs +104 -0
- package/daemon/dist/adapters/claude-code-adapter.d.ts +14 -6
- package/daemon/dist/adapters/claude-code-adapter.d.ts.map +1 -1
- package/daemon/dist/adapters/claude-code-adapter.js +183 -65
- package/daemon/dist/adapters/claude-code-adapter.js.map +1 -1
- package/daemon/dist/adapters/claude-resume.d.ts +5 -0
- package/daemon/dist/adapters/claude-resume.d.ts.map +1 -1
- package/daemon/dist/adapters/claude-resume.js +13 -0
- package/daemon/dist/adapters/claude-resume.js.map +1 -1
- package/daemon/dist/adapters/codex-resume.d.ts +8 -1
- package/daemon/dist/adapters/codex-resume.d.ts.map +1 -1
- package/daemon/dist/adapters/codex-resume.js +74 -5
- package/daemon/dist/adapters/codex-resume.js.map +1 -1
- package/daemon/dist/adapters/codex-runtime-adapter.d.ts +9 -5
- package/daemon/dist/adapters/codex-runtime-adapter.d.ts.map +1 -1
- package/daemon/dist/adapters/codex-runtime-adapter.js +255 -28
- package/daemon/dist/adapters/codex-runtime-adapter.js.map +1 -1
- package/daemon/dist/adapters/terminal-adapter.d.ts +2 -1
- package/daemon/dist/adapters/terminal-adapter.d.ts.map +1 -1
- package/daemon/dist/adapters/terminal-adapter.js +7 -1
- package/daemon/dist/adapters/terminal-adapter.js.map +1 -1
- package/daemon/dist/adapters/tmux.d.ts.map +1 -1
- package/daemon/dist/adapters/tmux.js +53 -5
- package/daemon/dist/adapters/tmux.js.map +1 -1
- package/daemon/dist/db/migrations/021_seat_handover_observability.d.ts +3 -0
- package/daemon/dist/db/migrations/021_seat_handover_observability.d.ts.map +1 -0
- package/daemon/dist/db/migrations/021_seat_handover_observability.js +11 -0
- package/daemon/dist/db/migrations/021_seat_handover_observability.js.map +1 -0
- package/daemon/dist/db/migrations/022_node_codex_config_profile.d.ts +3 -0
- package/daemon/dist/db/migrations/022_node_codex_config_profile.d.ts.map +1 -0
- package/daemon/dist/db/migrations/022_node_codex_config_profile.js +7 -0
- package/daemon/dist/db/migrations/022_node_codex_config_profile.js.map +1 -0
- package/daemon/dist/db/migrations/023_stream_items.d.ts +16 -0
- package/daemon/dist/db/migrations/023_stream_items.d.ts.map +1 -0
- package/daemon/dist/db/migrations/023_stream_items.js +37 -0
- package/daemon/dist/db/migrations/023_stream_items.js.map +1 -0
- package/daemon/dist/db/migrations/024_queue_items.d.ts +19 -0
- package/daemon/dist/db/migrations/024_queue_items.d.ts.map +1 -0
- package/daemon/dist/db/migrations/024_queue_items.js +52 -0
- package/daemon/dist/db/migrations/024_queue_items.js.map +1 -0
- package/daemon/dist/db/migrations/025_queue_transitions.d.ts +13 -0
- package/daemon/dist/db/migrations/025_queue_transitions.d.ts.map +1 -0
- package/daemon/dist/db/migrations/025_queue_transitions.js +28 -0
- package/daemon/dist/db/migrations/025_queue_transitions.js.map +1 -0
- package/daemon/dist/db/migrations/026_inbox_entries.d.ts +14 -0
- package/daemon/dist/db/migrations/026_inbox_entries.d.ts.map +1 -0
- package/daemon/dist/db/migrations/026_inbox_entries.js +35 -0
- package/daemon/dist/db/migrations/026_inbox_entries.js.map +1 -0
- package/daemon/dist/db/migrations/027_outbox_entries.d.ts +12 -0
- package/daemon/dist/db/migrations/027_outbox_entries.d.ts.map +1 -0
- package/daemon/dist/db/migrations/027_outbox_entries.js +30 -0
- package/daemon/dist/db/migrations/027_outbox_entries.js.map +1 -0
- package/daemon/dist/db/migrations/028_project_classifications.d.ts +26 -0
- package/daemon/dist/db/migrations/028_project_classifications.d.ts.map +1 -0
- package/daemon/dist/db/migrations/028_project_classifications.js +44 -0
- package/daemon/dist/db/migrations/028_project_classifications.js.map +1 -0
- package/daemon/dist/db/migrations/029_classifier_leases.d.ts +22 -0
- package/daemon/dist/db/migrations/029_classifier_leases.d.ts.map +1 -0
- package/daemon/dist/db/migrations/029_classifier_leases.js +41 -0
- package/daemon/dist/db/migrations/029_classifier_leases.js.map +1 -0
- package/daemon/dist/db/migrations/030_views_custom.d.ts +15 -0
- package/daemon/dist/db/migrations/030_views_custom.d.ts.map +1 -0
- package/daemon/dist/db/migrations/030_views_custom.js +27 -0
- package/daemon/dist/db/migrations/030_views_custom.js.map +1 -0
- package/daemon/dist/db/migrations/031_watchdog_jobs.d.ts +34 -0
- package/daemon/dist/db/migrations/031_watchdog_jobs.d.ts.map +1 -0
- package/daemon/dist/db/migrations/031_watchdog_jobs.js +59 -0
- package/daemon/dist/db/migrations/031_watchdog_jobs.js.map +1 -0
- package/daemon/dist/db/migrations/032_watchdog_history.d.ts +28 -0
- package/daemon/dist/db/migrations/032_watchdog_history.d.ts.map +1 -0
- package/daemon/dist/db/migrations/032_watchdog_history.js +46 -0
- package/daemon/dist/db/migrations/032_watchdog_history.js.map +1 -0
- package/daemon/dist/db/migrations/033_workflow_specs.d.ts +30 -0
- package/daemon/dist/db/migrations/033_workflow_specs.d.ts.map +1 -0
- package/daemon/dist/db/migrations/033_workflow_specs.js +50 -0
- package/daemon/dist/db/migrations/033_workflow_specs.js.map +1 -0
- package/daemon/dist/db/migrations/034_workflow_instances.d.ts +42 -0
- package/daemon/dist/db/migrations/034_workflow_instances.d.ts.map +1 -0
- package/daemon/dist/db/migrations/034_workflow_instances.js +63 -0
- package/daemon/dist/db/migrations/034_workflow_instances.js.map +1 -0
- package/daemon/dist/db/migrations/035_workflow_step_trails.d.ts +29 -0
- package/daemon/dist/db/migrations/035_workflow_step_trails.d.ts.map +1 -0
- package/daemon/dist/db/migrations/035_workflow_step_trails.js +48 -0
- package/daemon/dist/db/migrations/035_workflow_step_trails.js.map +1 -0
- package/daemon/dist/db/migrations/036_watchdog_policy_enum_extension.d.ts +35 -0
- package/daemon/dist/db/migrations/036_watchdog_policy_enum_extension.d.ts.map +1 -0
- package/daemon/dist/db/migrations/036_watchdog_policy_enum_extension.js +43 -0
- package/daemon/dist/db/migrations/036_watchdog_policy_enum_extension.js.map +1 -0
- package/daemon/dist/domain/agent-activity-store.d.ts +42 -0
- package/daemon/dist/domain/agent-activity-store.d.ts.map +1 -0
- package/daemon/dist/domain/agent-activity-store.js +177 -0
- package/daemon/dist/domain/agent-activity-store.js.map +1 -0
- package/daemon/dist/domain/agent-manifest.d.ts.map +1 -1
- package/daemon/dist/domain/agent-manifest.js +3 -0
- package/daemon/dist/domain/agent-manifest.js.map +1 -1
- package/daemon/dist/domain/agent-starter-resolver.d.ts +100 -0
- package/daemon/dist/domain/agent-starter-resolver.d.ts.map +1 -0
- package/daemon/dist/domain/agent-starter-resolver.js +200 -0
- package/daemon/dist/domain/agent-starter-resolver.js.map +1 -0
- package/daemon/dist/domain/classifier-lease-manager.d.ts +130 -0
- package/daemon/dist/domain/classifier-lease-manager.d.ts.map +1 -0
- package/daemon/dist/domain/classifier-lease-manager.js +285 -0
- package/daemon/dist/domain/classifier-lease-manager.js.map +1 -0
- package/daemon/dist/domain/context-usage-store.d.ts +1 -1
- package/daemon/dist/domain/context-usage-store.js +1 -1
- package/daemon/dist/domain/context-usage-store.js.map +1 -1
- package/daemon/dist/domain/hot-potato-enforcer.d.ts +45 -0
- package/daemon/dist/domain/hot-potato-enforcer.d.ts.map +1 -0
- package/daemon/dist/domain/hot-potato-enforcer.js +94 -0
- package/daemon/dist/domain/hot-potato-enforcer.js.map +1 -0
- package/daemon/dist/domain/inbox-handler.d.ts +68 -0
- package/daemon/dist/domain/inbox-handler.d.ts.map +1 -0
- package/daemon/dist/domain/inbox-handler.js +177 -0
- package/daemon/dist/domain/inbox-handler.js.map +1 -0
- package/daemon/dist/domain/native-resume-probe.d.ts +1 -1
- package/daemon/dist/domain/native-resume-probe.d.ts.map +1 -1
- package/daemon/dist/domain/native-resume-probe.js +67 -0
- package/daemon/dist/domain/native-resume-probe.js.map +1 -1
- package/daemon/dist/domain/node-inventory.d.ts +29 -1
- package/daemon/dist/domain/node-inventory.d.ts.map +1 -1
- package/daemon/dist/domain/node-inventory.js +160 -38
- package/daemon/dist/domain/node-inventory.js.map +1 -1
- package/daemon/dist/domain/node-launcher.d.ts +2 -0
- package/daemon/dist/domain/node-launcher.d.ts.map +1 -1
- package/daemon/dist/domain/node-launcher.js +14 -2
- package/daemon/dist/domain/node-launcher.js.map +1 -1
- package/daemon/dist/domain/outbox-handler.d.ts +48 -0
- package/daemon/dist/domain/outbox-handler.d.ts.map +1 -0
- package/daemon/dist/domain/outbox-handler.js +106 -0
- package/daemon/dist/domain/outbox-handler.js.map +1 -0
- package/daemon/dist/domain/policies/artifact-pool-helpers.d.ts +64 -0
- package/daemon/dist/domain/policies/artifact-pool-helpers.d.ts.map +1 -0
- package/daemon/dist/domain/policies/artifact-pool-helpers.js +226 -0
- package/daemon/dist/domain/policies/artifact-pool-helpers.js.map +1 -0
- package/daemon/dist/domain/policies/artifact-pool-ready.d.ts +3 -0
- package/daemon/dist/domain/policies/artifact-pool-ready.d.ts.map +1 -0
- package/daemon/dist/domain/policies/artifact-pool-ready.js +41 -0
- package/daemon/dist/domain/policies/artifact-pool-ready.js.map +1 -0
- package/daemon/dist/domain/policies/edge-artifact-required.d.ts +3 -0
- package/daemon/dist/domain/policies/edge-artifact-required.d.ts.map +1 -0
- package/daemon/dist/domain/policies/edge-artifact-required.js +90 -0
- package/daemon/dist/domain/policies/edge-artifact-required.js.map +1 -0
- package/daemon/dist/domain/policies/periodic-reminder.d.ts +3 -0
- package/daemon/dist/domain/policies/periodic-reminder.d.ts.map +1 -0
- package/daemon/dist/domain/policies/periodic-reminder.js +30 -0
- package/daemon/dist/domain/policies/periodic-reminder.js.map +1 -0
- package/daemon/dist/domain/policies/types.d.ts +51 -0
- package/daemon/dist/domain/policies/types.d.ts.map +1 -0
- package/daemon/dist/domain/policies/types.js +9 -0
- package/daemon/dist/domain/policies/types.js.map +1 -0
- package/daemon/dist/domain/policies/workflow-keepalive.d.ts +7 -0
- package/daemon/dist/domain/policies/workflow-keepalive.d.ts.map +1 -0
- package/daemon/dist/domain/policies/workflow-keepalive.js +123 -0
- package/daemon/dist/domain/policies/workflow-keepalive.js.map +1 -0
- package/daemon/dist/domain/project-classifier.d.ts +75 -0
- package/daemon/dist/domain/project-classifier.d.ts.map +1 -0
- package/daemon/dist/domain/project-classifier.js +132 -0
- package/daemon/dist/domain/project-classifier.js.map +1 -0
- package/daemon/dist/domain/projection-planner.d.ts +1 -0
- package/daemon/dist/domain/projection-planner.d.ts.map +1 -1
- package/daemon/dist/domain/projection-planner.js +3 -0
- package/daemon/dist/domain/projection-planner.js.map +1 -1
- package/daemon/dist/domain/ps-projection.d.ts +31 -0
- package/daemon/dist/domain/ps-projection.d.ts.map +1 -1
- package/daemon/dist/domain/ps-projection.js +38 -0
- package/daemon/dist/domain/ps-projection.js.map +1 -1
- package/daemon/dist/domain/queue-repository.d.ts +322 -0
- package/daemon/dist/domain/queue-repository.d.ts.map +1 -0
- package/daemon/dist/domain/queue-repository.js +686 -0
- package/daemon/dist/domain/queue-repository.js.map +1 -0
- package/daemon/dist/domain/queue-transition-log.d.ts +38 -0
- package/daemon/dist/domain/queue-transition-log.d.ts.map +1 -0
- package/daemon/dist/domain/queue-transition-log.js +52 -0
- package/daemon/dist/domain/queue-transition-log.js.map +1 -0
- package/daemon/dist/domain/restore-check-service.d.ts +203 -0
- package/daemon/dist/domain/restore-check-service.d.ts.map +1 -0
- package/daemon/dist/domain/restore-check-service.js +1047 -0
- package/daemon/dist/domain/restore-check-service.js.map +1 -0
- package/daemon/dist/domain/restore-orchestrator.d.ts +49 -3
- package/daemon/dist/domain/restore-orchestrator.d.ts.map +1 -1
- package/daemon/dist/domain/restore-orchestrator.js +460 -62
- package/daemon/dist/domain/restore-orchestrator.js.map +1 -1
- package/daemon/dist/domain/rig-expansion-service.d.ts.map +1 -1
- package/daemon/dist/domain/rig-expansion-service.js +13 -0
- package/daemon/dist/domain/rig-expansion-service.js.map +1 -1
- package/daemon/dist/domain/rig-repository.d.ts +17 -1
- package/daemon/dist/domain/rig-repository.d.ts.map +1 -1
- package/daemon/dist/domain/rig-repository.js +63 -5
- package/daemon/dist/domain/rig-repository.js.map +1 -1
- package/daemon/dist/domain/rigspec-codec.d.ts.map +1 -1
- package/daemon/dist/domain/rigspec-codec.js +13 -0
- package/daemon/dist/domain/rigspec-codec.js.map +1 -1
- package/daemon/dist/domain/rigspec-exporter.d.ts.map +1 -1
- package/daemon/dist/domain/rigspec-exporter.js +2 -0
- package/daemon/dist/domain/rigspec-exporter.js.map +1 -1
- package/daemon/dist/domain/rigspec-instantiator.d.ts.map +1 -1
- package/daemon/dist/domain/rigspec-instantiator.js +61 -1
- package/daemon/dist/domain/rigspec-instantiator.js.map +1 -1
- package/daemon/dist/domain/rigspec-schema.d.ts.map +1 -1
- package/daemon/dist/domain/rigspec-schema.js +196 -0
- package/daemon/dist/domain/rigspec-schema.js.map +1 -1
- package/daemon/dist/domain/runtime-adapter.d.ts +27 -2
- package/daemon/dist/domain/runtime-adapter.d.ts.map +1 -1
- package/daemon/dist/domain/runtime-adapter.js +5 -0
- package/daemon/dist/domain/runtime-adapter.js.map +1 -1
- package/daemon/dist/domain/seat-handover-planner.d.ts +84 -0
- package/daemon/dist/domain/seat-handover-planner.d.ts.map +1 -0
- package/daemon/dist/domain/seat-handover-planner.js +188 -0
- package/daemon/dist/domain/seat-handover-planner.js.map +1 -0
- package/daemon/dist/domain/seat-handover-service.d.ts +104 -0
- package/daemon/dist/domain/seat-handover-service.d.ts.map +1 -0
- package/daemon/dist/domain/seat-handover-service.js +343 -0
- package/daemon/dist/domain/seat-handover-service.js.map +1 -0
- package/daemon/dist/domain/seat-status-service.d.ts +50 -0
- package/daemon/dist/domain/seat-status-service.d.ts.map +1 -0
- package/daemon/dist/domain/seat-status-service.js +66 -0
- package/daemon/dist/domain/seat-status-service.js.map +1 -0
- package/daemon/dist/domain/session-source-rebuild-resolver.d.ts +53 -0
- package/daemon/dist/domain/session-source-rebuild-resolver.d.ts.map +1 -0
- package/daemon/dist/domain/session-source-rebuild-resolver.js +59 -0
- package/daemon/dist/domain/session-source-rebuild-resolver.js.map +1 -0
- package/daemon/dist/domain/session-transport.d.ts +30 -0
- package/daemon/dist/domain/session-transport.d.ts.map +1 -1
- package/daemon/dist/domain/session-transport.js +322 -12
- package/daemon/dist/domain/session-transport.js.map +1 -1
- package/daemon/dist/domain/snapshot-repository.d.ts +18 -0
- package/daemon/dist/domain/snapshot-repository.d.ts.map +1 -1
- package/daemon/dist/domain/snapshot-repository.js +77 -0
- package/daemon/dist/domain/snapshot-repository.js.map +1 -1
- package/daemon/dist/domain/startup-orchestrator.d.ts +22 -3
- package/daemon/dist/domain/startup-orchestrator.d.ts.map +1 -1
- package/daemon/dist/domain/startup-orchestrator.js +31 -5
- package/daemon/dist/domain/startup-orchestrator.js.map +1 -1
- package/daemon/dist/domain/stream-store.d.ts +56 -0
- package/daemon/dist/domain/stream-store.d.ts.map +1 -0
- package/daemon/dist/domain/stream-store.js +108 -0
- package/daemon/dist/domain/stream-store.js.map +1 -0
- package/daemon/dist/domain/transcript-redaction.d.ts +13 -0
- package/daemon/dist/domain/transcript-redaction.d.ts.map +1 -0
- package/daemon/dist/domain/transcript-redaction.js +34 -0
- package/daemon/dist/domain/transcript-redaction.js.map +1 -0
- package/daemon/dist/domain/transcript-store.d.ts +15 -0
- package/daemon/dist/domain/transcript-store.d.ts.map +1 -1
- package/daemon/dist/domain/transcript-store.js +57 -1
- package/daemon/dist/domain/transcript-store.js.map +1 -1
- package/daemon/dist/domain/types.d.ts +316 -3
- package/daemon/dist/domain/types.d.ts.map +1 -1
- package/daemon/dist/domain/view-event-bridge.d.ts +12 -0
- package/daemon/dist/domain/view-event-bridge.d.ts.map +1 -0
- package/daemon/dist/domain/view-event-bridge.js +92 -0
- package/daemon/dist/domain/view-event-bridge.js.map +1 -0
- package/daemon/dist/domain/view-projector.d.ts +83 -0
- package/daemon/dist/domain/view-projector.d.ts.map +1 -0
- package/daemon/dist/domain/view-projector.js +296 -0
- package/daemon/dist/domain/view-projector.js.map +1 -0
- package/daemon/dist/domain/watchdog-history-log.d.ts +49 -0
- package/daemon/dist/domain/watchdog-history-log.d.ts.map +1 -0
- package/daemon/dist/domain/watchdog-history-log.js +67 -0
- package/daemon/dist/domain/watchdog-history-log.js.map +1 -0
- package/daemon/dist/domain/watchdog-jobs-repository.d.ts +81 -0
- package/daemon/dist/domain/watchdog-jobs-repository.d.ts.map +1 -0
- package/daemon/dist/domain/watchdog-jobs-repository.js +164 -0
- package/daemon/dist/domain/watchdog-jobs-repository.js.map +1 -0
- package/daemon/dist/domain/watchdog-policy-engine.d.ts +92 -0
- package/daemon/dist/domain/watchdog-policy-engine.d.ts.map +1 -0
- package/daemon/dist/domain/watchdog-policy-engine.js +442 -0
- package/daemon/dist/domain/watchdog-policy-engine.js.map +1 -0
- package/daemon/dist/domain/watchdog-scheduler.d.ts +76 -0
- package/daemon/dist/domain/watchdog-scheduler.d.ts.map +1 -0
- package/daemon/dist/domain/watchdog-scheduler.js +112 -0
- package/daemon/dist/domain/watchdog-scheduler.js.map +1 -0
- package/daemon/dist/domain/workflow-instance-store.d.ts +46 -0
- package/daemon/dist/domain/workflow-instance-store.d.ts.map +1 -0
- package/daemon/dist/domain/workflow-instance-store.js +116 -0
- package/daemon/dist/domain/workflow-instance-store.js.map +1 -0
- package/daemon/dist/domain/workflow-projector.d.ts +64 -0
- package/daemon/dist/domain/workflow-projector.d.ts.map +1 -0
- package/daemon/dist/domain/workflow-projector.js +424 -0
- package/daemon/dist/domain/workflow-projector.js.map +1 -0
- package/daemon/dist/domain/workflow-runtime.d.ts +63 -0
- package/daemon/dist/domain/workflow-runtime.d.ts.map +1 -0
- package/daemon/dist/domain/workflow-runtime.js +150 -0
- package/daemon/dist/domain/workflow-runtime.js.map +1 -0
- package/daemon/dist/domain/workflow-spec-cache.d.ts +35 -0
- package/daemon/dist/domain/workflow-spec-cache.d.ts.map +1 -0
- package/daemon/dist/domain/workflow-spec-cache.js +171 -0
- package/daemon/dist/domain/workflow-spec-cache.js.map +1 -0
- package/daemon/dist/domain/workflow-step-trail-log.d.ts +27 -0
- package/daemon/dist/domain/workflow-step-trail-log.d.ts.map +1 -0
- package/daemon/dist/domain/workflow-step-trail-log.js +73 -0
- package/daemon/dist/domain/workflow-step-trail-log.js.map +1 -0
- package/daemon/dist/domain/workflow-types.d.ts +108 -0
- package/daemon/dist/domain/workflow-types.d.ts.map +1 -0
- package/daemon/dist/domain/workflow-types.js +11 -0
- package/daemon/dist/domain/workflow-types.js.map +1 -0
- package/daemon/dist/domain/workflow-validator.d.ts +31 -0
- package/daemon/dist/domain/workflow-validator.d.ts.map +1 -0
- package/daemon/dist/domain/workflow-validator.js +115 -0
- package/daemon/dist/domain/workflow-validator.js.map +1 -0
- package/daemon/dist/index.d.ts.map +1 -1
- package/daemon/dist/index.js +21 -2
- package/daemon/dist/index.js.map +1 -1
- package/daemon/dist/routes/activity.d.ts +3 -0
- package/daemon/dist/routes/activity.d.ts.map +1 -0
- package/daemon/dist/routes/activity.js +47 -0
- package/daemon/dist/routes/activity.js.map +1 -0
- package/daemon/dist/routes/projects.d.ts +12 -0
- package/daemon/dist/routes/projects.d.ts.map +1 -0
- package/daemon/dist/routes/projects.js +178 -0
- package/daemon/dist/routes/projects.js.map +1 -0
- package/daemon/dist/routes/queue.d.ts +10 -0
- package/daemon/dist/routes/queue.d.ts.map +1 -0
- package/daemon/dist/routes/queue.js +374 -0
- package/daemon/dist/routes/queue.js.map +1 -0
- package/daemon/dist/routes/restore-check.d.ts +3 -0
- package/daemon/dist/routes/restore-check.d.ts.map +1 -0
- package/daemon/dist/routes/restore-check.js +200 -0
- package/daemon/dist/routes/restore-check.js.map +1 -0
- package/daemon/dist/routes/rigs.d.ts.map +1 -1
- package/daemon/dist/routes/rigs.js +60 -4
- package/daemon/dist/routes/rigs.js.map +1 -1
- package/daemon/dist/routes/seat.d.ts +3 -0
- package/daemon/dist/routes/seat.d.ts.map +1 -0
- package/daemon/dist/routes/seat.js +69 -0
- package/daemon/dist/routes/seat.js.map +1 -0
- package/daemon/dist/routes/sessions.d.ts.map +1 -1
- package/daemon/dist/routes/sessions.js +25 -4
- package/daemon/dist/routes/sessions.js.map +1 -1
- package/daemon/dist/routes/snapshots.js +66 -29
- package/daemon/dist/routes/snapshots.js.map +1 -1
- package/daemon/dist/routes/stream.d.ts +9 -0
- package/daemon/dist/routes/stream.d.ts.map +1 -0
- package/daemon/dist/routes/stream.js +119 -0
- package/daemon/dist/routes/stream.js.map +1 -0
- package/daemon/dist/routes/transcripts.d.ts.map +1 -1
- package/daemon/dist/routes/transcripts.js +38 -0
- package/daemon/dist/routes/transcripts.js.map +1 -1
- package/daemon/dist/routes/transport.d.ts.map +1 -1
- package/daemon/dist/routes/transport.js +21 -0
- package/daemon/dist/routes/transport.js.map +1 -1
- package/daemon/dist/routes/up.js +24 -3
- package/daemon/dist/routes/up.js.map +1 -1
- package/daemon/dist/routes/views.d.ts +13 -0
- package/daemon/dist/routes/views.d.ts.map +1 -0
- package/daemon/dist/routes/views.js +120 -0
- package/daemon/dist/routes/views.js.map +1 -0
- package/daemon/dist/routes/watchdog.d.ts +18 -0
- package/daemon/dist/routes/watchdog.d.ts.map +1 -0
- package/daemon/dist/routes/watchdog.js +148 -0
- package/daemon/dist/routes/watchdog.js.map +1 -0
- package/daemon/dist/routes/workflow.d.ts +20 -0
- package/daemon/dist/routes/workflow.d.ts.map +1 -0
- package/daemon/dist/routes/workflow.js +184 -0
- package/daemon/dist/routes/workflow.js.map +1 -0
- package/daemon/dist/server.d.ts +30 -0
- package/daemon/dist/server.d.ts.map +1 -1
- package/daemon/dist/server.js +33 -0
- package/daemon/dist/server.js.map +1 -1
- package/daemon/dist/startup.d.ts.map +1 -1
- package/daemon/dist/startup.js +202 -8
- package/daemon/dist/startup.js.map +1 -1
- package/daemon/docs/reference/agent-spec.md +12 -5
- package/daemon/docs/reference/agent-startup-guide.md +9 -9
- package/daemon/specs/agents/apps/vault-specialist/agent.yaml +1 -1
- package/daemon/specs/agents/design/product-designer/agent.yaml +1 -1
- package/daemon/specs/agents/development/implementer/agent.yaml +1 -1
- package/daemon/specs/agents/development/qa/agent.yaml +1 -1
- package/daemon/specs/agents/orchestration/orchestrator/agent.yaml +1 -1
- package/daemon/specs/agents/product-management/pm/agent.yaml +4 -0
- package/daemon/specs/agents/research/analyst/agent.yaml +1 -1
- package/daemon/specs/agents/research/synthesizer/agent.yaml +1 -1
- package/daemon/specs/agents/review/independent-reviewer/agent.yaml +1 -1
- package/daemon/specs/agents/shared/agent.yaml +17 -0
- package/daemon/specs/agents/shared/runtime/claude-mcp.fragment.json +12 -0
- package/daemon/specs/agents/shared/runtime/claude-settings.fragment.json +16 -0
- package/daemon/specs/agents/shared/runtime/codex-config.fragment.toml +5 -0
- package/daemon/specs/agents/shared/skills/claude-compact-in-place/SKILL.md +100 -0
- package/daemon/specs/agents/shared/skills/core/openrig-operator/SKILL.md +110 -0
- package/daemon/specs/agents/shared/skills/core/openrig-user/SKILL.md +44 -1
- package/daemon/specs/agents/shared/skills/mental-model-ha/SKILL.md +3 -0
- package/daemon/specs/agents/shared/skills/pods/orchestration-team/SKILL.md +3 -0
- package/daemon/specs/agents/shared/skills/rig-architect/SKILL.md +11 -1
- package/dist/bin-wrapper.js +0 -0
- package/dist/commands/capture.d.ts +7 -1
- package/dist/commands/capture.d.ts.map +1 -1
- package/dist/commands/capture.js +59 -1
- package/dist/commands/capture.js.map +1 -1
- package/dist/commands/compact-plan.d.ts +9 -0
- package/dist/commands/compact-plan.d.ts.map +1 -0
- package/dist/commands/compact-plan.js +354 -0
- package/dist/commands/compact-plan.js.map +1 -0
- package/dist/commands/context.d.ts +9 -0
- package/dist/commands/context.d.ts.map +1 -0
- package/dist/commands/context.js +220 -0
- package/dist/commands/context.js.map +1 -0
- package/dist/commands/expand.d.ts.map +1 -1
- package/dist/commands/expand.js +15 -4
- package/dist/commands/expand.js.map +1 -1
- package/dist/commands/heartbeat.d.ts +99 -0
- package/dist/commands/heartbeat.d.ts.map +1 -0
- package/dist/commands/heartbeat.js +393 -0
- package/dist/commands/heartbeat.js.map +1 -0
- package/dist/commands/project.d.ts +14 -0
- package/dist/commands/project.d.ts.map +1 -0
- package/dist/commands/project.js +157 -0
- package/dist/commands/project.js.map +1 -0
- package/dist/commands/ps.d.ts +20 -3
- package/dist/commands/ps.d.ts.map +1 -1
- package/dist/commands/ps.js +455 -33
- package/dist/commands/ps.js.map +1 -1
- package/dist/commands/queue.d.ts +16 -0
- package/dist/commands/queue.d.ts.map +1 -0
- package/dist/commands/queue.js +401 -0
- package/dist/commands/queue.js.map +1 -0
- package/dist/commands/restore-check.d.ts +9 -0
- package/dist/commands/restore-check.d.ts.map +1 -0
- package/dist/commands/restore-check.js +234 -0
- package/dist/commands/restore-check.js.map +1 -0
- package/dist/commands/restore-packet.d.ts +9 -0
- package/dist/commands/restore-packet.d.ts.map +1 -0
- package/dist/commands/restore-packet.js +383 -0
- package/dist/commands/restore-packet.js.map +1 -0
- package/dist/commands/restore.d.ts.map +1 -1
- package/dist/commands/restore.js +44 -1
- package/dist/commands/restore.js.map +1 -1
- package/dist/commands/seat.d.ts +5 -0
- package/dist/commands/seat.d.ts.map +1 -0
- package/dist/commands/seat.js +170 -0
- package/dist/commands/seat.js.map +1 -0
- package/dist/commands/send.d.ts +12 -1
- package/dist/commands/send.d.ts.map +1 -1
- package/dist/commands/send.js +93 -5
- package/dist/commands/send.js.map +1 -1
- package/dist/commands/setup.js +3 -3
- package/dist/commands/setup.js.map +1 -1
- package/dist/commands/specs.d.ts.map +1 -1
- package/dist/commands/specs.js +99 -16
- package/dist/commands/specs.js.map +1 -1
- package/dist/commands/stream.d.ts +12 -0
- package/dist/commands/stream.d.ts.map +1 -0
- package/dist/commands/stream.js +111 -0
- package/dist/commands/stream.js.map +1 -0
- package/dist/commands/up.d.ts.map +1 -1
- package/dist/commands/up.js +63 -3
- package/dist/commands/up.js.map +1 -1
- package/dist/commands/view.d.ts +12 -0
- package/dist/commands/view.d.ts.map +1 -0
- package/dist/commands/view.js +82 -0
- package/dist/commands/view.js.map +1 -0
- package/dist/commands/watchdog.d.ts +15 -0
- package/dist/commands/watchdog.d.ts.map +1 -0
- package/dist/commands/watchdog.js +120 -0
- package/dist/commands/watchdog.js.map +1 -0
- package/dist/commands/whoami.d.ts +8 -1
- package/dist/commands/whoami.d.ts.map +1 -1
- package/dist/commands/whoami.js +60 -1
- package/dist/commands/whoami.js.map +1 -1
- package/dist/commands/workflow.d.ts +17 -0
- package/dist/commands/workflow.d.ts.map +1 -0
- package/dist/commands/workflow.js +138 -0
- package/dist/commands/workflow.js.map +1 -0
- package/dist/cross-host-cli-helpers.d.ts +12 -0
- package/dist/cross-host-cli-helpers.d.ts.map +1 -0
- package/dist/cross-host-cli-helpers.js +48 -0
- package/dist/cross-host-cli-helpers.js.map +1 -0
- package/dist/cross-host-executor.d.ts +84 -0
- package/dist/cross-host-executor.d.ts.map +1 -0
- package/dist/cross-host-executor.js +138 -0
- package/dist/cross-host-executor.js.map +1 -0
- package/dist/host-registry.d.ts +50 -0
- package/dist/host-registry.d.ts.map +1 -0
- package/dist/host-registry.js +116 -0
- package/dist/host-registry.js.map +1 -0
- package/dist/index.d.ts +20 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +24 -0
- package/dist/index.js.map +1 -1
- package/dist/mcp-server.d.ts.map +1 -1
- package/dist/mcp-server.js +22 -2
- package/dist/mcp-server.js.map +1 -1
- package/dist/restore-packet/claude-transcript-parser.d.ts +12 -0
- package/dist/restore-packet/claude-transcript-parser.d.ts.map +1 -0
- package/dist/restore-packet/claude-transcript-parser.js +221 -0
- package/dist/restore-packet/claude-transcript-parser.js.map +1 -0
- package/dist/restore-packet/codex-jsonl-parser.d.ts +11 -0
- package/dist/restore-packet/codex-jsonl-parser.d.ts.map +1 -0
- package/dist/restore-packet/codex-jsonl-parser.js +159 -0
- package/dist/restore-packet/codex-jsonl-parser.js.map +1 -0
- package/dist/restore-packet/omitted-records.d.ts +60 -0
- package/dist/restore-packet/omitted-records.d.ts.map +1 -0
- package/dist/restore-packet/omitted-records.js +116 -0
- package/dist/restore-packet/omitted-records.js.map +1 -0
- package/dist/restore-packet/packet-writer.d.ts +59 -0
- package/dist/restore-packet/packet-writer.d.ts.map +1 -0
- package/dist/restore-packet/packet-writer.js +224 -0
- package/dist/restore-packet/packet-writer.js.map +1 -0
- package/dist/restore-packet/redaction.d.ts +30 -0
- package/dist/restore-packet/redaction.d.ts.map +1 -0
- package/dist/restore-packet/redaction.js +71 -0
- package/dist/restore-packet/redaction.js.map +1 -0
- package/dist/restore-packet/runtime-detect.d.ts +19 -0
- package/dist/restore-packet/runtime-detect.d.ts.map +1 -0
- package/dist/restore-packet/runtime-detect.js +81 -0
- package/dist/restore-packet/runtime-detect.js.map +1 -0
- package/dist/restore-packet/schema-validator.d.ts +13 -0
- package/dist/restore-packet/schema-validator.d.ts.map +1 -0
- package/dist/restore-packet/schema-validator.js +245 -0
- package/dist/restore-packet/schema-validator.js.map +1 -0
- package/dist/restore-packet/types.d.ts +76 -0
- package/dist/restore-packet/types.d.ts.map +1 -0
- package/dist/restore-packet/types.js +15 -0
- package/dist/restore-packet/types.js.map +1 -0
- package/dist/schemas/restore-summary.schema.json +98 -0
- package/package.json +10 -3
- package/scripts/check-abi.mjs +123 -0
- package/ui/dist/assets/{index-DlMH-REm.css → index-Dec25siz.css} +1 -1
- package/ui/dist/assets/{index-Ccd-jwRN.js → index-GNYaWmBj.js} +2 -2
- package/ui/dist/index.html +2 -2
|
@@ -1,7 +1,22 @@
|
|
|
1
1
|
import { writeFileSync } from "node:fs";
|
|
2
2
|
import { join } from "node:path";
|
|
3
|
+
import { assessNativeResumeProbe } from "./native-resume-probe.js";
|
|
3
4
|
// Only these edge kinds constrain launch order
|
|
4
5
|
const LAUNCH_DEPENDENCY_KINDS = new Set(["delegates_to", "spawned_by"]);
|
|
6
|
+
export function rollupRestoreRigResult(nodes) {
|
|
7
|
+
if (nodes.length === 0)
|
|
8
|
+
return "failed";
|
|
9
|
+
// L3: `attention_required` is non-terminal failure (alive but blocked on
|
|
10
|
+
// operator action). It rolls up as `partially_restored`. `operator_recovered`
|
|
11
|
+
// is a clean post-reconciliation outcome and rolls up like `resumed`.
|
|
12
|
+
const allFailed = nodes.every((node) => node.status === "failed");
|
|
13
|
+
if (allFailed)
|
|
14
|
+
return "failed";
|
|
15
|
+
if (nodes.some((node) => node.status === "fresh" || node.status === "failed" || node.status === "attention_required")) {
|
|
16
|
+
return "partially_restored";
|
|
17
|
+
}
|
|
18
|
+
return "fully_restored";
|
|
19
|
+
}
|
|
5
20
|
export class RestoreOrchestrator {
|
|
6
21
|
db;
|
|
7
22
|
activeRestores = new Set();
|
|
@@ -62,8 +77,13 @@ export class RestoreOrchestrator {
|
|
|
62
77
|
if (!rig) {
|
|
63
78
|
return { ok: false, code: "rig_not_found", message: `Rig ${rigId} not found` };
|
|
64
79
|
}
|
|
65
|
-
|
|
66
|
-
|
|
80
|
+
// Classify DB-running sessions against tmux reality WITHOUT mutating DB.
|
|
81
|
+
// This determines whether the rig is safe to restore before any state
|
|
82
|
+
// changes occur — critical for pre_restore snapshot ordering (snapshot
|
|
83
|
+
// must capture original DB state, not post-reconciliation state).
|
|
84
|
+
const classification = await this.classifyRunningSessions(rigId);
|
|
85
|
+
if (classification.live.length > 0 || classification.unknown.length > 0) {
|
|
86
|
+
return { ok: false, code: "rig_not_stopped", message: `Rig ${rigId} has live sessions. Stop the rig with 'rig down' before restoring, or use the latest auto-pre-down snapshot.` };
|
|
67
87
|
}
|
|
68
88
|
// Per-rig concurrency lock
|
|
69
89
|
if (this.activeRestores.has(rigId)) {
|
|
@@ -71,17 +91,52 @@ export class RestoreOrchestrator {
|
|
|
71
91
|
}
|
|
72
92
|
this.activeRestores.add(rigId);
|
|
73
93
|
try {
|
|
74
|
-
|
|
94
|
+
const validation = this.validatePreRestore(snapshot.data, {
|
|
95
|
+
fsOps: opts?.fsOps,
|
|
96
|
+
servicesRecord: this.rigRepo.getServicesRecord(rigId),
|
|
97
|
+
});
|
|
98
|
+
if (validation.blockers.length > 0) {
|
|
99
|
+
const result = {
|
|
100
|
+
snapshotId,
|
|
101
|
+
preRestoreSnapshotId: null,
|
|
102
|
+
rigResult: "not_attempted",
|
|
103
|
+
nodes: [],
|
|
104
|
+
warnings: validation.warnings,
|
|
105
|
+
blockers: validation.blockers,
|
|
106
|
+
};
|
|
107
|
+
return {
|
|
108
|
+
ok: false,
|
|
109
|
+
code: "pre_restore_validation_failed",
|
|
110
|
+
message: "Restore pre-validation failed; no restore mutation was attempted.",
|
|
111
|
+
result,
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
// 2. Capture pre-restore snapshot BEFORE any DB mutations —
|
|
115
|
+
// DB still reflects original session state (running for stale sessions)
|
|
75
116
|
const preRestoreSnapshot = this.snapshotCapture.captureSnapshot(rigId, "pre_restore");
|
|
76
|
-
//
|
|
77
|
-
|
|
117
|
+
// 2b. NOW mark stale sessions as detached (safe: we've captured the
|
|
118
|
+
// pre-restore snapshot and confirmed no live/unknown sessions remain)
|
|
119
|
+
for (const sessionId of classification.stale) {
|
|
120
|
+
this.sessionRegistry.markDetached(sessionId);
|
|
121
|
+
}
|
|
122
|
+
// 3. Emit restore.started — the persisted event seq IS the attempt id
|
|
123
|
+
// (Decision 1: no separate restore_attempts table).
|
|
124
|
+
const restoreStartedEvent = this.eventBus.emit({ type: "restore.started", rigId, snapshotId });
|
|
125
|
+
const attemptId = restoreStartedEvent.seq;
|
|
126
|
+
try {
|
|
127
|
+
opts?.onAttemptStarted?.(attemptId);
|
|
128
|
+
}
|
|
129
|
+
catch {
|
|
130
|
+
// onAttemptStarted is fire-and-forget; never let a route's response
|
|
131
|
+
// logic crash the restore pipeline.
|
|
132
|
+
}
|
|
78
133
|
// 3b. Service gate: boot services before agent restore if this rig has services
|
|
79
134
|
if (this.serviceOrchestrator) {
|
|
80
135
|
const svcRecord = this.rigRepo.getServicesRecord(rigId);
|
|
81
136
|
if (svcRecord) {
|
|
82
137
|
const bootResult = await this.serviceOrchestrator.boot(rigId);
|
|
83
138
|
if (!bootResult.ok) {
|
|
84
|
-
this.eventBus.emit({ type: "restore.completed", rigId, snapshotId, result: { snapshotId, preRestoreSnapshotId: preRestoreSnapshot.id, nodes: [], warnings: [`Service boot failed: ${bootResult.error}`] } });
|
|
139
|
+
this.eventBus.emit({ type: "restore.completed", rigId, snapshotId, result: { snapshotId, preRestoreSnapshotId: preRestoreSnapshot.id, rigResult: "failed", nodes: [], warnings: [`Service boot failed: ${bootResult.error}`] } });
|
|
85
140
|
return { ok: false, code: "service_boot_failed", message: `Service boot failed before agent restore: ${bootResult.error}` };
|
|
86
141
|
}
|
|
87
142
|
}
|
|
@@ -90,7 +145,7 @@ export class RestoreOrchestrator {
|
|
|
90
145
|
const plan = this.computeRestorePlan(snapshot.data);
|
|
91
146
|
// 5. Execute restore with compensating pattern per node
|
|
92
147
|
const nodeResults = [];
|
|
93
|
-
const restoreWarnings = [];
|
|
148
|
+
const restoreWarnings = [...validation.warnings];
|
|
94
149
|
for (const entry of plan) {
|
|
95
150
|
const result = await this.restoreNodeWithCompensation(entry, rigId, snapshotId, snapshot.data, opts, restoreWarnings);
|
|
96
151
|
nodeResults.push(result);
|
|
@@ -98,6 +153,7 @@ export class RestoreOrchestrator {
|
|
|
98
153
|
const restoreResult = {
|
|
99
154
|
snapshotId,
|
|
100
155
|
preRestoreSnapshotId: preRestoreSnapshot.id,
|
|
156
|
+
rigResult: rollupRestoreRigResult(nodeResults),
|
|
101
157
|
nodes: nodeResults,
|
|
102
158
|
warnings: restoreWarnings,
|
|
103
159
|
};
|
|
@@ -116,6 +172,169 @@ export class RestoreOrchestrator {
|
|
|
116
172
|
this.activeRestores.delete(rigId);
|
|
117
173
|
}
|
|
118
174
|
}
|
|
175
|
+
validatePreRestore(data, opts) {
|
|
176
|
+
const blockers = [];
|
|
177
|
+
const warnings = [];
|
|
178
|
+
const exists = opts.fsOps?.exists ?? (() => true);
|
|
179
|
+
const add = (blocker) => blockers.push(blocker);
|
|
180
|
+
const nodes = Array.isArray(data.nodes) ? data.nodes : null;
|
|
181
|
+
const sessions = Array.isArray(data.sessions) ? data.sessions : null;
|
|
182
|
+
const edges = Array.isArray(data.edges) ? data.edges : null;
|
|
183
|
+
const checkpoints = data.checkpoints && typeof data.checkpoints === "object" ? data.checkpoints : null;
|
|
184
|
+
if (!data.rig || typeof data.rig.id !== "string") {
|
|
185
|
+
add({
|
|
186
|
+
code: "invalid_snapshot_data",
|
|
187
|
+
severity: "critical",
|
|
188
|
+
target: "snapshot.rig",
|
|
189
|
+
message: "Snapshot is missing the rig record needed for restore.",
|
|
190
|
+
remediation: "Capture a new snapshot or restore from a structurally valid snapshot.",
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
if (!nodes) {
|
|
194
|
+
add({
|
|
195
|
+
code: "invalid_snapshot_data",
|
|
196
|
+
severity: "critical",
|
|
197
|
+
target: "snapshot.nodes",
|
|
198
|
+
message: "Snapshot is missing the node list needed for restore.",
|
|
199
|
+
remediation: "Capture a new snapshot or restore from a structurally valid snapshot.",
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
if (!sessions) {
|
|
203
|
+
add({
|
|
204
|
+
code: "invalid_snapshot_data",
|
|
205
|
+
severity: "critical",
|
|
206
|
+
target: "snapshot.sessions",
|
|
207
|
+
message: "Snapshot is missing session records needed for restore.",
|
|
208
|
+
remediation: "Capture a new snapshot or restore from a structurally valid snapshot.",
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
if (!edges) {
|
|
212
|
+
add({
|
|
213
|
+
code: "invalid_snapshot_data",
|
|
214
|
+
severity: "critical",
|
|
215
|
+
target: "snapshot.edges",
|
|
216
|
+
message: "Snapshot is missing topology edges needed for restore planning.",
|
|
217
|
+
remediation: "Capture a new snapshot or restore from a structurally valid snapshot.",
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
if (!checkpoints) {
|
|
221
|
+
add({
|
|
222
|
+
code: "invalid_snapshot_data",
|
|
223
|
+
severity: "critical",
|
|
224
|
+
target: "snapshot.checkpoints",
|
|
225
|
+
message: "Snapshot is missing the checkpoint map needed for restore.",
|
|
226
|
+
remediation: "Capture a new snapshot or restore from a structurally valid snapshot.",
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
if (!nodes || !checkpoints) {
|
|
230
|
+
return { blockers, warnings };
|
|
231
|
+
}
|
|
232
|
+
for (const node of nodes) {
|
|
233
|
+
const checkpoint = checkpoints[node.id] ?? null;
|
|
234
|
+
if (checkpoint && !node.cwd) {
|
|
235
|
+
add({
|
|
236
|
+
code: "checkpoint_missing_node_cwd",
|
|
237
|
+
severity: "critical",
|
|
238
|
+
nodeId: node.id,
|
|
239
|
+
logicalId: node.logicalId,
|
|
240
|
+
target: "checkpoint",
|
|
241
|
+
message: `Checkpoint exists for ${node.logicalId}, but the node has no cwd to receive it.`,
|
|
242
|
+
remediation: "Update the rig spec to include a cwd for this node, then capture a new snapshot or restore manually.",
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
const startupCtx = data.nodeStartupContext?.[node.id] ?? null;
|
|
246
|
+
if (!startupCtx)
|
|
247
|
+
continue;
|
|
248
|
+
for (const file of startupCtx.resolvedStartupFiles ?? []) {
|
|
249
|
+
if (!file.required) {
|
|
250
|
+
if (this.pathLike(file.absolutePath) && !exists(file.absolutePath)) {
|
|
251
|
+
warnings.push(`Restore pre-validation: optional startup file missing for ${node.logicalId}: ${file.absolutePath}`);
|
|
252
|
+
}
|
|
253
|
+
continue;
|
|
254
|
+
}
|
|
255
|
+
if (this.pathLike(file.ownerRoot) && !exists(file.ownerRoot)) {
|
|
256
|
+
add({
|
|
257
|
+
code: "startup_owner_root_missing",
|
|
258
|
+
severity: "critical",
|
|
259
|
+
nodeId: node.id,
|
|
260
|
+
logicalId: node.logicalId,
|
|
261
|
+
target: file.path,
|
|
262
|
+
path: file.ownerRoot,
|
|
263
|
+
message: `Required startup file owner root is missing for ${node.logicalId}: ${file.ownerRoot}`,
|
|
264
|
+
remediation: "Restore the agent/source root or capture a new snapshot with reachable startup context.",
|
|
265
|
+
});
|
|
266
|
+
}
|
|
267
|
+
if (this.pathLike(file.absolutePath) && !exists(file.absolutePath)) {
|
|
268
|
+
add({
|
|
269
|
+
code: "required_startup_file_missing",
|
|
270
|
+
severity: "critical",
|
|
271
|
+
nodeId: node.id,
|
|
272
|
+
logicalId: node.logicalId,
|
|
273
|
+
target: file.path,
|
|
274
|
+
path: file.absolutePath,
|
|
275
|
+
message: `Required startup file is missing for ${node.logicalId}: ${file.absolutePath}`,
|
|
276
|
+
remediation: "Restore the missing startup file or capture a new snapshot before retrying restore.",
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
for (const entry of startupCtx.projectionEntries ?? []) {
|
|
281
|
+
if (this.pathLike(entry.sourcePath) && !exists(entry.sourcePath)) {
|
|
282
|
+
add({
|
|
283
|
+
code: "projection_source_missing",
|
|
284
|
+
severity: "critical",
|
|
285
|
+
nodeId: node.id,
|
|
286
|
+
logicalId: node.logicalId,
|
|
287
|
+
target: entry.effectiveId,
|
|
288
|
+
path: entry.sourcePath,
|
|
289
|
+
message: `Projection source root is missing for ${node.logicalId}: ${entry.sourcePath}`,
|
|
290
|
+
remediation: "Restore the agent/source root that owns this projected resource or capture a new snapshot.",
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
if (this.pathLike(entry.absolutePath) && !exists(entry.absolutePath)) {
|
|
294
|
+
add({
|
|
295
|
+
code: "projection_entry_missing",
|
|
296
|
+
severity: "critical",
|
|
297
|
+
nodeId: node.id,
|
|
298
|
+
logicalId: node.logicalId,
|
|
299
|
+
target: entry.effectiveId,
|
|
300
|
+
path: entry.absolutePath,
|
|
301
|
+
message: `Projection entry is missing for ${node.logicalId}: ${entry.absolutePath}`,
|
|
302
|
+
remediation: "Restore the projected source artifact or capture a new snapshot before retrying restore.",
|
|
303
|
+
});
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
const servicesRecord = opts.servicesRecord ?? null;
|
|
308
|
+
if (servicesRecord) {
|
|
309
|
+
if (this.pathLike(servicesRecord.rigRoot) && !exists(servicesRecord.rigRoot)) {
|
|
310
|
+
add({
|
|
311
|
+
code: "service_rig_root_missing",
|
|
312
|
+
severity: "critical",
|
|
313
|
+
target: "services.rigRoot",
|
|
314
|
+
path: servicesRecord.rigRoot,
|
|
315
|
+
message: `Service rig root is missing: ${servicesRecord.rigRoot}`,
|
|
316
|
+
remediation: "Restore the service rig root or update the services record before retrying restore.",
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
if (this.pathLike(servicesRecord.composeFile) && !exists(servicesRecord.composeFile)) {
|
|
320
|
+
add({
|
|
321
|
+
code: "service_compose_file_missing",
|
|
322
|
+
severity: "critical",
|
|
323
|
+
target: "services.composeFile",
|
|
324
|
+
path: servicesRecord.composeFile,
|
|
325
|
+
message: `Service compose file is missing: ${servicesRecord.composeFile}`,
|
|
326
|
+
remediation: "Restore the compose file or update the services record before retrying restore.",
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
return { blockers, warnings };
|
|
331
|
+
}
|
|
332
|
+
pathLike(value) {
|
|
333
|
+
return typeof value === "string" && value.trim().length > 0 && (value.startsWith("/")
|
|
334
|
+
|| value.startsWith("./")
|
|
335
|
+
|| value.startsWith("../")
|
|
336
|
+
|| value.startsWith("~"));
|
|
337
|
+
}
|
|
119
338
|
captureNodeState(nodeId, rigId) {
|
|
120
339
|
const binding = this.sessionRegistry.getBindingForNode(nodeId);
|
|
121
340
|
const sessions = this.sessionRegistry.getSessionsForRig(rigId)
|
|
@@ -123,12 +342,35 @@ export class RestoreOrchestrator {
|
|
|
123
342
|
.map((s) => ({ id: s.id, status: s.status }));
|
|
124
343
|
return { binding, sessions };
|
|
125
344
|
}
|
|
126
|
-
|
|
127
|
-
|
|
345
|
+
/**
|
|
346
|
+
* Classify ALL DB-running sessions against tmux reality without mutating DB.
|
|
347
|
+
* Scans every session (not just latest-per-node) to catch older live sessions
|
|
348
|
+
* behind newer detached rows. Returns structured classification for the caller
|
|
349
|
+
* to act on: live sessions block restore, stale sessions get marked detached
|
|
350
|
+
* AFTER the pre_restore snapshot is captured, unknown sessions fail closed.
|
|
351
|
+
*/
|
|
352
|
+
async classifyRunningSessions(rigId) {
|
|
353
|
+
const live = [];
|
|
354
|
+
const stale = [];
|
|
355
|
+
const unknown = [];
|
|
128
356
|
for (const session of this.sessionRegistry.getSessionsForRig(rigId)) {
|
|
129
|
-
|
|
357
|
+
if (session.status !== "running")
|
|
358
|
+
continue;
|
|
359
|
+
try {
|
|
360
|
+
const alive = await this.tmuxAdapter.hasSession(session.sessionName);
|
|
361
|
+
if (alive) {
|
|
362
|
+
live.push(session.id);
|
|
363
|
+
}
|
|
364
|
+
else {
|
|
365
|
+
stale.push(session.id);
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
catch {
|
|
369
|
+
// tmux check failed — fail closed: classify as unknown so restore blocks
|
|
370
|
+
unknown.push(session.id);
|
|
371
|
+
}
|
|
130
372
|
}
|
|
131
|
-
return
|
|
373
|
+
return { live, stale, unknown };
|
|
132
374
|
}
|
|
133
375
|
clearStaleState(nodeId, rigId) {
|
|
134
376
|
this.sessionRegistry.clearBinding(nodeId);
|
|
@@ -309,34 +551,33 @@ export class RestoreOrchestrator {
|
|
|
309
551
|
const resumeToken = session?.resumeToken ?? null;
|
|
310
552
|
const resumeRequested = restorePolicy === "resume_if_possible" && !!resumeType && resumeType !== "none";
|
|
311
553
|
let baseStatus = "fresh";
|
|
312
|
-
let needsFreshLaunchFallback = false;
|
|
313
554
|
// Pod-aware nodes: resume via launchHarness (handled in startup orchestrator with skipHarnessLaunch: false)
|
|
314
555
|
// Legacy nodes: resume via old claude-resume/codex-resume helpers
|
|
315
556
|
const isPodAware = !!node.podId;
|
|
316
557
|
if (resumeRequested && !isPodAware) {
|
|
317
558
|
// Legacy resume path
|
|
318
559
|
if (!resumeToken) {
|
|
319
|
-
|
|
320
|
-
warnings?.push(`Node ${node.logicalId}: resume requested but no token was available; launched fresh instead.`);
|
|
321
|
-
baseStatus = "fresh";
|
|
322
|
-
needsFreshLaunchFallback = true;
|
|
323
|
-
}
|
|
324
|
-
else {
|
|
325
|
-
return { nodeId: node.id, logicalId: node.logicalId, status: "failed", error: `Resume requested but no token available. Restore the node manually or launch fresh with: rig up` };
|
|
326
|
-
}
|
|
560
|
+
return { nodeId: node.id, logicalId: node.logicalId, status: "failed", error: `Resume requested but no token available. Restore the node manually or launch fresh explicitly.` };
|
|
327
561
|
}
|
|
328
562
|
else {
|
|
329
563
|
const resumeOutcome = await this.attemptResume(sessionName, resumeType, resumeToken, node.cwd ?? "/");
|
|
330
|
-
if (resumeOutcome === "resumed") {
|
|
564
|
+
if (resumeOutcome.kind === "resumed") {
|
|
331
565
|
baseStatus = "resumed";
|
|
332
566
|
}
|
|
333
|
-
else if (resumeOutcome === "
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
567
|
+
else if (resumeOutcome.kind === "attention_required") {
|
|
568
|
+
// L3 Decision 2: Claude resume-selection prompt -> attention_required.
|
|
569
|
+
// Do NOT auto-answer. Reconcile later via reconcileNodeRuntimeTruth
|
|
570
|
+
// when the operator reaches a usable pane state.
|
|
571
|
+
return {
|
|
572
|
+
nodeId: node.id,
|
|
573
|
+
logicalId: node.logicalId,
|
|
574
|
+
status: "attention_required",
|
|
575
|
+
error: resumeOutcome.message,
|
|
576
|
+
attentionEvidence: resumeOutcome.evidence ?? null,
|
|
577
|
+
};
|
|
337
578
|
}
|
|
338
579
|
else {
|
|
339
|
-
return { nodeId: node.id, logicalId: node.logicalId, status: "failed", error: `Resume attempted but failed. Check the harness state manually or launch fresh
|
|
580
|
+
return { nodeId: node.id, logicalId: node.logicalId, status: "failed", error: `Resume attempted but failed. Check the harness state manually or launch fresh explicitly.` };
|
|
340
581
|
}
|
|
341
582
|
}
|
|
342
583
|
}
|
|
@@ -345,18 +586,12 @@ export class RestoreOrchestrator {
|
|
|
345
586
|
// if resume was requested but continuity state is unavailable, fail loudly
|
|
346
587
|
// instead of silently downgrading to a fresh launch with amnesia.
|
|
347
588
|
if (!resumeToken) {
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
nodeId: node.id,
|
|
355
|
-
logicalId: node.logicalId,
|
|
356
|
-
status: "failed",
|
|
357
|
-
error: "Resume requested but no token available. Restore the node manually or launch fresh with: rig up",
|
|
358
|
-
};
|
|
359
|
-
}
|
|
589
|
+
return {
|
|
590
|
+
nodeId: node.id,
|
|
591
|
+
logicalId: node.logicalId,
|
|
592
|
+
status: "failed",
|
|
593
|
+
error: "Resume requested but no token available. Restore the node manually or launch fresh explicitly.",
|
|
594
|
+
};
|
|
360
595
|
}
|
|
361
596
|
}
|
|
362
597
|
// Checkpoint delivery (if not already resumed)
|
|
@@ -421,12 +656,13 @@ export class RestoreOrchestrator {
|
|
|
421
656
|
const binding = {
|
|
422
657
|
...launchResult.binding,
|
|
423
658
|
cwd: node.cwd ?? ".",
|
|
659
|
+
codexConfigProfile: node.codexConfigProfile ?? undefined,
|
|
424
660
|
};
|
|
425
661
|
try {
|
|
426
662
|
const { StartupOrchestrator } = await import("./startup-orchestrator.js");
|
|
427
663
|
const startupOrch = new StartupOrchestrator({ db: this.db, sessionRegistry: this.sessionRegistry, eventBus: this.eventBus, tmuxAdapter: this.tmuxAdapter });
|
|
428
664
|
const replayAsRestore = baseStatus !== "fresh";
|
|
429
|
-
const shouldLaunchHarness = isPodAware
|
|
665
|
+
const shouldLaunchHarness = isPodAware;
|
|
430
666
|
const startupResult = await startupOrch.startNode({
|
|
431
667
|
rigId,
|
|
432
668
|
nodeId: node.id,
|
|
@@ -438,24 +674,46 @@ export class RestoreOrchestrator {
|
|
|
438
674
|
startupActions: startupCtx.startupActions,
|
|
439
675
|
isRestore: replayAsRestore,
|
|
440
676
|
skipHarnessLaunch: !shouldLaunchHarness,
|
|
441
|
-
resumeToken: (isPodAware && resumeRequested
|
|
677
|
+
resumeToken: (isPodAware && resumeRequested) ? resumeToken ?? undefined : undefined,
|
|
442
678
|
sessionName: sessionName,
|
|
679
|
+
allowFreshFallback: !(isPodAware && resumeRequested),
|
|
443
680
|
});
|
|
444
681
|
if (startupResult.ok) {
|
|
445
682
|
const nativeContinuityProved = isPodAware
|
|
446
683
|
&& resumeRequested
|
|
447
684
|
&& this.launchedSessionMatchesSnapshotResume(launchResult.session.id, resumeType, resumeToken);
|
|
448
|
-
// Pod-aware nodes with resume token may either truly resume or
|
|
449
|
-
// prove that the saved session is gone and fall back to fresh.
|
|
450
685
|
if (isPodAware && resumeRequested && startupResult.continuityOutcome === "fresh" && !nativeContinuityProved) {
|
|
451
|
-
|
|
686
|
+
return {
|
|
687
|
+
nodeId: node.id,
|
|
688
|
+
logicalId: node.logicalId,
|
|
689
|
+
status: "failed",
|
|
690
|
+
error: "Resume attempted but runtime reported fresh continuity. Launch fresh explicitly if that degradation is acceptable.",
|
|
691
|
+
};
|
|
452
692
|
}
|
|
453
693
|
const finalStatus = (isPodAware && resumeRequested)
|
|
454
694
|
? ((startupResult.continuityOutcome === "resumed" || nativeContinuityProved) ? "resumed" : baseStatus)
|
|
455
695
|
: baseStatus;
|
|
456
696
|
return { nodeId: node.id, logicalId: node.logicalId, status: finalStatus };
|
|
457
697
|
}
|
|
458
|
-
|
|
698
|
+
// Pod-aware attention_required: hoisted above both the
|
|
699
|
+
// resume-requested and non-resume-requested failed branches so
|
|
700
|
+
// that pod-aware Codex auth-refusal (verifyResumeLaunch →
|
|
701
|
+
// recovery: "attention_required") surfaces honestly regardless
|
|
702
|
+
// of whether resume was requested. Mirrors the runtime-agnostic
|
|
703
|
+
// legacy mapping at lines 725-735. This MUST come before the
|
|
704
|
+
// line 859 / 867 failed branches; otherwise the production
|
|
705
|
+
// pod-aware-resume path (most common) returns `status: "failed"`
|
|
706
|
+
// and the slice's "attention_required end-to-end" claim breaks.
|
|
707
|
+
if (isPodAware && startupResult.startupStatus === "attention_required") {
|
|
708
|
+
return {
|
|
709
|
+
nodeId: node.id,
|
|
710
|
+
logicalId: node.logicalId,
|
|
711
|
+
status: "attention_required",
|
|
712
|
+
error: `Restore startup requires attention: ${startupResult.errors.join("; ")}`,
|
|
713
|
+
attentionEvidence: startupResult.evidence ?? null,
|
|
714
|
+
};
|
|
715
|
+
}
|
|
716
|
+
if (isPodAware && resumeRequested) {
|
|
459
717
|
return {
|
|
460
718
|
nodeId: node.id,
|
|
461
719
|
logicalId: node.logicalId,
|
|
@@ -463,10 +721,29 @@ export class RestoreOrchestrator {
|
|
|
463
721
|
error: startupResult.errors.join("; "),
|
|
464
722
|
};
|
|
465
723
|
}
|
|
724
|
+
if (isPodAware) {
|
|
725
|
+
const prefix = startupResult.startupStatus === "attention_required"
|
|
726
|
+
? "Restore startup requires attention"
|
|
727
|
+
: "Restore startup failed";
|
|
728
|
+
return {
|
|
729
|
+
nodeId: node.id,
|
|
730
|
+
logicalId: node.logicalId,
|
|
731
|
+
status: "failed",
|
|
732
|
+
error: `${prefix}: ${startupResult.errors.join("; ")}`,
|
|
733
|
+
};
|
|
734
|
+
}
|
|
466
735
|
warnings?.push(`Restore startup failed for ${node.logicalId}: ${startupResult.errors.join("; ")}`);
|
|
467
736
|
}
|
|
468
737
|
catch (err) {
|
|
469
|
-
if (
|
|
738
|
+
if (isPodAware && resumeRequested) {
|
|
739
|
+
return {
|
|
740
|
+
nodeId: node.id,
|
|
741
|
+
logicalId: node.logicalId,
|
|
742
|
+
status: "failed",
|
|
743
|
+
error: `Restore startup error: ${err.message}`,
|
|
744
|
+
};
|
|
745
|
+
}
|
|
746
|
+
if (isPodAware) {
|
|
470
747
|
return {
|
|
471
748
|
nodeId: node.id,
|
|
472
749
|
logicalId: node.logicalId,
|
|
@@ -479,21 +756,8 @@ export class RestoreOrchestrator {
|
|
|
479
756
|
}
|
|
480
757
|
}
|
|
481
758
|
}
|
|
482
|
-
if (needsFreshLaunchFallback) {
|
|
483
|
-
return {
|
|
484
|
-
nodeId: node.id,
|
|
485
|
-
logicalId: node.logicalId,
|
|
486
|
-
status: "failed",
|
|
487
|
-
error: "Resume fallback required a fresh launch, but no startup context was available to relaunch the runtime",
|
|
488
|
-
};
|
|
489
|
-
}
|
|
490
759
|
return { nodeId: node.id, logicalId: node.logicalId, status: baseStatus };
|
|
491
760
|
}
|
|
492
|
-
shouldFallbackFreshWithoutResume(runtime, resumeType) {
|
|
493
|
-
return !!resumeType
|
|
494
|
-
&& ((runtime === "codex" && resumeType.startsWith("codex"))
|
|
495
|
-
|| (runtime === "claude-code" && resumeType.startsWith("claude")));
|
|
496
|
-
}
|
|
497
761
|
launchedSessionMatchesSnapshotResume(sessionId, resumeType, resumeToken) {
|
|
498
762
|
if (!resumeType || !resumeToken)
|
|
499
763
|
return false;
|
|
@@ -506,14 +770,148 @@ export class RestoreOrchestrator {
|
|
|
506
770
|
if (this.claudeResume.canResume(resumeType, resumeToken)) {
|
|
507
771
|
const result = await this.claudeResume.resume(sessionName, resumeType, resumeToken, cwd);
|
|
508
772
|
if (result.ok)
|
|
509
|
-
return "resumed";
|
|
510
|
-
|
|
773
|
+
return { kind: "resumed" };
|
|
774
|
+
if (result.code === "retry_fresh")
|
|
775
|
+
return { kind: "retry_fresh" };
|
|
776
|
+
// L3: surface attention_required from the Claude probe (resume-selection prompt).
|
|
777
|
+
if (result.code === "attention_required") {
|
|
778
|
+
return {
|
|
779
|
+
kind: "attention_required",
|
|
780
|
+
message: result.message,
|
|
781
|
+
evidence: result.evidence,
|
|
782
|
+
};
|
|
783
|
+
}
|
|
784
|
+
return { kind: "failed", message: result.message };
|
|
511
785
|
}
|
|
512
786
|
if (this.codexResume.canResume(resumeType, resumeToken)) {
|
|
513
787
|
const result = await this.codexResume.resume(sessionName, resumeType, resumeToken, cwd);
|
|
514
|
-
|
|
788
|
+
if (result.ok)
|
|
789
|
+
return { kind: "resumed" };
|
|
790
|
+
if (result.code === "retry_fresh")
|
|
791
|
+
return { kind: "retry_fresh" };
|
|
792
|
+
// Codex auth-refusal: stored OAuth token can no longer be refreshed.
|
|
793
|
+
// Recoverable — operator runs `codex login` and the seat continues.
|
|
794
|
+
// Per-node mapping at lines 725-735 emits `status: "attention_required"`
|
|
795
|
+
// with `attentionEvidence` for both runtimes; no further wiring needed.
|
|
796
|
+
if (result.code === "attention_required") {
|
|
797
|
+
return {
|
|
798
|
+
kind: "attention_required",
|
|
799
|
+
message: result.message,
|
|
800
|
+
evidence: result.evidence,
|
|
801
|
+
};
|
|
802
|
+
}
|
|
803
|
+
return { kind: "failed", message: result.message };
|
|
515
804
|
}
|
|
516
|
-
return "failed";
|
|
805
|
+
return { kind: "failed", message: "No resume adapter available for this runtime/token combination." };
|
|
806
|
+
}
|
|
807
|
+
/**
|
|
808
|
+
* L3 Decision 3: runtime-truth reconciliation. Given a node whose original
|
|
809
|
+
* `restoreOutcome` was `failed` or `attention_required`, examine current
|
|
810
|
+
* runtime state. If ALL four visible-evidence preconditions hold, append
|
|
811
|
+
* `restore.outcome_reconciled` so the node's effective post-reconciliation
|
|
812
|
+
* outcome becomes `operator_recovered`. Never mutates or deletes the
|
|
813
|
+
* original failure event; never produces `ready`.
|
|
814
|
+
*
|
|
815
|
+
* Returns `{ ok: true, attemptId, from, to, evidence }` on upgrade, or
|
|
816
|
+
* `{ ok: false, code, detail }` describing exactly which precondition
|
|
817
|
+
* failed (or "no_attempt" / "outcome_not_upgradable" when there is nothing
|
|
818
|
+
* to reconcile).
|
|
819
|
+
*/
|
|
820
|
+
async reconcileNodeRuntimeTruth(rigId, nodeId) {
|
|
821
|
+
// Locate the latest restore attempt for this rig.
|
|
822
|
+
const startedRow = this.db.prepare("SELECT seq, payload FROM events WHERE rig_id = ? AND type = 'restore.started' ORDER BY seq DESC LIMIT 1").get(rigId);
|
|
823
|
+
if (!startedRow) {
|
|
824
|
+
return { ok: false, code: "no_attempt", detail: "No restore.started event recorded for this rig." };
|
|
825
|
+
}
|
|
826
|
+
const attemptId = startedRow.seq;
|
|
827
|
+
// Find the node's most recent post-attempt outcome from the most-recent
|
|
828
|
+
// restore.completed event for this rig. If the latest outcome is not
|
|
829
|
+
// failed or attention_required, the reconciler has nothing to upgrade.
|
|
830
|
+
const completedRow = this.db.prepare("SELECT payload FROM events WHERE rig_id = ? AND type = 'restore.completed' AND seq > ? ORDER BY seq DESC LIMIT 1").get(rigId, attemptId);
|
|
831
|
+
let nodeStatus = null;
|
|
832
|
+
let nodeLogicalId = null;
|
|
833
|
+
if (completedRow) {
|
|
834
|
+
try {
|
|
835
|
+
const parsed = JSON.parse(completedRow.payload);
|
|
836
|
+
const found = parsed.result?.nodes?.find((n) => n.nodeId === nodeId);
|
|
837
|
+
if (found) {
|
|
838
|
+
nodeStatus = found.status;
|
|
839
|
+
nodeLogicalId = found.logicalId;
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
catch {
|
|
843
|
+
// payload corruption — treat as no node record found
|
|
844
|
+
}
|
|
845
|
+
}
|
|
846
|
+
if (!nodeStatus) {
|
|
847
|
+
return { ok: false, code: "node_not_found", detail: `Node ${nodeId} has no record in the latest restore.completed event for rig ${rigId}.` };
|
|
848
|
+
}
|
|
849
|
+
if (nodeStatus !== "failed" && nodeStatus !== "attention_required") {
|
|
850
|
+
return { ok: false, code: "outcome_not_upgradable", detail: `Reconciliation only upgrades failed or attention_required; current outcome is ${nodeStatus}.` };
|
|
851
|
+
}
|
|
852
|
+
const fromStatus = nodeStatus;
|
|
853
|
+
// Resolve canonical session name for this node so we can probe tmux/pane.
|
|
854
|
+
const bindingRow = this.db.prepare("SELECT tmux_session FROM bindings WHERE node_id = ?").get(nodeId);
|
|
855
|
+
const sessionName = bindingRow?.tmux_session ?? null;
|
|
856
|
+
if (!sessionName) {
|
|
857
|
+
return { ok: false, code: "tmux_session_missing", detail: "No tmux session bound for this node." };
|
|
858
|
+
}
|
|
859
|
+
// Precondition #1: tmux session exists.
|
|
860
|
+
let alive = false;
|
|
861
|
+
try {
|
|
862
|
+
alive = await this.tmuxAdapter.hasSession(sessionName);
|
|
863
|
+
}
|
|
864
|
+
catch {
|
|
865
|
+
// L1 fail-closed: ambiguous probe failure stays as not-alive for the
|
|
866
|
+
// reconciler. Original failure event remains untouched.
|
|
867
|
+
alive = false;
|
|
868
|
+
}
|
|
869
|
+
if (!alive) {
|
|
870
|
+
return { ok: false, code: "tmux_session_missing", detail: `Tmux session ${sessionName} is not currently alive.` };
|
|
871
|
+
}
|
|
872
|
+
// Resolve runtime + capture pane state for preconditions #2 and #4.
|
|
873
|
+
const nodeRow = this.db.prepare("SELECT runtime FROM nodes WHERE id = ?").get(nodeId);
|
|
874
|
+
const runtime = nodeRow?.runtime ?? null;
|
|
875
|
+
const paneCommand = await this.tmuxAdapter.getPaneCommand(sessionName);
|
|
876
|
+
const paneContent = (await this.tmuxAdapter.capturePaneContent(sessionName, 40)) ?? "";
|
|
877
|
+
// Precondition #2: foreground process is the runtime (claude or codex).
|
|
878
|
+
const probe = assessNativeResumeProbe({ runtime, paneCommand, paneContent });
|
|
879
|
+
const fgProcess = paneCommand && (paneCommand === "claude" || paneCommand.startsWith("codex"))
|
|
880
|
+
? (paneCommand === "claude" ? "claude" : "codex")
|
|
881
|
+
: null;
|
|
882
|
+
if (!fgProcess) {
|
|
883
|
+
return { ok: false, code: "fg_process_not_runtime", detail: `Foreground process is "${paneCommand ?? "(unknown)"}", not claude/codex.` };
|
|
884
|
+
}
|
|
885
|
+
// Precondition #3: the resume token was actually used at launch.
|
|
886
|
+
// Stored on the latest session row; null/empty means resume was not
|
|
887
|
+
// exercised so a "recovered" claim has no basis.
|
|
888
|
+
const sessRow = this.db.prepare("SELECT resume_token FROM sessions WHERE node_id = ? ORDER BY id DESC LIMIT 1").get(nodeId);
|
|
889
|
+
const resumeTokenUsed = typeof sessRow?.resume_token === "string" && sessRow.resume_token.length > 0;
|
|
890
|
+
if (!resumeTokenUsed) {
|
|
891
|
+
return { ok: false, code: "resume_token_not_used", detail: "No resume token recorded on the latest session row." };
|
|
892
|
+
}
|
|
893
|
+
// Precondition #4: pane is at a usable/idle state — explicitly NOT a
|
|
894
|
+
// resume-selection prompt and not the "returned to shell" failure mode.
|
|
895
|
+
if (probe.status !== "resumed") {
|
|
896
|
+
return { ok: false, code: "pane_not_usable", detail: `Pane state is ${probe.status} (${probe.code}); reconciliation requires resumed.` };
|
|
897
|
+
}
|
|
898
|
+
// All four preconditions hold. Append (never mutate) the audit event.
|
|
899
|
+
this.eventBus.emit({
|
|
900
|
+
type: "restore.outcome_reconciled",
|
|
901
|
+
rigId,
|
|
902
|
+
nodeId,
|
|
903
|
+
attemptId,
|
|
904
|
+
from: fromStatus,
|
|
905
|
+
to: "operator_recovered",
|
|
906
|
+
evidence: { tmux: true, fgProcess, resumeTokenUsed: true, paneState: "usable" },
|
|
907
|
+
});
|
|
908
|
+
return {
|
|
909
|
+
ok: true,
|
|
910
|
+
attemptId,
|
|
911
|
+
from: fromStatus,
|
|
912
|
+
to: "operator_recovered",
|
|
913
|
+
evidence: { tmux: true, fgProcess, resumeTokenUsed: true, paneState: "usable" },
|
|
914
|
+
};
|
|
517
915
|
}
|
|
518
916
|
writeCheckpointFile(cwd, checkpoint) {
|
|
519
917
|
try {
|