codex-autorunner 1.1.0__py3-none-any.whl → 1.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/agents/opencode/client.py +113 -4
- codex_autorunner/agents/opencode/supervisor.py +4 -0
- codex_autorunner/agents/registry.py +17 -7
- codex_autorunner/bootstrap.py +219 -1
- codex_autorunner/core/__init__.py +17 -1
- codex_autorunner/core/about_car.py +124 -11
- codex_autorunner/core/app_server_threads.py +6 -0
- codex_autorunner/core/config.py +238 -3
- codex_autorunner/core/context_awareness.py +39 -0
- codex_autorunner/core/docs.py +0 -122
- codex_autorunner/core/filebox.py +265 -0
- codex_autorunner/core/flows/controller.py +71 -1
- codex_autorunner/core/flows/reconciler.py +4 -1
- codex_autorunner/core/flows/runtime.py +22 -0
- codex_autorunner/core/flows/store.py +61 -9
- codex_autorunner/core/flows/transition.py +23 -16
- codex_autorunner/core/flows/ux_helpers.py +18 -3
- codex_autorunner/core/flows/worker_process.py +32 -6
- codex_autorunner/core/hub.py +198 -41
- codex_autorunner/core/lifecycle_events.py +253 -0
- codex_autorunner/core/path_utils.py +2 -1
- codex_autorunner/core/pma_audit.py +224 -0
- codex_autorunner/core/pma_context.py +683 -0
- codex_autorunner/core/pma_dispatch_interceptor.py +284 -0
- codex_autorunner/core/pma_lifecycle.py +527 -0
- codex_autorunner/core/pma_queue.py +367 -0
- codex_autorunner/core/pma_safety.py +221 -0
- codex_autorunner/core/pma_state.py +115 -0
- codex_autorunner/core/ports/agent_backend.py +2 -5
- codex_autorunner/core/ports/run_event.py +1 -4
- codex_autorunner/core/prompt.py +0 -80
- codex_autorunner/core/prompts.py +56 -172
- codex_autorunner/core/redaction.py +0 -4
- codex_autorunner/core/review_context.py +11 -9
- codex_autorunner/core/runner_controller.py +35 -33
- codex_autorunner/core/runner_state.py +147 -0
- codex_autorunner/core/runtime.py +829 -0
- codex_autorunner/core/sqlite_utils.py +13 -4
- codex_autorunner/core/state.py +7 -10
- codex_autorunner/core/state_roots.py +5 -0
- codex_autorunner/core/templates/__init__.py +39 -0
- codex_autorunner/core/templates/git_mirror.py +234 -0
- codex_autorunner/core/templates/provenance.py +56 -0
- codex_autorunner/core/templates/scan_cache.py +120 -0
- codex_autorunner/core/ticket_linter_cli.py +17 -0
- codex_autorunner/core/ticket_manager_cli.py +154 -92
- codex_autorunner/core/time_utils.py +11 -0
- codex_autorunner/core/types.py +18 -0
- codex_autorunner/core/utils.py +34 -6
- codex_autorunner/flows/review/service.py +23 -25
- codex_autorunner/flows/ticket_flow/definition.py +43 -1
- codex_autorunner/integrations/agents/__init__.py +2 -0
- codex_autorunner/integrations/agents/backend_orchestrator.py +18 -0
- codex_autorunner/integrations/agents/codex_backend.py +19 -8
- codex_autorunner/integrations/agents/runner.py +3 -8
- codex_autorunner/integrations/agents/wiring.py +8 -0
- codex_autorunner/integrations/telegram/adapter.py +1 -1
- codex_autorunner/integrations/telegram/config.py +1 -1
- codex_autorunner/integrations/telegram/doctor.py +228 -6
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +236 -74
- codex_autorunner/integrations/telegram/handlers/commands/files.py +314 -75
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +346 -58
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +498 -37
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +202 -45
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +18 -7
- codex_autorunner/integrations/telegram/handlers/messages.py +34 -3
- codex_autorunner/integrations/telegram/helpers.py +1 -3
- codex_autorunner/integrations/telegram/runtime.py +9 -4
- codex_autorunner/integrations/telegram/service.py +30 -0
- codex_autorunner/integrations/telegram/state.py +38 -0
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +10 -4
- codex_autorunner/integrations/telegram/transport.py +10 -3
- codex_autorunner/integrations/templates/__init__.py +27 -0
- codex_autorunner/integrations/templates/scan_agent.py +312 -0
- codex_autorunner/server.py +2 -2
- codex_autorunner/static/agentControls.js +21 -5
- codex_autorunner/static/app.js +115 -11
- codex_autorunner/static/archive.js +274 -81
- codex_autorunner/static/archiveApi.js +21 -0
- codex_autorunner/static/chatUploads.js +137 -0
- codex_autorunner/static/constants.js +1 -1
- codex_autorunner/static/docChatCore.js +185 -13
- codex_autorunner/static/fileChat.js +68 -40
- codex_autorunner/static/fileboxUi.js +159 -0
- codex_autorunner/static/hub.js +46 -81
- codex_autorunner/static/index.html +303 -24
- codex_autorunner/static/messages.js +82 -4
- codex_autorunner/static/notifications.js +288 -0
- codex_autorunner/static/pma.js +1167 -0
- codex_autorunner/static/settings.js +3 -0
- codex_autorunner/static/streamUtils.js +57 -0
- codex_autorunner/static/styles.css +9141 -6742
- codex_autorunner/static/templateReposSettings.js +225 -0
- codex_autorunner/static/terminalManager.js +22 -3
- codex_autorunner/static/ticketChatActions.js +165 -3
- codex_autorunner/static/ticketChatStream.js +17 -119
- codex_autorunner/static/ticketEditor.js +41 -13
- codex_autorunner/static/ticketTemplates.js +798 -0
- codex_autorunner/static/tickets.js +69 -19
- codex_autorunner/static/turnEvents.js +27 -0
- codex_autorunner/static/turnResume.js +33 -0
- codex_autorunner/static/utils.js +28 -0
- codex_autorunner/static/workspace.js +258 -44
- codex_autorunner/static/workspaceFileBrowser.js +6 -4
- codex_autorunner/surfaces/cli/cli.py +1465 -155
- codex_autorunner/surfaces/cli/pma_cli.py +817 -0
- codex_autorunner/surfaces/web/app.py +253 -49
- codex_autorunner/surfaces/web/routes/__init__.py +4 -0
- codex_autorunner/surfaces/web/routes/analytics.py +29 -22
- codex_autorunner/surfaces/web/routes/archive.py +197 -0
- codex_autorunner/surfaces/web/routes/file_chat.py +297 -36
- codex_autorunner/surfaces/web/routes/filebox.py +227 -0
- codex_autorunner/surfaces/web/routes/flows.py +219 -29
- codex_autorunner/surfaces/web/routes/messages.py +70 -39
- codex_autorunner/surfaces/web/routes/pma.py +1652 -0
- codex_autorunner/surfaces/web/routes/repos.py +1 -1
- codex_autorunner/surfaces/web/routes/shared.py +0 -3
- codex_autorunner/surfaces/web/routes/templates.py +634 -0
- codex_autorunner/surfaces/web/runner_manager.py +2 -2
- codex_autorunner/surfaces/web/schemas.py +81 -18
- codex_autorunner/tickets/agent_pool.py +27 -0
- codex_autorunner/tickets/files.py +33 -16
- codex_autorunner/tickets/lint.py +50 -0
- codex_autorunner/tickets/models.py +3 -0
- codex_autorunner/tickets/outbox.py +41 -5
- codex_autorunner/tickets/runner.py +350 -69
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/METADATA +15 -19
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/RECORD +132 -101
- codex_autorunner/core/adapter_utils.py +0 -21
- codex_autorunner/core/engine.py +0 -3302
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/WHEEL +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/top_level.txt +0 -0
codex_autorunner/core/docs.py
CHANGED
|
@@ -1,122 +0,0 @@
|
|
|
1
|
-
import re
|
|
2
|
-
from pathlib import Path
|
|
3
|
-
from typing import List, Tuple
|
|
4
|
-
|
|
5
|
-
from .config import Config
|
|
6
|
-
|
|
7
|
-
_TODO_LINE_RE = re.compile(r"^\s*[-*]\s*\[(?P<state>[ xX])\]\s*(?P<text>.*)$")
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def _iter_meaningful_lines(content: str):
|
|
11
|
-
in_code_fence = False
|
|
12
|
-
in_html_comment = False
|
|
13
|
-
html_comment_pattern = re.compile(r"<!--.*?-->", re.DOTALL)
|
|
14
|
-
|
|
15
|
-
for line in content.splitlines():
|
|
16
|
-
stripped = line.strip()
|
|
17
|
-
|
|
18
|
-
if stripped.startswith("```"):
|
|
19
|
-
in_code_fence = not in_code_fence
|
|
20
|
-
continue
|
|
21
|
-
|
|
22
|
-
if in_code_fence:
|
|
23
|
-
continue
|
|
24
|
-
|
|
25
|
-
if line.lstrip().startswith("<!--"):
|
|
26
|
-
if "-->" in line:
|
|
27
|
-
if html_comment_pattern.search(line):
|
|
28
|
-
continue
|
|
29
|
-
else:
|
|
30
|
-
in_html_comment = True
|
|
31
|
-
continue
|
|
32
|
-
|
|
33
|
-
if in_html_comment:
|
|
34
|
-
if "-->" in line:
|
|
35
|
-
in_html_comment = False
|
|
36
|
-
continue
|
|
37
|
-
|
|
38
|
-
yield line
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def parse_todos(content: str) -> Tuple[List[str], List[str]]:
|
|
42
|
-
outstanding: List[str] = []
|
|
43
|
-
done: List[str] = []
|
|
44
|
-
if not content:
|
|
45
|
-
return outstanding, done
|
|
46
|
-
|
|
47
|
-
for line in _iter_meaningful_lines(content):
|
|
48
|
-
match = _TODO_LINE_RE.match(line)
|
|
49
|
-
if match:
|
|
50
|
-
state = match.group("state")
|
|
51
|
-
text = match.group("text").strip()
|
|
52
|
-
if state in (" ",):
|
|
53
|
-
outstanding.append(text)
|
|
54
|
-
elif state in ("x", "X"):
|
|
55
|
-
done.append(text)
|
|
56
|
-
return outstanding, done
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
_TODO_CHECKBOX_RE = re.compile(r"^\s*[-*]\s*\[(?P<state>[ xX])\]\s+\S")
|
|
60
|
-
_TODO_BULLET_RE = re.compile(r"^\s*[-*]\s+")
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def validate_todo_markdown(content: str) -> List[str]:
|
|
64
|
-
"""
|
|
65
|
-
Validate that TODO content contains tasks as markdown checkboxes.
|
|
66
|
-
|
|
67
|
-
Rules:
|
|
68
|
-
- If the file has any non-heading, non-empty content, it must include at least one checkbox line.
|
|
69
|
-
- Any bullet line must be a checkbox bullet (no plain '-' bullets for tasks).
|
|
70
|
-
"""
|
|
71
|
-
errors: List[str] = []
|
|
72
|
-
if content is None:
|
|
73
|
-
return ["TODO is missing"]
|
|
74
|
-
lines = list(_iter_meaningful_lines(content))
|
|
75
|
-
meaningful = [
|
|
76
|
-
line for line in lines if line.strip() and not line.lstrip().startswith("#")
|
|
77
|
-
]
|
|
78
|
-
if not meaningful:
|
|
79
|
-
return []
|
|
80
|
-
checkbox_lines = [line for line in meaningful if _TODO_CHECKBOX_RE.match(line)]
|
|
81
|
-
if not checkbox_lines:
|
|
82
|
-
errors.append(
|
|
83
|
-
"TODO must contain at least one markdown checkbox task line like `- [ ] ...`."
|
|
84
|
-
)
|
|
85
|
-
bullet_lines = [line for line in meaningful if _TODO_BULLET_RE.match(line)]
|
|
86
|
-
non_checkbox_bullets = [
|
|
87
|
-
line for line in bullet_lines if not _TODO_CHECKBOX_RE.match(line)
|
|
88
|
-
]
|
|
89
|
-
if non_checkbox_bullets:
|
|
90
|
-
sample = non_checkbox_bullets[0].strip()
|
|
91
|
-
errors.append(
|
|
92
|
-
"TODO contains non-checkbox bullet(s); use `- [ ] ...` instead. "
|
|
93
|
-
f"Example: `{sample}`"
|
|
94
|
-
)
|
|
95
|
-
return errors
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
class DocsManager:
|
|
99
|
-
def __init__(self, config: Config):
|
|
100
|
-
self.config = config
|
|
101
|
-
|
|
102
|
-
def read_doc(self, key: str) -> str:
|
|
103
|
-
try:
|
|
104
|
-
path = self.config.doc_path(key)
|
|
105
|
-
except KeyError:
|
|
106
|
-
return ""
|
|
107
|
-
return path.read_text(encoding="utf-8") if path.exists() else ""
|
|
108
|
-
|
|
109
|
-
def todos(self) -> Tuple[List[str], List[str]]:
|
|
110
|
-
# Legacy helper retained for backward compatibility; newer configs may not
|
|
111
|
-
# have a TODO doc at all.
|
|
112
|
-
try:
|
|
113
|
-
todo_path: Path = self.config.doc_path("todo")
|
|
114
|
-
except KeyError:
|
|
115
|
-
return [], []
|
|
116
|
-
if not todo_path.exists():
|
|
117
|
-
return [], []
|
|
118
|
-
return parse_todos(todo_path.read_text(encoding="utf-8"))
|
|
119
|
-
|
|
120
|
-
def todos_done(self) -> bool:
|
|
121
|
-
outstanding, _ = self.todos()
|
|
122
|
-
return len(outstanding) == 0
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import shutil
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, Iterable, List, Tuple
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True)
|
|
11
|
+
class FileBoxEntry:
|
|
12
|
+
name: str
|
|
13
|
+
box: str # "inbox" | "outbox"
|
|
14
|
+
size: int | None
|
|
15
|
+
modified_at: str | None
|
|
16
|
+
source: str # "filebox", "pma", "telegram"
|
|
17
|
+
path: Path
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
BOXES = ("inbox", "outbox")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def filebox_root(repo_root: Path) -> Path:
|
|
24
|
+
return Path(repo_root) / ".codex-autorunner" / "filebox"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def inbox_dir(repo_root: Path) -> Path:
|
|
28
|
+
return filebox_root(repo_root) / "inbox"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def outbox_dir(repo_root: Path) -> Path:
|
|
32
|
+
return filebox_root(repo_root) / "outbox"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def outbox_pending_dir(repo_root: Path) -> Path:
|
|
36
|
+
# Preserves Telegram pending semantics while keeping everything under the shared FileBox.
|
|
37
|
+
return outbox_dir(repo_root) / "pending"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def outbox_sent_dir(repo_root: Path) -> Path:
|
|
41
|
+
return outbox_dir(repo_root) / "sent"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def ensure_structure(repo_root: Path) -> None:
|
|
45
|
+
for path in (
|
|
46
|
+
inbox_dir(repo_root),
|
|
47
|
+
outbox_dir(repo_root),
|
|
48
|
+
outbox_pending_dir(repo_root),
|
|
49
|
+
outbox_sent_dir(repo_root),
|
|
50
|
+
):
|
|
51
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def sanitize_filename(name: str) -> str:
|
|
55
|
+
base = Path(name or "").name
|
|
56
|
+
if not base or base in {".", ".."}:
|
|
57
|
+
raise ValueError("Missing filename")
|
|
58
|
+
# Reject any path separators or traversal segments up-front.
|
|
59
|
+
if name != base or "/" in name or "\\" in name:
|
|
60
|
+
raise ValueError("Invalid filename")
|
|
61
|
+
parts = Path(base).parts
|
|
62
|
+
if any(part in {"", ".", ".."} for part in parts):
|
|
63
|
+
raise ValueError("Invalid filename")
|
|
64
|
+
return base
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _legacy_paths(repo_root: Path, box: str) -> List[Tuple[str, Path]]:
|
|
68
|
+
root = Path(repo_root)
|
|
69
|
+
paths: List[Tuple[str, Path]] = []
|
|
70
|
+
if box not in BOXES:
|
|
71
|
+
return paths
|
|
72
|
+
|
|
73
|
+
# PMA legacy paths
|
|
74
|
+
pma_dir = root / ".codex-autorunner" / "pma" / box
|
|
75
|
+
paths.append(("pma", pma_dir))
|
|
76
|
+
|
|
77
|
+
# Telegram legacy paths (topic-scoped). We merge inbox and outbox/pending|sent.
|
|
78
|
+
telegram_root = root / ".codex-autorunner" / "uploads" / "telegram-files"
|
|
79
|
+
if telegram_root.exists():
|
|
80
|
+
for topic in telegram_root.iterdir():
|
|
81
|
+
if not topic.is_dir():
|
|
82
|
+
continue
|
|
83
|
+
if box == "inbox":
|
|
84
|
+
paths.append(("telegram", topic / "inbox"))
|
|
85
|
+
elif box == "outbox":
|
|
86
|
+
paths.append(("telegram", topic / "outbox" / "pending"))
|
|
87
|
+
paths.append(("telegram", topic / "outbox" / "sent"))
|
|
88
|
+
return paths
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _gather_files(entries: Iterable[Tuple[str, Path]], box: str) -> List[FileBoxEntry]:
|
|
92
|
+
collected: List[FileBoxEntry] = []
|
|
93
|
+
for source, folder in entries:
|
|
94
|
+
if not folder.exists():
|
|
95
|
+
continue
|
|
96
|
+
try:
|
|
97
|
+
for path in folder.iterdir():
|
|
98
|
+
try:
|
|
99
|
+
if not path.is_file():
|
|
100
|
+
continue
|
|
101
|
+
stat = path.stat()
|
|
102
|
+
collected.append(
|
|
103
|
+
FileBoxEntry(
|
|
104
|
+
name=path.name,
|
|
105
|
+
box=box,
|
|
106
|
+
size=stat.st_size if stat else None,
|
|
107
|
+
modified_at=_format_mtime(stat.st_mtime) if stat else None,
|
|
108
|
+
source=source,
|
|
109
|
+
path=path,
|
|
110
|
+
)
|
|
111
|
+
)
|
|
112
|
+
except OSError:
|
|
113
|
+
continue
|
|
114
|
+
except OSError:
|
|
115
|
+
continue
|
|
116
|
+
return collected
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _dedupe(entries: List[FileBoxEntry]) -> List[FileBoxEntry]:
|
|
120
|
+
# Prefer primary filebox entries over legacy duplicates.
|
|
121
|
+
deduped: Dict[Tuple[str, str], FileBoxEntry] = {}
|
|
122
|
+
for entry in entries:
|
|
123
|
+
key = (entry.box, entry.name)
|
|
124
|
+
existing = deduped.get(key)
|
|
125
|
+
if existing is None:
|
|
126
|
+
deduped[key] = entry
|
|
127
|
+
continue
|
|
128
|
+
if existing.source != "filebox" and entry.source == "filebox":
|
|
129
|
+
deduped[key] = entry
|
|
130
|
+
return list(deduped.values())
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def _format_mtime(ts: float | None) -> str | None:
|
|
134
|
+
if ts is None:
|
|
135
|
+
return None
|
|
136
|
+
try:
|
|
137
|
+
return datetime.fromtimestamp(ts, tz=timezone.utc).isoformat()
|
|
138
|
+
except Exception:
|
|
139
|
+
return None
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def list_filebox(
|
|
143
|
+
repo_root: Path, *, include_legacy: bool = True
|
|
144
|
+
) -> Dict[str, List[FileBoxEntry]]:
|
|
145
|
+
ensure_structure(repo_root)
|
|
146
|
+
results: Dict[str, List[FileBoxEntry]] = {}
|
|
147
|
+
for box in BOXES:
|
|
148
|
+
primaries = _gather_files([("filebox", _box_dir(repo_root, box))], box)
|
|
149
|
+
legacy = (
|
|
150
|
+
_gather_files(_legacy_paths(repo_root, box), box) if include_legacy else []
|
|
151
|
+
)
|
|
152
|
+
results[box] = _dedupe(primaries + legacy)
|
|
153
|
+
return results
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def _box_dir(repo_root: Path, box: str) -> Path:
|
|
157
|
+
if box == "inbox":
|
|
158
|
+
return inbox_dir(repo_root)
|
|
159
|
+
if box == "outbox":
|
|
160
|
+
return outbox_dir(repo_root)
|
|
161
|
+
raise ValueError("Invalid filebox")
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _target_path(repo_root: Path, box: str, filename: str) -> Path:
|
|
165
|
+
"""Return a resolved path within the FileBox, rejecting traversal attempts."""
|
|
166
|
+
|
|
167
|
+
safe_name = sanitize_filename(filename)
|
|
168
|
+
target_dir = _box_dir(repo_root, box)
|
|
169
|
+
target_dir.mkdir(parents=True, exist_ok=True)
|
|
170
|
+
|
|
171
|
+
root = target_dir.resolve()
|
|
172
|
+
candidate = (root / safe_name).resolve()
|
|
173
|
+
try:
|
|
174
|
+
candidate.relative_to(root)
|
|
175
|
+
except ValueError as exc:
|
|
176
|
+
raise ValueError("Invalid filename") from exc
|
|
177
|
+
if candidate.parent != root:
|
|
178
|
+
# Disallow sneaky path tricks that resolve inside nested folders.
|
|
179
|
+
raise ValueError("Invalid filename")
|
|
180
|
+
return candidate
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def save_file(repo_root: Path, box: str, filename: str, data: bytes) -> Path:
|
|
184
|
+
if box not in BOXES:
|
|
185
|
+
raise ValueError("Invalid box")
|
|
186
|
+
ensure_structure(repo_root)
|
|
187
|
+
path = _target_path(repo_root, box, filename)
|
|
188
|
+
path.write_bytes(data)
|
|
189
|
+
return path
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def resolve_file(repo_root: Path, box: str, filename: str) -> FileBoxEntry | None:
|
|
193
|
+
if box not in BOXES:
|
|
194
|
+
return None
|
|
195
|
+
safe_name = sanitize_filename(filename)
|
|
196
|
+
paths: List[Tuple[str, Path]] = [("filebox", _box_dir(repo_root, box))]
|
|
197
|
+
paths.extend(_legacy_paths(repo_root, box))
|
|
198
|
+
candidates = _gather_files(paths, box)
|
|
199
|
+
for entry in candidates:
|
|
200
|
+
if entry.name == safe_name:
|
|
201
|
+
return entry
|
|
202
|
+
return None
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def delete_file(repo_root: Path, box: str, filename: str) -> bool:
|
|
206
|
+
if box not in BOXES:
|
|
207
|
+
return False
|
|
208
|
+
safe_name = sanitize_filename(filename)
|
|
209
|
+
paths: List[Tuple[str, Path]] = [("filebox", _box_dir(repo_root, box))]
|
|
210
|
+
paths.extend(_legacy_paths(repo_root, box))
|
|
211
|
+
candidates = _gather_files(paths, box)
|
|
212
|
+
removed = False
|
|
213
|
+
for entry in candidates:
|
|
214
|
+
if entry.name != safe_name:
|
|
215
|
+
continue
|
|
216
|
+
try:
|
|
217
|
+
entry.path.unlink()
|
|
218
|
+
removed = True
|
|
219
|
+
except OSError:
|
|
220
|
+
continue
|
|
221
|
+
return removed
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def migrate_legacy(repo_root: Path) -> int:
|
|
225
|
+
"""
|
|
226
|
+
Opportunistically copy legacy PMA/Telegram files into the shared FileBox.
|
|
227
|
+
Returns the number of files copied.
|
|
228
|
+
"""
|
|
229
|
+
copied = 0
|
|
230
|
+
ensure_structure(repo_root)
|
|
231
|
+
for box in BOXES:
|
|
232
|
+
target_dir = _box_dir(repo_root, box)
|
|
233
|
+
target_dir.mkdir(parents=True, exist_ok=True)
|
|
234
|
+
for _source, folder in _legacy_paths(repo_root, box):
|
|
235
|
+
if not folder.exists():
|
|
236
|
+
continue
|
|
237
|
+
for path in folder.iterdir():
|
|
238
|
+
try:
|
|
239
|
+
if not path.is_file():
|
|
240
|
+
continue
|
|
241
|
+
dest = target_dir / path.name
|
|
242
|
+
if dest.exists():
|
|
243
|
+
continue
|
|
244
|
+
shutil.copy2(path, dest)
|
|
245
|
+
copied += 1
|
|
246
|
+
except OSError:
|
|
247
|
+
continue
|
|
248
|
+
return copied
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
__all__ = [
|
|
252
|
+
"BOXES",
|
|
253
|
+
"FileBoxEntry",
|
|
254
|
+
"delete_file",
|
|
255
|
+
"filebox_root",
|
|
256
|
+
"inbox_dir",
|
|
257
|
+
"list_filebox",
|
|
258
|
+
"migrate_legacy",
|
|
259
|
+
"outbox_dir",
|
|
260
|
+
"outbox_pending_dir",
|
|
261
|
+
"outbox_sent_dir",
|
|
262
|
+
"resolve_file",
|
|
263
|
+
"sanitize_filename",
|
|
264
|
+
"save_file",
|
|
265
|
+
]
|
|
@@ -4,11 +4,31 @@ import uuid
|
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
from typing import Any, AsyncGenerator, Callable, Dict, Optional, Set
|
|
6
6
|
|
|
7
|
+
from ..lifecycle_events import LifecycleEventEmitter
|
|
8
|
+
from ..utils import find_repo_root
|
|
7
9
|
from .definition import FlowDefinition
|
|
8
10
|
from .models import FlowEvent, FlowRunRecord, FlowRunStatus
|
|
9
11
|
from .runtime import FlowRuntime
|
|
10
12
|
from .store import FlowStore
|
|
11
13
|
|
|
14
|
+
|
|
15
|
+
def _find_hub_root(repo_root: Optional[Path] = None) -> Optional[Path]:
|
|
16
|
+
if repo_root is None:
|
|
17
|
+
repo_root = find_repo_root()
|
|
18
|
+
if repo_root is None:
|
|
19
|
+
return None
|
|
20
|
+
current = repo_root
|
|
21
|
+
for _ in range(5):
|
|
22
|
+
manifest_path = current / ".codex-autorunner" / "manifest.yml"
|
|
23
|
+
if manifest_path.exists():
|
|
24
|
+
return current
|
|
25
|
+
parent = current.parent
|
|
26
|
+
if parent == current:
|
|
27
|
+
break
|
|
28
|
+
current = parent
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
|
|
12
32
|
_logger = logging.getLogger(__name__)
|
|
13
33
|
|
|
14
34
|
|
|
@@ -18,13 +38,24 @@ class FlowController:
|
|
|
18
38
|
definition: FlowDefinition,
|
|
19
39
|
db_path: Path,
|
|
20
40
|
artifacts_root: Path,
|
|
41
|
+
durable: bool = False,
|
|
42
|
+
hub_root: Optional[Path] = None,
|
|
21
43
|
):
|
|
22
44
|
self.definition = definition
|
|
23
45
|
self.db_path = db_path
|
|
24
46
|
self.artifacts_root = artifacts_root
|
|
25
|
-
self.store = FlowStore(db_path)
|
|
47
|
+
self.store = FlowStore(db_path, durable=durable)
|
|
26
48
|
self._event_listeners: Set[Callable[[FlowEvent], None]] = set()
|
|
49
|
+
self._lifecycle_event_listeners: Set[Callable[[str, str, str, dict], None]] = (
|
|
50
|
+
set()
|
|
51
|
+
)
|
|
27
52
|
self._lock = asyncio.Lock()
|
|
53
|
+
self._lifecycle_emitter: Optional[LifecycleEventEmitter] = None
|
|
54
|
+
if hub_root is None:
|
|
55
|
+
hub_root = _find_hub_root(db_path.parent.parent if db_path else None)
|
|
56
|
+
if hub_root is not None:
|
|
57
|
+
self._lifecycle_emitter = LifecycleEventEmitter(hub_root)
|
|
58
|
+
self.add_lifecycle_event_listener(self._emit_to_lifecycle_store)
|
|
28
59
|
|
|
29
60
|
def initialize(self) -> None:
|
|
30
61
|
self.artifacts_root.mkdir(parents=True, exist_ok=True)
|
|
@@ -70,6 +101,7 @@ class FlowController:
|
|
|
70
101
|
definition=self.definition,
|
|
71
102
|
store=self.store,
|
|
72
103
|
emit_event=self._emit_event,
|
|
104
|
+
emit_lifecycle_event=self._emit_lifecycle,
|
|
73
105
|
)
|
|
74
106
|
return await runtime.run_flow(run_id=run_id, initial_state=initial_state)
|
|
75
107
|
|
|
@@ -109,6 +141,8 @@ class FlowController:
|
|
|
109
141
|
engine = state.get("ticket_engine")
|
|
110
142
|
if isinstance(engine, dict):
|
|
111
143
|
engine = dict(engine)
|
|
144
|
+
if engine.get("reason_code") == "max_turns":
|
|
145
|
+
engine["total_turns"] = 0
|
|
112
146
|
engine["status"] = "running"
|
|
113
147
|
engine.pop("reason", None)
|
|
114
148
|
engine.pop("reason_details", None)
|
|
@@ -174,6 +208,42 @@ class FlowController:
|
|
|
174
208
|
def remove_event_listener(self, listener: Callable[[FlowEvent], None]) -> None:
|
|
175
209
|
self._event_listeners.discard(listener)
|
|
176
210
|
|
|
211
|
+
def add_lifecycle_event_listener(
|
|
212
|
+
self, listener: Callable[[str, str, str, dict], None]
|
|
213
|
+
) -> None:
|
|
214
|
+
self._lifecycle_event_listeners.add(listener)
|
|
215
|
+
|
|
216
|
+
def remove_lifecycle_event_listener(
|
|
217
|
+
self, listener: Callable[[str, str, str, dict], None]
|
|
218
|
+
) -> None:
|
|
219
|
+
self._lifecycle_event_listeners.discard(listener)
|
|
220
|
+
|
|
221
|
+
def _emit_lifecycle(
|
|
222
|
+
self, event_type: str, repo_id: str, run_id: str, data: Dict[str, Any]
|
|
223
|
+
) -> None:
|
|
224
|
+
for listener in self._lifecycle_event_listeners:
|
|
225
|
+
try:
|
|
226
|
+
listener(event_type, repo_id, run_id, data)
|
|
227
|
+
except Exception as e:
|
|
228
|
+
_logger.exception("Error in lifecycle event listener: %s", e)
|
|
229
|
+
|
|
230
|
+
def _emit_to_lifecycle_store(
|
|
231
|
+
self, event_type: str, repo_id: str, run_id: str, data: Dict[str, Any]
|
|
232
|
+
) -> None:
|
|
233
|
+
if self._lifecycle_emitter is None:
|
|
234
|
+
return
|
|
235
|
+
try:
|
|
236
|
+
if event_type == "flow_paused":
|
|
237
|
+
self._lifecycle_emitter.emit_flow_paused(repo_id, run_id, data=data)
|
|
238
|
+
elif event_type == "flow_completed":
|
|
239
|
+
self._lifecycle_emitter.emit_flow_completed(repo_id, run_id, data=data)
|
|
240
|
+
elif event_type == "flow_failed":
|
|
241
|
+
self._lifecycle_emitter.emit_flow_failed(repo_id, run_id, data=data)
|
|
242
|
+
elif event_type == "flow_stopped":
|
|
243
|
+
self._lifecycle_emitter.emit_flow_stopped(repo_id, run_id, data=data)
|
|
244
|
+
except Exception as exc:
|
|
245
|
+
_logger.exception("Error emitting to lifecycle store: %s", exc)
|
|
246
|
+
|
|
177
247
|
def _emit_event(self, event: FlowEvent) -> None:
|
|
178
248
|
for listener in self._event_listeners:
|
|
179
249
|
try:
|
|
@@ -103,7 +103,10 @@ def reconcile_flow_runs(
|
|
|
103
103
|
db_path = repo_root / ".codex-autorunner" / "flows.db"
|
|
104
104
|
if not db_path.exists():
|
|
105
105
|
return FlowReconcileResult(records=[], summary=FlowReconcileSummary())
|
|
106
|
-
|
|
106
|
+
from ..config import load_repo_config
|
|
107
|
+
|
|
108
|
+
config = load_repo_config(repo_root)
|
|
109
|
+
store = FlowStore(db_path, durable=config.durable_writes)
|
|
107
110
|
summary = FlowReconcileSummary()
|
|
108
111
|
records: list[FlowRunRecord] = []
|
|
109
112
|
try:
|
|
@@ -11,18 +11,32 @@ from .store import FlowStore, now_iso
|
|
|
11
11
|
_logger = logging.getLogger(__name__)
|
|
12
12
|
|
|
13
13
|
|
|
14
|
+
LifecycleEventCallback = Optional[Callable[[str, str, str, Dict[str, Any]], None]]
|
|
15
|
+
|
|
16
|
+
|
|
14
17
|
class FlowRuntime:
|
|
15
18
|
def __init__(
|
|
16
19
|
self,
|
|
17
20
|
definition: FlowDefinition,
|
|
18
21
|
store: FlowStore,
|
|
19
22
|
emit_event: Optional[Callable[[FlowEvent], None]] = None,
|
|
23
|
+
emit_lifecycle_event: LifecycleEventCallback = None,
|
|
20
24
|
):
|
|
21
25
|
self.definition = definition
|
|
22
26
|
self.store = store
|
|
23
27
|
self.emit_event = emit_event
|
|
28
|
+
self.emit_lifecycle_event = emit_lifecycle_event
|
|
24
29
|
self._stop_check_interval = 0.5
|
|
25
30
|
|
|
31
|
+
def _emit_lifecycle(
|
|
32
|
+
self, event_type: str, repo_id: str, run_id: str, data: Dict[str, Any]
|
|
33
|
+
) -> None:
|
|
34
|
+
if self.emit_lifecycle_event:
|
|
35
|
+
try:
|
|
36
|
+
self.emit_lifecycle_event(event_type, repo_id, run_id, data)
|
|
37
|
+
except Exception as exc:
|
|
38
|
+
_logger.exception("Error emitting lifecycle event: %s", exc)
|
|
39
|
+
|
|
26
40
|
def _emit(
|
|
27
41
|
self,
|
|
28
42
|
event_type: FlowEventType,
|
|
@@ -112,6 +126,7 @@ class FlowRuntime:
|
|
|
112
126
|
if not updated:
|
|
113
127
|
raise RuntimeError(f"Failed to stop flow run {run_id}")
|
|
114
128
|
record = updated
|
|
129
|
+
self._emit_lifecycle("flow_stopped", "", run_id, {})
|
|
115
130
|
break
|
|
116
131
|
|
|
117
132
|
step_id = next_steps.pop()
|
|
@@ -152,6 +167,7 @@ class FlowRuntime:
|
|
|
152
167
|
f"Failed to update flow run {run_id} to failed state"
|
|
153
168
|
) from e
|
|
154
169
|
record = updated
|
|
170
|
+
self._emit_lifecycle("flow_failed", "", run_id, {"error": str(e)})
|
|
155
171
|
return record
|
|
156
172
|
|
|
157
173
|
async def _execute_step(
|
|
@@ -270,6 +286,7 @@ class FlowRuntime:
|
|
|
270
286
|
f"Failed to update flow run after step {step_id}"
|
|
271
287
|
)
|
|
272
288
|
record = updated
|
|
289
|
+
self._emit_lifecycle("flow_completed", "", record.id, {})
|
|
273
290
|
|
|
274
291
|
elif outcome.status == FlowRunStatus.FAILED:
|
|
275
292
|
self._emit(
|
|
@@ -298,6 +315,9 @@ class FlowRuntime:
|
|
|
298
315
|
f"Failed to update flow run after step {step_id}"
|
|
299
316
|
)
|
|
300
317
|
record = updated
|
|
318
|
+
self._emit_lifecycle(
|
|
319
|
+
"flow_failed", "", record.id, {"error": outcome.error or ""}
|
|
320
|
+
)
|
|
301
321
|
|
|
302
322
|
elif outcome.status == FlowRunStatus.STOPPED:
|
|
303
323
|
self._emit(
|
|
@@ -324,6 +344,7 @@ class FlowRuntime:
|
|
|
324
344
|
f"Failed to update flow run after step {step_id}"
|
|
325
345
|
)
|
|
326
346
|
record = updated
|
|
347
|
+
self._emit_lifecycle("flow_stopped", "", record.id, {})
|
|
327
348
|
|
|
328
349
|
elif outcome.status == FlowRunStatus.PAUSED:
|
|
329
350
|
self._emit(
|
|
@@ -348,6 +369,7 @@ class FlowRuntime:
|
|
|
348
369
|
f"Failed to update flow run after step {step_id}"
|
|
349
370
|
)
|
|
350
371
|
record = updated
|
|
372
|
+
self._emit_lifecycle("flow_paused", "", record.id, {})
|
|
351
373
|
|
|
352
374
|
return record
|
|
353
375
|
|
|
@@ -1,13 +1,15 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import json
|
|
2
4
|
import logging
|
|
3
5
|
import sqlite3
|
|
4
6
|
import threading
|
|
5
7
|
from contextlib import contextmanager
|
|
6
|
-
from datetime import datetime, timezone
|
|
7
8
|
from pathlib import Path
|
|
8
9
|
from typing import Any, Dict, Generator, List, Optional, cast
|
|
9
10
|
|
|
10
|
-
from ..sqlite_utils import SQLITE_PRAGMAS
|
|
11
|
+
from ..sqlite_utils import SQLITE_PRAGMAS, SQLITE_PRAGMAS_DURABLE
|
|
12
|
+
from ..time_utils import now_iso
|
|
11
13
|
from .models import (
|
|
12
14
|
FlowArtifact,
|
|
13
15
|
FlowEvent,
|
|
@@ -22,18 +24,22 @@ SCHEMA_VERSION = 2
|
|
|
22
24
|
UNSET = object()
|
|
23
25
|
|
|
24
26
|
|
|
25
|
-
def now_iso() -> str:
|
|
26
|
-
return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
27
|
-
|
|
28
|
-
|
|
29
27
|
class FlowStore:
|
|
30
|
-
def __init__(self, db_path: Path):
|
|
28
|
+
def __init__(self, db_path: Path, durable: bool = False):
|
|
31
29
|
self.db_path = db_path
|
|
30
|
+
self._durable = durable
|
|
32
31
|
self._local: threading.local = threading.local()
|
|
33
32
|
|
|
33
|
+
def __enter__(self) -> FlowStore:
|
|
34
|
+
self.initialize()
|
|
35
|
+
return self
|
|
36
|
+
|
|
37
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
38
|
+
self.close()
|
|
39
|
+
|
|
34
40
|
def _get_conn(self) -> sqlite3.Connection:
|
|
35
41
|
if not hasattr(self._local, "conn"):
|
|
36
|
-
# Ensure parent directory exists so sqlite can create/open
|
|
42
|
+
# Ensure parent directory exists so sqlite can create/open file.
|
|
37
43
|
try:
|
|
38
44
|
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
39
45
|
except Exception:
|
|
@@ -43,7 +49,8 @@ class FlowStore:
|
|
|
43
49
|
self.db_path, check_same_thread=False, isolation_level=None
|
|
44
50
|
)
|
|
45
51
|
self._local.conn.row_factory = sqlite3.Row
|
|
46
|
-
|
|
52
|
+
pragmas = SQLITE_PRAGMAS_DURABLE if self._durable else SQLITE_PRAGMAS
|
|
53
|
+
for pragma in pragmas:
|
|
47
54
|
self._local.conn.execute(pragma)
|
|
48
55
|
return cast(sqlite3.Connection, self._local.conn)
|
|
49
56
|
|
|
@@ -387,6 +394,51 @@ class FlowStore:
|
|
|
387
394
|
rows = conn.execute(query, params).fetchall()
|
|
388
395
|
return [self._row_to_flow_event(row) for row in rows]
|
|
389
396
|
|
|
397
|
+
def get_events_by_types(
|
|
398
|
+
self,
|
|
399
|
+
run_id: str,
|
|
400
|
+
event_types: list[FlowEventType],
|
|
401
|
+
*,
|
|
402
|
+
after_seq: Optional[int] = None,
|
|
403
|
+
limit: Optional[int] = None,
|
|
404
|
+
) -> List[FlowEvent]:
|
|
405
|
+
"""Return events for a run filtered to specific event types."""
|
|
406
|
+
if not event_types:
|
|
407
|
+
return []
|
|
408
|
+
conn = self._get_conn()
|
|
409
|
+
placeholders = ", ".join("?" for _ in event_types)
|
|
410
|
+
query = f"""
|
|
411
|
+
SELECT *
|
|
412
|
+
FROM flow_events
|
|
413
|
+
WHERE run_id = ? AND event_type IN ({placeholders})
|
|
414
|
+
"""
|
|
415
|
+
params: List[Any] = [run_id, *[t.value for t in event_types]]
|
|
416
|
+
|
|
417
|
+
if after_seq is not None:
|
|
418
|
+
query += " AND seq > ?"
|
|
419
|
+
params.append(after_seq)
|
|
420
|
+
|
|
421
|
+
query += " ORDER BY seq ASC"
|
|
422
|
+
|
|
423
|
+
if limit is not None:
|
|
424
|
+
query += " LIMIT ?"
|
|
425
|
+
params.append(limit)
|
|
426
|
+
|
|
427
|
+
rows = conn.execute(query, params).fetchall()
|
|
428
|
+
return [self._row_to_flow_event(row) for row in rows]
|
|
429
|
+
|
|
430
|
+
def get_events_by_type(
|
|
431
|
+
self,
|
|
432
|
+
run_id: str,
|
|
433
|
+
event_type: FlowEventType,
|
|
434
|
+
*,
|
|
435
|
+
after_seq: Optional[int] = None,
|
|
436
|
+
limit: Optional[int] = None,
|
|
437
|
+
) -> List[FlowEvent]:
|
|
438
|
+
return self.get_events_by_types(
|
|
439
|
+
run_id, [event_type], after_seq=after_seq, limit=limit
|
|
440
|
+
)
|
|
441
|
+
|
|
390
442
|
def get_last_event_meta(self, run_id: str) -> tuple[Optional[int], Optional[str]]:
|
|
391
443
|
conn = self._get_conn()
|
|
392
444
|
row = conn.execute(
|