codex-autorunner 1.1.0__py3-none-any.whl → 1.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/agents/opencode/client.py +113 -4
- codex_autorunner/agents/opencode/supervisor.py +4 -0
- codex_autorunner/agents/registry.py +17 -7
- codex_autorunner/bootstrap.py +219 -1
- codex_autorunner/core/__init__.py +17 -1
- codex_autorunner/core/about_car.py +124 -11
- codex_autorunner/core/app_server_threads.py +6 -0
- codex_autorunner/core/config.py +238 -3
- codex_autorunner/core/context_awareness.py +39 -0
- codex_autorunner/core/docs.py +0 -122
- codex_autorunner/core/filebox.py +265 -0
- codex_autorunner/core/flows/controller.py +71 -1
- codex_autorunner/core/flows/reconciler.py +4 -1
- codex_autorunner/core/flows/runtime.py +22 -0
- codex_autorunner/core/flows/store.py +61 -9
- codex_autorunner/core/flows/transition.py +23 -16
- codex_autorunner/core/flows/ux_helpers.py +18 -3
- codex_autorunner/core/flows/worker_process.py +32 -6
- codex_autorunner/core/hub.py +198 -41
- codex_autorunner/core/lifecycle_events.py +253 -0
- codex_autorunner/core/path_utils.py +2 -1
- codex_autorunner/core/pma_audit.py +224 -0
- codex_autorunner/core/pma_context.py +683 -0
- codex_autorunner/core/pma_dispatch_interceptor.py +284 -0
- codex_autorunner/core/pma_lifecycle.py +527 -0
- codex_autorunner/core/pma_queue.py +367 -0
- codex_autorunner/core/pma_safety.py +221 -0
- codex_autorunner/core/pma_state.py +115 -0
- codex_autorunner/core/ports/agent_backend.py +2 -5
- codex_autorunner/core/ports/run_event.py +1 -4
- codex_autorunner/core/prompt.py +0 -80
- codex_autorunner/core/prompts.py +56 -172
- codex_autorunner/core/redaction.py +0 -4
- codex_autorunner/core/review_context.py +11 -9
- codex_autorunner/core/runner_controller.py +35 -33
- codex_autorunner/core/runner_state.py +147 -0
- codex_autorunner/core/runtime.py +829 -0
- codex_autorunner/core/sqlite_utils.py +13 -4
- codex_autorunner/core/state.py +7 -10
- codex_autorunner/core/state_roots.py +5 -0
- codex_autorunner/core/templates/__init__.py +39 -0
- codex_autorunner/core/templates/git_mirror.py +234 -0
- codex_autorunner/core/templates/provenance.py +56 -0
- codex_autorunner/core/templates/scan_cache.py +120 -0
- codex_autorunner/core/ticket_linter_cli.py +17 -0
- codex_autorunner/core/ticket_manager_cli.py +154 -92
- codex_autorunner/core/time_utils.py +11 -0
- codex_autorunner/core/types.py +18 -0
- codex_autorunner/core/utils.py +34 -6
- codex_autorunner/flows/review/service.py +23 -25
- codex_autorunner/flows/ticket_flow/definition.py +43 -1
- codex_autorunner/integrations/agents/__init__.py +2 -0
- codex_autorunner/integrations/agents/backend_orchestrator.py +18 -0
- codex_autorunner/integrations/agents/codex_backend.py +19 -8
- codex_autorunner/integrations/agents/runner.py +3 -8
- codex_autorunner/integrations/agents/wiring.py +8 -0
- codex_autorunner/integrations/telegram/adapter.py +1 -1
- codex_autorunner/integrations/telegram/config.py +1 -1
- codex_autorunner/integrations/telegram/doctor.py +228 -6
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +236 -74
- codex_autorunner/integrations/telegram/handlers/commands/files.py +314 -75
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +346 -58
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +498 -37
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +202 -45
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +18 -7
- codex_autorunner/integrations/telegram/handlers/messages.py +34 -3
- codex_autorunner/integrations/telegram/helpers.py +1 -3
- codex_autorunner/integrations/telegram/runtime.py +9 -4
- codex_autorunner/integrations/telegram/service.py +30 -0
- codex_autorunner/integrations/telegram/state.py +38 -0
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +10 -4
- codex_autorunner/integrations/telegram/transport.py +10 -3
- codex_autorunner/integrations/templates/__init__.py +27 -0
- codex_autorunner/integrations/templates/scan_agent.py +312 -0
- codex_autorunner/server.py +2 -2
- codex_autorunner/static/agentControls.js +21 -5
- codex_autorunner/static/app.js +115 -11
- codex_autorunner/static/archive.js +274 -81
- codex_autorunner/static/archiveApi.js +21 -0
- codex_autorunner/static/chatUploads.js +137 -0
- codex_autorunner/static/constants.js +1 -1
- codex_autorunner/static/docChatCore.js +185 -13
- codex_autorunner/static/fileChat.js +68 -40
- codex_autorunner/static/fileboxUi.js +159 -0
- codex_autorunner/static/hub.js +46 -81
- codex_autorunner/static/index.html +303 -24
- codex_autorunner/static/messages.js +82 -4
- codex_autorunner/static/notifications.js +288 -0
- codex_autorunner/static/pma.js +1167 -0
- codex_autorunner/static/settings.js +3 -0
- codex_autorunner/static/streamUtils.js +57 -0
- codex_autorunner/static/styles.css +9141 -6742
- codex_autorunner/static/templateReposSettings.js +225 -0
- codex_autorunner/static/terminalManager.js +22 -3
- codex_autorunner/static/ticketChatActions.js +165 -3
- codex_autorunner/static/ticketChatStream.js +17 -119
- codex_autorunner/static/ticketEditor.js +41 -13
- codex_autorunner/static/ticketTemplates.js +798 -0
- codex_autorunner/static/tickets.js +69 -19
- codex_autorunner/static/turnEvents.js +27 -0
- codex_autorunner/static/turnResume.js +33 -0
- codex_autorunner/static/utils.js +28 -0
- codex_autorunner/static/workspace.js +258 -44
- codex_autorunner/static/workspaceFileBrowser.js +6 -4
- codex_autorunner/surfaces/cli/cli.py +1465 -155
- codex_autorunner/surfaces/cli/pma_cli.py +817 -0
- codex_autorunner/surfaces/web/app.py +253 -49
- codex_autorunner/surfaces/web/routes/__init__.py +4 -0
- codex_autorunner/surfaces/web/routes/analytics.py +29 -22
- codex_autorunner/surfaces/web/routes/archive.py +197 -0
- codex_autorunner/surfaces/web/routes/file_chat.py +297 -36
- codex_autorunner/surfaces/web/routes/filebox.py +227 -0
- codex_autorunner/surfaces/web/routes/flows.py +219 -29
- codex_autorunner/surfaces/web/routes/messages.py +70 -39
- codex_autorunner/surfaces/web/routes/pma.py +1652 -0
- codex_autorunner/surfaces/web/routes/repos.py +1 -1
- codex_autorunner/surfaces/web/routes/shared.py +0 -3
- codex_autorunner/surfaces/web/routes/templates.py +634 -0
- codex_autorunner/surfaces/web/runner_manager.py +2 -2
- codex_autorunner/surfaces/web/schemas.py +81 -18
- codex_autorunner/tickets/agent_pool.py +27 -0
- codex_autorunner/tickets/files.py +33 -16
- codex_autorunner/tickets/lint.py +50 -0
- codex_autorunner/tickets/models.py +3 -0
- codex_autorunner/tickets/outbox.py +41 -5
- codex_autorunner/tickets/runner.py +350 -69
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/METADATA +15 -19
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/RECORD +132 -101
- codex_autorunner/core/adapter_utils.py +0 -21
- codex_autorunner/core/engine.py +0 -3302
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/WHEEL +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
from enum import Enum
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, Callable, Optional
|
|
10
|
+
|
|
11
|
+
from .locks import file_lock
|
|
12
|
+
from .utils import atomic_write
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
LIFECYCLE_EVENTS_FILENAME = "lifecycle_events.json"
|
|
17
|
+
LIFECYCLE_EVENTS_LOCK_SUFFIX = ".lock"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class LifecycleEventType(str, Enum):
|
|
21
|
+
FLOW_PAUSED = "flow_paused"
|
|
22
|
+
FLOW_COMPLETED = "flow_completed"
|
|
23
|
+
FLOW_FAILED = "flow_failed"
|
|
24
|
+
FLOW_STOPPED = "flow_stopped"
|
|
25
|
+
DISPATCH_CREATED = "dispatch_created"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class LifecycleEvent:
|
|
30
|
+
event_type: LifecycleEventType
|
|
31
|
+
repo_id: str
|
|
32
|
+
run_id: str
|
|
33
|
+
data: dict[str, Any] = field(default_factory=dict)
|
|
34
|
+
timestamp: str = field(
|
|
35
|
+
default_factory=lambda: datetime.now(timezone.utc).isoformat()
|
|
36
|
+
)
|
|
37
|
+
processed: bool = False
|
|
38
|
+
event_id: str = ""
|
|
39
|
+
|
|
40
|
+
def __post_init__(self):
|
|
41
|
+
if not self.event_id:
|
|
42
|
+
import uuid
|
|
43
|
+
|
|
44
|
+
object.__setattr__(self, "event_id", str(uuid.uuid4()))
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def default_lifecycle_events_path(hub_root: Path) -> Path:
|
|
48
|
+
return hub_root / ".codex-autorunner" / LIFECYCLE_EVENTS_FILENAME
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class LifecycleEventStore:
|
|
52
|
+
def __init__(self, hub_root: Path) -> None:
|
|
53
|
+
self._path = default_lifecycle_events_path(hub_root)
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def path(self) -> Path:
|
|
57
|
+
return self._path
|
|
58
|
+
|
|
59
|
+
def _lock_path(self) -> Path:
|
|
60
|
+
return self._path.with_suffix(LIFECYCLE_EVENTS_LOCK_SUFFIX)
|
|
61
|
+
|
|
62
|
+
def load(self, *, ensure_exists: bool = True) -> list[LifecycleEvent]:
|
|
63
|
+
with file_lock(self._lock_path()):
|
|
64
|
+
if not self._path.exists():
|
|
65
|
+
return []
|
|
66
|
+
try:
|
|
67
|
+
raw = self._path.read_text(encoding="utf-8")
|
|
68
|
+
except OSError as exc:
|
|
69
|
+
logger.warning(
|
|
70
|
+
"Failed to read lifecycle events at %s: %s", self._path, exc
|
|
71
|
+
)
|
|
72
|
+
return []
|
|
73
|
+
try:
|
|
74
|
+
data = json.loads(raw)
|
|
75
|
+
except json.JSONDecodeError as exc:
|
|
76
|
+
logger.warning(
|
|
77
|
+
"Failed to parse lifecycle events at %s: %s", self._path, exc
|
|
78
|
+
)
|
|
79
|
+
return []
|
|
80
|
+
if not isinstance(data, list):
|
|
81
|
+
logger.warning("Lifecycle events data is not a list: %s", self._path)
|
|
82
|
+
return []
|
|
83
|
+
events: list[LifecycleEvent] = []
|
|
84
|
+
for entry in data:
|
|
85
|
+
try:
|
|
86
|
+
if not isinstance(entry, dict):
|
|
87
|
+
continue
|
|
88
|
+
event_type_str = entry.get("event_type")
|
|
89
|
+
if not isinstance(event_type_str, str):
|
|
90
|
+
continue
|
|
91
|
+
try:
|
|
92
|
+
event_type = LifecycleEventType(event_type_str)
|
|
93
|
+
except ValueError:
|
|
94
|
+
continue
|
|
95
|
+
event_id_raw = entry.get("event_id")
|
|
96
|
+
event_id = (
|
|
97
|
+
str(event_id_raw) if isinstance(event_id_raw, str) else ""
|
|
98
|
+
)
|
|
99
|
+
if not event_id:
|
|
100
|
+
import uuid
|
|
101
|
+
|
|
102
|
+
event_id = str(uuid.uuid4())
|
|
103
|
+
event = LifecycleEvent(
|
|
104
|
+
event_type=event_type,
|
|
105
|
+
repo_id=str(entry.get("repo_id", "")),
|
|
106
|
+
run_id=str(entry.get("run_id", "")),
|
|
107
|
+
data=dict(entry.get("data", {})),
|
|
108
|
+
timestamp=str(entry.get("timestamp", "")),
|
|
109
|
+
processed=bool(entry.get("processed", False)),
|
|
110
|
+
event_id=event_id,
|
|
111
|
+
)
|
|
112
|
+
events.append(event)
|
|
113
|
+
except Exception as exc:
|
|
114
|
+
logger.debug("Failed to parse lifecycle event entry: %s", exc)
|
|
115
|
+
continue
|
|
116
|
+
return events
|
|
117
|
+
|
|
118
|
+
def save(self, events: list[LifecycleEvent]) -> None:
|
|
119
|
+
with file_lock(self._lock_path()):
|
|
120
|
+
self._save_unlocked(events)
|
|
121
|
+
|
|
122
|
+
def _save_unlocked(self, events: list[LifecycleEvent]) -> None:
|
|
123
|
+
self._path.parent.mkdir(parents=True, exist_ok=True)
|
|
124
|
+
data = [
|
|
125
|
+
{
|
|
126
|
+
"event_id": event.event_id,
|
|
127
|
+
"event_type": event.event_type.value,
|
|
128
|
+
"repo_id": event.repo_id,
|
|
129
|
+
"run_id": event.run_id,
|
|
130
|
+
"data": event.data,
|
|
131
|
+
"timestamp": event.timestamp,
|
|
132
|
+
"processed": event.processed,
|
|
133
|
+
}
|
|
134
|
+
for event in events
|
|
135
|
+
]
|
|
136
|
+
atomic_write(self._path, json.dumps(data, indent=2) + "\n")
|
|
137
|
+
|
|
138
|
+
def append(self, event: LifecycleEvent) -> None:
|
|
139
|
+
events = self.load(ensure_exists=False)
|
|
140
|
+
events.append(event)
|
|
141
|
+
self.save(events)
|
|
142
|
+
|
|
143
|
+
def mark_processed(self, event_id: str) -> Optional[LifecycleEvent]:
|
|
144
|
+
if not event_id:
|
|
145
|
+
return None
|
|
146
|
+
events = self.load(ensure_exists=False)
|
|
147
|
+
updated = None
|
|
148
|
+
for event in events:
|
|
149
|
+
if event.event_id == event_id:
|
|
150
|
+
event.processed = True
|
|
151
|
+
updated = event
|
|
152
|
+
break
|
|
153
|
+
if updated:
|
|
154
|
+
self.save(events)
|
|
155
|
+
return updated
|
|
156
|
+
|
|
157
|
+
def get_unprocessed(self, *, limit: int = 100) -> list[LifecycleEvent]:
|
|
158
|
+
events = self.load(ensure_exists=False)
|
|
159
|
+
unprocessed = [e for e in events if not e.processed]
|
|
160
|
+
return unprocessed[:limit]
|
|
161
|
+
|
|
162
|
+
def prune_processed(self, *, keep_last: int = 100) -> None:
|
|
163
|
+
events = self.load(ensure_exists=False)
|
|
164
|
+
unprocessed = [e for e in events if not e.processed]
|
|
165
|
+
processed = [e for e in events if e.processed]
|
|
166
|
+
if len(processed) > keep_last:
|
|
167
|
+
processed = processed[-keep_last:]
|
|
168
|
+
self.save(unprocessed + processed)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class LifecycleEventEmitter:
|
|
172
|
+
def __init__(self, hub_root: Path) -> None:
|
|
173
|
+
self._store = LifecycleEventStore(hub_root)
|
|
174
|
+
self._listeners: list[Callable[[LifecycleEvent], None]] = []
|
|
175
|
+
|
|
176
|
+
def emit(self, event: LifecycleEvent) -> str:
|
|
177
|
+
self._store.append(event)
|
|
178
|
+
for listener in self._listeners:
|
|
179
|
+
try:
|
|
180
|
+
listener(event)
|
|
181
|
+
except Exception as exc:
|
|
182
|
+
logger.exception("Error in lifecycle event listener: %s", exc)
|
|
183
|
+
return event.event_id
|
|
184
|
+
|
|
185
|
+
def emit_flow_paused(
|
|
186
|
+
self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
|
|
187
|
+
) -> str:
|
|
188
|
+
event = LifecycleEvent(
|
|
189
|
+
event_type=LifecycleEventType.FLOW_PAUSED,
|
|
190
|
+
repo_id=repo_id,
|
|
191
|
+
run_id=run_id,
|
|
192
|
+
data=data or {},
|
|
193
|
+
)
|
|
194
|
+
return self.emit(event)
|
|
195
|
+
|
|
196
|
+
def emit_flow_completed(
|
|
197
|
+
self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
|
|
198
|
+
) -> str:
|
|
199
|
+
event = LifecycleEvent(
|
|
200
|
+
event_type=LifecycleEventType.FLOW_COMPLETED,
|
|
201
|
+
repo_id=repo_id,
|
|
202
|
+
run_id=run_id,
|
|
203
|
+
data=data or {},
|
|
204
|
+
)
|
|
205
|
+
return self.emit(event)
|
|
206
|
+
|
|
207
|
+
def emit_flow_failed(
|
|
208
|
+
self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
|
|
209
|
+
) -> str:
|
|
210
|
+
event = LifecycleEvent(
|
|
211
|
+
event_type=LifecycleEventType.FLOW_FAILED,
|
|
212
|
+
repo_id=repo_id,
|
|
213
|
+
run_id=run_id,
|
|
214
|
+
data=data or {},
|
|
215
|
+
)
|
|
216
|
+
return self.emit(event)
|
|
217
|
+
|
|
218
|
+
def emit_flow_stopped(
|
|
219
|
+
self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
|
|
220
|
+
) -> str:
|
|
221
|
+
event = LifecycleEvent(
|
|
222
|
+
event_type=LifecycleEventType.FLOW_STOPPED,
|
|
223
|
+
repo_id=repo_id,
|
|
224
|
+
run_id=run_id,
|
|
225
|
+
data=data or {},
|
|
226
|
+
)
|
|
227
|
+
return self.emit(event)
|
|
228
|
+
|
|
229
|
+
def emit_dispatch_created(
|
|
230
|
+
self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
|
|
231
|
+
) -> str:
|
|
232
|
+
event = LifecycleEvent(
|
|
233
|
+
event_type=LifecycleEventType.DISPATCH_CREATED,
|
|
234
|
+
repo_id=repo_id,
|
|
235
|
+
run_id=run_id,
|
|
236
|
+
data=data or {},
|
|
237
|
+
)
|
|
238
|
+
return self.emit(event)
|
|
239
|
+
|
|
240
|
+
def add_listener(self, listener: Callable[[LifecycleEvent], None]) -> None:
|
|
241
|
+
self._listeners.append(listener)
|
|
242
|
+
|
|
243
|
+
def remove_listener(self, listener: Callable[[LifecycleEvent], None]) -> None:
|
|
244
|
+
self._listeners = [lst for lst in self._listeners if lst != listener]
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
__all__ = [
|
|
248
|
+
"LifecycleEventType",
|
|
249
|
+
"LifecycleEvent",
|
|
250
|
+
"LifecycleEventStore",
|
|
251
|
+
"LifecycleEventEmitter",
|
|
252
|
+
"default_lifecycle_events_path",
|
|
253
|
+
]
|
|
@@ -49,6 +49,7 @@ def resolve_config_path(
|
|
|
49
49
|
3. Otherwise, resolve relative to repo_root
|
|
50
50
|
4. Reject '..' segments unless allow_dotdot=True
|
|
51
51
|
5. Reject paths escaping repo_root (except home expansion)
|
|
52
|
+
- allow_dotdot allows '..' segments inside the repo, not escaping the repo
|
|
52
53
|
|
|
53
54
|
Args:
|
|
54
55
|
value: Path string or Path object
|
|
@@ -112,7 +113,7 @@ def resolve_config_path(
|
|
|
112
113
|
|
|
113
114
|
resolved = (repo_root / path).resolve()
|
|
114
115
|
|
|
115
|
-
if not
|
|
116
|
+
if not resolved.is_relative_to(repo_root):
|
|
116
117
|
raise ConfigPathError(
|
|
117
118
|
"Path resolves outside repo root",
|
|
118
119
|
path=value_str,
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from datetime import datetime, timezone
|
|
8
|
+
from enum import Enum
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Optional
|
|
11
|
+
|
|
12
|
+
from .locks import file_lock
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
PMA_AUDIT_LOG_FILENAME = "audit_log.jsonl"
|
|
17
|
+
PMA_AUDIT_LOG_LOCK_SUFFIX = ".lock"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class PmaActionType(str, Enum):
|
|
21
|
+
CHAT_STARTED = "chat_started"
|
|
22
|
+
CHAT_COMPLETED = "chat_completed"
|
|
23
|
+
CHAT_FAILED = "chat_failed"
|
|
24
|
+
CHAT_INTERRUPTED = "chat_interrupted"
|
|
25
|
+
FILE_UPLOADED = "file_uploaded"
|
|
26
|
+
FILE_DOWNLOADED = "file_downloaded"
|
|
27
|
+
FILE_DELETED = "file_deleted"
|
|
28
|
+
FILE_BULK_DELETED = "file_bulk_deleted"
|
|
29
|
+
DOC_UPDATED = "doc_updated"
|
|
30
|
+
DISPATCH_PROCESSED = "dispatch_processed"
|
|
31
|
+
AGENT_ACTION = "agent_action"
|
|
32
|
+
SESSION_NEW = "session_new"
|
|
33
|
+
SESSION_RESET = "session_reset"
|
|
34
|
+
SESSION_STOP = "session_stop"
|
|
35
|
+
SESSION_COMPACT = "session_compact"
|
|
36
|
+
UNKNOWN = "unknown"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@dataclass
|
|
40
|
+
class PmaAuditEntry:
|
|
41
|
+
action_type: PmaActionType
|
|
42
|
+
timestamp: str = field(
|
|
43
|
+
default_factory=lambda: datetime.now(timezone.utc).isoformat()
|
|
44
|
+
)
|
|
45
|
+
entry_id: str = ""
|
|
46
|
+
agent: Optional[str] = None
|
|
47
|
+
thread_id: Optional[str] = None
|
|
48
|
+
turn_id: Optional[str] = None
|
|
49
|
+
client_turn_id: Optional[str] = None
|
|
50
|
+
details: dict[str, Any] = field(default_factory=dict)
|
|
51
|
+
status: str = "ok"
|
|
52
|
+
error: Optional[str] = None
|
|
53
|
+
fingerprint: str = ""
|
|
54
|
+
|
|
55
|
+
def __post_init__(self):
|
|
56
|
+
if not self.entry_id:
|
|
57
|
+
import uuid
|
|
58
|
+
|
|
59
|
+
object.__setattr__(self, "entry_id", str(uuid.uuid4()))
|
|
60
|
+
if not self.fingerprint:
|
|
61
|
+
object.__setattr__(self, "fingerprint", self._compute_fingerprint())
|
|
62
|
+
|
|
63
|
+
def _compute_fingerprint(self) -> str:
|
|
64
|
+
base = {
|
|
65
|
+
"action_type": self.action_type.value,
|
|
66
|
+
"agent": self.agent,
|
|
67
|
+
"details": self.details,
|
|
68
|
+
}
|
|
69
|
+
raw = json.dumps(base, sort_keys=True, default=str)
|
|
70
|
+
return hashlib.sha256(raw.encode()).hexdigest()[:16]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def default_pma_audit_log_path(hub_root: Path) -> Path:
|
|
74
|
+
return hub_root / ".codex-autorunner" / "pma" / PMA_AUDIT_LOG_FILENAME
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class PmaAuditLog:
|
|
78
|
+
def __init__(self, hub_root: Path) -> None:
|
|
79
|
+
self._path = default_pma_audit_log_path(hub_root)
|
|
80
|
+
|
|
81
|
+
@property
|
|
82
|
+
def path(self) -> Path:
|
|
83
|
+
return self._path
|
|
84
|
+
|
|
85
|
+
def _lock_path(self) -> Path:
|
|
86
|
+
return self._path.with_suffix(PMA_AUDIT_LOG_LOCK_SUFFIX)
|
|
87
|
+
|
|
88
|
+
def append(self, entry: PmaAuditEntry) -> str:
|
|
89
|
+
with file_lock(self._lock_path()):
|
|
90
|
+
self._append_unlocked(entry)
|
|
91
|
+
return entry.entry_id
|
|
92
|
+
|
|
93
|
+
def _append_unlocked(self, entry: PmaAuditEntry) -> None:
|
|
94
|
+
self._path.parent.mkdir(parents=True, exist_ok=True)
|
|
95
|
+
line = json.dumps(
|
|
96
|
+
{
|
|
97
|
+
"entry_id": entry.entry_id,
|
|
98
|
+
"action_type": entry.action_type.value,
|
|
99
|
+
"timestamp": entry.timestamp,
|
|
100
|
+
"agent": entry.agent,
|
|
101
|
+
"thread_id": entry.thread_id,
|
|
102
|
+
"turn_id": entry.turn_id,
|
|
103
|
+
"client_turn_id": entry.client_turn_id,
|
|
104
|
+
"details": entry.details,
|
|
105
|
+
"status": entry.status,
|
|
106
|
+
"error": entry.error,
|
|
107
|
+
"fingerprint": entry.fingerprint,
|
|
108
|
+
}
|
|
109
|
+
)
|
|
110
|
+
with open(self._path, "a", encoding="utf-8") as f:
|
|
111
|
+
f.write(line + "\n")
|
|
112
|
+
|
|
113
|
+
def list_recent(
|
|
114
|
+
self, *, limit: int = 100, action_type: Optional[PmaActionType] = None
|
|
115
|
+
) -> list[PmaAuditEntry]:
|
|
116
|
+
with file_lock(self._lock_path()):
|
|
117
|
+
return self._list_recent_unlocked(limit=limit, action_type=action_type)
|
|
118
|
+
|
|
119
|
+
def _list_recent_unlocked(
|
|
120
|
+
self, *, limit: int = 100, action_type: Optional[PmaActionType] = None
|
|
121
|
+
) -> list[PmaAuditEntry]:
|
|
122
|
+
if not self._path.exists():
|
|
123
|
+
return []
|
|
124
|
+
entries: list[PmaAuditEntry] = []
|
|
125
|
+
try:
|
|
126
|
+
with open(self._path, "r", encoding="utf-8") as f:
|
|
127
|
+
for line in f:
|
|
128
|
+
line = line.strip()
|
|
129
|
+
if not line:
|
|
130
|
+
continue
|
|
131
|
+
try:
|
|
132
|
+
data = json.loads(line)
|
|
133
|
+
except json.JSONDecodeError:
|
|
134
|
+
continue
|
|
135
|
+
if not isinstance(data, dict):
|
|
136
|
+
continue
|
|
137
|
+
try:
|
|
138
|
+
action_type_str = data.get("action_type")
|
|
139
|
+
event_type = (
|
|
140
|
+
PmaActionType(action_type_str)
|
|
141
|
+
if action_type_str
|
|
142
|
+
else PmaActionType.UNKNOWN
|
|
143
|
+
)
|
|
144
|
+
except ValueError:
|
|
145
|
+
event_type = PmaActionType.UNKNOWN
|
|
146
|
+
if action_type and event_type != action_type:
|
|
147
|
+
continue
|
|
148
|
+
entry = PmaAuditEntry(
|
|
149
|
+
action_type=event_type,
|
|
150
|
+
timestamp=data.get("timestamp", ""),
|
|
151
|
+
entry_id=data.get("entry_id", ""),
|
|
152
|
+
agent=data.get("agent"),
|
|
153
|
+
thread_id=data.get("thread_id"),
|
|
154
|
+
turn_id=data.get("turn_id"),
|
|
155
|
+
client_turn_id=data.get("client_turn_id"),
|
|
156
|
+
details=dict(data.get("details", {}) or {}),
|
|
157
|
+
status=data.get("status", "ok"),
|
|
158
|
+
error=data.get("error"),
|
|
159
|
+
fingerprint=data.get("fingerprint", ""),
|
|
160
|
+
)
|
|
161
|
+
entries.append(entry)
|
|
162
|
+
except OSError as exc:
|
|
163
|
+
logger.warning("Failed to read PMA audit log at %s: %s", self._path, exc)
|
|
164
|
+
return entries[-limit:]
|
|
165
|
+
|
|
166
|
+
def prune_old(self, *, keep_last: int = 1000) -> int:
|
|
167
|
+
with file_lock(self._lock_path()):
|
|
168
|
+
return self._prune_old_unlocked(keep_last=keep_last)
|
|
169
|
+
|
|
170
|
+
def _prune_old_unlocked(self, *, keep_last: int = 1000) -> int:
|
|
171
|
+
if not self._path.exists():
|
|
172
|
+
return 0
|
|
173
|
+
entries = self._list_recent_unlocked(limit=keep_last * 2)
|
|
174
|
+
if len(entries) <= keep_last:
|
|
175
|
+
return 0
|
|
176
|
+
to_keep = entries[-keep_last:]
|
|
177
|
+
self._path.parent.mkdir(parents=True, exist_ok=True)
|
|
178
|
+
with open(self._path, "w", encoding="utf-8") as f:
|
|
179
|
+
for entry in to_keep:
|
|
180
|
+
line = json.dumps(
|
|
181
|
+
{
|
|
182
|
+
"entry_id": entry.entry_id,
|
|
183
|
+
"action_type": entry.action_type.value,
|
|
184
|
+
"timestamp": entry.timestamp,
|
|
185
|
+
"agent": entry.agent,
|
|
186
|
+
"thread_id": entry.thread_id,
|
|
187
|
+
"turn_id": entry.turn_id,
|
|
188
|
+
"client_turn_id": entry.client_turn_id,
|
|
189
|
+
"details": entry.details,
|
|
190
|
+
"status": entry.status,
|
|
191
|
+
"error": entry.error,
|
|
192
|
+
"fingerprint": entry.fingerprint,
|
|
193
|
+
}
|
|
194
|
+
)
|
|
195
|
+
f.write(line + "\n")
|
|
196
|
+
return len(entries) - keep_last
|
|
197
|
+
|
|
198
|
+
def count_fingerprint(
|
|
199
|
+
self, fingerprint: str, *, within_seconds: Optional[int] = None
|
|
200
|
+
) -> int:
|
|
201
|
+
if not within_seconds:
|
|
202
|
+
return sum(
|
|
203
|
+
1
|
|
204
|
+
for e in self._list_recent_unlocked(limit=10000)
|
|
205
|
+
if e.fingerprint == fingerprint
|
|
206
|
+
)
|
|
207
|
+
cutoff = datetime.now(timezone.utc).timestamp() - within_seconds
|
|
208
|
+
count = 0
|
|
209
|
+
for entry in self._list_recent_unlocked(limit=10000):
|
|
210
|
+
try:
|
|
211
|
+
ts = datetime.fromisoformat(entry.timestamp.replace("Z", "+00:00"))
|
|
212
|
+
if ts.timestamp() >= cutoff and entry.fingerprint == fingerprint:
|
|
213
|
+
count += 1
|
|
214
|
+
except Exception:
|
|
215
|
+
continue
|
|
216
|
+
return count
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
__all__ = [
|
|
220
|
+
"PmaActionType",
|
|
221
|
+
"PmaAuditEntry",
|
|
222
|
+
"PmaAuditLog",
|
|
223
|
+
"default_pma_audit_log_path",
|
|
224
|
+
]
|