aline-ai 0.7.2__py3-none-any.whl → 0.7.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.4.dist-info}/METADATA +1 -1
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.4.dist-info}/RECORD +32 -27
- realign/__init__.py +1 -1
- realign/adapters/codex.py +30 -2
- realign/claude_hooks/stop_hook.py +176 -21
- realign/codex_home.py +71 -0
- realign/codex_hooks/__init__.py +16 -0
- realign/codex_hooks/notify_hook.py +511 -0
- realign/codex_hooks/notify_hook_installer.py +247 -0
- realign/commands/doctor.py +125 -0
- realign/commands/export_shares.py +188 -65
- realign/commands/import_shares.py +30 -10
- realign/commands/init.py +16 -0
- realign/commands/sync_agent.py +274 -44
- realign/commit_pipeline.py +1024 -0
- realign/config.py +3 -11
- realign/dashboard/app.py +151 -2
- realign/dashboard/diagnostics.py +274 -0
- realign/dashboard/screens/create_agent.py +2 -1
- realign/dashboard/screens/create_agent_info.py +40 -77
- realign/dashboard/tmux_manager.py +348 -33
- realign/dashboard/widgets/agents_panel.py +942 -314
- realign/dashboard/widgets/config_panel.py +34 -121
- realign/dashboard/widgets/header.py +1 -1
- realign/db/sqlite_db.py +59 -1
- realign/logging_config.py +51 -6
- realign/watcher_core.py +742 -393
- realign/worker_core.py +206 -15
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.4.dist-info}/WHEEL +0 -0
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.4.dist-info}/entry_points.txt +0 -0
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.4.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,511 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Codex CLI notify hook.
|
|
3
|
+
|
|
4
|
+
Codex (Rust CLI) can execute a `notify` command when the agent finishes a turn.
|
|
5
|
+
We use this hook to enqueue a durable `session_process` job in Aline's SQLite DB,
|
|
6
|
+
so the worker can process all missing turns without watcher polling.
|
|
7
|
+
|
|
8
|
+
This script must be dependency-light and never raise (it's executed from Codex).
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import json
|
|
14
|
+
import os
|
|
15
|
+
import sqlite3
|
|
16
|
+
import sys
|
|
17
|
+
import time
|
|
18
|
+
import uuid
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
from datetime import timedelta
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from typing import Any, Optional
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _parse_config_sqlite_db_path(config_path: Path) -> Optional[str]:
|
|
26
|
+
"""Parse ~/.aline/config.yaml for sqlite_db_path (best-effort)."""
|
|
27
|
+
try:
|
|
28
|
+
import yaml # type: ignore
|
|
29
|
+
except Exception:
|
|
30
|
+
yaml = None # type: ignore[assignment]
|
|
31
|
+
|
|
32
|
+
if yaml is None:
|
|
33
|
+
return None
|
|
34
|
+
try:
|
|
35
|
+
data = yaml.safe_load(config_path.read_text(encoding="utf-8")) or {}
|
|
36
|
+
except Exception:
|
|
37
|
+
return None
|
|
38
|
+
if not isinstance(data, dict):
|
|
39
|
+
return None
|
|
40
|
+
raw = data.get("sqlite_db_path")
|
|
41
|
+
if isinstance(raw, str) and raw.strip():
|
|
42
|
+
return raw.strip()
|
|
43
|
+
return None
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _resolve_sqlite_db_path() -> Path:
|
|
47
|
+
env_db_path = (
|
|
48
|
+
os.getenv("REALIGN_SQLITE_DB_PATH") or os.getenv("REALIGN_DB_PATH") or os.getenv("ALINE_DB_PATH")
|
|
49
|
+
)
|
|
50
|
+
if env_db_path:
|
|
51
|
+
return Path(env_db_path).expanduser()
|
|
52
|
+
|
|
53
|
+
config_path = Path.home() / ".aline" / "config.yaml"
|
|
54
|
+
cfg = _parse_config_sqlite_db_path(config_path) if config_path.exists() else None
|
|
55
|
+
if cfg:
|
|
56
|
+
return Path(cfg).expanduser()
|
|
57
|
+
|
|
58
|
+
return Path.home() / ".aline" / "db" / "aline.db"
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _safe_json(obj: Any) -> str:
|
|
62
|
+
try:
|
|
63
|
+
return json.dumps(obj, ensure_ascii=False, default=str)
|
|
64
|
+
except Exception:
|
|
65
|
+
return "{}"
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _read_event_payload() -> dict[str, Any]:
|
|
69
|
+
"""Codex passes event JSON as argv[1] (docs); also accept stdin for safety."""
|
|
70
|
+
raw = ""
|
|
71
|
+
if len(sys.argv) >= 2:
|
|
72
|
+
raw = sys.argv[1] or ""
|
|
73
|
+
if not raw.strip():
|
|
74
|
+
try:
|
|
75
|
+
raw = sys.stdin.read() or ""
|
|
76
|
+
except Exception:
|
|
77
|
+
raw = ""
|
|
78
|
+
try:
|
|
79
|
+
data = json.loads(raw) if raw.strip() else {}
|
|
80
|
+
return data if isinstance(data, dict) else {}
|
|
81
|
+
except Exception:
|
|
82
|
+
return {}
|
|
83
|
+
|
|
84
|
+
def _infer_agent_terminal_from_codex_home(codex_home: Path) -> tuple[str | None, str | None]:
|
|
85
|
+
"""Infer (agent_id, terminal_id) from an Aline-managed CODEX_HOME path.
|
|
86
|
+
|
|
87
|
+
Supported layouts:
|
|
88
|
+
- ~/.aline/codex_homes/<terminal_id>/
|
|
89
|
+
- ~/.aline/codex_homes/agent-<agent_id>/<terminal_id>/
|
|
90
|
+
"""
|
|
91
|
+
try:
|
|
92
|
+
p = codex_home.expanduser().resolve()
|
|
93
|
+
except Exception:
|
|
94
|
+
p = Path(codex_home)
|
|
95
|
+
|
|
96
|
+
parts = p.parts
|
|
97
|
+
try:
|
|
98
|
+
idx = parts.index("codex_homes")
|
|
99
|
+
except ValueError:
|
|
100
|
+
return None, None
|
|
101
|
+
|
|
102
|
+
if idx + 1 >= len(parts):
|
|
103
|
+
return None, None
|
|
104
|
+
|
|
105
|
+
owner = (parts[idx + 1] or "").strip()
|
|
106
|
+
if not owner:
|
|
107
|
+
return None, None
|
|
108
|
+
|
|
109
|
+
if owner.startswith("agent-"):
|
|
110
|
+
agent_id = owner[len("agent-") :].strip() or None
|
|
111
|
+
terminal_id = (parts[idx + 2] or "").strip() if idx + 2 < len(parts) else ""
|
|
112
|
+
return agent_id, (terminal_id or None)
|
|
113
|
+
|
|
114
|
+
# Terminal layout.
|
|
115
|
+
return None, (owner or None)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _extract_thread_id(evt: dict[str, Any]) -> str:
|
|
119
|
+
for k in ("thread-id", "threadId", "thread_id", "thread"):
|
|
120
|
+
v = evt.get(k)
|
|
121
|
+
if isinstance(v, str) and v.strip():
|
|
122
|
+
return v.strip()
|
|
123
|
+
return ""
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _extract_cwd(evt: dict[str, Any]) -> str:
|
|
127
|
+
v = evt.get("cwd")
|
|
128
|
+
if isinstance(v, str) and v.strip():
|
|
129
|
+
return v.strip()
|
|
130
|
+
return ""
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def _read_session_identifier(session_file: Path) -> str:
|
|
134
|
+
"""Best-effort: extract Codex 'thread id' from the session file header/metadata."""
|
|
135
|
+
try:
|
|
136
|
+
with session_file.open("r", encoding="utf-8") as f:
|
|
137
|
+
for i, line in enumerate(f):
|
|
138
|
+
if i >= 25:
|
|
139
|
+
break
|
|
140
|
+
raw = (line or "").strip()
|
|
141
|
+
if not raw:
|
|
142
|
+
continue
|
|
143
|
+
try:
|
|
144
|
+
data = json.loads(raw)
|
|
145
|
+
except Exception:
|
|
146
|
+
continue
|
|
147
|
+
|
|
148
|
+
# Newer header: {id, timestamp, git} without "type"
|
|
149
|
+
if "id" in data and "type" not in data:
|
|
150
|
+
v = data.get("id")
|
|
151
|
+
if isinstance(v, str) and v.strip():
|
|
152
|
+
return v.strip()
|
|
153
|
+
|
|
154
|
+
# session_meta payload may contain id/thread_id
|
|
155
|
+
if data.get("type") == "session_meta":
|
|
156
|
+
payload = data.get("payload") if isinstance(data.get("payload"), dict) else {}
|
|
157
|
+
for k in ("thread_id", "threadId", "thread-id", "id"):
|
|
158
|
+
v = payload.get(k) if isinstance(payload, dict) else None
|
|
159
|
+
if isinstance(v, str) and v.strip():
|
|
160
|
+
return v.strip()
|
|
161
|
+
except Exception:
|
|
162
|
+
return ""
|
|
163
|
+
return ""
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def _read_session_cwd(session_file: Path) -> str:
|
|
167
|
+
"""Best-effort: extract cwd from session metadata/header."""
|
|
168
|
+
try:
|
|
169
|
+
with session_file.open("r", encoding="utf-8") as f:
|
|
170
|
+
for i, line in enumerate(f):
|
|
171
|
+
if i >= 25:
|
|
172
|
+
break
|
|
173
|
+
raw = (line or "").strip()
|
|
174
|
+
if not raw:
|
|
175
|
+
continue
|
|
176
|
+
try:
|
|
177
|
+
data = json.loads(raw)
|
|
178
|
+
except Exception:
|
|
179
|
+
continue
|
|
180
|
+
|
|
181
|
+
if data.get("type") == "session_meta":
|
|
182
|
+
payload = data.get("payload") if isinstance(data.get("payload"), dict) else {}
|
|
183
|
+
cwd = payload.get("cwd") if isinstance(payload, dict) else None
|
|
184
|
+
if isinstance(cwd, str) and cwd.strip():
|
|
185
|
+
return cwd.strip()
|
|
186
|
+
|
|
187
|
+
if "type" not in data and isinstance(data.get("git"), dict):
|
|
188
|
+
cwd = data["git"].get("cwd")
|
|
189
|
+
if isinstance(cwd, str) and cwd.strip():
|
|
190
|
+
return cwd.strip()
|
|
191
|
+
cwd2 = data.get("cwd")
|
|
192
|
+
if isinstance(cwd2, str) and cwd2.strip():
|
|
193
|
+
return cwd2.strip()
|
|
194
|
+
except Exception:
|
|
195
|
+
return ""
|
|
196
|
+
return ""
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def _thread_map_path(codex_home: Path) -> Path:
|
|
200
|
+
return codex_home / ".aline_thread_map.json"
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def _load_thread_map(codex_home: Path) -> dict[str, str]:
|
|
204
|
+
path = _thread_map_path(codex_home)
|
|
205
|
+
if not path.exists():
|
|
206
|
+
return {}
|
|
207
|
+
try:
|
|
208
|
+
raw = json.loads(path.read_text(encoding="utf-8"))
|
|
209
|
+
except Exception:
|
|
210
|
+
return {}
|
|
211
|
+
if not isinstance(raw, dict):
|
|
212
|
+
return {}
|
|
213
|
+
out: dict[str, str] = {}
|
|
214
|
+
for k, v in raw.items():
|
|
215
|
+
if isinstance(k, str) and isinstance(v, str):
|
|
216
|
+
out[k] = v
|
|
217
|
+
return out
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _save_thread_map(codex_home: Path, mapping: dict[str, str]) -> None:
|
|
221
|
+
path = _thread_map_path(codex_home)
|
|
222
|
+
tmp = path.with_suffix(".json.tmp")
|
|
223
|
+
try:
|
|
224
|
+
tmp.write_text(json.dumps(mapping, ensure_ascii=False, indent=2), encoding="utf-8")
|
|
225
|
+
tmp.replace(path)
|
|
226
|
+
except Exception:
|
|
227
|
+
return
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def _iter_recent_codex_session_files(sessions_root: Path, *, days_back: int = 2) -> list[Path]:
|
|
231
|
+
"""List recent rollout session files under CODEX_HOME/sessions (YYYY/MM/DD layout)."""
|
|
232
|
+
out: list[Path] = []
|
|
233
|
+
if not sessions_root.exists():
|
|
234
|
+
return out
|
|
235
|
+
|
|
236
|
+
# Preferred: YYYY/MM/DD partitioned directories.
|
|
237
|
+
now = datetime.now()
|
|
238
|
+
for days_ago in range(max(0, int(days_back)) + 1):
|
|
239
|
+
dt = now - timedelta(days=days_ago)
|
|
240
|
+
p = sessions_root / str(dt.year) / f"{dt.month:02d}" / f"{dt.day:02d}"
|
|
241
|
+
if not p.exists():
|
|
242
|
+
continue
|
|
243
|
+
try:
|
|
244
|
+
out.extend(p.glob("rollout-*.jsonl"))
|
|
245
|
+
except Exception:
|
|
246
|
+
continue
|
|
247
|
+
|
|
248
|
+
# Fallback: flat structure
|
|
249
|
+
if not out:
|
|
250
|
+
try:
|
|
251
|
+
out.extend(list(sessions_root.glob("rollout-*.jsonl")))
|
|
252
|
+
except Exception:
|
|
253
|
+
pass
|
|
254
|
+
|
|
255
|
+
# De-dup and sort by mtime desc.
|
|
256
|
+
uniq: dict[str, Path] = {}
|
|
257
|
+
for f in out:
|
|
258
|
+
uniq[str(f)] = f
|
|
259
|
+
files = list(uniq.values())
|
|
260
|
+
try:
|
|
261
|
+
files.sort(key=lambda p: p.stat().st_mtime if p.exists() else 0, reverse=True)
|
|
262
|
+
except Exception:
|
|
263
|
+
pass
|
|
264
|
+
return files
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def _find_session_file_for_event(
|
|
268
|
+
*, codex_home: Path, thread_id: str, cwd: str
|
|
269
|
+
) -> Optional[Path]:
|
|
270
|
+
sessions_root = codex_home / "sessions"
|
|
271
|
+
cache = _load_thread_map(codex_home)
|
|
272
|
+
cached = cache.get(thread_id) if thread_id else None
|
|
273
|
+
if cached:
|
|
274
|
+
p = Path(cached)
|
|
275
|
+
if p.exists():
|
|
276
|
+
# Validate cached mapping (cheap header read) to avoid stale misroutes.
|
|
277
|
+
try:
|
|
278
|
+
if not thread_id or _read_session_identifier(p) == thread_id:
|
|
279
|
+
return p
|
|
280
|
+
except Exception:
|
|
281
|
+
return p
|
|
282
|
+
|
|
283
|
+
candidates = _iter_recent_codex_session_files(sessions_root, days_back=2)
|
|
284
|
+
cwd_norm = (cwd or "").strip()
|
|
285
|
+
|
|
286
|
+
# First pass: match by thread id.
|
|
287
|
+
if thread_id:
|
|
288
|
+
for f in candidates[:50]:
|
|
289
|
+
try:
|
|
290
|
+
if _read_session_identifier(f) == thread_id:
|
|
291
|
+
cache[thread_id] = str(f)
|
|
292
|
+
_save_thread_map(codex_home, cache)
|
|
293
|
+
return f
|
|
294
|
+
except Exception:
|
|
295
|
+
continue
|
|
296
|
+
|
|
297
|
+
# Second pass: match by cwd.
|
|
298
|
+
if cwd_norm:
|
|
299
|
+
for f in candidates[:50]:
|
|
300
|
+
try:
|
|
301
|
+
if _read_session_cwd(f) == cwd_norm:
|
|
302
|
+
if thread_id:
|
|
303
|
+
cache[thread_id] = str(f)
|
|
304
|
+
_save_thread_map(codex_home, cache)
|
|
305
|
+
return f
|
|
306
|
+
except Exception:
|
|
307
|
+
continue
|
|
308
|
+
|
|
309
|
+
chosen = candidates[0] if candidates else None
|
|
310
|
+
if chosen is not None and thread_id:
|
|
311
|
+
cache[thread_id] = str(chosen)
|
|
312
|
+
_save_thread_map(codex_home, cache)
|
|
313
|
+
return chosen
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def _try_enqueue_session_process_job(
|
|
317
|
+
*,
|
|
318
|
+
session_id: str,
|
|
319
|
+
session_file_path: str,
|
|
320
|
+
workspace_path: str | None,
|
|
321
|
+
session_type: str | None,
|
|
322
|
+
source_event: str | None,
|
|
323
|
+
no_track: bool,
|
|
324
|
+
agent_id: str | None,
|
|
325
|
+
terminal_id: str | None,
|
|
326
|
+
connect_timeout_seconds: float,
|
|
327
|
+
) -> bool:
|
|
328
|
+
"""Best-effort enqueue into sqlite jobs table. Never raises."""
|
|
329
|
+
try:
|
|
330
|
+
db_path = _resolve_sqlite_db_path()
|
|
331
|
+
if not db_path.exists():
|
|
332
|
+
return False
|
|
333
|
+
|
|
334
|
+
payload: dict[str, Any] = {"session_id": session_id, "session_file_path": session_file_path}
|
|
335
|
+
if workspace_path is not None:
|
|
336
|
+
payload["workspace_path"] = workspace_path
|
|
337
|
+
if session_type:
|
|
338
|
+
payload["session_type"] = session_type
|
|
339
|
+
if source_event:
|
|
340
|
+
payload["source_event"] = source_event
|
|
341
|
+
if no_track:
|
|
342
|
+
payload["no_track"] = True
|
|
343
|
+
if agent_id:
|
|
344
|
+
payload["agent_id"] = agent_id
|
|
345
|
+
if terminal_id:
|
|
346
|
+
payload["terminal_id"] = terminal_id
|
|
347
|
+
|
|
348
|
+
job_id = str(uuid.uuid4())
|
|
349
|
+
payload_json = json.dumps(payload, ensure_ascii=False)
|
|
350
|
+
dedupe_key = f"session_process:{session_id}"
|
|
351
|
+
|
|
352
|
+
conn = sqlite3.connect(str(db_path), timeout=float(connect_timeout_seconds))
|
|
353
|
+
try:
|
|
354
|
+
conn.execute(
|
|
355
|
+
"""
|
|
356
|
+
INSERT INTO jobs (
|
|
357
|
+
id, kind, dedupe_key, payload, status, priority, attempts, next_run_at,
|
|
358
|
+
locked_until, locked_by, reschedule, last_error, created_at, updated_at
|
|
359
|
+
) VALUES (
|
|
360
|
+
?, ?, ?, ?, 'queued', ?, 0, datetime('now'),
|
|
361
|
+
NULL, NULL, 0, NULL, datetime('now'), datetime('now')
|
|
362
|
+
)
|
|
363
|
+
ON CONFLICT(dedupe_key) DO UPDATE SET
|
|
364
|
+
kind=excluded.kind,
|
|
365
|
+
payload=excluded.payload,
|
|
366
|
+
priority=MAX(COALESCE(jobs.priority, 0), COALESCE(excluded.priority, 0)),
|
|
367
|
+
attempts=CASE
|
|
368
|
+
WHEN jobs.status='retry' THEN 0
|
|
369
|
+
ELSE COALESCE(jobs.attempts, 0)
|
|
370
|
+
END,
|
|
371
|
+
updated_at=datetime('now'),
|
|
372
|
+
reschedule=CASE
|
|
373
|
+
WHEN jobs.status='processing' THEN 1
|
|
374
|
+
ELSE COALESCE(jobs.reschedule, 0)
|
|
375
|
+
END,
|
|
376
|
+
last_error=CASE
|
|
377
|
+
WHEN jobs.status='retry' THEN NULL
|
|
378
|
+
ELSE jobs.last_error
|
|
379
|
+
END,
|
|
380
|
+
status=CASE
|
|
381
|
+
WHEN jobs.status='processing' THEN jobs.status
|
|
382
|
+
WHEN jobs.status='queued' THEN jobs.status
|
|
383
|
+
WHEN jobs.status='retry' THEN 'queued'
|
|
384
|
+
WHEN jobs.status='done' THEN 'queued'
|
|
385
|
+
ELSE 'queued'
|
|
386
|
+
END,
|
|
387
|
+
next_run_at=CASE
|
|
388
|
+
WHEN jobs.status='processing' THEN jobs.next_run_at
|
|
389
|
+
WHEN jobs.next_run_at IS NULL THEN excluded.next_run_at
|
|
390
|
+
WHEN excluded.next_run_at < jobs.next_run_at THEN excluded.next_run_at
|
|
391
|
+
ELSE jobs.next_run_at
|
|
392
|
+
END
|
|
393
|
+
""",
|
|
394
|
+
(
|
|
395
|
+
job_id,
|
|
396
|
+
"session_process",
|
|
397
|
+
dedupe_key,
|
|
398
|
+
payload_json,
|
|
399
|
+
15,
|
|
400
|
+
),
|
|
401
|
+
)
|
|
402
|
+
conn.commit()
|
|
403
|
+
return True
|
|
404
|
+
finally:
|
|
405
|
+
try:
|
|
406
|
+
conn.close()
|
|
407
|
+
except Exception:
|
|
408
|
+
pass
|
|
409
|
+
except Exception:
|
|
410
|
+
return False
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
def _write_fallback_signal(*, session_id: str, session_file: str, cwd: str, agent_id: str, terminal_id: str, no_track: bool) -> None:
|
|
414
|
+
try:
|
|
415
|
+
from . import codex_notify_signal_dir
|
|
416
|
+
|
|
417
|
+
signal_dir = codex_notify_signal_dir()
|
|
418
|
+
signal_dir.mkdir(parents=True, exist_ok=True)
|
|
419
|
+
stamp_ms = int(time.time() * 1000)
|
|
420
|
+
signal_file_path = signal_dir / f"{session_id}_{stamp_ms}.signal"
|
|
421
|
+
tmp = signal_dir / f"{session_id}_{stamp_ms}.signal.tmp"
|
|
422
|
+
data: dict[str, Any] = {
|
|
423
|
+
"session_id": session_id,
|
|
424
|
+
"transcript_path": session_file,
|
|
425
|
+
"project_dir": cwd,
|
|
426
|
+
"cwd": cwd,
|
|
427
|
+
"timestamp": time.time(),
|
|
428
|
+
"hook_event": "CodexNotify",
|
|
429
|
+
"source_event": "notify",
|
|
430
|
+
"session_type": "codex",
|
|
431
|
+
}
|
|
432
|
+
if agent_id:
|
|
433
|
+
data["agent_id"] = agent_id
|
|
434
|
+
if terminal_id:
|
|
435
|
+
data["terminal_id"] = terminal_id
|
|
436
|
+
if no_track:
|
|
437
|
+
data["no_track"] = True
|
|
438
|
+
tmp.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8")
|
|
439
|
+
tmp.replace(signal_file_path)
|
|
440
|
+
except Exception:
|
|
441
|
+
return
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
def main() -> None:
|
|
445
|
+
evt = _read_event_payload()
|
|
446
|
+
thread_id = _extract_thread_id(evt)
|
|
447
|
+
cwd = _extract_cwd(evt)
|
|
448
|
+
no_track = os.environ.get("ALINE_NO_TRACK", "") == "1"
|
|
449
|
+
agent_id = os.environ.get("ALINE_AGENT_ID", "").strip()
|
|
450
|
+
terminal_id = os.environ.get("ALINE_TERMINAL_ID", "").strip()
|
|
451
|
+
|
|
452
|
+
codex_home = Path(os.environ.get("CODEX_HOME", "") or (Path.home() / ".codex")).expanduser()
|
|
453
|
+
|
|
454
|
+
session_file = None
|
|
455
|
+
try:
|
|
456
|
+
session_file = _find_session_file_for_event(codex_home=codex_home, thread_id=thread_id, cwd=cwd)
|
|
457
|
+
except Exception:
|
|
458
|
+
session_file = None
|
|
459
|
+
if session_file is None or not session_file.exists():
|
|
460
|
+
# Nothing else to do; avoid crashing Codex.
|
|
461
|
+
return
|
|
462
|
+
|
|
463
|
+
# If the event payload doesn't include cwd, fall back to the session meta header.
|
|
464
|
+
if not (cwd or "").strip():
|
|
465
|
+
try:
|
|
466
|
+
cwd = _read_session_cwd(session_file) or ""
|
|
467
|
+
except Exception:
|
|
468
|
+
cwd = ""
|
|
469
|
+
|
|
470
|
+
# Some Codex notify runners don't propagate arbitrary env vars. Infer from CODEX_HOME when possible.
|
|
471
|
+
if not agent_id or not terminal_id:
|
|
472
|
+
inferred_agent_id, inferred_terminal_id = _infer_agent_terminal_from_codex_home(codex_home)
|
|
473
|
+
if not agent_id and inferred_agent_id:
|
|
474
|
+
agent_id = inferred_agent_id
|
|
475
|
+
if not terminal_id and inferred_terminal_id:
|
|
476
|
+
terminal_id = inferred_terminal_id
|
|
477
|
+
|
|
478
|
+
session_id = session_file.stem
|
|
479
|
+
connect_timeout = float(os.environ.get("ALINE_CODEX_NOTIFY_DB_TIMEOUT", "0.2"))
|
|
480
|
+
|
|
481
|
+
ok = _try_enqueue_session_process_job(
|
|
482
|
+
session_id=session_id,
|
|
483
|
+
session_file_path=str(session_file),
|
|
484
|
+
workspace_path=cwd or None,
|
|
485
|
+
session_type="codex",
|
|
486
|
+
source_event="notify",
|
|
487
|
+
no_track=no_track,
|
|
488
|
+
agent_id=agent_id or None,
|
|
489
|
+
terminal_id=terminal_id or None,
|
|
490
|
+
connect_timeout_seconds=connect_timeout,
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
if not ok:
|
|
494
|
+
_write_fallback_signal(
|
|
495
|
+
session_id=session_id,
|
|
496
|
+
session_file=str(session_file),
|
|
497
|
+
cwd=cwd or "",
|
|
498
|
+
agent_id=agent_id,
|
|
499
|
+
terminal_id=terminal_id,
|
|
500
|
+
no_track=no_track,
|
|
501
|
+
)
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
if __name__ == "__main__":
|
|
505
|
+
try:
|
|
506
|
+
main()
|
|
507
|
+
except SystemExit:
|
|
508
|
+
raise
|
|
509
|
+
except Exception:
|
|
510
|
+
# Hook must never crash Codex.
|
|
511
|
+
sys.exit(0)
|