aline-ai 0.6.3__py3-none-any.whl → 0.6.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.6.3.dist-info → aline_ai-0.6.5.dist-info}/METADATA +1 -1
- {aline_ai-0.6.3.dist-info → aline_ai-0.6.5.dist-info}/RECORD +26 -23
- realign/__init__.py +1 -1
- realign/adapters/codex.py +14 -9
- realign/cli.py +42 -235
- realign/codex_detector.py +72 -32
- realign/codex_home.py +85 -0
- realign/codex_terminal_linker.py +172 -0
- realign/commands/__init__.py +2 -2
- realign/commands/add.py +89 -9
- realign/commands/doctor.py +497 -0
- realign/commands/init.py +66 -4
- realign/commands/watcher.py +2 -1
- realign/config.py +10 -1
- realign/dashboard/app.py +2 -149
- realign/dashboard/tmux_manager.py +171 -5
- realign/dashboard/widgets/config_panel.py +91 -11
- realign/dashboard/widgets/sessions_table.py +1 -1
- realign/dashboard/widgets/terminal_panel.py +400 -35
- realign/db/sqlite_db.py +76 -0
- realign/hooks.py +6 -128
- realign/watcher_core.py +50 -0
- {aline_ai-0.6.3.dist-info → aline_ai-0.6.5.dist-info}/WHEEL +0 -0
- {aline_ai-0.6.3.dist-info → aline_ai-0.6.5.dist-info}/entry_points.txt +0 -0
- {aline_ai-0.6.3.dist-info → aline_ai-0.6.5.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.6.3.dist-info → aline_ai-0.6.5.dist-info}/top_level.txt +0 -0
realign/db/sqlite_db.py
CHANGED
|
@@ -1462,6 +1462,82 @@ class SQLiteDatabase(DatabaseInterface):
|
|
|
1462
1462
|
except Exception:
|
|
1463
1463
|
return 0
|
|
1464
1464
|
|
|
1465
|
+
def requeue_failed_jobs(
|
|
1466
|
+
self,
|
|
1467
|
+
*,
|
|
1468
|
+
kinds: Optional[List[str]] = None,
|
|
1469
|
+
) -> Tuple[int, List[Dict[str, Any]]]:
|
|
1470
|
+
"""
|
|
1471
|
+
Requeue all failed jobs, optionally filtering by kind.
|
|
1472
|
+
|
|
1473
|
+
Returns:
|
|
1474
|
+
(count, jobs) - number of jobs requeued and their details
|
|
1475
|
+
"""
|
|
1476
|
+
conn = self._get_connection()
|
|
1477
|
+
try:
|
|
1478
|
+
# First, get the failed jobs
|
|
1479
|
+
where_clauses: list[str] = ["status = 'failed'"]
|
|
1480
|
+
params: list[Any] = []
|
|
1481
|
+
if kinds:
|
|
1482
|
+
placeholders = ",".join(["?"] * len(kinds))
|
|
1483
|
+
where_clauses.append(f"kind IN ({placeholders})")
|
|
1484
|
+
params.extend(kinds)
|
|
1485
|
+
|
|
1486
|
+
where_sql = "WHERE " + " AND ".join(where_clauses)
|
|
1487
|
+
|
|
1488
|
+
rows = conn.execute(
|
|
1489
|
+
f"""
|
|
1490
|
+
SELECT id, kind, dedupe_key, payload, last_error, attempts
|
|
1491
|
+
FROM jobs
|
|
1492
|
+
{where_sql}
|
|
1493
|
+
ORDER BY updated_at DESC
|
|
1494
|
+
""",
|
|
1495
|
+
tuple(params),
|
|
1496
|
+
).fetchall()
|
|
1497
|
+
|
|
1498
|
+
if not rows:
|
|
1499
|
+
return 0, []
|
|
1500
|
+
|
|
1501
|
+
jobs_info: List[Dict[str, Any]] = []
|
|
1502
|
+
for row in rows:
|
|
1503
|
+
payload_raw = row["payload"] or "{}"
|
|
1504
|
+
try:
|
|
1505
|
+
payload_obj = json.loads(payload_raw)
|
|
1506
|
+
except Exception:
|
|
1507
|
+
payload_obj = {}
|
|
1508
|
+
jobs_info.append({
|
|
1509
|
+
"id": str(row["id"]),
|
|
1510
|
+
"kind": row["kind"],
|
|
1511
|
+
"dedupe_key": row["dedupe_key"],
|
|
1512
|
+
"payload": payload_obj,
|
|
1513
|
+
"last_error": row["last_error"],
|
|
1514
|
+
"attempts": row["attempts"],
|
|
1515
|
+
})
|
|
1516
|
+
|
|
1517
|
+
# Requeue them
|
|
1518
|
+
conn.execute(
|
|
1519
|
+
f"""
|
|
1520
|
+
UPDATE jobs
|
|
1521
|
+
SET status = 'queued',
|
|
1522
|
+
attempts = 0,
|
|
1523
|
+
next_run_at = datetime('now'),
|
|
1524
|
+
last_error = NULL,
|
|
1525
|
+
locked_until = NULL,
|
|
1526
|
+
locked_by = NULL,
|
|
1527
|
+
updated_at = datetime('now')
|
|
1528
|
+
{where_sql}
|
|
1529
|
+
""",
|
|
1530
|
+
tuple(params),
|
|
1531
|
+
)
|
|
1532
|
+
conn.commit()
|
|
1533
|
+
|
|
1534
|
+
return len(jobs_info), jobs_info
|
|
1535
|
+
except sqlite3.OperationalError:
|
|
1536
|
+
return 0, []
|
|
1537
|
+
except Exception:
|
|
1538
|
+
conn.rollback()
|
|
1539
|
+
return 0, []
|
|
1540
|
+
|
|
1465
1541
|
def update_turn_summary(
|
|
1466
1542
|
self,
|
|
1467
1543
|
turn_id: str,
|
realign/hooks.py
CHANGED
|
@@ -157,80 +157,11 @@ def _classify_task_metadata(
|
|
|
157
157
|
) -> Tuple[str, str]:
|
|
158
158
|
"""
|
|
159
159
|
Run a dedicated LLM classification pass for if_last_task and satisfaction tags.
|
|
160
|
-
"""
|
|
161
|
-
defaults = ("no", "fine")
|
|
162
|
-
|
|
163
|
-
if not (user_messages or assistant_replies or code_changes):
|
|
164
|
-
return defaults
|
|
165
|
-
|
|
166
|
-
def _clip_text(text: str, limit: int) -> str:
|
|
167
|
-
text = (text or "").strip()
|
|
168
|
-
if not text:
|
|
169
|
-
return ""
|
|
170
|
-
if len(text) <= limit:
|
|
171
|
-
return text
|
|
172
|
-
return text[: max(0, limit - 3)].rstrip() + "..."
|
|
173
|
-
|
|
174
|
-
clipped_user = _clip_text(user_messages, 2000) or "(missing)"
|
|
175
|
-
current_title = (summary_title or "").strip() or "(missing)"
|
|
176
|
-
previous_title = (previous_commit_title or "").strip() or "(none)"
|
|
177
|
-
|
|
178
|
-
# Try cloud provider first if provider is "auto" or "cloud" and user is logged in
|
|
179
|
-
if provider in ("auto", "cloud"):
|
|
180
|
-
try:
|
|
181
|
-
from .auth import is_logged_in
|
|
182
|
-
|
|
183
|
-
if is_logged_in():
|
|
184
|
-
logger.debug("Attempting cloud LLM for metadata classification")
|
|
185
|
-
# Load user custom prompt if available
|
|
186
|
-
custom_prompt = None
|
|
187
|
-
if system_prompt is not None:
|
|
188
|
-
custom_prompt = system_prompt
|
|
189
|
-
else:
|
|
190
|
-
user_prompt_path = Path.home() / ".aline" / "prompts" / "metadata.md"
|
|
191
|
-
try:
|
|
192
|
-
if user_prompt_path.exists():
|
|
193
|
-
custom_prompt = user_prompt_path.read_text(encoding="utf-8").strip()
|
|
194
|
-
except Exception:
|
|
195
|
-
pass
|
|
196
|
-
|
|
197
|
-
model_name, result = call_llm_cloud(
|
|
198
|
-
task="metadata",
|
|
199
|
-
payload={
|
|
200
|
-
"previous_title": previous_title,
|
|
201
|
-
"user_messages": clipped_user,
|
|
202
|
-
"current_title": current_title,
|
|
203
|
-
},
|
|
204
|
-
custom_prompt=custom_prompt,
|
|
205
|
-
silent=False,
|
|
206
|
-
)
|
|
207
|
-
|
|
208
|
-
if result:
|
|
209
|
-
if_last_task = result.get("if_last_task", "no")
|
|
210
|
-
satisfaction = result.get("satisfaction", "fine")
|
|
211
|
-
logger.info(
|
|
212
|
-
"Cloud LLM metadata response: if_last_task=%s, satisfaction=%s",
|
|
213
|
-
if_last_task,
|
|
214
|
-
satisfaction,
|
|
215
|
-
)
|
|
216
|
-
print(
|
|
217
|
-
f" 🔍 Metadata classification: if_last_task={if_last_task}, "
|
|
218
|
-
f"satisfaction={satisfaction}",
|
|
219
|
-
file=sys.stderr,
|
|
220
|
-
)
|
|
221
|
-
return if_last_task, satisfaction
|
|
222
|
-
else:
|
|
223
|
-
# Cloud LLM failed, return defaults (local fallback disabled)
|
|
224
|
-
logger.warning("Cloud LLM metadata failed, returning defaults")
|
|
225
|
-
print(" ⚠️ Cloud LLM metadata failed, using defaults", file=sys.stderr)
|
|
226
|
-
return defaults
|
|
227
|
-
except ImportError:
|
|
228
|
-
logger.debug("Auth module not available, skipping cloud LLM")
|
|
229
160
|
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
return
|
|
161
|
+
NOTE: LLM-based metadata classification is disabled. Always returns defaults.
|
|
162
|
+
"""
|
|
163
|
+
# Metadata LLM classification disabled - always return defaults
|
|
164
|
+
return ("no", "fine")
|
|
234
165
|
|
|
235
166
|
# =========================================================================
|
|
236
167
|
# LOCAL LLM FALLBACK DISABLED - Code kept for reference
|
|
@@ -450,63 +381,10 @@ def find_codex_latest_session(project_path: Path, days_back: int = 7) -> Optiona
|
|
|
450
381
|
Returns:
|
|
451
382
|
Path to the most recent session file, or None if not found
|
|
452
383
|
"""
|
|
453
|
-
from
|
|
384
|
+
from .codex_detector import get_latest_codex_session
|
|
454
385
|
|
|
455
386
|
logger.debug(f"Searching for Codex sessions for project: {project_path}")
|
|
456
|
-
|
|
457
|
-
codex_sessions_base = Path.home() / ".codex" / "sessions"
|
|
458
|
-
|
|
459
|
-
if not codex_sessions_base.exists():
|
|
460
|
-
logger.debug(f"Codex sessions directory not found: {codex_sessions_base}")
|
|
461
|
-
return None
|
|
462
|
-
|
|
463
|
-
# Normalize project path for comparison
|
|
464
|
-
abs_project_path = str(project_path.resolve())
|
|
465
|
-
|
|
466
|
-
matching_sessions = []
|
|
467
|
-
|
|
468
|
-
# Search through recent days
|
|
469
|
-
for days_ago in range(days_back + 1):
|
|
470
|
-
target_date = datetime.now() - timedelta(days=days_ago)
|
|
471
|
-
date_path = (
|
|
472
|
-
codex_sessions_base
|
|
473
|
-
/ str(target_date.year)
|
|
474
|
-
/ f"{target_date.month:02d}"
|
|
475
|
-
/ f"{target_date.day:02d}"
|
|
476
|
-
)
|
|
477
|
-
|
|
478
|
-
if not date_path.exists():
|
|
479
|
-
continue
|
|
480
|
-
|
|
481
|
-
# Check all session files in this date directory
|
|
482
|
-
for session_file in date_path.glob("rollout-*.jsonl"):
|
|
483
|
-
try:
|
|
484
|
-
# Read first line to get session metadata
|
|
485
|
-
with open(session_file, "r", encoding="utf-8") as f:
|
|
486
|
-
first_line = f.readline()
|
|
487
|
-
if first_line:
|
|
488
|
-
data = json.loads(first_line)
|
|
489
|
-
if data.get("type") == "session_meta":
|
|
490
|
-
session_cwd = data.get("payload", {}).get("cwd", "")
|
|
491
|
-
# Match the project path
|
|
492
|
-
if session_cwd == abs_project_path:
|
|
493
|
-
matching_sessions.append(session_file)
|
|
494
|
-
logger.debug(f"Found matching Codex session: {session_file}")
|
|
495
|
-
except (json.JSONDecodeError, IOError) as e:
|
|
496
|
-
logger.debug(f"Skipping malformed session file {session_file}: {e}")
|
|
497
|
-
continue
|
|
498
|
-
|
|
499
|
-
# Sort by modification time, newest first
|
|
500
|
-
matching_sessions.sort(key=lambda p: p.stat().st_mtime, reverse=True)
|
|
501
|
-
|
|
502
|
-
if matching_sessions:
|
|
503
|
-
logger.info(
|
|
504
|
-
f"Found {len(matching_sessions)} Codex session(s), using latest: {matching_sessions[0]}"
|
|
505
|
-
)
|
|
506
|
-
else:
|
|
507
|
-
logger.debug("No matching Codex sessions found")
|
|
508
|
-
|
|
509
|
-
return matching_sessions[0] if matching_sessions else None
|
|
387
|
+
return get_latest_codex_session(project_path, days_back=days_back)
|
|
510
388
|
|
|
511
389
|
|
|
512
390
|
def find_all_claude_sessions() -> List[Path]:
|
realign/watcher_core.py
CHANGED
|
@@ -251,6 +251,46 @@ class DialogueWatcher:
|
|
|
251
251
|
self.user_prompt_signal_dir = self.signal_dir / "user_prompt_submit"
|
|
252
252
|
self.user_prompt_signal_dir.mkdir(parents=True, exist_ok=True)
|
|
253
253
|
|
|
254
|
+
def _maybe_link_codex_terminal(self, session_file: Path) -> None:
|
|
255
|
+
"""Best-effort: bind a Codex session file to the most likely active Codex terminal."""
|
|
256
|
+
try:
|
|
257
|
+
if self._detect_session_type(session_file) != "codex":
|
|
258
|
+
return
|
|
259
|
+
except Exception:
|
|
260
|
+
return
|
|
261
|
+
|
|
262
|
+
try:
|
|
263
|
+
from .codex_home import terminal_id_from_codex_session_file
|
|
264
|
+
from .codex_terminal_linker import read_codex_session_meta, select_agent_for_codex_session
|
|
265
|
+
from .db import get_database
|
|
266
|
+
|
|
267
|
+
meta = read_codex_session_meta(session_file)
|
|
268
|
+
if meta is None:
|
|
269
|
+
return
|
|
270
|
+
|
|
271
|
+
db = get_database(read_only=False)
|
|
272
|
+
agents = db.list_agents(status="active", limit=1000)
|
|
273
|
+
# Deterministic mapping: session file stored under ~/.aline/codex_homes/<terminal_id>/...
|
|
274
|
+
agent_id = terminal_id_from_codex_session_file(session_file)
|
|
275
|
+
if not agent_id:
|
|
276
|
+
# Fallback heuristic mapping (legacy default ~/.codex/sessions).
|
|
277
|
+
agent_id = select_agent_for_codex_session(agents, session=meta)
|
|
278
|
+
if not agent_id:
|
|
279
|
+
return
|
|
280
|
+
|
|
281
|
+
db.update_agent(
|
|
282
|
+
agent_id,
|
|
283
|
+
provider="codex",
|
|
284
|
+
session_type="codex",
|
|
285
|
+
session_id=session_file.stem,
|
|
286
|
+
transcript_path=str(session_file),
|
|
287
|
+
cwd=meta.cwd,
|
|
288
|
+
project_dir=meta.cwd,
|
|
289
|
+
source="codex:auto-link",
|
|
290
|
+
)
|
|
291
|
+
except Exception:
|
|
292
|
+
return
|
|
293
|
+
|
|
254
294
|
async def start(self):
|
|
255
295
|
"""Start watching session files."""
|
|
256
296
|
if not self.config.mcp_auto_commit:
|
|
@@ -1047,6 +1087,11 @@ class DialogueWatcher:
|
|
|
1047
1087
|
if old_size is None or old_mtime is None:
|
|
1048
1088
|
changed_files.append(Path(path))
|
|
1049
1089
|
logger.debug(f"Session file first seen: {Path(path).name} ({size} bytes)")
|
|
1090
|
+
# Best-effort: link newly discovered Codex sessions to an active Codex terminal.
|
|
1091
|
+
try:
|
|
1092
|
+
self._maybe_link_codex_terminal(Path(path))
|
|
1093
|
+
except Exception:
|
|
1094
|
+
pass
|
|
1050
1095
|
# Reset idle final-commit attempt tracking for new files
|
|
1051
1096
|
self.last_final_commit_times.pop(path, None)
|
|
1052
1097
|
continue
|
|
@@ -1107,6 +1152,11 @@ class DialogueWatcher:
|
|
|
1107
1152
|
for session_file in changed_files:
|
|
1108
1153
|
if not session_file.exists():
|
|
1109
1154
|
continue
|
|
1155
|
+
# Best-effort: keep terminal bindings fresh (especially after watcher restarts).
|
|
1156
|
+
try:
|
|
1157
|
+
self._maybe_link_codex_terminal(session_file)
|
|
1158
|
+
except Exception:
|
|
1159
|
+
pass
|
|
1110
1160
|
new_turns = self._get_new_completed_turn_numbers(session_file)
|
|
1111
1161
|
if new_turns:
|
|
1112
1162
|
sessions_to_enqueue.append((session_file, new_turns))
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|