aline-ai 0.7.3__py3-none-any.whl → 0.7.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.7.3.dist-info → aline_ai-0.7.5.dist-info}/METADATA +1 -1
- {aline_ai-0.7.3.dist-info → aline_ai-0.7.5.dist-info}/RECORD +32 -27
- realign/__init__.py +1 -1
- realign/adapters/codex.py +30 -2
- realign/claude_hooks/stop_hook.py +176 -21
- realign/codex_home.py +71 -0
- realign/codex_hooks/__init__.py +16 -0
- realign/codex_hooks/notify_hook.py +511 -0
- realign/codex_hooks/notify_hook_installer.py +247 -0
- realign/commands/doctor.py +125 -0
- realign/commands/import_shares.py +30 -10
- realign/commands/init.py +16 -0
- realign/commands/sync_agent.py +230 -52
- realign/commands/upgrade.py +117 -0
- realign/commit_pipeline.py +1024 -0
- realign/config.py +3 -11
- realign/dashboard/app.py +150 -0
- realign/dashboard/diagnostics.py +274 -0
- realign/dashboard/screens/create_agent.py +2 -1
- realign/dashboard/screens/create_agent_info.py +40 -77
- realign/dashboard/tmux_manager.py +354 -20
- realign/dashboard/widgets/agents_panel.py +817 -202
- realign/dashboard/widgets/config_panel.py +52 -121
- realign/dashboard/widgets/header.py +1 -1
- realign/db/sqlite_db.py +59 -1
- realign/logging_config.py +51 -6
- realign/watcher_core.py +742 -393
- realign/worker_core.py +206 -15
- {aline_ai-0.7.3.dist-info → aline_ai-0.7.5.dist-info}/WHEEL +0 -0
- {aline_ai-0.7.3.dist-info → aline_ai-0.7.5.dist-info}/entry_points.txt +0 -0
- {aline_ai-0.7.3.dist-info → aline_ai-0.7.5.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.7.3.dist-info → aline_ai-0.7.5.dist-info}/top_level.txt +0 -0
realign/commands/sync_agent.py
CHANGED
|
@@ -11,6 +11,7 @@ Sync works with unencrypted shares only.
|
|
|
11
11
|
import json
|
|
12
12
|
import os
|
|
13
13
|
import logging
|
|
14
|
+
import time
|
|
14
15
|
from datetime import datetime, timezone
|
|
15
16
|
from typing import Optional, Dict, Any, Callable
|
|
16
17
|
|
|
@@ -28,6 +29,40 @@ logger = setup_logger("realign.commands.sync_agent", "sync_agent.log")
|
|
|
28
29
|
MAX_SYNC_RETRIES = 3
|
|
29
30
|
|
|
30
31
|
|
|
32
|
+
def _ensure_aware_utc(dt: datetime) -> datetime:
|
|
33
|
+
if dt.tzinfo is None:
|
|
34
|
+
return dt.replace(tzinfo=timezone.utc)
|
|
35
|
+
return dt
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _parse_iso_datetime_to_utc(value: Any) -> Optional[datetime]:
|
|
39
|
+
if not isinstance(value, str) or not value:
|
|
40
|
+
return None
|
|
41
|
+
try:
|
|
42
|
+
dt = datetime.fromisoformat(value.replace("Z", "+00:00"))
|
|
43
|
+
return _ensure_aware_utc(dt)
|
|
44
|
+
except Exception:
|
|
45
|
+
return None
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _extract_httpx_conflict_current_version(err: Exception) -> Optional[int]:
|
|
49
|
+
if not HTTPX_AVAILABLE:
|
|
50
|
+
return None
|
|
51
|
+
if not isinstance(err, httpx.HTTPStatusError):
|
|
52
|
+
return None
|
|
53
|
+
if err.response is None or err.response.status_code != 409:
|
|
54
|
+
return None
|
|
55
|
+
try:
|
|
56
|
+
payload = err.response.json()
|
|
57
|
+
except Exception:
|
|
58
|
+
return None
|
|
59
|
+
current = payload.get("current_version")
|
|
60
|
+
try:
|
|
61
|
+
return int(current)
|
|
62
|
+
except Exception:
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
|
|
31
66
|
def sync_agent_command(
|
|
32
67
|
agent_id: str,
|
|
33
68
|
backend_url: Optional[str] = None,
|
|
@@ -52,6 +87,7 @@ def sync_agent_command(
|
|
|
52
87
|
{"success": True, "sessions_pulled": N, "sessions_pushed": N, ...} on success
|
|
53
88
|
{"success": False, "error": str} on failure
|
|
54
89
|
"""
|
|
90
|
+
|
|
55
91
|
def _progress(msg: str) -> None:
|
|
56
92
|
if progress_callback:
|
|
57
93
|
progress_callback(msg)
|
|
@@ -84,7 +120,10 @@ def sync_agent_command(
|
|
|
84
120
|
agent_info = matches[0]
|
|
85
121
|
agent_id = agent_info.id
|
|
86
122
|
elif len(matches) > 1:
|
|
87
|
-
return {
|
|
123
|
+
return {
|
|
124
|
+
"success": False,
|
|
125
|
+
"error": f"Ambiguous agent_id prefix '{agent_id}' matches {len(matches)} agents",
|
|
126
|
+
}
|
|
88
127
|
else:
|
|
89
128
|
return {"success": False, "error": f"Agent not found: {agent_id}"}
|
|
90
129
|
|
|
@@ -93,7 +132,10 @@ def sync_agent_command(
|
|
|
93
132
|
|
|
94
133
|
token = agent_info.share_admin_token or agent_info.share_contributor_token
|
|
95
134
|
if not token:
|
|
96
|
-
return {
|
|
135
|
+
return {
|
|
136
|
+
"success": False,
|
|
137
|
+
"error": "No token available for sync (need admin or contributor token)",
|
|
138
|
+
}
|
|
97
139
|
|
|
98
140
|
share_id = agent_info.share_id
|
|
99
141
|
local_sync_version = agent_info.sync_version or 0
|
|
@@ -164,7 +206,8 @@ def sync_agent_command(
|
|
|
164
206
|
|
|
165
207
|
# Check if any turns in this session are new to THIS AGENT (not globally)
|
|
166
208
|
new_turns = [
|
|
167
|
-
t
|
|
209
|
+
t
|
|
210
|
+
for t in session_turns
|
|
168
211
|
if t.get("content_hash") and t["content_hash"] not in local_content_hashes
|
|
169
212
|
]
|
|
170
213
|
|
|
@@ -186,8 +229,11 @@ def sync_agent_command(
|
|
|
186
229
|
session_data, f"agent-{agent_id}", agent_info.share_url, db, force=False
|
|
187
230
|
)
|
|
188
231
|
# Count as pulled if: created new session/turns, or session was new/needed linking
|
|
189
|
-
if (
|
|
190
|
-
|
|
232
|
+
if (
|
|
233
|
+
import_result.get("sessions", 0) > 0
|
|
234
|
+
or import_result.get("turns", 0) > 0
|
|
235
|
+
or should_count
|
|
236
|
+
):
|
|
191
237
|
sessions_pulled += 1
|
|
192
238
|
except Exception as e:
|
|
193
239
|
logger.error(f"Failed to import remote session {session_id}: {e}")
|
|
@@ -206,11 +252,13 @@ def sync_agent_command(
|
|
|
206
252
|
remote_updated_at = remote_event.get("updated_at")
|
|
207
253
|
if remote_updated_at:
|
|
208
254
|
try:
|
|
209
|
-
remote_dt =
|
|
210
|
-
local_dt =
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
255
|
+
remote_dt = _parse_iso_datetime_to_utc(remote_updated_at)
|
|
256
|
+
local_dt = (
|
|
257
|
+
_ensure_aware_utc(agent_info.updated_at)
|
|
258
|
+
if isinstance(agent_info.updated_at, datetime)
|
|
259
|
+
else None
|
|
260
|
+
)
|
|
261
|
+
if remote_dt and local_dt and remote_dt > local_dt:
|
|
214
262
|
remote_name = remote_event.get("title")
|
|
215
263
|
remote_desc = remote_event.get("description")
|
|
216
264
|
updates = {}
|
|
@@ -253,16 +301,11 @@ def sync_agent_command(
|
|
|
253
301
|
has_metadata_diff = (remote_title != local_title) or (remote_desc != local_desc)
|
|
254
302
|
if has_metadata_diff and not description_updated:
|
|
255
303
|
remote_updated_at = remote_event.get("updated_at")
|
|
256
|
-
remote_dt =
|
|
257
|
-
if isinstance(remote_updated_at, str) and remote_updated_at:
|
|
258
|
-
try:
|
|
259
|
-
remote_dt = datetime.fromisoformat(remote_updated_at.replace("Z", "+00:00"))
|
|
260
|
-
except Exception:
|
|
261
|
-
remote_dt = None
|
|
304
|
+
remote_dt = _parse_iso_datetime_to_utc(remote_updated_at)
|
|
262
305
|
|
|
263
306
|
local_dt = getattr(agent_info, "updated_at", None)
|
|
264
|
-
if
|
|
265
|
-
local_dt = local_dt
|
|
307
|
+
if isinstance(local_dt, datetime):
|
|
308
|
+
local_dt = _ensure_aware_utc(local_dt)
|
|
266
309
|
|
|
267
310
|
# If remote has no timestamp, assume local should win. Otherwise, push only if local is newer.
|
|
268
311
|
if remote_dt is None or (local_dt and remote_dt and local_dt > remote_dt):
|
|
@@ -317,14 +360,138 @@ def sync_agent_command(
|
|
|
317
360
|
new_version = push_result.get("version", new_version + 1)
|
|
318
361
|
break
|
|
319
362
|
except Exception as e:
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
363
|
+
conflict_current_version = _extract_httpx_conflict_current_version(e)
|
|
364
|
+
is_conflict = (
|
|
365
|
+
HTTPX_AVAILABLE
|
|
366
|
+
and isinstance(e, httpx.HTTPStatusError)
|
|
367
|
+
and e.response is not None
|
|
368
|
+
and e.response.status_code == 409
|
|
369
|
+
)
|
|
370
|
+
if is_conflict and attempt < MAX_SYNC_RETRIES - 1:
|
|
371
|
+
if conflict_current_version is not None:
|
|
372
|
+
_progress(
|
|
373
|
+
"Version conflict "
|
|
374
|
+
f"(remote={conflict_current_version}, local_expected={new_version}), "
|
|
375
|
+
f"retrying ({attempt + 2}/{MAX_SYNC_RETRIES})..."
|
|
376
|
+
)
|
|
377
|
+
else:
|
|
378
|
+
_progress(f"Version conflict, retrying ({attempt + 2}/{MAX_SYNC_RETRIES})...")
|
|
379
|
+
|
|
380
|
+
# Re-pull to re-merge any remote changes; also bypass potential CDN caching.
|
|
324
381
|
remote_data = _pull_remote(backend_url, share_id)
|
|
325
382
|
if remote_data.get("success"):
|
|
326
383
|
conv = remote_data["data"]
|
|
327
|
-
|
|
384
|
+
remote_version = conv.get("sync_metadata", {}).get("sync_version", 0)
|
|
385
|
+
try:
|
|
386
|
+
remote_version_int = int(remote_version)
|
|
387
|
+
except Exception:
|
|
388
|
+
remote_version_int = 0
|
|
389
|
+
|
|
390
|
+
if conflict_current_version is not None:
|
|
391
|
+
remote_version_int = max(remote_version_int, conflict_current_version)
|
|
392
|
+
|
|
393
|
+
new_version = remote_version_int
|
|
394
|
+
|
|
395
|
+
# Rebuild merge inputs from refreshed remote snapshot.
|
|
396
|
+
remote_sessions_data = conv.get("sessions", [])
|
|
397
|
+
remote_event = conv.get("event", {})
|
|
398
|
+
|
|
399
|
+
remote_content_hashes = set()
|
|
400
|
+
for session_data in remote_sessions_data:
|
|
401
|
+
for turn_data in session_data.get("turns", []):
|
|
402
|
+
h = turn_data.get("content_hash")
|
|
403
|
+
if h:
|
|
404
|
+
remote_content_hashes.add(h)
|
|
405
|
+
|
|
406
|
+
# Re-import remote sessions (idempotent via content_hash dedup) and re-merge metadata.
|
|
407
|
+
try:
|
|
408
|
+
from .import_shares import import_session_with_turns
|
|
409
|
+
|
|
410
|
+
local_content_hashes = db.get_agent_content_hashes(agent_id)
|
|
411
|
+
for session_data in remote_sessions_data:
|
|
412
|
+
session_id = session_data.get("session_id", "")
|
|
413
|
+
session_turns = session_data.get("turns", [])
|
|
414
|
+
|
|
415
|
+
new_turns = [
|
|
416
|
+
t
|
|
417
|
+
for t in session_turns
|
|
418
|
+
if t.get("content_hash")
|
|
419
|
+
and t["content_hash"] not in local_content_hashes
|
|
420
|
+
]
|
|
421
|
+
|
|
422
|
+
existing_session = db.get_session_by_id(session_id)
|
|
423
|
+
session_is_new = existing_session is None
|
|
424
|
+
session_needs_linking = (
|
|
425
|
+
existing_session and existing_session.agent_id != agent_id
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
if not new_turns and not session_is_new and not session_needs_linking:
|
|
429
|
+
continue
|
|
430
|
+
|
|
431
|
+
should_count = session_is_new or session_needs_linking
|
|
432
|
+
try:
|
|
433
|
+
os.environ["REALIGN_DISABLE_AUTO_SUMMARIES"] = "1"
|
|
434
|
+
import_result = import_session_with_turns(
|
|
435
|
+
session_data,
|
|
436
|
+
f"agent-{agent_id}",
|
|
437
|
+
agent_info.share_url,
|
|
438
|
+
db,
|
|
439
|
+
force=False,
|
|
440
|
+
)
|
|
441
|
+
if (
|
|
442
|
+
import_result.get("sessions", 0) > 0
|
|
443
|
+
or import_result.get("turns", 0) > 0
|
|
444
|
+
or should_count
|
|
445
|
+
):
|
|
446
|
+
sessions_pulled += 1
|
|
447
|
+
except Exception as ie:
|
|
448
|
+
logger.error(f"Failed to import remote session {session_id}: {ie}")
|
|
449
|
+
if should_count:
|
|
450
|
+
sessions_pulled += 1
|
|
451
|
+
|
|
452
|
+
try:
|
|
453
|
+
db.update_session_agent_id(session_id, agent_id)
|
|
454
|
+
except Exception as le:
|
|
455
|
+
logger.error(f"Failed to link session {session_id} to agent: {le}")
|
|
456
|
+
|
|
457
|
+
# Re-merge name/description: last-write-wins by updated_at.
|
|
458
|
+
refreshed_remote_updated_at = remote_event.get("updated_at")
|
|
459
|
+
remote_dt = _parse_iso_datetime_to_utc(refreshed_remote_updated_at)
|
|
460
|
+
local_dt = (
|
|
461
|
+
_ensure_aware_utc(agent_info.updated_at)
|
|
462
|
+
if isinstance(agent_info.updated_at, datetime)
|
|
463
|
+
else None
|
|
464
|
+
)
|
|
465
|
+
if remote_dt and local_dt and remote_dt > local_dt:
|
|
466
|
+
remote_name = remote_event.get("title")
|
|
467
|
+
remote_desc = remote_event.get("description")
|
|
468
|
+
updates = {}
|
|
469
|
+
if remote_name and remote_name != agent_info.name:
|
|
470
|
+
updates["name"] = remote_name
|
|
471
|
+
if remote_desc is not None and remote_desc != agent_info.description:
|
|
472
|
+
updates["description"] = remote_desc
|
|
473
|
+
if updates:
|
|
474
|
+
db.update_agent_info(agent_id, **updates)
|
|
475
|
+
description_updated = True
|
|
476
|
+
agent_info = db.get_agent_info(agent_id)
|
|
477
|
+
except Exception as merge_e:
|
|
478
|
+
logger.warning(
|
|
479
|
+
f"Failed to refresh merge after conflict (non-fatal): {merge_e}"
|
|
480
|
+
)
|
|
481
|
+
|
|
482
|
+
# Rebuild payload from refreshed local state before retrying.
|
|
483
|
+
local_sessions = db.get_sessions_by_agent_id(agent_id)
|
|
484
|
+
merged_conversation = _build_merged_conversation_data(
|
|
485
|
+
agent_info=agent_info,
|
|
486
|
+
agent_id=agent_id,
|
|
487
|
+
sessions=local_sessions,
|
|
488
|
+
db=db,
|
|
489
|
+
contributor_token=agent_info.share_contributor_token,
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
elif conflict_current_version is not None:
|
|
493
|
+
# If pull fails, fall back to server-provided current_version.
|
|
494
|
+
new_version = max(new_version, conflict_current_version)
|
|
328
495
|
continue
|
|
329
496
|
else:
|
|
330
497
|
logger.error(f"Push failed after {attempt + 1} attempts: {e}")
|
|
@@ -355,7 +522,8 @@ def _pull_remote(backend_url: str, share_id: str) -> dict:
|
|
|
355
522
|
from .import_shares import download_share_data
|
|
356
523
|
|
|
357
524
|
share_url = f"{backend_url}/share/{share_id}"
|
|
358
|
-
|
|
525
|
+
cache_buster = str(int(time.time() * 1000))
|
|
526
|
+
return download_share_data(share_url, password=None, cache_buster=cache_buster)
|
|
359
527
|
except Exception as e:
|
|
360
528
|
return {"success": False, "error": str(e)}
|
|
361
529
|
|
|
@@ -381,8 +549,12 @@ def _build_merged_conversation_data(
|
|
|
381
549
|
"description": agent_info.description or "",
|
|
382
550
|
"event_type": "agent",
|
|
383
551
|
"status": "active",
|
|
384
|
-
"created_at":
|
|
385
|
-
|
|
552
|
+
"created_at": (
|
|
553
|
+
_ensure_aware_utc(agent_info.created_at).isoformat() if agent_info.created_at else None
|
|
554
|
+
),
|
|
555
|
+
"updated_at": (
|
|
556
|
+
_ensure_aware_utc(agent_info.updated_at).isoformat() if agent_info.updated_at else None
|
|
557
|
+
),
|
|
386
558
|
}
|
|
387
559
|
|
|
388
560
|
sessions_data = []
|
|
@@ -400,32 +572,38 @@ def _build_merged_conversation_data(
|
|
|
400
572
|
except Exception:
|
|
401
573
|
continue
|
|
402
574
|
|
|
403
|
-
turns_data.append(
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
575
|
+
turns_data.append(
|
|
576
|
+
{
|
|
577
|
+
"turn_id": turn.id,
|
|
578
|
+
"turn_number": turn.turn_number,
|
|
579
|
+
"content_hash": turn.content_hash,
|
|
580
|
+
"timestamp": turn.timestamp.isoformat() if turn.timestamp else None,
|
|
581
|
+
"llm_title": turn.llm_title or "",
|
|
582
|
+
"llm_description": turn.llm_description,
|
|
583
|
+
"user_message": turn.user_message,
|
|
584
|
+
"assistant_summary": turn.assistant_summary,
|
|
585
|
+
"model_name": turn.model_name,
|
|
586
|
+
"git_commit_hash": turn.git_commit_hash,
|
|
587
|
+
"messages": messages,
|
|
588
|
+
}
|
|
589
|
+
)
|
|
590
|
+
|
|
591
|
+
sessions_data.append(
|
|
592
|
+
{
|
|
593
|
+
"session_id": session.id,
|
|
594
|
+
"session_type": session.session_type or "unknown",
|
|
595
|
+
"workspace_path": session.workspace_path,
|
|
596
|
+
"session_title": session.session_title,
|
|
597
|
+
"session_summary": session.session_summary,
|
|
598
|
+
"started_at": session.started_at.isoformat() if session.started_at else None,
|
|
599
|
+
"last_activity_at": (
|
|
600
|
+
session.last_activity_at.isoformat() if session.last_activity_at else None
|
|
601
|
+
),
|
|
602
|
+
"created_by": session.created_by,
|
|
603
|
+
"shared_by": session.shared_by,
|
|
604
|
+
"turns": turns_data,
|
|
605
|
+
}
|
|
606
|
+
)
|
|
429
607
|
|
|
430
608
|
username = os.environ.get("USER") or os.environ.get("USERNAME") or "anonymous"
|
|
431
609
|
|
realign/commands/upgrade.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""ReAlign upgrade command - Upgrade database schema to latest version."""
|
|
2
2
|
|
|
3
3
|
from pathlib import Path
|
|
4
|
+
import os
|
|
4
5
|
import subprocess
|
|
5
6
|
import typer
|
|
6
7
|
from rich.console import Console
|
|
@@ -13,6 +14,97 @@ from ..db.schema import SCHEMA_VERSION, get_migration_scripts
|
|
|
13
14
|
console = Console()
|
|
14
15
|
|
|
15
16
|
|
|
17
|
+
def _env_truthy(name: str) -> bool:
|
|
18
|
+
value = os.environ.get(name, "").strip().lower()
|
|
19
|
+
return value in {"1", "true", "yes", "y", "on"}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _stop_daemons_best_effort_for_package_upgrade() -> None:
|
|
23
|
+
"""Stop watcher/worker daemons (best-effort) before upgrading the package.
|
|
24
|
+
|
|
25
|
+
Upgrading the installed package while old daemons are running can leave the system
|
|
26
|
+
in a mixed-version state (old processes continue running old code).
|
|
27
|
+
"""
|
|
28
|
+
try:
|
|
29
|
+
from . import watcher as watcher_cmd
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
watcher_running = bool(watcher_cmd._is_watcher_running())
|
|
33
|
+
except Exception:
|
|
34
|
+
watcher_running = False
|
|
35
|
+
if watcher_running:
|
|
36
|
+
console.print("[dim]Stopping watcher for upgrade...[/dim]")
|
|
37
|
+
try:
|
|
38
|
+
watcher_cmd.watcher_stop_command()
|
|
39
|
+
except Exception:
|
|
40
|
+
pass
|
|
41
|
+
except Exception:
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
from . import worker as worker_cmd
|
|
46
|
+
|
|
47
|
+
try:
|
|
48
|
+
worker_running, _pid, _mode = worker_cmd.detect_worker_process()
|
|
49
|
+
except Exception:
|
|
50
|
+
worker_running = False
|
|
51
|
+
if worker_running:
|
|
52
|
+
console.print("[dim]Stopping worker for upgrade...[/dim]")
|
|
53
|
+
try:
|
|
54
|
+
worker_cmd.worker_stop_command()
|
|
55
|
+
except Exception:
|
|
56
|
+
pass
|
|
57
|
+
except Exception:
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _detect_running_tmux_dashboard() -> tuple[bool, str, str]:
|
|
62
|
+
"""Detect an Aline-managed tmux dashboard session (best-effort)."""
|
|
63
|
+
try:
|
|
64
|
+
from ..dashboard.tmux_manager import OUTER_SESSION, OUTER_SOCKET # type: ignore
|
|
65
|
+
|
|
66
|
+
outer_session = str(OUTER_SESSION)
|
|
67
|
+
outer_socket = str(OUTER_SOCKET)
|
|
68
|
+
except Exception:
|
|
69
|
+
# Keep defaults stable for older versions / import failures.
|
|
70
|
+
outer_session = "aline"
|
|
71
|
+
outer_socket = "aline_dash"
|
|
72
|
+
|
|
73
|
+
try:
|
|
74
|
+
proc = subprocess.run(
|
|
75
|
+
["tmux", "-L", outer_socket, "has-session", "-t", outer_session],
|
|
76
|
+
capture_output=True,
|
|
77
|
+
text=True,
|
|
78
|
+
timeout=1.5,
|
|
79
|
+
check=False,
|
|
80
|
+
)
|
|
81
|
+
return proc.returncode == 0, outer_socket, outer_session
|
|
82
|
+
except Exception:
|
|
83
|
+
return False, outer_socket, outer_session
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def _run_doctor_after_upgrade() -> None:
|
|
87
|
+
if _env_truthy("ALINE_SKIP_DOCTOR_AFTER_UPGRADE"):
|
|
88
|
+
console.print("[dim]Skipping 'aline doctor' (ALINE_SKIP_DOCTOR_AFTER_UPGRADE=1).[/dim]\n")
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
console.print("\n[dim]Running 'aline doctor' to finalize the update...[/dim]\n")
|
|
92
|
+
try:
|
|
93
|
+
result = subprocess.run(["aline", "doctor"], capture_output=False)
|
|
94
|
+
if result.returncode == 0:
|
|
95
|
+
console.print("\n[green]✓ 'aline doctor' completed.[/green]\n")
|
|
96
|
+
else:
|
|
97
|
+
console.print(
|
|
98
|
+
f"\n[yellow]⚠ 'aline doctor' exited with code {result.returncode}.[/yellow]\n"
|
|
99
|
+
)
|
|
100
|
+
except FileNotFoundError:
|
|
101
|
+
console.print(
|
|
102
|
+
"\n[yellow]⚠ Could not find 'aline' on PATH to run doctor. Please run: aline doctor[/yellow]\n"
|
|
103
|
+
)
|
|
104
|
+
except Exception as e:
|
|
105
|
+
console.print(f"\n[yellow]⚠ Failed to run 'aline doctor': {e}[/yellow]\n")
|
|
106
|
+
|
|
107
|
+
|
|
16
108
|
def get_latest_pypi_version() -> Optional[str]:
|
|
17
109
|
"""Fetch the latest version of aline-ai from PyPI.
|
|
18
110
|
|
|
@@ -110,6 +202,30 @@ def check_and_prompt_update() -> bool:
|
|
|
110
202
|
console.print("[dim]Update skipped.[/dim]\n")
|
|
111
203
|
return False
|
|
112
204
|
|
|
205
|
+
# Best practice: stop daemons and close dashboards before upgrading.
|
|
206
|
+
# Otherwise users can end up with old processes still running old code.
|
|
207
|
+
dash_running, dash_socket, dash_session = _detect_running_tmux_dashboard()
|
|
208
|
+
if dash_running:
|
|
209
|
+
console.print(
|
|
210
|
+
"\n[bold yellow]⚠ Detected a running Aline dashboard tmux session.[/bold yellow]"
|
|
211
|
+
)
|
|
212
|
+
console.print(
|
|
213
|
+
f"[dim]Close it before upgrading to avoid mixed versions. "
|
|
214
|
+
f"(Command: tmux -L {dash_socket} kill-session -t {dash_session})[/dim]\n"
|
|
215
|
+
)
|
|
216
|
+
try:
|
|
217
|
+
proceed = console.input(
|
|
218
|
+
"[dim]Continue upgrade anyway? ([/dim][green]y[/green][dim]/[/dim][yellow]n[/yellow][dim]):[/dim] "
|
|
219
|
+
).strip().lower()
|
|
220
|
+
except (EOFError, KeyboardInterrupt):
|
|
221
|
+
console.print("\n[dim]Update skipped.[/dim]\n")
|
|
222
|
+
return False
|
|
223
|
+
if proceed not in ("y", "yes"):
|
|
224
|
+
console.print("[dim]Update skipped.[/dim]\n")
|
|
225
|
+
return False
|
|
226
|
+
|
|
227
|
+
_stop_daemons_best_effort_for_package_upgrade()
|
|
228
|
+
|
|
113
229
|
# Perform update using pipx
|
|
114
230
|
console.print("\n[bold]Updating aline-ai via pipx...[/bold]\n")
|
|
115
231
|
|
|
@@ -123,6 +239,7 @@ def check_and_prompt_update() -> bool:
|
|
|
123
239
|
console.print(
|
|
124
240
|
f"\n[bold green]✓ Successfully updated to {latest_version}![/bold green]"
|
|
125
241
|
)
|
|
242
|
+
_run_doctor_after_upgrade()
|
|
126
243
|
console.print("[dim]Please restart 'aline' to use the new version.[/dim]\n")
|
|
127
244
|
return True
|
|
128
245
|
else:
|