aline-ai 0.7.2__py3-none-any.whl → 0.7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.3.dist-info}/METADATA +1 -1
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.3.dist-info}/RECORD +12 -12
- realign/__init__.py +1 -1
- realign/commands/export_shares.py +188 -65
- realign/commands/sync_agent.py +52 -0
- realign/dashboard/app.py +2 -3
- realign/dashboard/tmux_manager.py +15 -14
- realign/dashboard/widgets/agents_panel.py +235 -217
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.3.dist-info}/WHEEL +0 -0
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.3.dist-info}/entry_points.txt +0 -0
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.3.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.7.2.dist-info → aline_ai-0.7.3.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
aline_ai-0.7.
|
|
2
|
-
realign/__init__.py,sha256=
|
|
1
|
+
aline_ai-0.7.3.dist-info/licenses/LICENSE,sha256=H8wTqV5IF1oHw_HbBtS1PSDU8G_q81yblEIL_JfV8Vo,1077
|
|
2
|
+
realign/__init__.py,sha256=ReJTANkYJVgZYwHExBSBOBiJZxdi2HY4VluOTHSxRns,1623
|
|
3
3
|
realign/agent_names.py,sha256=H4oVJMkqg1ZYCk58vD_Jh9apaAHSFJRswa-C9SPdJxc,1171
|
|
4
4
|
realign/auth.py,sha256=d_1yvCwluN5iIrdgjtuSKpOYAksDzrzNgntKacLVJrw,16583
|
|
5
5
|
realign/claude_detector.py,sha256=ZLSJacMo6zzQclXByABKA70UNpstxqIv3fPGqdpA934,2792
|
|
@@ -41,23 +41,23 @@ realign/commands/auth.py,sha256=wcs1lUcSXxv75WcGruzyZ3kgi0xXA8W4lNnUwM4a3CI,1173
|
|
|
41
41
|
realign/commands/config.py,sha256=nYnu_h2pk7GODcrzrV04K51D-s7v06FlRXHJ0HJ-gvU,6732
|
|
42
42
|
realign/commands/context.py,sha256=pM2KfZHVkB-ou4nBhFvKSwnYliLBzwN3zerLyBAbhfE,7095
|
|
43
43
|
realign/commands/doctor.py,sha256=0c1TZuA_cw1CSU0yKMVRU-18uTxdqjXKJ8lP2CTTNSQ,20656
|
|
44
|
-
realign/commands/export_shares.py,sha256=
|
|
44
|
+
realign/commands/export_shares.py,sha256=O2yRZT4S2ANoswLwDDmA1mau1nEvBVbmSXD4ST6Id_o,153150
|
|
45
45
|
realign/commands/import_shares.py,sha256=Jx_7HVSg7SrGGKLDxsf_UqoStDimw8B26uKkqNFF6t8,33071
|
|
46
46
|
realign/commands/init.py,sha256=6rBr1LVIrQLbUH_UvoDhkF1qXmMh2xkjNWCYAUz5Tho,35274
|
|
47
47
|
realign/commands/restore.py,sha256=s2BxQZHxQw9r12NzRVsK20KlGafy5AIoSjWMo5PcnHY,11173
|
|
48
48
|
realign/commands/search.py,sha256=QlUDzRDD6ebq21LTtLe5-OZM62iwDrDqfbnXbuxfklU,27516
|
|
49
|
-
realign/commands/sync_agent.py,sha256=
|
|
49
|
+
realign/commands/sync_agent.py,sha256=sopzUQ6kiRgiBlcEReGAWCRoqrHpk3nAx75qXSgnNi4,17082
|
|
50
50
|
realign/commands/upgrade.py,sha256=L3PLOUIN5qAQTbkfoVtSsIbbzEezA_xjjk9F1GMVfjw,12781
|
|
51
51
|
realign/commands/watcher.py,sha256=4WTThIgr-Z5guKh_JqGDcPmerr97XiHrVaaijmckHsA,134350
|
|
52
52
|
realign/commands/worker.py,sha256=jTu7Pj60nTnn7SsH3oNCNnO6zl4TIFCJVNSC1OoQ_0o,23363
|
|
53
53
|
realign/dashboard/__init__.py,sha256=QZkHTsGityH8UkF8rmvA3xW7dMXNe0swEWr443qfgCM,128
|
|
54
|
-
realign/dashboard/app.py,sha256=
|
|
54
|
+
realign/dashboard/app.py,sha256=XLPqvPwGuR5Tyu6uz9T88yQSc4wq8Afu0h7pWH5A8_k,8161
|
|
55
55
|
realign/dashboard/clipboard.py,sha256=81frq83E_urqLkwuCvtl0hiTEjavtdQn8kCi72jJWcs,1207
|
|
56
56
|
realign/dashboard/layout.py,sha256=sZxmFj6QTbkois9MHTvBEMMcnaRVehCDqugdbiFx10k,9072
|
|
57
57
|
realign/dashboard/local_api.py,sha256=Roq74etTJR0uOiHE3uIe7sqVITjS5JGQEF4g0nmUm5Q,4332
|
|
58
58
|
realign/dashboard/state.py,sha256=V7zBKvyDgqdXv68XHxV4T8xf3IhYbI5W33UmYW3_hyM,1139
|
|
59
59
|
realign/dashboard/terminal_backend.py,sha256=MlDfwtqhftyQK6jDNizQGFjAWIo5Bx2TDpSnP3MCZVM,3375
|
|
60
|
-
realign/dashboard/tmux_manager.py,sha256=
|
|
60
|
+
realign/dashboard/tmux_manager.py,sha256=HJwB2Wpz-I4OrNT3Db8gKCLifmHdMCalA-UONBaLMG8,34564
|
|
61
61
|
realign/dashboard/backends/__init__.py,sha256=POROX7YKtukYZcLB1pi_kO0sSEpuO3y-hwmF3WIN1Kk,163
|
|
62
62
|
realign/dashboard/backends/iterm2.py,sha256=XYYJT5lrrp4pW_MyEqPZYkRI0qyKUwJlezwMidgnsHc,21390
|
|
63
63
|
realign/dashboard/backends/kitty.py,sha256=5jdkR1f2PwB8a4SnS3EG6uOQ2XU-PB7-cpKBfIJq3hU,12066
|
|
@@ -72,7 +72,7 @@ realign/dashboard/screens/session_detail.py,sha256=TBkHqSHyMxsLB2QdZq9m1EoiH8oRV
|
|
|
72
72
|
realign/dashboard/screens/share_import.py,sha256=hl2x0yGVycsoUI76AmdZTAV-br3Q6191g5xHHrZ8hOA,6318
|
|
73
73
|
realign/dashboard/styles/dashboard.tcss,sha256=9W5Tx0lgyGb4HU-z-Kn7gBdexIK0aPe0bkVn2k_AseM,3288
|
|
74
74
|
realign/dashboard/widgets/__init__.py,sha256=dXsOnbeu_8XhP-6Bu6-R_0LNGqsSM6x7dG7FCDumpa8,460
|
|
75
|
-
realign/dashboard/widgets/agents_panel.py,sha256=
|
|
75
|
+
realign/dashboard/widgets/agents_panel.py,sha256=pqXZhzSL84lzJPqGGGsfsGJGVlVo2iCyHByXM4_ITCM,47083
|
|
76
76
|
realign/dashboard/widgets/config_panel.py,sha256=J6A_rxGVqNu5TMFcWELWgdX1nFCHAjKprFMMp7mBDKo,18203
|
|
77
77
|
realign/dashboard/widgets/events_table.py,sha256=0cMvE0KdZFBZyvywv7vlt005qsR0aLQnQiMf3ZzK7RY,30218
|
|
78
78
|
realign/dashboard/widgets/header.py,sha256=0HHCFXX7F3C6HII-WDwOJwWkJrajmKPWmdoMWyOkn9E,1587
|
|
@@ -106,8 +106,8 @@ realign/triggers/next_turn_trigger.py,sha256=-x80_I-WmIjXXzQHEPBykgx_GQW6oKaLDQx
|
|
|
106
106
|
realign/triggers/registry.py,sha256=dkIjSd8Bg-hF0nxaO2Fi2K-0Zipqv6vVjc-HYSrA_fY,3656
|
|
107
107
|
realign/triggers/turn_status.py,sha256=wAZEhXDAmDoX5F-ohWfSnZZ0eA6DAJ9svSPiSv_f6sg,6041
|
|
108
108
|
realign/triggers/turn_summary.py,sha256=f3hEUshgv9skJ9AbfWpoYs417lsv_HK2A_vpPjgryO4,4467
|
|
109
|
-
aline_ai-0.7.
|
|
110
|
-
aline_ai-0.7.
|
|
111
|
-
aline_ai-0.7.
|
|
112
|
-
aline_ai-0.7.
|
|
113
|
-
aline_ai-0.7.
|
|
109
|
+
aline_ai-0.7.3.dist-info/METADATA,sha256=qbtgEyiKE5FSJk_zjGsOTKm8s89Ckqpnw8wGM8RFezA,1597
|
|
110
|
+
aline_ai-0.7.3.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
111
|
+
aline_ai-0.7.3.dist-info/entry_points.txt,sha256=TvYELpMoWsUTcQdMV8tBHxCbEf_LbK4sESqK3r8PM6Y,78
|
|
112
|
+
aline_ai-0.7.3.dist-info/top_level.txt,sha256=yIL3s2xv9nf1GwD5n71Aq_JEIV4AfzCIDNKBzewuRm4,8
|
|
113
|
+
aline_ai-0.7.3.dist-info/RECORD,,
|
realign/__init__.py
CHANGED
|
@@ -1703,6 +1703,21 @@ def _update_share_content(
|
|
|
1703
1703
|
if not HTTPX_AVAILABLE:
|
|
1704
1704
|
raise RuntimeError("httpx package not installed. Run: pip install httpx")
|
|
1705
1705
|
|
|
1706
|
+
# Large payloads can exceed serverless request limits; fall back to chunked update.
|
|
1707
|
+
update_payload = {"conversation_data": conversation_data}
|
|
1708
|
+
payload_size = len(json.dumps(update_payload).encode("utf-8"))
|
|
1709
|
+
if payload_size > CHUNKED_UPLOAD_THRESHOLD:
|
|
1710
|
+
logger.info(
|
|
1711
|
+
f"Update payload size ({payload_size / 1024 / 1024:.2f}MB) exceeds threshold, using chunked update"
|
|
1712
|
+
)
|
|
1713
|
+
return _chunked_update_share_content(
|
|
1714
|
+
backend_url=backend_url,
|
|
1715
|
+
share_id=share_id,
|
|
1716
|
+
token=token,
|
|
1717
|
+
conversation_data=conversation_data,
|
|
1718
|
+
expected_version=expected_version,
|
|
1719
|
+
)
|
|
1720
|
+
|
|
1706
1721
|
headers = {
|
|
1707
1722
|
"X-Token": token,
|
|
1708
1723
|
"X-Expected-Version": str(expected_version),
|
|
@@ -1712,7 +1727,7 @@ def _update_share_content(
|
|
|
1712
1727
|
response = httpx.put(
|
|
1713
1728
|
f"{backend_url}/api/share/{share_id}",
|
|
1714
1729
|
headers=headers,
|
|
1715
|
-
json=
|
|
1730
|
+
json=update_payload,
|
|
1716
1731
|
timeout=60.0,
|
|
1717
1732
|
)
|
|
1718
1733
|
response.raise_for_status()
|
|
@@ -1752,64 +1767,148 @@ def _upload_chunks_and_complete(
|
|
|
1752
1767
|
upload_id: str,
|
|
1753
1768
|
backend_url: str,
|
|
1754
1769
|
progress_callback: Optional[Callable] = None,
|
|
1755
|
-
|
|
1756
|
-
) ->
|
|
1770
|
+
headers_provider: Optional[Callable[[], Dict[str, str]]] = None,
|
|
1771
|
+
) -> Optional[dict]:
|
|
1757
1772
|
"""
|
|
1758
1773
|
Helper function to upload chunks and complete the upload.
|
|
1759
1774
|
Can be run in background thread.
|
|
1760
1775
|
"""
|
|
1776
|
+
import time
|
|
1777
|
+
|
|
1761
1778
|
total_chunks = len(chunks)
|
|
1762
|
-
|
|
1779
|
+
|
|
1780
|
+
def _headers() -> Dict[str, str]:
|
|
1781
|
+
try:
|
|
1782
|
+
return dict(headers_provider()) if headers_provider else {}
|
|
1783
|
+
except Exception:
|
|
1784
|
+
return {}
|
|
1785
|
+
|
|
1786
|
+
def _post_with_retries(url: str, payload: dict, timeout: float) -> Optional[dict]:
|
|
1787
|
+
max_attempts = 3
|
|
1788
|
+
for attempt in range(max_attempts):
|
|
1789
|
+
try:
|
|
1790
|
+
response = httpx.post(url, json=payload, headers=_headers(), timeout=timeout)
|
|
1791
|
+
response.raise_for_status()
|
|
1792
|
+
return response.json()
|
|
1793
|
+
except httpx.HTTPStatusError as e:
|
|
1794
|
+
status = getattr(e.response, "status_code", None)
|
|
1795
|
+
retriable = status in (401, 403, 409, 429, 500, 502, 503, 504)
|
|
1796
|
+
if retriable and attempt < max_attempts - 1:
|
|
1797
|
+
time.sleep(0.5 * (2**attempt))
|
|
1798
|
+
continue
|
|
1799
|
+
logger.error(f"POST failed: {e}")
|
|
1800
|
+
return None
|
|
1801
|
+
except httpx.HTTPError as e:
|
|
1802
|
+
if attempt < max_attempts - 1:
|
|
1803
|
+
time.sleep(0.5 * (2**attempt))
|
|
1804
|
+
continue
|
|
1805
|
+
logger.error(f"POST failed: {e}")
|
|
1806
|
+
return None
|
|
1763
1807
|
|
|
1764
1808
|
# Upload each chunk
|
|
1765
1809
|
for i, chunk in enumerate(chunks):
|
|
1766
1810
|
if progress_callback:
|
|
1767
1811
|
progress_callback(i + 1, total_chunks + 2, f"Uploading chunk {i + 1}/{total_chunks}...")
|
|
1768
1812
|
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
}
|
|
1813
|
+
chunk_payload = {
|
|
1814
|
+
"upload_id": upload_id,
|
|
1815
|
+
"chunk_index": i,
|
|
1816
|
+
"data": chunk,
|
|
1817
|
+
}
|
|
1775
1818
|
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
logger.debug(
|
|
1785
|
-
f"Chunk {i + 1}/{total_chunks} uploaded, received: {result.get('received_chunks')}"
|
|
1786
|
-
)
|
|
1819
|
+
result = _post_with_retries(
|
|
1820
|
+
f"{backend_url}/api/share/chunk/upload",
|
|
1821
|
+
chunk_payload,
|
|
1822
|
+
timeout=60.0, # Longer timeout for chunk uploads
|
|
1823
|
+
)
|
|
1824
|
+
if not result:
|
|
1825
|
+
logger.error(f"Failed to upload chunk {i}")
|
|
1826
|
+
return None
|
|
1787
1827
|
|
|
1788
|
-
|
|
1789
|
-
|
|
1790
|
-
|
|
1791
|
-
return
|
|
1828
|
+
logger.debug(
|
|
1829
|
+
f"Chunk {i + 1}/{total_chunks} uploaded, received: {result.get('received_chunks')}"
|
|
1830
|
+
)
|
|
1792
1831
|
|
|
1793
1832
|
# Complete upload
|
|
1794
1833
|
if progress_callback:
|
|
1795
1834
|
progress_callback(total_chunks + 1, total_chunks + 2, "Finalizing upload...")
|
|
1796
1835
|
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
)
|
|
1804
|
-
|
|
1805
|
-
result = response.json()
|
|
1806
|
-
logger.info(f"Chunked upload completed: {result.get('share_url')}")
|
|
1836
|
+
result = _post_with_retries(
|
|
1837
|
+
f"{backend_url}/api/share/chunk/complete",
|
|
1838
|
+
{"upload_id": upload_id},
|
|
1839
|
+
timeout=60.0,
|
|
1840
|
+
)
|
|
1841
|
+
if not result:
|
|
1842
|
+
logger.error("Failed to complete chunked upload")
|
|
1843
|
+
return None
|
|
1807
1844
|
|
|
1808
|
-
|
|
1809
|
-
progress_callback(total_chunks + 2, total_chunks + 2, "Upload complete!")
|
|
1845
|
+
logger.info(f"Chunked upload completed: {result.get('share_url')}")
|
|
1810
1846
|
|
|
1811
|
-
|
|
1812
|
-
|
|
1847
|
+
if progress_callback:
|
|
1848
|
+
progress_callback(total_chunks + 2, total_chunks + 2, "Upload complete!")
|
|
1849
|
+
|
|
1850
|
+
return result
|
|
1851
|
+
|
|
1852
|
+
|
|
1853
|
+
def _chunked_update_share_content(
|
|
1854
|
+
backend_url: str,
|
|
1855
|
+
share_id: str,
|
|
1856
|
+
token: str,
|
|
1857
|
+
conversation_data: dict,
|
|
1858
|
+
expected_version: int,
|
|
1859
|
+
) -> dict:
|
|
1860
|
+
data_str = json.dumps(conversation_data)
|
|
1861
|
+
data_bytes = data_str.encode("utf-8")
|
|
1862
|
+
total_size = len(data_bytes)
|
|
1863
|
+
|
|
1864
|
+
chunks: List[str] = []
|
|
1865
|
+
for i in range(0, total_size, CHUNK_SIZE):
|
|
1866
|
+
chunk_data = data_bytes[i : i + CHUNK_SIZE]
|
|
1867
|
+
chunks.append(base64.b64encode(chunk_data).decode("ascii"))
|
|
1868
|
+
|
|
1869
|
+
total_chunks = len(chunks)
|
|
1870
|
+
logger.info(f"Splitting update into {total_chunks} chunks")
|
|
1871
|
+
|
|
1872
|
+
init_payload = {
|
|
1873
|
+
"total_chunks": total_chunks,
|
|
1874
|
+
"total_size": total_size,
|
|
1875
|
+
"metadata": {},
|
|
1876
|
+
"encrypted_info": None,
|
|
1877
|
+
"ui_metadata": conversation_data.get("ui_metadata"),
|
|
1878
|
+
"share_id": share_id,
|
|
1879
|
+
"operation": "update",
|
|
1880
|
+
}
|
|
1881
|
+
|
|
1882
|
+
def headers_provider() -> Dict[str, str]:
|
|
1883
|
+
# Keep bearer auth when available; required for create, optional for update.
|
|
1884
|
+
headers = get_auth_headers()
|
|
1885
|
+
headers["X-Token"] = token
|
|
1886
|
+
headers["X-Expected-Version"] = str(expected_version)
|
|
1887
|
+
return headers
|
|
1888
|
+
|
|
1889
|
+
init_headers = headers_provider()
|
|
1890
|
+
init_headers["Content-Type"] = "application/json"
|
|
1891
|
+
|
|
1892
|
+
response = httpx.post(
|
|
1893
|
+
f"{backend_url}/api/share/chunk/init",
|
|
1894
|
+
json=init_payload,
|
|
1895
|
+
headers=init_headers,
|
|
1896
|
+
timeout=30.0,
|
|
1897
|
+
)
|
|
1898
|
+
response.raise_for_status()
|
|
1899
|
+
init_result = response.json()
|
|
1900
|
+
upload_id = init_result["upload_id"]
|
|
1901
|
+
|
|
1902
|
+
result = _upload_chunks_and_complete(
|
|
1903
|
+
chunks,
|
|
1904
|
+
upload_id,
|
|
1905
|
+
backend_url,
|
|
1906
|
+
progress_callback=None,
|
|
1907
|
+
headers_provider=headers_provider,
|
|
1908
|
+
)
|
|
1909
|
+
if not result:
|
|
1910
|
+
raise RuntimeError("Failed to complete chunked update")
|
|
1911
|
+
return result
|
|
1813
1912
|
|
|
1814
1913
|
|
|
1815
1914
|
def _chunked_upload(
|
|
@@ -1857,8 +1956,11 @@ def _chunked_upload(
|
|
|
1857
1956
|
if progress_callback:
|
|
1858
1957
|
progress_callback(0, total_chunks + 2, "Initializing chunked upload...")
|
|
1859
1958
|
|
|
1860
|
-
|
|
1861
|
-
|
|
1959
|
+
def headers_provider() -> Dict[str, str]:
|
|
1960
|
+
# Refresh token if needed between chunks.
|
|
1961
|
+
headers = get_auth_headers()
|
|
1962
|
+
headers["Content-Type"] = "application/json"
|
|
1963
|
+
return headers
|
|
1862
1964
|
|
|
1863
1965
|
# Step 1: Initialize upload session (now returns share_url immediately)
|
|
1864
1966
|
try:
|
|
@@ -1877,7 +1979,7 @@ def _chunked_upload(
|
|
|
1877
1979
|
response = httpx.post(
|
|
1878
1980
|
f"{backend_url}/api/share/chunk/init",
|
|
1879
1981
|
json=init_payload,
|
|
1880
|
-
headers=
|
|
1982
|
+
headers=headers_provider(),
|
|
1881
1983
|
timeout=30.0,
|
|
1882
1984
|
)
|
|
1883
1985
|
response.raise_for_status()
|
|
@@ -1902,7 +2004,7 @@ def _chunked_upload(
|
|
|
1902
2004
|
# but user already has the share URL displayed
|
|
1903
2005
|
thread = threading.Thread(
|
|
1904
2006
|
target=_upload_chunks_and_complete,
|
|
1905
|
-
args=(chunks, upload_id, backend_url, None,
|
|
2007
|
+
args=(chunks, upload_id, backend_url, None, headers_provider), # No callback in background
|
|
1906
2008
|
daemon=False, # Important: let thread complete before process exits
|
|
1907
2009
|
)
|
|
1908
2010
|
thread.start()
|
|
@@ -1917,14 +2019,17 @@ def _chunked_upload(
|
|
|
1917
2019
|
}
|
|
1918
2020
|
|
|
1919
2021
|
# Foreground mode: upload chunks synchronously
|
|
1920
|
-
_upload_chunks_and_complete(
|
|
1921
|
-
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
"
|
|
1925
|
-
|
|
1926
|
-
|
|
1927
|
-
|
|
2022
|
+
result = _upload_chunks_and_complete(
|
|
2023
|
+
chunks, upload_id, backend_url, progress_callback, headers_provider
|
|
2024
|
+
)
|
|
2025
|
+
if not result:
|
|
2026
|
+
raise RuntimeError("Failed to complete chunked upload")
|
|
2027
|
+
# Preserve init fields if server didn't echo them
|
|
2028
|
+
result.setdefault("share_id", share_id)
|
|
2029
|
+
result.setdefault("share_url", share_url)
|
|
2030
|
+
result.setdefault("admin_token", admin_token)
|
|
2031
|
+
result.setdefault("expiry_at", expiry_at)
|
|
2032
|
+
return result
|
|
1928
2033
|
|
|
1929
2034
|
|
|
1930
2035
|
def upload_to_backend_unencrypted(
|
|
@@ -2032,8 +2137,10 @@ def _chunked_upload_unencrypted(
|
|
|
2032
2137
|
if progress_callback:
|
|
2033
2138
|
progress_callback(0, total_chunks + 2, "Initializing chunked upload...")
|
|
2034
2139
|
|
|
2035
|
-
|
|
2036
|
-
|
|
2140
|
+
def headers_provider() -> Dict[str, str]:
|
|
2141
|
+
headers = get_auth_headers()
|
|
2142
|
+
headers["Content-Type"] = "application/json"
|
|
2143
|
+
return headers
|
|
2037
2144
|
|
|
2038
2145
|
# Step 1: Initialize upload session (now returns share_url immediately)
|
|
2039
2146
|
try:
|
|
@@ -2048,7 +2155,7 @@ def _chunked_upload_unencrypted(
|
|
|
2048
2155
|
response = httpx.post(
|
|
2049
2156
|
f"{backend_url}/api/share/chunk/init",
|
|
2050
2157
|
json=init_payload,
|
|
2051
|
-
headers=
|
|
2158
|
+
headers=headers_provider(),
|
|
2052
2159
|
timeout=30.0,
|
|
2053
2160
|
)
|
|
2054
2161
|
response.raise_for_status()
|
|
@@ -2073,7 +2180,7 @@ def _chunked_upload_unencrypted(
|
|
|
2073
2180
|
# but user already has the share URL displayed
|
|
2074
2181
|
thread = threading.Thread(
|
|
2075
2182
|
target=_upload_chunks_and_complete,
|
|
2076
|
-
args=(chunks, upload_id, backend_url, None,
|
|
2183
|
+
args=(chunks, upload_id, backend_url, None, headers_provider), # No callback in background
|
|
2077
2184
|
daemon=False, # Important: let thread complete before process exits
|
|
2078
2185
|
)
|
|
2079
2186
|
thread.start()
|
|
@@ -2088,14 +2195,16 @@ def _chunked_upload_unencrypted(
|
|
|
2088
2195
|
}
|
|
2089
2196
|
|
|
2090
2197
|
# Foreground mode: upload chunks synchronously
|
|
2091
|
-
_upload_chunks_and_complete(
|
|
2092
|
-
|
|
2093
|
-
|
|
2094
|
-
|
|
2095
|
-
"
|
|
2096
|
-
|
|
2097
|
-
|
|
2098
|
-
|
|
2198
|
+
result = _upload_chunks_and_complete(
|
|
2199
|
+
chunks, upload_id, backend_url, progress_callback, headers_provider
|
|
2200
|
+
)
|
|
2201
|
+
if not result:
|
|
2202
|
+
raise RuntimeError("Failed to complete chunked upload")
|
|
2203
|
+
result.setdefault("share_id", share_id)
|
|
2204
|
+
result.setdefault("share_url", share_url)
|
|
2205
|
+
result.setdefault("admin_token", admin_token)
|
|
2206
|
+
result.setdefault("expiry_at", expiry_at)
|
|
2207
|
+
return result
|
|
2099
2208
|
|
|
2100
2209
|
|
|
2101
2210
|
def clean_text_for_prompt(text: str) -> str:
|
|
@@ -3093,6 +3202,10 @@ def export_shares_interactive_command(
|
|
|
3093
3202
|
if not json_output:
|
|
3094
3203
|
print("Error: Not logged in. Please run 'aline login' first.", file=sys.stderr)
|
|
3095
3204
|
return 1
|
|
3205
|
+
if not get_auth_headers():
|
|
3206
|
+
if not json_output:
|
|
3207
|
+
print("Error: Login expired. Please run 'aline login' again.", file=sys.stderr)
|
|
3208
|
+
return 1
|
|
3096
3209
|
|
|
3097
3210
|
# Get backend URL
|
|
3098
3211
|
if backend_url is None:
|
|
@@ -3784,6 +3897,12 @@ def export_agent_shares_command(
|
|
|
3784
3897
|
if not json_output:
|
|
3785
3898
|
print("Error: Not logged in. Please run 'aline login' first.", file=sys.stderr)
|
|
3786
3899
|
return 1
|
|
3900
|
+
# is_logged_in() can be true with an expired token + refresh_token; ensure we actually have an access token
|
|
3901
|
+
# before attempting uploads (otherwise the server returns 401/403 and the UX is confusing).
|
|
3902
|
+
if not get_auth_headers():
|
|
3903
|
+
if not json_output:
|
|
3904
|
+
print("Error: Login expired. Please run 'aline login' again.", file=sys.stderr)
|
|
3905
|
+
return 1
|
|
3787
3906
|
|
|
3788
3907
|
_progress("Fetching agent info...")
|
|
3789
3908
|
|
|
@@ -3990,14 +4109,18 @@ def export_agent_shares_command(
|
|
|
3990
4109
|
metadata=metadata,
|
|
3991
4110
|
backend_url=backend_url,
|
|
3992
4111
|
ui_metadata=conversation_data.get("ui_metadata"),
|
|
3993
|
-
background=
|
|
4112
|
+
background=False,
|
|
3994
4113
|
)
|
|
3995
4114
|
else:
|
|
4115
|
+
def upload_progress(current: int, total: int, message: str) -> None:
|
|
4116
|
+
_progress(f"{message} ({current}/{total})")
|
|
4117
|
+
|
|
3996
4118
|
result = upload_to_backend_unencrypted(
|
|
3997
4119
|
conversation_data=conversation_data,
|
|
3998
4120
|
metadata=metadata,
|
|
3999
4121
|
backend_url=backend_url,
|
|
4000
|
-
|
|
4122
|
+
progress_callback=upload_progress,
|
|
4123
|
+
background=False,
|
|
4001
4124
|
)
|
|
4002
4125
|
except Exception as e:
|
|
4003
4126
|
if not json_output:
|
realign/commands/sync_agent.py
CHANGED
|
@@ -240,6 +240,58 @@ def sync_agent_command(
|
|
|
240
240
|
if new_local_turns:
|
|
241
241
|
sessions_pushed += 1
|
|
242
242
|
|
|
243
|
+
# Skip push if there's nothing new to send.
|
|
244
|
+
# This avoids re-uploading large, unchanged payloads (which can hit serverless limits and show up as 403/413).
|
|
245
|
+
needs_push_metadata = False
|
|
246
|
+
try:
|
|
247
|
+
remote_title = remote_event.get("title")
|
|
248
|
+
remote_desc = remote_event.get("description")
|
|
249
|
+
|
|
250
|
+
local_title = agent_info.name
|
|
251
|
+
local_desc = agent_info.description
|
|
252
|
+
|
|
253
|
+
has_metadata_diff = (remote_title != local_title) or (remote_desc != local_desc)
|
|
254
|
+
if has_metadata_diff and not description_updated:
|
|
255
|
+
remote_updated_at = remote_event.get("updated_at")
|
|
256
|
+
remote_dt = None
|
|
257
|
+
if isinstance(remote_updated_at, str) and remote_updated_at:
|
|
258
|
+
try:
|
|
259
|
+
remote_dt = datetime.fromisoformat(remote_updated_at.replace("Z", "+00:00"))
|
|
260
|
+
except Exception:
|
|
261
|
+
remote_dt = None
|
|
262
|
+
|
|
263
|
+
local_dt = getattr(agent_info, "updated_at", None)
|
|
264
|
+
if hasattr(local_dt, "tzinfo") and local_dt and local_dt.tzinfo is None:
|
|
265
|
+
local_dt = local_dt.replace(tzinfo=timezone.utc)
|
|
266
|
+
|
|
267
|
+
# If remote has no timestamp, assume local should win. Otherwise, push only if local is newer.
|
|
268
|
+
if remote_dt is None or (local_dt and remote_dt and local_dt > remote_dt):
|
|
269
|
+
needs_push_metadata = True
|
|
270
|
+
except Exception as e:
|
|
271
|
+
logger.warning(f"Failed to compute metadata push necessity (non-fatal): {e}")
|
|
272
|
+
|
|
273
|
+
if sessions_pushed == 0 and not needs_push_metadata:
|
|
274
|
+
now_iso = datetime.now(timezone.utc).isoformat()
|
|
275
|
+
try:
|
|
276
|
+
db.update_agent_sync_metadata(
|
|
277
|
+
agent_id,
|
|
278
|
+
last_synced_at=now_iso,
|
|
279
|
+
sync_version=remote_sync_version,
|
|
280
|
+
)
|
|
281
|
+
except Exception as e:
|
|
282
|
+
logger.warning(f"Failed to update local sync metadata after no-op sync: {e}")
|
|
283
|
+
|
|
284
|
+
_progress("No changes to push.")
|
|
285
|
+
_progress("Sync complete!")
|
|
286
|
+
|
|
287
|
+
return {
|
|
288
|
+
"success": True,
|
|
289
|
+
"sessions_pulled": sessions_pulled,
|
|
290
|
+
"sessions_pushed": 0,
|
|
291
|
+
"description_updated": description_updated,
|
|
292
|
+
"new_sync_version": remote_sync_version,
|
|
293
|
+
}
|
|
294
|
+
|
|
243
295
|
# Build full conversation data for push
|
|
244
296
|
merged_conversation = _build_merged_conversation_data(
|
|
245
297
|
agent_info=agent_info,
|
realign/dashboard/app.py
CHANGED
|
@@ -58,9 +58,7 @@ class AlineDashboard(App):
|
|
|
58
58
|
self._native_terminal_mode = self._detect_native_mode()
|
|
59
59
|
self._local_api_server = None
|
|
60
60
|
self._apply_saved_theme()
|
|
61
|
-
logger.info(
|
|
62
|
-
f"AlineDashboard initialized (native_terminal={self._native_terminal_mode})"
|
|
63
|
-
)
|
|
61
|
+
logger.info(f"AlineDashboard initialized (native_terminal={self._native_terminal_mode})")
|
|
64
62
|
|
|
65
63
|
def _detect_native_mode(self) -> bool:
|
|
66
64
|
"""Detect if native terminal mode should be used."""
|
|
@@ -204,6 +202,7 @@ class AlineDashboard(App):
|
|
|
204
202
|
self._quit_confirm_deadline = now + self._quit_confirm_window_s
|
|
205
203
|
self.notify("Press Ctrl+C again to quit", title="Quit", timeout=2)
|
|
206
204
|
|
|
205
|
+
|
|
207
206
|
def run_dashboard(use_native_terminal: bool | None = None) -> None:
|
|
208
207
|
"""Run the Aline Dashboard.
|
|
209
208
|
|
|
@@ -196,16 +196,17 @@ def _session_id_from_transcript_path(transcript_path: str | None) -> str | None:
|
|
|
196
196
|
def _load_terminal_state_from_db() -> dict[str, dict[str, str]]:
|
|
197
197
|
"""Load terminal state from database (best-effort)."""
|
|
198
198
|
import time as _time
|
|
199
|
+
|
|
199
200
|
t0 = _time.time()
|
|
200
201
|
try:
|
|
201
202
|
from ..db import get_database
|
|
202
203
|
|
|
203
204
|
t1 = _time.time()
|
|
204
205
|
db = get_database(read_only=True)
|
|
205
|
-
logger.
|
|
206
|
+
logger.debug(f"[PERF] _load_terminal_state_from_db get_database: {_time.time() - t1:.3f}s")
|
|
206
207
|
t2 = _time.time()
|
|
207
208
|
agents = db.list_agents(status="active", limit=100)
|
|
208
|
-
logger.
|
|
209
|
+
logger.debug(f"[PERF] _load_terminal_state_from_db list_agents: {_time.time() - t2:.3f}s")
|
|
209
210
|
|
|
210
211
|
out: dict[str, dict[str, str]] = {}
|
|
211
212
|
for agent in agents:
|
|
@@ -711,13 +712,14 @@ def ensure_right_pane(width_percent: int = 50) -> bool:
|
|
|
711
712
|
|
|
712
713
|
def list_inner_windows() -> list[InnerWindow]:
|
|
713
714
|
import time as _time
|
|
715
|
+
|
|
714
716
|
t0 = _time.time()
|
|
715
717
|
if not ensure_inner_session():
|
|
716
718
|
return []
|
|
717
|
-
logger.
|
|
719
|
+
logger.debug(f"[PERF] list_inner_windows ensure_inner_session: {_time.time() - t0:.3f}s")
|
|
718
720
|
t1 = _time.time()
|
|
719
721
|
state = _load_terminal_state()
|
|
720
|
-
logger.
|
|
722
|
+
logger.debug(f"[PERF] list_inner_windows _load_terminal_state: {_time.time() - t1:.3f}s")
|
|
721
723
|
out = (
|
|
722
724
|
_run_inner_tmux(
|
|
723
725
|
[
|
|
@@ -840,6 +842,7 @@ def list_inner_windows() -> list[InnerWindow]:
|
|
|
840
842
|
|
|
841
843
|
def set_inner_window_options(window_id: str, options: dict[str, str]) -> bool:
|
|
842
844
|
import time as _time
|
|
845
|
+
|
|
843
846
|
if not ensure_inner_session():
|
|
844
847
|
return False
|
|
845
848
|
ok = True
|
|
@@ -848,7 +851,7 @@ def set_inner_window_options(window_id: str, options: dict[str, str]) -> bool:
|
|
|
848
851
|
# Important: these are per-window (not session-wide) to avoid cross-tab clobbering.
|
|
849
852
|
if _run_inner_tmux(["set-option", "-w", "-t", window_id, key, value]).returncode != 0:
|
|
850
853
|
ok = False
|
|
851
|
-
logger.
|
|
854
|
+
logger.debug(f"[PERF] set_inner_window_options {key}: {_time.time() - t0:.3f}s")
|
|
852
855
|
return ok
|
|
853
856
|
|
|
854
857
|
|
|
@@ -868,15 +871,16 @@ def create_inner_window(
|
|
|
868
871
|
no_track: bool = False,
|
|
869
872
|
) -> InnerWindow | None:
|
|
870
873
|
import time as _time
|
|
874
|
+
|
|
871
875
|
t0 = _time.time()
|
|
872
|
-
logger.
|
|
876
|
+
logger.debug("[PERF] create_inner_window START")
|
|
873
877
|
if not ensure_right_pane():
|
|
874
878
|
return None
|
|
875
|
-
logger.
|
|
879
|
+
logger.debug(f"[PERF] create_inner_window ensure_right_pane: {_time.time() - t0:.3f}s")
|
|
876
880
|
|
|
877
881
|
t1 = _time.time()
|
|
878
882
|
existing = list_inner_windows()
|
|
879
|
-
logger.
|
|
883
|
+
logger.debug(f"[PERF] create_inner_window list_inner_windows: {_time.time() - t1:.3f}s")
|
|
880
884
|
name = _unique_name((w.window_name for w in existing), base_name)
|
|
881
885
|
|
|
882
886
|
# Record creation time before creating the window
|
|
@@ -897,7 +901,7 @@ def create_inner_window(
|
|
|
897
901
|
],
|
|
898
902
|
capture=True,
|
|
899
903
|
)
|
|
900
|
-
logger.
|
|
904
|
+
logger.debug(f"[PERF] create_inner_window new-window: {_time.time() - t2:.3f}s")
|
|
901
905
|
if proc.returncode != 0:
|
|
902
906
|
return None
|
|
903
907
|
|
|
@@ -923,7 +927,7 @@ def create_inner_window(
|
|
|
923
927
|
opts.setdefault(OPT_NO_TRACK, "")
|
|
924
928
|
t3 = _time.time()
|
|
925
929
|
set_inner_window_options(window_id, opts)
|
|
926
|
-
logger.
|
|
930
|
+
logger.debug(f"[PERF] create_inner_window set_options: {_time.time() - t3:.3f}s")
|
|
927
931
|
|
|
928
932
|
_run_inner_tmux(["select-window", "-t", window_id])
|
|
929
933
|
|
|
@@ -947,10 +951,7 @@ def select_inner_window(window_id: str) -> bool:
|
|
|
947
951
|
def focus_right_pane() -> bool:
|
|
948
952
|
"""Focus the right pane (terminal area) in the outer tmux layout."""
|
|
949
953
|
return (
|
|
950
|
-
_run_outer_tmux(
|
|
951
|
-
["select-pane", "-t", f"{OUTER_SESSION}:{OUTER_WINDOW}.1"]
|
|
952
|
-
).returncode
|
|
953
|
-
== 0
|
|
954
|
+
_run_outer_tmux(["select-pane", "-t", f"{OUTER_SESSION}:{OUTER_WINDOW}.1"]).returncode == 0
|
|
954
955
|
)
|
|
955
956
|
|
|
956
957
|
|
|
@@ -6,6 +6,7 @@ import asyncio
|
|
|
6
6
|
import json as _json
|
|
7
7
|
import re
|
|
8
8
|
import shlex
|
|
9
|
+
import time
|
|
9
10
|
from pathlib import Path
|
|
10
11
|
from typing import Optional
|
|
11
12
|
|
|
@@ -171,6 +172,7 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
171
172
|
self._share_agent_id: Optional[str] = None
|
|
172
173
|
self._sync_agent_id: Optional[str] = None
|
|
173
174
|
self._refresh_timer = None
|
|
175
|
+
self._last_refresh_error_at: float | None = None
|
|
174
176
|
|
|
175
177
|
def compose(self) -> ComposeResult:
|
|
176
178
|
with Horizontal(classes="summary"):
|
|
@@ -180,7 +182,8 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
180
182
|
|
|
181
183
|
def on_show(self) -> None:
|
|
182
184
|
if self._refresh_timer is None:
|
|
183
|
-
|
|
185
|
+
# Refresh frequently, but avoid hammering SQLite/tmux (can cause transient empty UI).
|
|
186
|
+
self._refresh_timer = self.set_interval(2.0, self._on_refresh_timer)
|
|
184
187
|
else:
|
|
185
188
|
try:
|
|
186
189
|
self._refresh_timer.resume()
|
|
@@ -212,100 +215,127 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
212
215
|
|
|
213
216
|
def _collect_agents(self) -> list[dict]:
|
|
214
217
|
"""Collect agent info with their terminals."""
|
|
215
|
-
agents = []
|
|
216
|
-
try:
|
|
217
|
-
from ...db import get_database
|
|
218
|
+
agents: list[dict] = []
|
|
218
219
|
|
|
219
|
-
|
|
220
|
-
|
|
220
|
+
from ...db import get_database
|
|
221
|
+
|
|
222
|
+
# Dashboard should prefer correctness/stability over ultra-low lock timeouts.
|
|
223
|
+
db = get_database(read_only=True, connect_timeout_seconds=2.0)
|
|
224
|
+
|
|
225
|
+
# Critical: if this fails, let it surface as a worker ERROR so we can keep
|
|
226
|
+
# the last rendered UI instead of flashing an empty agent list.
|
|
227
|
+
agent_infos = db.list_agent_info()
|
|
228
|
+
|
|
229
|
+
# Best-effort: missing pieces should degrade gracefully (names still render).
|
|
230
|
+
try:
|
|
221
231
|
active_terminals = db.list_agents(status="active", limit=1000)
|
|
232
|
+
except Exception as e:
|
|
233
|
+
logger.debug(f"Failed to list active terminals: {e}")
|
|
234
|
+
active_terminals = []
|
|
222
235
|
|
|
223
|
-
|
|
236
|
+
try:
|
|
224
237
|
latest_links = db.list_latest_window_links(limit=2000)
|
|
225
|
-
|
|
238
|
+
except Exception:
|
|
239
|
+
latest_links = []
|
|
240
|
+
link_by_terminal = {
|
|
241
|
+
l.terminal_id: l for l in latest_links if getattr(l, "terminal_id", None)
|
|
242
|
+
}
|
|
226
243
|
|
|
227
|
-
|
|
244
|
+
try:
|
|
228
245
|
tmux_windows = tmux_manager.list_inner_windows()
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
246
|
+
except Exception as e:
|
|
247
|
+
logger.debug(f"Failed to list tmux windows: {e}")
|
|
248
|
+
tmux_windows = []
|
|
249
|
+
terminal_to_window = {
|
|
250
|
+
w.terminal_id: w for w in tmux_windows if getattr(w, "terminal_id", None)
|
|
251
|
+
}
|
|
232
252
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
253
|
+
# Collect all session_ids for title lookup
|
|
254
|
+
session_ids: list[str] = []
|
|
255
|
+
for t in active_terminals:
|
|
256
|
+
link = link_by_terminal.get(t.id)
|
|
257
|
+
if link and getattr(link, "session_id", None):
|
|
258
|
+
session_ids.append(link.session_id)
|
|
259
|
+
continue
|
|
260
|
+
window = terminal_to_window.get(t.id)
|
|
261
|
+
if window and getattr(window, "session_id", None):
|
|
262
|
+
session_ids.append(window.session_id)
|
|
263
|
+
|
|
264
|
+
titles = self._fetch_session_titles(session_ids)
|
|
265
|
+
|
|
266
|
+
# Map agent_info.id -> list of terminals
|
|
267
|
+
agent_to_terminals: dict[str, list[dict]] = {}
|
|
268
|
+
for t in active_terminals:
|
|
269
|
+
# Find which agent_info this terminal belongs to
|
|
270
|
+
agent_info_id = None
|
|
271
|
+
|
|
272
|
+
link = link_by_terminal.get(t.id)
|
|
273
|
+
|
|
274
|
+
# Method 1: Check source field for "agent:{agent_info_id}" format
|
|
275
|
+
source = t.source or ""
|
|
276
|
+
if source.startswith("agent:"):
|
|
277
|
+
agent_info_id = source[6:]
|
|
278
|
+
|
|
279
|
+
# Method 2: WindowLink agent_id
|
|
280
|
+
if not agent_info_id and link and getattr(link, "agent_id", None):
|
|
281
|
+
agent_info_id = link.agent_id
|
|
282
|
+
|
|
283
|
+
# Method 3: Fallback - check tmux window's session.agent_id
|
|
284
|
+
if not agent_info_id:
|
|
240
285
|
window = terminal_to_window.get(t.id)
|
|
241
|
-
if window and window
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
titles = self._fetch_session_titles(session_ids)
|
|
245
|
-
|
|
246
|
-
# Map agent_info.id -> list of terminals
|
|
247
|
-
agent_to_terminals: dict[str, list[dict]] = {}
|
|
248
|
-
for t in active_terminals:
|
|
249
|
-
# Find which agent_info this terminal belongs to
|
|
250
|
-
agent_info_id = None
|
|
251
|
-
|
|
252
|
-
link = link_by_terminal.get(t.id)
|
|
253
|
-
|
|
254
|
-
# Method 1: Check source field for "agent:{agent_info_id}" format
|
|
255
|
-
source = t.source or ""
|
|
256
|
-
if source.startswith("agent:"):
|
|
257
|
-
agent_info_id = source[6:]
|
|
258
|
-
|
|
259
|
-
# Method 2: WindowLink agent_id
|
|
260
|
-
if not agent_info_id and link and link.agent_id:
|
|
261
|
-
agent_info_id = link.agent_id
|
|
262
|
-
|
|
263
|
-
# Method 3: Fallback - check tmux window's session.agent_id
|
|
264
|
-
if not agent_info_id:
|
|
265
|
-
window = terminal_to_window.get(t.id)
|
|
266
|
-
if window and window.session_id:
|
|
286
|
+
if window and getattr(window, "session_id", None):
|
|
287
|
+
try:
|
|
267
288
|
session = db.get_session_by_id(window.session_id)
|
|
268
|
-
|
|
269
|
-
|
|
289
|
+
except Exception:
|
|
290
|
+
session = None
|
|
291
|
+
if session:
|
|
292
|
+
agent_info_id = session.agent_id
|
|
270
293
|
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
agent_to_terminals[agent_info_id] = []
|
|
294
|
+
if agent_info_id:
|
|
295
|
+
agent_to_terminals.setdefault(agent_info_id, [])
|
|
274
296
|
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
)
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
"terminal_id": t.id,
|
|
285
|
-
"session_id": session_id,
|
|
286
|
-
"provider": link.provider if link and link.provider else (t.provider or ""),
|
|
287
|
-
"session_type": t.session_type or "",
|
|
288
|
-
"title": title,
|
|
289
|
-
"cwd": t.cwd or "",
|
|
290
|
-
}
|
|
297
|
+
# Get session_id from windowlink (preferred) or tmux window
|
|
298
|
+
window = terminal_to_window.get(t.id)
|
|
299
|
+
session_id = (
|
|
300
|
+
link.session_id
|
|
301
|
+
if link and getattr(link, "session_id", None)
|
|
302
|
+
else (
|
|
303
|
+
window.session_id
|
|
304
|
+
if window and getattr(window, "session_id", None)
|
|
305
|
+
else None
|
|
291
306
|
)
|
|
307
|
+
)
|
|
308
|
+
title = titles.get(session_id, "") if session_id else ""
|
|
292
309
|
|
|
293
|
-
|
|
294
|
-
terminals = agent_to_terminals.get(info.id, [])
|
|
295
|
-
agents.append(
|
|
310
|
+
agent_to_terminals[agent_info_id].append(
|
|
296
311
|
{
|
|
297
|
-
"
|
|
298
|
-
"
|
|
299
|
-
"
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
312
|
+
"terminal_id": t.id,
|
|
313
|
+
"session_id": session_id,
|
|
314
|
+
"provider": (
|
|
315
|
+
link.provider
|
|
316
|
+
if link and getattr(link, "provider", None)
|
|
317
|
+
else (t.provider or "")
|
|
318
|
+
),
|
|
319
|
+
"session_type": t.session_type or "",
|
|
320
|
+
"title": title,
|
|
321
|
+
"cwd": t.cwd or "",
|
|
303
322
|
}
|
|
304
323
|
)
|
|
305
|
-
except Exception as e:
|
|
306
|
-
logger.debug(f"Failed to collect agents: {e}")
|
|
307
|
-
return agents
|
|
308
324
|
|
|
325
|
+
for info in agent_infos:
|
|
326
|
+
terminals = agent_to_terminals.get(info.id, [])
|
|
327
|
+
agents.append(
|
|
328
|
+
{
|
|
329
|
+
"id": info.id,
|
|
330
|
+
"name": info.name,
|
|
331
|
+
"description": info.description or "",
|
|
332
|
+
"terminals": terminals,
|
|
333
|
+
"share_url": getattr(info, "share_url", None),
|
|
334
|
+
"last_synced_at": getattr(info, "last_synced_at", None),
|
|
335
|
+
}
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
return agents
|
|
309
339
|
|
|
310
340
|
@staticmethod
|
|
311
341
|
def _fingerprint(agents: list[dict]) -> str:
|
|
@@ -319,9 +349,21 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
319
349
|
# Handle refresh worker
|
|
320
350
|
if self._refresh_worker is not None and event.worker is self._refresh_worker:
|
|
321
351
|
if event.state == WorkerState.ERROR:
|
|
322
|
-
|
|
352
|
+
# Keep the last successfully-rendered list on refresh errors to avoid
|
|
353
|
+
# flashing an empty Agents tab during transient tmux/SQLite hiccups.
|
|
354
|
+
self._last_refresh_error_at = time.monotonic()
|
|
355
|
+
err = self._refresh_worker.error
|
|
356
|
+
if isinstance(err, BaseException):
|
|
357
|
+
logger.warning(
|
|
358
|
+
"Agents refresh failed",
|
|
359
|
+
exc_info=(type(err), err, err.__traceback__),
|
|
360
|
+
)
|
|
361
|
+
else:
|
|
362
|
+
logger.warning(f"Agents refresh failed: {err}")
|
|
363
|
+
return
|
|
323
364
|
elif event.state == WorkerState.SUCCESS:
|
|
324
365
|
self._agents = self._refresh_worker.result or []
|
|
366
|
+
self._last_refresh_error_at = None
|
|
325
367
|
else:
|
|
326
368
|
return
|
|
327
369
|
fp = self._fingerprint(self._agents)
|
|
@@ -329,7 +371,10 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
329
371
|
return # nothing changed – skip re-render to avoid flicker
|
|
330
372
|
self._rendered_fingerprint = fp
|
|
331
373
|
self.run_worker(
|
|
332
|
-
self._render_agents(),
|
|
374
|
+
self._render_agents(),
|
|
375
|
+
group="agents-render",
|
|
376
|
+
exclusive=True,
|
|
377
|
+
exit_on_error=False,
|
|
333
378
|
)
|
|
334
379
|
return
|
|
335
380
|
|
|
@@ -345,117 +390,123 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
345
390
|
async with self._refresh_lock:
|
|
346
391
|
try:
|
|
347
392
|
container = self.query_one("#agents-list", Vertical)
|
|
348
|
-
|
|
349
|
-
return
|
|
350
|
-
|
|
351
|
-
await container.remove_children()
|
|
393
|
+
await container.remove_children()
|
|
352
394
|
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
)
|
|
357
|
-
return
|
|
395
|
+
if not self._agents:
|
|
396
|
+
await container.mount(Static("No agents yet. Click 'Create Agent' to add one."))
|
|
397
|
+
return
|
|
358
398
|
|
|
359
|
-
|
|
360
|
-
|
|
399
|
+
for agent in self._agents:
|
|
400
|
+
safe_id = self._safe_id(agent["id"])
|
|
361
401
|
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
402
|
+
# Agent row with name, create button, and delete button
|
|
403
|
+
row = Horizontal(classes="agent-row")
|
|
404
|
+
await container.mount(row)
|
|
365
405
|
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
406
|
+
# Agent name button
|
|
407
|
+
name_label = Text(agent["name"], style="bold")
|
|
408
|
+
terminal_count = len(agent["terminals"])
|
|
409
|
+
if terminal_count > 0:
|
|
410
|
+
name_label.append(f" ({terminal_count})", style="dim")
|
|
371
411
|
|
|
372
|
-
|
|
373
|
-
AgentNameButton(
|
|
412
|
+
agent_btn = AgentNameButton(
|
|
374
413
|
name_label,
|
|
375
414
|
id=f"agent-{safe_id}",
|
|
376
415
|
name=agent["id"],
|
|
377
416
|
classes="agent-name",
|
|
378
417
|
)
|
|
379
|
-
|
|
418
|
+
if agent.get("description"):
|
|
419
|
+
agent_btn.tooltip = agent["description"]
|
|
420
|
+
await row.mount(agent_btn)
|
|
380
421
|
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
422
|
+
# Share or Sync button (Sync if agent already has a share_url)
|
|
423
|
+
if agent.get("share_url"):
|
|
424
|
+
await row.mount(
|
|
425
|
+
Button(
|
|
426
|
+
"Sync",
|
|
427
|
+
id=f"sync-{safe_id}",
|
|
428
|
+
name=agent["id"],
|
|
429
|
+
classes="agent-share",
|
|
430
|
+
)
|
|
389
431
|
)
|
|
390
|
-
|
|
391
|
-
|
|
432
|
+
await row.mount(
|
|
433
|
+
Button(
|
|
434
|
+
"Link",
|
|
435
|
+
id=f"link-{safe_id}",
|
|
436
|
+
name=agent["id"],
|
|
437
|
+
classes="agent-share",
|
|
438
|
+
)
|
|
439
|
+
)
|
|
440
|
+
else:
|
|
441
|
+
await row.mount(
|
|
442
|
+
Button(
|
|
443
|
+
"Share",
|
|
444
|
+
id=f"share-{safe_id}",
|
|
445
|
+
name=agent["id"],
|
|
446
|
+
classes="agent-share",
|
|
447
|
+
)
|
|
448
|
+
)
|
|
449
|
+
|
|
450
|
+
# Create terminal button
|
|
392
451
|
await row.mount(
|
|
393
452
|
Button(
|
|
394
|
-
"
|
|
395
|
-
id=f"
|
|
453
|
+
"+ Term",
|
|
454
|
+
id=f"create-term-{safe_id}",
|
|
396
455
|
name=agent["id"],
|
|
397
|
-
classes="agent-
|
|
456
|
+
classes="agent-create",
|
|
398
457
|
)
|
|
399
458
|
)
|
|
400
|
-
|
|
459
|
+
|
|
460
|
+
# Delete agent button
|
|
401
461
|
await row.mount(
|
|
402
462
|
Button(
|
|
403
|
-
"
|
|
404
|
-
id=f"
|
|
463
|
+
"✕",
|
|
464
|
+
id=f"delete-{safe_id}",
|
|
405
465
|
name=agent["id"],
|
|
406
|
-
|
|
466
|
+
variant="error",
|
|
467
|
+
classes="agent-delete",
|
|
407
468
|
)
|
|
408
469
|
)
|
|
409
470
|
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
)
|
|
429
|
-
)
|
|
430
|
-
|
|
431
|
-
# Terminal list (indented under agent)
|
|
432
|
-
if agent["terminals"]:
|
|
433
|
-
term_list = Vertical(classes="terminal-list")
|
|
434
|
-
await container.mount(term_list)
|
|
435
|
-
|
|
436
|
-
for term in agent["terminals"]:
|
|
437
|
-
term_safe_id = self._safe_id(term["terminal_id"])
|
|
438
|
-
term_row = Horizontal(classes="terminal-row")
|
|
439
|
-
await term_list.mount(term_row)
|
|
440
|
-
|
|
441
|
-
label = self._make_terminal_label(term)
|
|
442
|
-
await term_row.mount(
|
|
443
|
-
Button(
|
|
444
|
-
label,
|
|
445
|
-
id=f"switch-{term_safe_id}",
|
|
446
|
-
name=term["terminal_id"],
|
|
447
|
-
classes="terminal-switch",
|
|
471
|
+
# Terminal list (indented under agent)
|
|
472
|
+
if agent["terminals"]:
|
|
473
|
+
term_list = Vertical(classes="terminal-list")
|
|
474
|
+
await container.mount(term_list)
|
|
475
|
+
|
|
476
|
+
for term in agent["terminals"]:
|
|
477
|
+
term_safe_id = self._safe_id(term["terminal_id"])
|
|
478
|
+
term_row = Horizontal(classes="terminal-row")
|
|
479
|
+
await term_list.mount(term_row)
|
|
480
|
+
|
|
481
|
+
label = self._make_terminal_label(term)
|
|
482
|
+
await term_row.mount(
|
|
483
|
+
Button(
|
|
484
|
+
label,
|
|
485
|
+
id=f"switch-{term_safe_id}",
|
|
486
|
+
name=term["terminal_id"],
|
|
487
|
+
classes="terminal-switch",
|
|
488
|
+
)
|
|
448
489
|
)
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
490
|
+
await term_row.mount(
|
|
491
|
+
Button(
|
|
492
|
+
"✕",
|
|
493
|
+
id=f"close-{term_safe_id}",
|
|
494
|
+
name=term["terminal_id"],
|
|
495
|
+
variant="error",
|
|
496
|
+
classes="terminal-close",
|
|
497
|
+
)
|
|
457
498
|
)
|
|
458
|
-
|
|
499
|
+
except Exception:
|
|
500
|
+
logger.exception("Failed to render agents list")
|
|
501
|
+
try:
|
|
502
|
+
container = self.query_one("#agents-list", Vertical)
|
|
503
|
+
await container.remove_children()
|
|
504
|
+
await container.mount(
|
|
505
|
+
Static("Agents UI error (see ~/.aline/.logs/dashboard.log)")
|
|
506
|
+
)
|
|
507
|
+
except Exception:
|
|
508
|
+
pass
|
|
509
|
+
return
|
|
459
510
|
|
|
460
511
|
def _make_terminal_label(self, term: dict) -> Text:
|
|
461
512
|
"""Generate label for a terminal."""
|
|
@@ -613,9 +664,7 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
613
664
|
if result:
|
|
614
665
|
if result.get("imported"):
|
|
615
666
|
n = result.get("sessions_imported", 0)
|
|
616
|
-
self.app.notify(
|
|
617
|
-
f"Imported: {result.get('name')} ({n} sessions)", title="Agent"
|
|
618
|
-
)
|
|
667
|
+
self.app.notify(f"Imported: {result.get('name')} ({n} sessions)", title="Agent")
|
|
619
668
|
else:
|
|
620
669
|
self.app.notify(f"Created: {result.get('name')}", title="Agent")
|
|
621
670
|
self.refresh_data()
|
|
@@ -653,11 +702,10 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
653
702
|
|
|
654
703
|
# Create the terminal with agent association
|
|
655
704
|
self.run_worker(
|
|
656
|
-
self._do_create_terminal(
|
|
657
|
-
agent_type, workspace, skip_permissions, no_track, agent_id
|
|
658
|
-
),
|
|
705
|
+
self._do_create_terminal(agent_type, workspace, skip_permissions, no_track, agent_id),
|
|
659
706
|
group="terminal-create",
|
|
660
707
|
exclusive=True,
|
|
708
|
+
exit_on_error=False,
|
|
661
709
|
)
|
|
662
710
|
|
|
663
711
|
async def _do_create_terminal(
|
|
@@ -670,9 +718,7 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
670
718
|
) -> None:
|
|
671
719
|
"""Actually create the terminal with agent association."""
|
|
672
720
|
if agent_type == "claude":
|
|
673
|
-
await self._create_claude_terminal(
|
|
674
|
-
workspace, skip_permissions, no_track, agent_id
|
|
675
|
-
)
|
|
721
|
+
await self._create_claude_terminal(workspace, skip_permissions, no_track, agent_id)
|
|
676
722
|
elif agent_type == "codex":
|
|
677
723
|
await self._create_codex_terminal(workspace, no_track, agent_id)
|
|
678
724
|
elif agent_type == "opencode":
|
|
@@ -748,13 +794,9 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
748
794
|
except Exception:
|
|
749
795
|
pass
|
|
750
796
|
else:
|
|
751
|
-
self.app.notify(
|
|
752
|
-
"Failed to create terminal", title="Agent", severity="error"
|
|
753
|
-
)
|
|
797
|
+
self.app.notify("Failed to create terminal", title="Agent", severity="error")
|
|
754
798
|
|
|
755
|
-
async def _create_codex_terminal(
|
|
756
|
-
self, workspace: str, no_track: bool, agent_id: str
|
|
757
|
-
) -> None:
|
|
799
|
+
async def _create_codex_terminal(self, workspace: str, no_track: bool, agent_id: str) -> None:
|
|
758
800
|
"""Create a Codex terminal associated with an agent."""
|
|
759
801
|
try:
|
|
760
802
|
from ...db import get_database
|
|
@@ -817,9 +859,7 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
817
859
|
except Exception:
|
|
818
860
|
pass
|
|
819
861
|
|
|
820
|
-
command = self._command_in_directory(
|
|
821
|
-
tmux_manager.zsh_run_and_keep_open("codex"), workspace
|
|
822
|
-
)
|
|
862
|
+
command = self._command_in_directory(tmux_manager.zsh_run_and_keep_open("codex"), workspace)
|
|
823
863
|
|
|
824
864
|
created = tmux_manager.create_inner_window(
|
|
825
865
|
"codex",
|
|
@@ -831,9 +871,7 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
831
871
|
)
|
|
832
872
|
|
|
833
873
|
if not created:
|
|
834
|
-
self.app.notify(
|
|
835
|
-
"Failed to create terminal", title="Agent", severity="error"
|
|
836
|
-
)
|
|
874
|
+
self.app.notify("Failed to create terminal", title="Agent", severity="error")
|
|
837
875
|
|
|
838
876
|
async def _create_opencode_terminal(self, workspace: str, agent_id: str) -> None:
|
|
839
877
|
"""Create an Opencode terminal associated with an agent."""
|
|
@@ -892,9 +930,7 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
892
930
|
except Exception:
|
|
893
931
|
pass
|
|
894
932
|
else:
|
|
895
|
-
self.app.notify(
|
|
896
|
-
"Failed to create terminal", title="Agent", severity="error"
|
|
897
|
-
)
|
|
933
|
+
self.app.notify("Failed to create terminal", title="Agent", severity="error")
|
|
898
934
|
|
|
899
935
|
async def _create_zsh_terminal(self, workspace: str, agent_id: str) -> None:
|
|
900
936
|
"""Create a zsh terminal associated with an agent."""
|
|
@@ -951,9 +987,7 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
951
987
|
except Exception:
|
|
952
988
|
pass
|
|
953
989
|
else:
|
|
954
|
-
self.app.notify(
|
|
955
|
-
"Failed to create terminal", title="Agent", severity="error"
|
|
956
|
-
)
|
|
990
|
+
self.app.notify("Failed to create terminal", title="Agent", severity="error")
|
|
957
991
|
|
|
958
992
|
def _install_claude_hooks(self, workspace: str) -> None:
|
|
959
993
|
"""Install Claude hooks for a workspace."""
|
|
@@ -980,12 +1014,8 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
980
1014
|
|
|
981
1015
|
project_root = Path(workspace)
|
|
982
1016
|
install_stop_hook(get_stop_settings_path(project_root), quiet=True)
|
|
983
|
-
install_user_prompt_submit_hook(
|
|
984
|
-
|
|
985
|
-
)
|
|
986
|
-
install_permission_request_hook(
|
|
987
|
-
get_permission_settings_path(project_root), quiet=True
|
|
988
|
-
)
|
|
1017
|
+
install_user_prompt_submit_hook(get_submit_settings_path(project_root), quiet=True)
|
|
1018
|
+
install_permission_request_hook(get_permission_settings_path(project_root), quiet=True)
|
|
989
1019
|
except Exception:
|
|
990
1020
|
pass
|
|
991
1021
|
|
|
@@ -1064,14 +1094,10 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
1064
1094
|
db = get_database(read_only=True)
|
|
1065
1095
|
sessions = db.get_sessions_by_agent_id(agent_id)
|
|
1066
1096
|
if not sessions:
|
|
1067
|
-
self.app.notify(
|
|
1068
|
-
"Agent has no sessions to share", title="Share", severity="warning"
|
|
1069
|
-
)
|
|
1097
|
+
self.app.notify("Agent has no sessions to share", title="Share", severity="warning")
|
|
1070
1098
|
return
|
|
1071
1099
|
except Exception as e:
|
|
1072
|
-
self.app.notify(
|
|
1073
|
-
f"Failed to check sessions: {e}", title="Share", severity="error"
|
|
1074
|
-
)
|
|
1100
|
+
self.app.notify(f"Failed to check sessions: {e}", title="Share", severity="error")
|
|
1075
1101
|
return
|
|
1076
1102
|
|
|
1077
1103
|
# Store agent_id for the worker callback
|
|
@@ -1128,9 +1154,7 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
1128
1154
|
try:
|
|
1129
1155
|
match = re.search(r"\{.*\}", output, re.DOTALL)
|
|
1130
1156
|
if match:
|
|
1131
|
-
result["json"] = json_module.loads(
|
|
1132
|
-
match.group(0), strict=False
|
|
1133
|
-
)
|
|
1157
|
+
result["json"] = json_module.loads(match.group(0), strict=False)
|
|
1134
1158
|
except Exception:
|
|
1135
1159
|
result["json"] = None
|
|
1136
1160
|
|
|
@@ -1165,9 +1189,7 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
1165
1189
|
share_link = payload.get("share_link") or payload.get("share_url")
|
|
1166
1190
|
if not share_link:
|
|
1167
1191
|
share_link = result.get("share_link_guess")
|
|
1168
|
-
slack_message = (
|
|
1169
|
-
payload.get("slack_message") if isinstance(payload, dict) else None
|
|
1170
|
-
)
|
|
1192
|
+
slack_message = payload.get("slack_message") if isinstance(payload, dict) else None
|
|
1171
1193
|
if not slack_message:
|
|
1172
1194
|
try:
|
|
1173
1195
|
from ...db import get_database
|
|
@@ -1190,17 +1212,13 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
1190
1212
|
copied = copy_text(self.app, text_to_copy)
|
|
1191
1213
|
|
|
1192
1214
|
suffix = " (copied)" if copied else ""
|
|
1193
|
-
self.app.notify(
|
|
1194
|
-
f"Share link: {share_link}{suffix}", title="Share", timeout=6
|
|
1195
|
-
)
|
|
1215
|
+
self.app.notify(f"Share link: {share_link}{suffix}", title="Share", timeout=6)
|
|
1196
1216
|
elif exit_code == 0:
|
|
1197
1217
|
self.app.notify("Share completed", title="Share", timeout=3)
|
|
1198
1218
|
else:
|
|
1199
1219
|
extra = result.get("stderr") or ""
|
|
1200
1220
|
suffix = f": {extra}" if extra else ""
|
|
1201
|
-
self.app.notify(
|
|
1202
|
-
f"Share failed (exit {exit_code}){suffix}", title="Share", timeout=6
|
|
1203
|
-
)
|
|
1221
|
+
self.app.notify(f"Share failed (exit {exit_code}){suffix}", title="Share", timeout=6)
|
|
1204
1222
|
|
|
1205
1223
|
async def _sync_agent(self, agent_id: str) -> None:
|
|
1206
1224
|
"""Sync all sessions for an agent with remote share."""
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|