aline-ai 0.7.1__py3-none-any.whl → 0.7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.7.1.dist-info → aline_ai-0.7.3.dist-info}/METADATA +1 -1
- {aline_ai-0.7.1.dist-info → aline_ai-0.7.3.dist-info}/RECORD +17 -15
- realign/__init__.py +1 -1
- realign/commands/export_shares.py +191 -65
- realign/commands/sync_agent.py +55 -1
- realign/config.py +6 -1
- realign/dashboard/app.py +28 -36
- realign/dashboard/local_api.py +122 -0
- realign/dashboard/screens/create_agent.py +2 -11
- realign/dashboard/state.py +41 -0
- realign/dashboard/tmux_manager.py +15 -14
- realign/dashboard/widgets/agents_panel.py +264 -209
- realign/dashboard/widgets/config_panel.py +63 -1
- {aline_ai-0.7.1.dist-info → aline_ai-0.7.3.dist-info}/WHEEL +0 -0
- {aline_ai-0.7.1.dist-info → aline_ai-0.7.3.dist-info}/entry_points.txt +0 -0
- {aline_ai-0.7.1.dist-info → aline_ai-0.7.3.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.7.1.dist-info → aline_ai-0.7.3.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
aline_ai-0.7.
|
|
2
|
-
realign/__init__.py,sha256=
|
|
1
|
+
aline_ai-0.7.3.dist-info/licenses/LICENSE,sha256=H8wTqV5IF1oHw_HbBtS1PSDU8G_q81yblEIL_JfV8Vo,1077
|
|
2
|
+
realign/__init__.py,sha256=ReJTANkYJVgZYwHExBSBOBiJZxdi2HY4VluOTHSxRns,1623
|
|
3
3
|
realign/agent_names.py,sha256=H4oVJMkqg1ZYCk58vD_Jh9apaAHSFJRswa-C9SPdJxc,1171
|
|
4
4
|
realign/auth.py,sha256=d_1yvCwluN5iIrdgjtuSKpOYAksDzrzNgntKacLVJrw,16583
|
|
5
5
|
realign/claude_detector.py,sha256=ZLSJacMo6zzQclXByABKA70UNpstxqIv3fPGqdpA934,2792
|
|
@@ -7,7 +7,7 @@ realign/cli.py,sha256=PiMUA_sFQ-K7zlIr1Ahs7St8NwcXDG3JKT_8yIqLwZI,40569
|
|
|
7
7
|
realign/codex_detector.py,sha256=WGIClvlrFVCqJ5vR9DrKVsp1eJhOShvcaXibTHb0Nfc,6304
|
|
8
8
|
realign/codex_home.py,sha256=ljkW8uCfQD4cisEJtPNQmIgaR0yEfWSyHwoVQFY-6p4,4374
|
|
9
9
|
realign/codex_terminal_linker.py,sha256=L2Ha4drlZ7Sbq2jzXyxczOdUY3S5fu1gJqoI5WN9CKk,6211
|
|
10
|
-
realign/config.py,sha256=
|
|
10
|
+
realign/config.py,sha256=_loJkoTKszMONgo6Qq3N8VRm_iqvD-7WvXeCsKUgGUE,9478
|
|
11
11
|
realign/context.py,sha256=8hzgNOg-7_eMW22wt7OM5H9IsmMveKXCv0epG7E0G7w,13917
|
|
12
12
|
realign/file_lock.py,sha256=kLNm1Rra4TCrTMyPM5fwjVascq-CUz2Bzh9HHKtCKOE,3444
|
|
13
13
|
realign/hooks.py,sha256=wSSIjS5x9w7fm9LUcL63Lf7bglEfb75dHFja_znKDDQ,65134
|
|
@@ -41,27 +41,29 @@ realign/commands/auth.py,sha256=wcs1lUcSXxv75WcGruzyZ3kgi0xXA8W4lNnUwM4a3CI,1173
|
|
|
41
41
|
realign/commands/config.py,sha256=nYnu_h2pk7GODcrzrV04K51D-s7v06FlRXHJ0HJ-gvU,6732
|
|
42
42
|
realign/commands/context.py,sha256=pM2KfZHVkB-ou4nBhFvKSwnYliLBzwN3zerLyBAbhfE,7095
|
|
43
43
|
realign/commands/doctor.py,sha256=0c1TZuA_cw1CSU0yKMVRU-18uTxdqjXKJ8lP2CTTNSQ,20656
|
|
44
|
-
realign/commands/export_shares.py,sha256=
|
|
44
|
+
realign/commands/export_shares.py,sha256=O2yRZT4S2ANoswLwDDmA1mau1nEvBVbmSXD4ST6Id_o,153150
|
|
45
45
|
realign/commands/import_shares.py,sha256=Jx_7HVSg7SrGGKLDxsf_UqoStDimw8B26uKkqNFF6t8,33071
|
|
46
46
|
realign/commands/init.py,sha256=6rBr1LVIrQLbUH_UvoDhkF1qXmMh2xkjNWCYAUz5Tho,35274
|
|
47
47
|
realign/commands/restore.py,sha256=s2BxQZHxQw9r12NzRVsK20KlGafy5AIoSjWMo5PcnHY,11173
|
|
48
48
|
realign/commands/search.py,sha256=QlUDzRDD6ebq21LTtLe5-OZM62iwDrDqfbnXbuxfklU,27516
|
|
49
|
-
realign/commands/sync_agent.py,sha256=
|
|
49
|
+
realign/commands/sync_agent.py,sha256=sopzUQ6kiRgiBlcEReGAWCRoqrHpk3nAx75qXSgnNi4,17082
|
|
50
50
|
realign/commands/upgrade.py,sha256=L3PLOUIN5qAQTbkfoVtSsIbbzEezA_xjjk9F1GMVfjw,12781
|
|
51
51
|
realign/commands/watcher.py,sha256=4WTThIgr-Z5guKh_JqGDcPmerr97XiHrVaaijmckHsA,134350
|
|
52
52
|
realign/commands/worker.py,sha256=jTu7Pj60nTnn7SsH3oNCNnO6zl4TIFCJVNSC1OoQ_0o,23363
|
|
53
53
|
realign/dashboard/__init__.py,sha256=QZkHTsGityH8UkF8rmvA3xW7dMXNe0swEWr443qfgCM,128
|
|
54
|
-
realign/dashboard/app.py,sha256=
|
|
54
|
+
realign/dashboard/app.py,sha256=XLPqvPwGuR5Tyu6uz9T88yQSc4wq8Afu0h7pWH5A8_k,8161
|
|
55
55
|
realign/dashboard/clipboard.py,sha256=81frq83E_urqLkwuCvtl0hiTEjavtdQn8kCi72jJWcs,1207
|
|
56
56
|
realign/dashboard/layout.py,sha256=sZxmFj6QTbkois9MHTvBEMMcnaRVehCDqugdbiFx10k,9072
|
|
57
|
+
realign/dashboard/local_api.py,sha256=Roq74etTJR0uOiHE3uIe7sqVITjS5JGQEF4g0nmUm5Q,4332
|
|
58
|
+
realign/dashboard/state.py,sha256=V7zBKvyDgqdXv68XHxV4T8xf3IhYbI5W33UmYW3_hyM,1139
|
|
57
59
|
realign/dashboard/terminal_backend.py,sha256=MlDfwtqhftyQK6jDNizQGFjAWIo5Bx2TDpSnP3MCZVM,3375
|
|
58
|
-
realign/dashboard/tmux_manager.py,sha256=
|
|
60
|
+
realign/dashboard/tmux_manager.py,sha256=HJwB2Wpz-I4OrNT3Db8gKCLifmHdMCalA-UONBaLMG8,34564
|
|
59
61
|
realign/dashboard/backends/__init__.py,sha256=POROX7YKtukYZcLB1pi_kO0sSEpuO3y-hwmF3WIN1Kk,163
|
|
60
62
|
realign/dashboard/backends/iterm2.py,sha256=XYYJT5lrrp4pW_MyEqPZYkRI0qyKUwJlezwMidgnsHc,21390
|
|
61
63
|
realign/dashboard/backends/kitty.py,sha256=5jdkR1f2PwB8a4SnS3EG6uOQ2XU-PB7-cpKBfIJq3hU,12066
|
|
62
64
|
realign/dashboard/screens/__init__.py,sha256=MiefFamCYRrzTwQXiCUdybaJaFxlK5XKtLHaSQmqDv0,597
|
|
63
65
|
realign/dashboard/screens/agent_detail.py,sha256=N-iUC4434C91OcDu4dkQaxS_NXQ5Yl5sqNBb2mTmoBw,10490
|
|
64
|
-
realign/dashboard/screens/create_agent.py,sha256=
|
|
66
|
+
realign/dashboard/screens/create_agent.py,sha256=Dy9liP_4fj_zgNafRRJGX2iQJiarHvtVLdghrqMGiLQ,11323
|
|
65
67
|
realign/dashboard/screens/create_agent_info.py,sha256=K2Rbp4zHVdanPT3Fp82We4qlSAM-0IBZXPLuQuevuME,7838
|
|
66
68
|
realign/dashboard/screens/create_event.py,sha256=oiQY1zKpUYnQU-5fQLeuZH9BV5NClE5B5XZIVBYG5A8,5506
|
|
67
69
|
realign/dashboard/screens/event_detail.py,sha256=-pqt3NBoeTXGJKtbndZy-msklwXTeNWMS4H12oMG5ks,20175
|
|
@@ -70,8 +72,8 @@ realign/dashboard/screens/session_detail.py,sha256=TBkHqSHyMxsLB2QdZq9m1EoiH8oRV
|
|
|
70
72
|
realign/dashboard/screens/share_import.py,sha256=hl2x0yGVycsoUI76AmdZTAV-br3Q6191g5xHHrZ8hOA,6318
|
|
71
73
|
realign/dashboard/styles/dashboard.tcss,sha256=9W5Tx0lgyGb4HU-z-Kn7gBdexIK0aPe0bkVn2k_AseM,3288
|
|
72
74
|
realign/dashboard/widgets/__init__.py,sha256=dXsOnbeu_8XhP-6Bu6-R_0LNGqsSM6x7dG7FCDumpa8,460
|
|
73
|
-
realign/dashboard/widgets/agents_panel.py,sha256=
|
|
74
|
-
realign/dashboard/widgets/config_panel.py,sha256=
|
|
75
|
+
realign/dashboard/widgets/agents_panel.py,sha256=pqXZhzSL84lzJPqGGGsfsGJGVlVo2iCyHByXM4_ITCM,47083
|
|
76
|
+
realign/dashboard/widgets/config_panel.py,sha256=J6A_rxGVqNu5TMFcWELWgdX1nFCHAjKprFMMp7mBDKo,18203
|
|
75
77
|
realign/dashboard/widgets/events_table.py,sha256=0cMvE0KdZFBZyvywv7vlt005qsR0aLQnQiMf3ZzK7RY,30218
|
|
76
78
|
realign/dashboard/widgets/header.py,sha256=0HHCFXX7F3C6HII-WDwOJwWkJrajmKPWmdoMWyOkn9E,1587
|
|
77
79
|
realign/dashboard/widgets/openable_table.py,sha256=GeJPDEYp0kRHShqvmPMzAePpYXRZHUNqcWNnxqsqxjA,1963
|
|
@@ -104,8 +106,8 @@ realign/triggers/next_turn_trigger.py,sha256=-x80_I-WmIjXXzQHEPBykgx_GQW6oKaLDQx
|
|
|
104
106
|
realign/triggers/registry.py,sha256=dkIjSd8Bg-hF0nxaO2Fi2K-0Zipqv6vVjc-HYSrA_fY,3656
|
|
105
107
|
realign/triggers/turn_status.py,sha256=wAZEhXDAmDoX5F-ohWfSnZZ0eA6DAJ9svSPiSv_f6sg,6041
|
|
106
108
|
realign/triggers/turn_summary.py,sha256=f3hEUshgv9skJ9AbfWpoYs417lsv_HK2A_vpPjgryO4,4467
|
|
107
|
-
aline_ai-0.7.
|
|
108
|
-
aline_ai-0.7.
|
|
109
|
-
aline_ai-0.7.
|
|
110
|
-
aline_ai-0.7.
|
|
111
|
-
aline_ai-0.7.
|
|
109
|
+
aline_ai-0.7.3.dist-info/METADATA,sha256=qbtgEyiKE5FSJk_zjGsOTKm8s89Ckqpnw8wGM8RFezA,1597
|
|
110
|
+
aline_ai-0.7.3.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
111
|
+
aline_ai-0.7.3.dist-info/entry_points.txt,sha256=TvYELpMoWsUTcQdMV8tBHxCbEf_LbK4sESqK3r8PM6Y,78
|
|
112
|
+
aline_ai-0.7.3.dist-info/top_level.txt,sha256=yIL3s2xv9nf1GwD5n71Aq_JEIV4AfzCIDNKBzewuRm4,8
|
|
113
|
+
aline_ai-0.7.3.dist-info/RECORD,,
|
realign/__init__.py
CHANGED
|
@@ -1703,6 +1703,21 @@ def _update_share_content(
|
|
|
1703
1703
|
if not HTTPX_AVAILABLE:
|
|
1704
1704
|
raise RuntimeError("httpx package not installed. Run: pip install httpx")
|
|
1705
1705
|
|
|
1706
|
+
# Large payloads can exceed serverless request limits; fall back to chunked update.
|
|
1707
|
+
update_payload = {"conversation_data": conversation_data}
|
|
1708
|
+
payload_size = len(json.dumps(update_payload).encode("utf-8"))
|
|
1709
|
+
if payload_size > CHUNKED_UPLOAD_THRESHOLD:
|
|
1710
|
+
logger.info(
|
|
1711
|
+
f"Update payload size ({payload_size / 1024 / 1024:.2f}MB) exceeds threshold, using chunked update"
|
|
1712
|
+
)
|
|
1713
|
+
return _chunked_update_share_content(
|
|
1714
|
+
backend_url=backend_url,
|
|
1715
|
+
share_id=share_id,
|
|
1716
|
+
token=token,
|
|
1717
|
+
conversation_data=conversation_data,
|
|
1718
|
+
expected_version=expected_version,
|
|
1719
|
+
)
|
|
1720
|
+
|
|
1706
1721
|
headers = {
|
|
1707
1722
|
"X-Token": token,
|
|
1708
1723
|
"X-Expected-Version": str(expected_version),
|
|
@@ -1712,7 +1727,7 @@ def _update_share_content(
|
|
|
1712
1727
|
response = httpx.put(
|
|
1713
1728
|
f"{backend_url}/api/share/{share_id}",
|
|
1714
1729
|
headers=headers,
|
|
1715
|
-
json=
|
|
1730
|
+
json=update_payload,
|
|
1716
1731
|
timeout=60.0,
|
|
1717
1732
|
)
|
|
1718
1733
|
response.raise_for_status()
|
|
@@ -1752,64 +1767,148 @@ def _upload_chunks_and_complete(
|
|
|
1752
1767
|
upload_id: str,
|
|
1753
1768
|
backend_url: str,
|
|
1754
1769
|
progress_callback: Optional[Callable] = None,
|
|
1755
|
-
|
|
1756
|
-
) ->
|
|
1770
|
+
headers_provider: Optional[Callable[[], Dict[str, str]]] = None,
|
|
1771
|
+
) -> Optional[dict]:
|
|
1757
1772
|
"""
|
|
1758
1773
|
Helper function to upload chunks and complete the upload.
|
|
1759
1774
|
Can be run in background thread.
|
|
1760
1775
|
"""
|
|
1776
|
+
import time
|
|
1777
|
+
|
|
1761
1778
|
total_chunks = len(chunks)
|
|
1762
|
-
|
|
1779
|
+
|
|
1780
|
+
def _headers() -> Dict[str, str]:
|
|
1781
|
+
try:
|
|
1782
|
+
return dict(headers_provider()) if headers_provider else {}
|
|
1783
|
+
except Exception:
|
|
1784
|
+
return {}
|
|
1785
|
+
|
|
1786
|
+
def _post_with_retries(url: str, payload: dict, timeout: float) -> Optional[dict]:
|
|
1787
|
+
max_attempts = 3
|
|
1788
|
+
for attempt in range(max_attempts):
|
|
1789
|
+
try:
|
|
1790
|
+
response = httpx.post(url, json=payload, headers=_headers(), timeout=timeout)
|
|
1791
|
+
response.raise_for_status()
|
|
1792
|
+
return response.json()
|
|
1793
|
+
except httpx.HTTPStatusError as e:
|
|
1794
|
+
status = getattr(e.response, "status_code", None)
|
|
1795
|
+
retriable = status in (401, 403, 409, 429, 500, 502, 503, 504)
|
|
1796
|
+
if retriable and attempt < max_attempts - 1:
|
|
1797
|
+
time.sleep(0.5 * (2**attempt))
|
|
1798
|
+
continue
|
|
1799
|
+
logger.error(f"POST failed: {e}")
|
|
1800
|
+
return None
|
|
1801
|
+
except httpx.HTTPError as e:
|
|
1802
|
+
if attempt < max_attempts - 1:
|
|
1803
|
+
time.sleep(0.5 * (2**attempt))
|
|
1804
|
+
continue
|
|
1805
|
+
logger.error(f"POST failed: {e}")
|
|
1806
|
+
return None
|
|
1763
1807
|
|
|
1764
1808
|
# Upload each chunk
|
|
1765
1809
|
for i, chunk in enumerate(chunks):
|
|
1766
1810
|
if progress_callback:
|
|
1767
1811
|
progress_callback(i + 1, total_chunks + 2, f"Uploading chunk {i + 1}/{total_chunks}...")
|
|
1768
1812
|
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
}
|
|
1813
|
+
chunk_payload = {
|
|
1814
|
+
"upload_id": upload_id,
|
|
1815
|
+
"chunk_index": i,
|
|
1816
|
+
"data": chunk,
|
|
1817
|
+
}
|
|
1775
1818
|
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
logger.debug(
|
|
1785
|
-
f"Chunk {i + 1}/{total_chunks} uploaded, received: {result.get('received_chunks')}"
|
|
1786
|
-
)
|
|
1819
|
+
result = _post_with_retries(
|
|
1820
|
+
f"{backend_url}/api/share/chunk/upload",
|
|
1821
|
+
chunk_payload,
|
|
1822
|
+
timeout=60.0, # Longer timeout for chunk uploads
|
|
1823
|
+
)
|
|
1824
|
+
if not result:
|
|
1825
|
+
logger.error(f"Failed to upload chunk {i}")
|
|
1826
|
+
return None
|
|
1787
1827
|
|
|
1788
|
-
|
|
1789
|
-
|
|
1790
|
-
|
|
1791
|
-
return
|
|
1828
|
+
logger.debug(
|
|
1829
|
+
f"Chunk {i + 1}/{total_chunks} uploaded, received: {result.get('received_chunks')}"
|
|
1830
|
+
)
|
|
1792
1831
|
|
|
1793
1832
|
# Complete upload
|
|
1794
1833
|
if progress_callback:
|
|
1795
1834
|
progress_callback(total_chunks + 1, total_chunks + 2, "Finalizing upload...")
|
|
1796
1835
|
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
)
|
|
1804
|
-
|
|
1805
|
-
result = response.json()
|
|
1806
|
-
logger.info(f"Chunked upload completed: {result.get('share_url')}")
|
|
1836
|
+
result = _post_with_retries(
|
|
1837
|
+
f"{backend_url}/api/share/chunk/complete",
|
|
1838
|
+
{"upload_id": upload_id},
|
|
1839
|
+
timeout=60.0,
|
|
1840
|
+
)
|
|
1841
|
+
if not result:
|
|
1842
|
+
logger.error("Failed to complete chunked upload")
|
|
1843
|
+
return None
|
|
1807
1844
|
|
|
1808
|
-
|
|
1809
|
-
progress_callback(total_chunks + 2, total_chunks + 2, "Upload complete!")
|
|
1845
|
+
logger.info(f"Chunked upload completed: {result.get('share_url')}")
|
|
1810
1846
|
|
|
1811
|
-
|
|
1812
|
-
|
|
1847
|
+
if progress_callback:
|
|
1848
|
+
progress_callback(total_chunks + 2, total_chunks + 2, "Upload complete!")
|
|
1849
|
+
|
|
1850
|
+
return result
|
|
1851
|
+
|
|
1852
|
+
|
|
1853
|
+
def _chunked_update_share_content(
|
|
1854
|
+
backend_url: str,
|
|
1855
|
+
share_id: str,
|
|
1856
|
+
token: str,
|
|
1857
|
+
conversation_data: dict,
|
|
1858
|
+
expected_version: int,
|
|
1859
|
+
) -> dict:
|
|
1860
|
+
data_str = json.dumps(conversation_data)
|
|
1861
|
+
data_bytes = data_str.encode("utf-8")
|
|
1862
|
+
total_size = len(data_bytes)
|
|
1863
|
+
|
|
1864
|
+
chunks: List[str] = []
|
|
1865
|
+
for i in range(0, total_size, CHUNK_SIZE):
|
|
1866
|
+
chunk_data = data_bytes[i : i + CHUNK_SIZE]
|
|
1867
|
+
chunks.append(base64.b64encode(chunk_data).decode("ascii"))
|
|
1868
|
+
|
|
1869
|
+
total_chunks = len(chunks)
|
|
1870
|
+
logger.info(f"Splitting update into {total_chunks} chunks")
|
|
1871
|
+
|
|
1872
|
+
init_payload = {
|
|
1873
|
+
"total_chunks": total_chunks,
|
|
1874
|
+
"total_size": total_size,
|
|
1875
|
+
"metadata": {},
|
|
1876
|
+
"encrypted_info": None,
|
|
1877
|
+
"ui_metadata": conversation_data.get("ui_metadata"),
|
|
1878
|
+
"share_id": share_id,
|
|
1879
|
+
"operation": "update",
|
|
1880
|
+
}
|
|
1881
|
+
|
|
1882
|
+
def headers_provider() -> Dict[str, str]:
|
|
1883
|
+
# Keep bearer auth when available; required for create, optional for update.
|
|
1884
|
+
headers = get_auth_headers()
|
|
1885
|
+
headers["X-Token"] = token
|
|
1886
|
+
headers["X-Expected-Version"] = str(expected_version)
|
|
1887
|
+
return headers
|
|
1888
|
+
|
|
1889
|
+
init_headers = headers_provider()
|
|
1890
|
+
init_headers["Content-Type"] = "application/json"
|
|
1891
|
+
|
|
1892
|
+
response = httpx.post(
|
|
1893
|
+
f"{backend_url}/api/share/chunk/init",
|
|
1894
|
+
json=init_payload,
|
|
1895
|
+
headers=init_headers,
|
|
1896
|
+
timeout=30.0,
|
|
1897
|
+
)
|
|
1898
|
+
response.raise_for_status()
|
|
1899
|
+
init_result = response.json()
|
|
1900
|
+
upload_id = init_result["upload_id"]
|
|
1901
|
+
|
|
1902
|
+
result = _upload_chunks_and_complete(
|
|
1903
|
+
chunks,
|
|
1904
|
+
upload_id,
|
|
1905
|
+
backend_url,
|
|
1906
|
+
progress_callback=None,
|
|
1907
|
+
headers_provider=headers_provider,
|
|
1908
|
+
)
|
|
1909
|
+
if not result:
|
|
1910
|
+
raise RuntimeError("Failed to complete chunked update")
|
|
1911
|
+
return result
|
|
1813
1912
|
|
|
1814
1913
|
|
|
1815
1914
|
def _chunked_upload(
|
|
@@ -1857,8 +1956,11 @@ def _chunked_upload(
|
|
|
1857
1956
|
if progress_callback:
|
|
1858
1957
|
progress_callback(0, total_chunks + 2, "Initializing chunked upload...")
|
|
1859
1958
|
|
|
1860
|
-
|
|
1861
|
-
|
|
1959
|
+
def headers_provider() -> Dict[str, str]:
|
|
1960
|
+
# Refresh token if needed between chunks.
|
|
1961
|
+
headers = get_auth_headers()
|
|
1962
|
+
headers["Content-Type"] = "application/json"
|
|
1963
|
+
return headers
|
|
1862
1964
|
|
|
1863
1965
|
# Step 1: Initialize upload session (now returns share_url immediately)
|
|
1864
1966
|
try:
|
|
@@ -1877,7 +1979,7 @@ def _chunked_upload(
|
|
|
1877
1979
|
response = httpx.post(
|
|
1878
1980
|
f"{backend_url}/api/share/chunk/init",
|
|
1879
1981
|
json=init_payload,
|
|
1880
|
-
headers=
|
|
1982
|
+
headers=headers_provider(),
|
|
1881
1983
|
timeout=30.0,
|
|
1882
1984
|
)
|
|
1883
1985
|
response.raise_for_status()
|
|
@@ -1902,7 +2004,7 @@ def _chunked_upload(
|
|
|
1902
2004
|
# but user already has the share URL displayed
|
|
1903
2005
|
thread = threading.Thread(
|
|
1904
2006
|
target=_upload_chunks_and_complete,
|
|
1905
|
-
args=(chunks, upload_id, backend_url, None,
|
|
2007
|
+
args=(chunks, upload_id, backend_url, None, headers_provider), # No callback in background
|
|
1906
2008
|
daemon=False, # Important: let thread complete before process exits
|
|
1907
2009
|
)
|
|
1908
2010
|
thread.start()
|
|
@@ -1917,14 +2019,17 @@ def _chunked_upload(
|
|
|
1917
2019
|
}
|
|
1918
2020
|
|
|
1919
2021
|
# Foreground mode: upload chunks synchronously
|
|
1920
|
-
_upload_chunks_and_complete(
|
|
1921
|
-
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
"
|
|
1925
|
-
|
|
1926
|
-
|
|
1927
|
-
|
|
2022
|
+
result = _upload_chunks_and_complete(
|
|
2023
|
+
chunks, upload_id, backend_url, progress_callback, headers_provider
|
|
2024
|
+
)
|
|
2025
|
+
if not result:
|
|
2026
|
+
raise RuntimeError("Failed to complete chunked upload")
|
|
2027
|
+
# Preserve init fields if server didn't echo them
|
|
2028
|
+
result.setdefault("share_id", share_id)
|
|
2029
|
+
result.setdefault("share_url", share_url)
|
|
2030
|
+
result.setdefault("admin_token", admin_token)
|
|
2031
|
+
result.setdefault("expiry_at", expiry_at)
|
|
2032
|
+
return result
|
|
1928
2033
|
|
|
1929
2034
|
|
|
1930
2035
|
def upload_to_backend_unencrypted(
|
|
@@ -2032,8 +2137,10 @@ def _chunked_upload_unencrypted(
|
|
|
2032
2137
|
if progress_callback:
|
|
2033
2138
|
progress_callback(0, total_chunks + 2, "Initializing chunked upload...")
|
|
2034
2139
|
|
|
2035
|
-
|
|
2036
|
-
|
|
2140
|
+
def headers_provider() -> Dict[str, str]:
|
|
2141
|
+
headers = get_auth_headers()
|
|
2142
|
+
headers["Content-Type"] = "application/json"
|
|
2143
|
+
return headers
|
|
2037
2144
|
|
|
2038
2145
|
# Step 1: Initialize upload session (now returns share_url immediately)
|
|
2039
2146
|
try:
|
|
@@ -2048,7 +2155,7 @@ def _chunked_upload_unencrypted(
|
|
|
2048
2155
|
response = httpx.post(
|
|
2049
2156
|
f"{backend_url}/api/share/chunk/init",
|
|
2050
2157
|
json=init_payload,
|
|
2051
|
-
headers=
|
|
2158
|
+
headers=headers_provider(),
|
|
2052
2159
|
timeout=30.0,
|
|
2053
2160
|
)
|
|
2054
2161
|
response.raise_for_status()
|
|
@@ -2073,7 +2180,7 @@ def _chunked_upload_unencrypted(
|
|
|
2073
2180
|
# but user already has the share URL displayed
|
|
2074
2181
|
thread = threading.Thread(
|
|
2075
2182
|
target=_upload_chunks_and_complete,
|
|
2076
|
-
args=(chunks, upload_id, backend_url, None,
|
|
2183
|
+
args=(chunks, upload_id, backend_url, None, headers_provider), # No callback in background
|
|
2077
2184
|
daemon=False, # Important: let thread complete before process exits
|
|
2078
2185
|
)
|
|
2079
2186
|
thread.start()
|
|
@@ -2088,14 +2195,16 @@ def _chunked_upload_unencrypted(
|
|
|
2088
2195
|
}
|
|
2089
2196
|
|
|
2090
2197
|
# Foreground mode: upload chunks synchronously
|
|
2091
|
-
_upload_chunks_and_complete(
|
|
2092
|
-
|
|
2093
|
-
|
|
2094
|
-
|
|
2095
|
-
"
|
|
2096
|
-
|
|
2097
|
-
|
|
2098
|
-
|
|
2198
|
+
result = _upload_chunks_and_complete(
|
|
2199
|
+
chunks, upload_id, backend_url, progress_callback, headers_provider
|
|
2200
|
+
)
|
|
2201
|
+
if not result:
|
|
2202
|
+
raise RuntimeError("Failed to complete chunked upload")
|
|
2203
|
+
result.setdefault("share_id", share_id)
|
|
2204
|
+
result.setdefault("share_url", share_url)
|
|
2205
|
+
result.setdefault("admin_token", admin_token)
|
|
2206
|
+
result.setdefault("expiry_at", expiry_at)
|
|
2207
|
+
return result
|
|
2099
2208
|
|
|
2100
2209
|
|
|
2101
2210
|
def clean_text_for_prompt(text: str) -> str:
|
|
@@ -3093,6 +3202,10 @@ def export_shares_interactive_command(
|
|
|
3093
3202
|
if not json_output:
|
|
3094
3203
|
print("Error: Not logged in. Please run 'aline login' first.", file=sys.stderr)
|
|
3095
3204
|
return 1
|
|
3205
|
+
if not get_auth_headers():
|
|
3206
|
+
if not json_output:
|
|
3207
|
+
print("Error: Login expired. Please run 'aline login' again.", file=sys.stderr)
|
|
3208
|
+
return 1
|
|
3096
3209
|
|
|
3097
3210
|
# Get backend URL
|
|
3098
3211
|
if backend_url is None:
|
|
@@ -3784,6 +3897,12 @@ def export_agent_shares_command(
|
|
|
3784
3897
|
if not json_output:
|
|
3785
3898
|
print("Error: Not logged in. Please run 'aline login' first.", file=sys.stderr)
|
|
3786
3899
|
return 1
|
|
3900
|
+
# is_logged_in() can be true with an expired token + refresh_token; ensure we actually have an access token
|
|
3901
|
+
# before attempting uploads (otherwise the server returns 401/403 and the UX is confusing).
|
|
3902
|
+
if not get_auth_headers():
|
|
3903
|
+
if not json_output:
|
|
3904
|
+
print("Error: Login expired. Please run 'aline login' again.", file=sys.stderr)
|
|
3905
|
+
return 1
|
|
3787
3906
|
|
|
3788
3907
|
_progress("Fetching agent info...")
|
|
3789
3908
|
|
|
@@ -3956,6 +4075,9 @@ def export_agent_shares_command(
|
|
|
3956
4075
|
"description": event_description,
|
|
3957
4076
|
}
|
|
3958
4077
|
|
|
4078
|
+
# Add agent name to ui_metadata for chat display
|
|
4079
|
+
conversation_data["ui_metadata"]["agent_name"] = agent_info.name
|
|
4080
|
+
|
|
3959
4081
|
# Add MCP instructions if enabled
|
|
3960
4082
|
if enable_mcp:
|
|
3961
4083
|
conversation_data["ui_metadata"]["mcp_instructions"] = {
|
|
@@ -3987,14 +4109,18 @@ def export_agent_shares_command(
|
|
|
3987
4109
|
metadata=metadata,
|
|
3988
4110
|
backend_url=backend_url,
|
|
3989
4111
|
ui_metadata=conversation_data.get("ui_metadata"),
|
|
3990
|
-
background=
|
|
4112
|
+
background=False,
|
|
3991
4113
|
)
|
|
3992
4114
|
else:
|
|
4115
|
+
def upload_progress(current: int, total: int, message: str) -> None:
|
|
4116
|
+
_progress(f"{message} ({current}/{total})")
|
|
4117
|
+
|
|
3993
4118
|
result = upload_to_backend_unencrypted(
|
|
3994
4119
|
conversation_data=conversation_data,
|
|
3995
4120
|
metadata=metadata,
|
|
3996
4121
|
backend_url=backend_url,
|
|
3997
|
-
|
|
4122
|
+
progress_callback=upload_progress,
|
|
4123
|
+
background=False,
|
|
3998
4124
|
)
|
|
3999
4125
|
except Exception as e:
|
|
4000
4126
|
if not json_output:
|
realign/commands/sync_agent.py
CHANGED
|
@@ -240,6 +240,58 @@ def sync_agent_command(
|
|
|
240
240
|
if new_local_turns:
|
|
241
241
|
sessions_pushed += 1
|
|
242
242
|
|
|
243
|
+
# Skip push if there's nothing new to send.
|
|
244
|
+
# This avoids re-uploading large, unchanged payloads (which can hit serverless limits and show up as 403/413).
|
|
245
|
+
needs_push_metadata = False
|
|
246
|
+
try:
|
|
247
|
+
remote_title = remote_event.get("title")
|
|
248
|
+
remote_desc = remote_event.get("description")
|
|
249
|
+
|
|
250
|
+
local_title = agent_info.name
|
|
251
|
+
local_desc = agent_info.description
|
|
252
|
+
|
|
253
|
+
has_metadata_diff = (remote_title != local_title) or (remote_desc != local_desc)
|
|
254
|
+
if has_metadata_diff and not description_updated:
|
|
255
|
+
remote_updated_at = remote_event.get("updated_at")
|
|
256
|
+
remote_dt = None
|
|
257
|
+
if isinstance(remote_updated_at, str) and remote_updated_at:
|
|
258
|
+
try:
|
|
259
|
+
remote_dt = datetime.fromisoformat(remote_updated_at.replace("Z", "+00:00"))
|
|
260
|
+
except Exception:
|
|
261
|
+
remote_dt = None
|
|
262
|
+
|
|
263
|
+
local_dt = getattr(agent_info, "updated_at", None)
|
|
264
|
+
if hasattr(local_dt, "tzinfo") and local_dt and local_dt.tzinfo is None:
|
|
265
|
+
local_dt = local_dt.replace(tzinfo=timezone.utc)
|
|
266
|
+
|
|
267
|
+
# If remote has no timestamp, assume local should win. Otherwise, push only if local is newer.
|
|
268
|
+
if remote_dt is None or (local_dt and remote_dt and local_dt > remote_dt):
|
|
269
|
+
needs_push_metadata = True
|
|
270
|
+
except Exception as e:
|
|
271
|
+
logger.warning(f"Failed to compute metadata push necessity (non-fatal): {e}")
|
|
272
|
+
|
|
273
|
+
if sessions_pushed == 0 and not needs_push_metadata:
|
|
274
|
+
now_iso = datetime.now(timezone.utc).isoformat()
|
|
275
|
+
try:
|
|
276
|
+
db.update_agent_sync_metadata(
|
|
277
|
+
agent_id,
|
|
278
|
+
last_synced_at=now_iso,
|
|
279
|
+
sync_version=remote_sync_version,
|
|
280
|
+
)
|
|
281
|
+
except Exception as e:
|
|
282
|
+
logger.warning(f"Failed to update local sync metadata after no-op sync: {e}")
|
|
283
|
+
|
|
284
|
+
_progress("No changes to push.")
|
|
285
|
+
_progress("Sync complete!")
|
|
286
|
+
|
|
287
|
+
return {
|
|
288
|
+
"success": True,
|
|
289
|
+
"sessions_pulled": sessions_pulled,
|
|
290
|
+
"sessions_pushed": 0,
|
|
291
|
+
"description_updated": description_updated,
|
|
292
|
+
"new_sync_version": remote_sync_version,
|
|
293
|
+
}
|
|
294
|
+
|
|
243
295
|
# Build full conversation data for push
|
|
244
296
|
merged_conversation = _build_merged_conversation_data(
|
|
245
297
|
agent_info=agent_info,
|
|
@@ -383,7 +435,9 @@ def _build_merged_conversation_data(
|
|
|
383
435
|
"time": datetime.now(timezone.utc).isoformat(),
|
|
384
436
|
"event": event_data,
|
|
385
437
|
"sessions": sessions_data,
|
|
386
|
-
"ui_metadata": {
|
|
438
|
+
"ui_metadata": {
|
|
439
|
+
"agent_name": agent_info.name,
|
|
440
|
+
},
|
|
387
441
|
}
|
|
388
442
|
|
|
389
443
|
if contributor_token:
|
realign/config.py
CHANGED
|
@@ -33,6 +33,9 @@ class ReAlignConfig:
|
|
|
33
33
|
# Session catch-up settings
|
|
34
34
|
max_catchup_sessions: int = 3 # Max sessions to auto-import on watcher startup
|
|
35
35
|
|
|
36
|
+
# Local API server port (for one-click browser import)
|
|
37
|
+
local_api_port: int = 17280
|
|
38
|
+
|
|
36
39
|
# Terminal auto-close settings
|
|
37
40
|
auto_close_stale_terminals: bool = False # Auto-close terminals inactive for 24+ hours
|
|
38
41
|
stale_terminal_hours: int = 24 # Hours of inactivity before auto-closing
|
|
@@ -85,13 +88,14 @@ class ReAlignConfig:
|
|
|
85
88
|
"user_name": os.getenv("REALIGN_USER_NAME"),
|
|
86
89
|
"uid": os.getenv("REALIGN_UID"),
|
|
87
90
|
"max_catchup_sessions": os.getenv("REALIGN_MAX_CATCHUP_SESSIONS"),
|
|
91
|
+
"local_api_port": os.getenv("ALINE_LOCAL_API_PORT"),
|
|
88
92
|
"auto_close_stale_terminals": os.getenv("REALIGN_AUTO_CLOSE_STALE_TERMINALS"),
|
|
89
93
|
"stale_terminal_hours": os.getenv("REALIGN_STALE_TERMINAL_HOURS"),
|
|
90
94
|
}
|
|
91
95
|
|
|
92
96
|
for key, value in env_overrides.items():
|
|
93
97
|
if value is not None:
|
|
94
|
-
if key in ["summary_max_chars", "max_catchup_sessions", "stale_terminal_hours"]:
|
|
98
|
+
if key in ["summary_max_chars", "max_catchup_sessions", "stale_terminal_hours", "local_api_port"]:
|
|
95
99
|
config_dict[key] = int(value)
|
|
96
100
|
elif key in [
|
|
97
101
|
"redact_on_match",
|
|
@@ -139,6 +143,7 @@ class ReAlignConfig:
|
|
|
139
143
|
"user_name": self.user_name,
|
|
140
144
|
"uid": self.uid,
|
|
141
145
|
"max_catchup_sessions": self.max_catchup_sessions,
|
|
146
|
+
"local_api_port": self.local_api_port,
|
|
142
147
|
"auto_close_stale_terminals": self.auto_close_stale_terminals,
|
|
143
148
|
"stale_terminal_hours": self.stale_terminal_hours,
|
|
144
149
|
}
|