aline-ai 0.7.2__py3-none-any.whl → 0.7.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1703,6 +1703,21 @@ def _update_share_content(
1703
1703
  if not HTTPX_AVAILABLE:
1704
1704
  raise RuntimeError("httpx package not installed. Run: pip install httpx")
1705
1705
 
1706
+ # Large payloads can exceed serverless request limits; fall back to chunked update.
1707
+ update_payload = {"conversation_data": conversation_data}
1708
+ payload_size = len(json.dumps(update_payload).encode("utf-8"))
1709
+ if payload_size > CHUNKED_UPLOAD_THRESHOLD:
1710
+ logger.info(
1711
+ f"Update payload size ({payload_size / 1024 / 1024:.2f}MB) exceeds threshold, using chunked update"
1712
+ )
1713
+ return _chunked_update_share_content(
1714
+ backend_url=backend_url,
1715
+ share_id=share_id,
1716
+ token=token,
1717
+ conversation_data=conversation_data,
1718
+ expected_version=expected_version,
1719
+ )
1720
+
1706
1721
  headers = {
1707
1722
  "X-Token": token,
1708
1723
  "X-Expected-Version": str(expected_version),
@@ -1712,7 +1727,7 @@ def _update_share_content(
1712
1727
  response = httpx.put(
1713
1728
  f"{backend_url}/api/share/{share_id}",
1714
1729
  headers=headers,
1715
- json={"conversation_data": conversation_data},
1730
+ json=update_payload,
1716
1731
  timeout=60.0,
1717
1732
  )
1718
1733
  response.raise_for_status()
@@ -1752,64 +1767,148 @@ def _upload_chunks_and_complete(
1752
1767
  upload_id: str,
1753
1768
  backend_url: str,
1754
1769
  progress_callback: Optional[Callable] = None,
1755
- auth_headers: Optional[Dict[str, str]] = None,
1756
- ) -> None:
1770
+ headers_provider: Optional[Callable[[], Dict[str, str]]] = None,
1771
+ ) -> Optional[dict]:
1757
1772
  """
1758
1773
  Helper function to upload chunks and complete the upload.
1759
1774
  Can be run in background thread.
1760
1775
  """
1776
+ import time
1777
+
1761
1778
  total_chunks = len(chunks)
1762
- headers = auth_headers or {}
1779
+
1780
+ def _headers() -> Dict[str, str]:
1781
+ try:
1782
+ return dict(headers_provider()) if headers_provider else {}
1783
+ except Exception:
1784
+ return {}
1785
+
1786
+ def _post_with_retries(url: str, payload: dict, timeout: float) -> Optional[dict]:
1787
+ max_attempts = 3
1788
+ for attempt in range(max_attempts):
1789
+ try:
1790
+ response = httpx.post(url, json=payload, headers=_headers(), timeout=timeout)
1791
+ response.raise_for_status()
1792
+ return response.json()
1793
+ except httpx.HTTPStatusError as e:
1794
+ status = getattr(e.response, "status_code", None)
1795
+ retriable = status in (401, 403, 409, 429, 500, 502, 503, 504)
1796
+ if retriable and attempt < max_attempts - 1:
1797
+ time.sleep(0.5 * (2**attempt))
1798
+ continue
1799
+ logger.error(f"POST failed: {e}")
1800
+ return None
1801
+ except httpx.HTTPError as e:
1802
+ if attempt < max_attempts - 1:
1803
+ time.sleep(0.5 * (2**attempt))
1804
+ continue
1805
+ logger.error(f"POST failed: {e}")
1806
+ return None
1763
1807
 
1764
1808
  # Upload each chunk
1765
1809
  for i, chunk in enumerate(chunks):
1766
1810
  if progress_callback:
1767
1811
  progress_callback(i + 1, total_chunks + 2, f"Uploading chunk {i + 1}/{total_chunks}...")
1768
1812
 
1769
- try:
1770
- chunk_payload = {
1771
- "upload_id": upload_id,
1772
- "chunk_index": i,
1773
- "data": chunk,
1774
- }
1813
+ chunk_payload = {
1814
+ "upload_id": upload_id,
1815
+ "chunk_index": i,
1816
+ "data": chunk,
1817
+ }
1775
1818
 
1776
- response = httpx.post(
1777
- f"{backend_url}/api/share/chunk/upload",
1778
- json=chunk_payload,
1779
- headers=headers,
1780
- timeout=60.0, # Longer timeout for chunk uploads
1781
- )
1782
- response.raise_for_status()
1783
- result = response.json()
1784
- logger.debug(
1785
- f"Chunk {i + 1}/{total_chunks} uploaded, received: {result.get('received_chunks')}"
1786
- )
1819
+ result = _post_with_retries(
1820
+ f"{backend_url}/api/share/chunk/upload",
1821
+ chunk_payload,
1822
+ timeout=60.0, # Longer timeout for chunk uploads
1823
+ )
1824
+ if not result:
1825
+ logger.error(f"Failed to upload chunk {i}")
1826
+ return None
1787
1827
 
1788
- except httpx.HTTPError as e:
1789
- logger.error(f"Failed to upload chunk {i}: {e}")
1790
- # In background mode, we just log the error
1791
- return
1828
+ logger.debug(
1829
+ f"Chunk {i + 1}/{total_chunks} uploaded, received: {result.get('received_chunks')}"
1830
+ )
1792
1831
 
1793
1832
  # Complete upload
1794
1833
  if progress_callback:
1795
1834
  progress_callback(total_chunks + 1, total_chunks + 2, "Finalizing upload...")
1796
1835
 
1797
- try:
1798
- response = httpx.post(
1799
- f"{backend_url}/api/share/chunk/complete",
1800
- json={"upload_id": upload_id},
1801
- headers=headers,
1802
- timeout=60.0,
1803
- )
1804
- response.raise_for_status()
1805
- result = response.json()
1806
- logger.info(f"Chunked upload completed: {result.get('share_url')}")
1836
+ result = _post_with_retries(
1837
+ f"{backend_url}/api/share/chunk/complete",
1838
+ {"upload_id": upload_id},
1839
+ timeout=60.0,
1840
+ )
1841
+ if not result:
1842
+ logger.error("Failed to complete chunked upload")
1843
+ return None
1807
1844
 
1808
- if progress_callback:
1809
- progress_callback(total_chunks + 2, total_chunks + 2, "Upload complete!")
1845
+ logger.info(f"Chunked upload completed: {result.get('share_url')}")
1810
1846
 
1811
- except httpx.HTTPError as e:
1812
- logger.error(f"Failed to complete chunked upload: {e}")
1847
+ if progress_callback:
1848
+ progress_callback(total_chunks + 2, total_chunks + 2, "Upload complete!")
1849
+
1850
+ return result
1851
+
1852
+
1853
+ def _chunked_update_share_content(
1854
+ backend_url: str,
1855
+ share_id: str,
1856
+ token: str,
1857
+ conversation_data: dict,
1858
+ expected_version: int,
1859
+ ) -> dict:
1860
+ data_str = json.dumps(conversation_data)
1861
+ data_bytes = data_str.encode("utf-8")
1862
+ total_size = len(data_bytes)
1863
+
1864
+ chunks: List[str] = []
1865
+ for i in range(0, total_size, CHUNK_SIZE):
1866
+ chunk_data = data_bytes[i : i + CHUNK_SIZE]
1867
+ chunks.append(base64.b64encode(chunk_data).decode("ascii"))
1868
+
1869
+ total_chunks = len(chunks)
1870
+ logger.info(f"Splitting update into {total_chunks} chunks")
1871
+
1872
+ init_payload = {
1873
+ "total_chunks": total_chunks,
1874
+ "total_size": total_size,
1875
+ "metadata": {},
1876
+ "encrypted_info": None,
1877
+ "ui_metadata": conversation_data.get("ui_metadata"),
1878
+ "share_id": share_id,
1879
+ "operation": "update",
1880
+ }
1881
+
1882
+ def headers_provider() -> Dict[str, str]:
1883
+ # Keep bearer auth when available; required for create, optional for update.
1884
+ headers = get_auth_headers()
1885
+ headers["X-Token"] = token
1886
+ headers["X-Expected-Version"] = str(expected_version)
1887
+ return headers
1888
+
1889
+ init_headers = headers_provider()
1890
+ init_headers["Content-Type"] = "application/json"
1891
+
1892
+ response = httpx.post(
1893
+ f"{backend_url}/api/share/chunk/init",
1894
+ json=init_payload,
1895
+ headers=init_headers,
1896
+ timeout=30.0,
1897
+ )
1898
+ response.raise_for_status()
1899
+ init_result = response.json()
1900
+ upload_id = init_result["upload_id"]
1901
+
1902
+ result = _upload_chunks_and_complete(
1903
+ chunks,
1904
+ upload_id,
1905
+ backend_url,
1906
+ progress_callback=None,
1907
+ headers_provider=headers_provider,
1908
+ )
1909
+ if not result:
1910
+ raise RuntimeError("Failed to complete chunked update")
1911
+ return result
1813
1912
 
1814
1913
 
1815
1914
  def _chunked_upload(
@@ -1857,8 +1956,11 @@ def _chunked_upload(
1857
1956
  if progress_callback:
1858
1957
  progress_callback(0, total_chunks + 2, "Initializing chunked upload...")
1859
1958
 
1860
- # Get auth headers for Bearer token authentication
1861
- auth_headers = get_auth_headers()
1959
+ def headers_provider() -> Dict[str, str]:
1960
+ # Refresh token if needed between chunks.
1961
+ headers = get_auth_headers()
1962
+ headers["Content-Type"] = "application/json"
1963
+ return headers
1862
1964
 
1863
1965
  # Step 1: Initialize upload session (now returns share_url immediately)
1864
1966
  try:
@@ -1877,7 +1979,7 @@ def _chunked_upload(
1877
1979
  response = httpx.post(
1878
1980
  f"{backend_url}/api/share/chunk/init",
1879
1981
  json=init_payload,
1880
- headers=auth_headers,
1982
+ headers=headers_provider(),
1881
1983
  timeout=30.0,
1882
1984
  )
1883
1985
  response.raise_for_status()
@@ -1902,7 +2004,7 @@ def _chunked_upload(
1902
2004
  # but user already has the share URL displayed
1903
2005
  thread = threading.Thread(
1904
2006
  target=_upload_chunks_and_complete,
1905
- args=(chunks, upload_id, backend_url, None, auth_headers), # No callback in background
2007
+ args=(chunks, upload_id, backend_url, None, headers_provider), # No callback in background
1906
2008
  daemon=False, # Important: let thread complete before process exits
1907
2009
  )
1908
2010
  thread.start()
@@ -1917,14 +2019,17 @@ def _chunked_upload(
1917
2019
  }
1918
2020
 
1919
2021
  # Foreground mode: upload chunks synchronously
1920
- _upload_chunks_and_complete(chunks, upload_id, backend_url, progress_callback, auth_headers)
1921
-
1922
- return {
1923
- "share_id": share_id,
1924
- "share_url": share_url,
1925
- "admin_token": admin_token,
1926
- "expiry_at": expiry_at,
1927
- }
2022
+ result = _upload_chunks_and_complete(
2023
+ chunks, upload_id, backend_url, progress_callback, headers_provider
2024
+ )
2025
+ if not result:
2026
+ raise RuntimeError("Failed to complete chunked upload")
2027
+ # Preserve init fields if server didn't echo them
2028
+ result.setdefault("share_id", share_id)
2029
+ result.setdefault("share_url", share_url)
2030
+ result.setdefault("admin_token", admin_token)
2031
+ result.setdefault("expiry_at", expiry_at)
2032
+ return result
1928
2033
 
1929
2034
 
1930
2035
  def upload_to_backend_unencrypted(
@@ -2032,8 +2137,10 @@ def _chunked_upload_unencrypted(
2032
2137
  if progress_callback:
2033
2138
  progress_callback(0, total_chunks + 2, "Initializing chunked upload...")
2034
2139
 
2035
- # Get auth headers for Bearer token authentication
2036
- auth_headers = get_auth_headers()
2140
+ def headers_provider() -> Dict[str, str]:
2141
+ headers = get_auth_headers()
2142
+ headers["Content-Type"] = "application/json"
2143
+ return headers
2037
2144
 
2038
2145
  # Step 1: Initialize upload session (now returns share_url immediately)
2039
2146
  try:
@@ -2048,7 +2155,7 @@ def _chunked_upload_unencrypted(
2048
2155
  response = httpx.post(
2049
2156
  f"{backend_url}/api/share/chunk/init",
2050
2157
  json=init_payload,
2051
- headers=auth_headers,
2158
+ headers=headers_provider(),
2052
2159
  timeout=30.0,
2053
2160
  )
2054
2161
  response.raise_for_status()
@@ -2073,7 +2180,7 @@ def _chunked_upload_unencrypted(
2073
2180
  # but user already has the share URL displayed
2074
2181
  thread = threading.Thread(
2075
2182
  target=_upload_chunks_and_complete,
2076
- args=(chunks, upload_id, backend_url, None, auth_headers), # No callback in background
2183
+ args=(chunks, upload_id, backend_url, None, headers_provider), # No callback in background
2077
2184
  daemon=False, # Important: let thread complete before process exits
2078
2185
  )
2079
2186
  thread.start()
@@ -2088,14 +2195,16 @@ def _chunked_upload_unencrypted(
2088
2195
  }
2089
2196
 
2090
2197
  # Foreground mode: upload chunks synchronously
2091
- _upload_chunks_and_complete(chunks, upload_id, backend_url, progress_callback, auth_headers)
2092
-
2093
- return {
2094
- "share_id": share_id,
2095
- "share_url": share_url,
2096
- "admin_token": admin_token,
2097
- "expiry_at": expiry_at,
2098
- }
2198
+ result = _upload_chunks_and_complete(
2199
+ chunks, upload_id, backend_url, progress_callback, headers_provider
2200
+ )
2201
+ if not result:
2202
+ raise RuntimeError("Failed to complete chunked upload")
2203
+ result.setdefault("share_id", share_id)
2204
+ result.setdefault("share_url", share_url)
2205
+ result.setdefault("admin_token", admin_token)
2206
+ result.setdefault("expiry_at", expiry_at)
2207
+ return result
2099
2208
 
2100
2209
 
2101
2210
  def clean_text_for_prompt(text: str) -> str:
@@ -3093,6 +3202,10 @@ def export_shares_interactive_command(
3093
3202
  if not json_output:
3094
3203
  print("Error: Not logged in. Please run 'aline login' first.", file=sys.stderr)
3095
3204
  return 1
3205
+ if not get_auth_headers():
3206
+ if not json_output:
3207
+ print("Error: Login expired. Please run 'aline login' again.", file=sys.stderr)
3208
+ return 1
3096
3209
 
3097
3210
  # Get backend URL
3098
3211
  if backend_url is None:
@@ -3784,6 +3897,12 @@ def export_agent_shares_command(
3784
3897
  if not json_output:
3785
3898
  print("Error: Not logged in. Please run 'aline login' first.", file=sys.stderr)
3786
3899
  return 1
3900
+ # is_logged_in() can be true with an expired token + refresh_token; ensure we actually have an access token
3901
+ # before attempting uploads (otherwise the server returns 401/403 and the UX is confusing).
3902
+ if not get_auth_headers():
3903
+ if not json_output:
3904
+ print("Error: Login expired. Please run 'aline login' again.", file=sys.stderr)
3905
+ return 1
3787
3906
 
3788
3907
  _progress("Fetching agent info...")
3789
3908
 
@@ -3990,14 +4109,18 @@ def export_agent_shares_command(
3990
4109
  metadata=metadata,
3991
4110
  backend_url=backend_url,
3992
4111
  ui_metadata=conversation_data.get("ui_metadata"),
3993
- background=True,
4112
+ background=False,
3994
4113
  )
3995
4114
  else:
4115
+ def upload_progress(current: int, total: int, message: str) -> None:
4116
+ _progress(f"{message} ({current}/{total})")
4117
+
3996
4118
  result = upload_to_backend_unencrypted(
3997
4119
  conversation_data=conversation_data,
3998
4120
  metadata=metadata,
3999
4121
  backend_url=backend_url,
4000
- background=True,
4122
+ progress_callback=upload_progress,
4123
+ background=False,
4001
4124
  )
4002
4125
  except Exception as e:
4003
4126
  if not json_output:
@@ -47,6 +47,7 @@ else:
47
47
  def download_share_data(
48
48
  share_url: str,
49
49
  password: Optional[str] = None,
50
+ cache_buster: Optional[str] = None,
50
51
  ) -> Dict[str, Any]:
51
52
  """
52
53
  Download share data from a share URL.
@@ -62,7 +63,10 @@ def download_share_data(
62
63
  {"success": False, "error": str} on failure
63
64
  """
64
65
  if not HTTPX_AVAILABLE:
65
- return {"success": False, "error": "httpx package not installed. Install with: pip install httpx"}
66
+ return {
67
+ "success": False,
68
+ "error": "httpx package not installed. Install with: pip install httpx",
69
+ }
66
70
 
67
71
  share_id = extract_share_id(share_url)
68
72
  if not share_id:
@@ -101,14 +105,16 @@ def download_share_data(
101
105
 
102
106
  # Download export data
103
107
  try:
104
- export_response = httpx.get(
105
- f"{backend_url}/api/share/{share_id}/export", headers=headers, timeout=30.0
106
- )
108
+ export_url = f"{backend_url}/api/share/{share_id}/export"
109
+ if cache_buster:
110
+ export_url = f"{export_url}?cache_bust={cache_buster}"
111
+
112
+ export_response = httpx.get(export_url, headers=headers, timeout=30.0)
107
113
  export_data = export_response.json()
108
114
 
109
115
  if export_response.status_code == 413 or export_data.get("needs_chunked_download"):
110
116
  total_chunks = export_data.get("total_chunks", 1)
111
- raw_data = _download_chunks(backend_url, share_id, headers, total_chunks)
117
+ raw_data = _download_chunks(backend_url, share_id, headers, total_chunks, cache_buster)
112
118
  conversation_data = json.loads(raw_data)
113
119
  export_data = {
114
120
  "success": True,
@@ -494,7 +500,9 @@ def import_v2_data(
494
500
  share_url=share_url,
495
501
  commit_hashes=[],
496
502
  # V18: user identity (with backward compatibility for old format)
497
- created_by=event_data.get("created_by") or event_data.get("uid") or event_data.get("creator_id"),
503
+ created_by=event_data.get("created_by")
504
+ or event_data.get("uid")
505
+ or event_data.get("creator_id"),
498
506
  shared_by=config.uid, # Current user is the importer
499
507
  )
500
508
 
@@ -603,7 +611,9 @@ def import_session_with_turns(
603
611
  summary_locked_until=None,
604
612
  summary_error=None,
605
613
  # V18: user identity (with backward compatibility for old format)
606
- created_by=session_data.get("created_by") or session_data.get("uid") or session_data.get("creator_id"),
614
+ created_by=session_data.get("created_by")
615
+ or session_data.get("uid")
616
+ or session_data.get("creator_id"),
607
617
  shared_by=config.uid, # Current user is the importer
608
618
  )
609
619
 
@@ -833,7 +843,11 @@ def generate_content_hash(messages: List[Dict]) -> str:
833
843
 
834
844
 
835
845
  def _download_chunks(
836
- backend_url: str, share_id: str, headers: Dict[str, str], total_chunks: int
846
+ backend_url: str,
847
+ share_id: str,
848
+ headers: Dict[str, str],
849
+ total_chunks: int,
850
+ cache_buster: Optional[str] = None,
837
851
  ) -> str:
838
852
  """
839
853
  Download data in chunks and combine them.
@@ -854,8 +868,11 @@ def _download_chunks(
854
868
  task = progress.add_task("[cyan]Downloading chunks...", total=total_chunks)
855
869
 
856
870
  for i in range(total_chunks):
871
+ url = f"{backend_url}/api/share/{share_id}/export?chunk={i}"
872
+ if cache_buster:
873
+ url = f"{url}&cache_bust={cache_buster}"
857
874
  chunk_response = httpx.get(
858
- f"{backend_url}/api/share/{share_id}/export?chunk={i}",
875
+ url,
859
876
  headers=headers,
860
877
  timeout=60.0,
861
878
  )
@@ -876,8 +893,11 @@ def _download_chunks(
876
893
  for i in range(total_chunks):
877
894
  print(f"Downloading chunk {i + 1}/{total_chunks}...")
878
895
 
896
+ url = f"{backend_url}/api/share/{share_id}/export?chunk={i}"
897
+ if cache_buster:
898
+ url = f"{url}&cache_bust={cache_buster}"
879
899
  chunk_response = httpx.get(
880
- f"{backend_url}/api/share/{share_id}/export?chunk={i}",
900
+ url,
881
901
  headers=headers,
882
902
  timeout=60.0,
883
903
  )
realign/commands/init.py CHANGED
@@ -836,6 +836,22 @@ def init_command(
836
836
  console.print(f" Tmux: [cyan]{result.get('tmux_conf', 'N/A')}[/cyan]")
837
837
  console.print(f" Skills: [cyan]{result.get('skills_path', 'N/A')}[/cyan]")
838
838
 
839
+ # Codex compatibility note (best-effort).
840
+ # We rely on the Rust Codex CLI notify hook to avoid expensive polling. If the installed
841
+ # Codex binary is legacy/unsupported, warn and suggest upgrading.
842
+ try:
843
+ from ..codex_hooks.notify_hook_installer import codex_cli_supports_notify_hook
844
+
845
+ supported = codex_cli_supports_notify_hook()
846
+ if supported is False:
847
+ console.print("\n[yellow]![/yellow] Codex CLI detected but does not support notify hook.")
848
+ console.print(
849
+ "[dim]Tip: update to the Rust Codex CLI to enable reliable, event-driven Codex imports (no polling).[/dim]"
850
+ )
851
+ # If Codex isn't installed (None), stay silent.
852
+ except Exception:
853
+ pass
854
+
839
855
  hooks_installed = result.get("hooks_installed") or []
840
856
  if hooks_installed:
841
857
  console.print(f" Hooks: [cyan]{', '.join(hooks_installed)}[/cyan]")