aline-ai 0.6.5__py3-none-any.whl → 0.6.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/METADATA +1 -1
  2. {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/RECORD +41 -34
  3. realign/__init__.py +1 -1
  4. realign/agent_names.py +79 -0
  5. realign/claude_hooks/stop_hook.py +3 -0
  6. realign/claude_hooks/terminal_state.py +43 -1
  7. realign/claude_hooks/user_prompt_submit_hook.py +3 -0
  8. realign/cli.py +62 -0
  9. realign/codex_detector.py +18 -3
  10. realign/codex_home.py +65 -16
  11. realign/codex_terminal_linker.py +18 -7
  12. realign/commands/agent.py +109 -0
  13. realign/commands/doctor.py +74 -1
  14. realign/commands/export_shares.py +448 -0
  15. realign/commands/import_shares.py +203 -1
  16. realign/commands/search.py +58 -29
  17. realign/commands/sync_agent.py +347 -0
  18. realign/dashboard/app.py +9 -9
  19. realign/dashboard/clipboard.py +54 -0
  20. realign/dashboard/screens/__init__.py +4 -0
  21. realign/dashboard/screens/agent_detail.py +333 -0
  22. realign/dashboard/screens/create_agent_info.py +244 -0
  23. realign/dashboard/screens/event_detail.py +6 -27
  24. realign/dashboard/styles/dashboard.tcss +22 -28
  25. realign/dashboard/tmux_manager.py +36 -10
  26. realign/dashboard/widgets/__init__.py +2 -2
  27. realign/dashboard/widgets/agents_panel.py +1248 -0
  28. realign/dashboard/widgets/events_table.py +4 -27
  29. realign/dashboard/widgets/sessions_table.py +4 -27
  30. realign/db/base.py +69 -0
  31. realign/db/locks.py +4 -0
  32. realign/db/schema.py +111 -2
  33. realign/db/sqlite_db.py +360 -2
  34. realign/events/agent_summarizer.py +157 -0
  35. realign/events/session_summarizer.py +25 -0
  36. realign/watcher_core.py +193 -5
  37. realign/worker_core.py +59 -1
  38. realign/dashboard/widgets/terminal_panel.py +0 -1653
  39. {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/WHEEL +0 -0
  40. {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/entry_points.txt +0 -0
  41. {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/licenses/LICENSE +0 -0
  42. {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/top_level.txt +0 -0
@@ -15,6 +15,7 @@ import secrets
15
15
  import hashlib
16
16
  import base64
17
17
  import threading
18
+ import shutil
18
19
  from urllib.parse import urlparse
19
20
  from collections import defaultdict
20
21
  from dataclasses import dataclass
@@ -1675,6 +1676,49 @@ def _extend_share_expiry(
1675
1676
  return None
1676
1677
 
1677
1678
 
1679
+ def _update_share_content(
1680
+ backend_url: str,
1681
+ share_id: str,
1682
+ token: str,
1683
+ conversation_data: dict,
1684
+ expected_version: int = 0,
1685
+ ) -> dict:
1686
+ """
1687
+ Push updated content to an existing share via PUT /api/share/{id}.
1688
+
1689
+ Args:
1690
+ backend_url: Backend server URL
1691
+ share_id: Share ID on the server
1692
+ token: Admin or contributor token for auth
1693
+ conversation_data: Full conversation data to replace current content
1694
+ expected_version: Optimistic locking version (409 on mismatch)
1695
+
1696
+ Returns:
1697
+ Response dict with success, version fields
1698
+
1699
+ Raises:
1700
+ RuntimeError on upload failure
1701
+ httpx.HTTPStatusError with 409 status on version conflict
1702
+ """
1703
+ if not HTTPX_AVAILABLE:
1704
+ raise RuntimeError("httpx package not installed. Run: pip install httpx")
1705
+
1706
+ headers = {
1707
+ "X-Token": token,
1708
+ "X-Expected-Version": str(expected_version),
1709
+ "Content-Type": "application/json",
1710
+ }
1711
+
1712
+ response = httpx.put(
1713
+ f"{backend_url}/api/share/{share_id}",
1714
+ headers=headers,
1715
+ json={"conversation_data": conversation_data},
1716
+ timeout=60.0,
1717
+ )
1718
+ response.raise_for_status()
1719
+ return response.json()
1720
+
1721
+
1678
1722
  def _standard_upload(
1679
1723
  encrypted_payload: dict,
1680
1724
  metadata: dict,
@@ -2523,6 +2567,59 @@ def display_share_result(
2523
2567
  console.print(f"[bold]👁️ Max Views:[/bold] {max_views}\n")
2524
2568
 
2525
2569
 
2570
+ def _run_clipboard_command(command: list[str], text: str) -> bool:
2571
+ try:
2572
+ return (
2573
+ subprocess.run(
2574
+ command,
2575
+ input=text,
2576
+ text=True,
2577
+ capture_output=False,
2578
+ check=False,
2579
+ ).returncode
2580
+ == 0
2581
+ )
2582
+ except Exception:
2583
+ return False
2584
+
2585
+
2586
+ def _copy_text_to_clipboard(text: str) -> bool:
2587
+ if not text:
2588
+ return False
2589
+
2590
+ if shutil.which("pbcopy"):
2591
+ if _run_clipboard_command(["pbcopy"], text):
2592
+ return True
2593
+
2594
+ if os.name == "nt" and shutil.which("clip"):
2595
+ if _run_clipboard_command(["clip"], text):
2596
+ return True
2597
+
2598
+ if shutil.which("wl-copy"):
2599
+ if _run_clipboard_command(["wl-copy"], text):
2600
+ return True
2601
+
2602
+ if shutil.which("xclip"):
2603
+ if _run_clipboard_command(["xclip", "-selection", "clipboard"], text):
2604
+ return True
2605
+
2606
+ if shutil.which("xsel"):
2607
+ if _run_clipboard_command(["xsel", "--clipboard", "--input"], text):
2608
+ return True
2609
+
2610
+ return False
2611
+
2612
+
2613
+ def _copy_share_to_clipboard(share_url: Optional[str], slack_message: Optional[str]) -> bool:
2614
+ if not share_url:
2615
+ return False
2616
+ if slack_message:
2617
+ text_to_copy = f"{slack_message}\n\n{share_url}"
2618
+ else:
2619
+ text_to_copy = share_url
2620
+ return _copy_text_to_clipboard(text_to_copy)
2621
+
2622
+
2526
2623
  def _export_by_events_interactive(
2527
2624
  all_commits: List,
2528
2625
  shadow_git: Path,
@@ -3616,7 +3713,358 @@ def export_shares_interactive_command(
3616
3713
  max_views=max_views,
3617
3714
  admin_token=result.get("admin_token"),
3618
3715
  )
3716
+ copied = _copy_share_to_clipboard(
3717
+ result.get("share_url"),
3718
+ ui_metadata.get("slack_message") if ui_metadata else None,
3719
+ )
3720
+ if copied:
3721
+ print("📋 Copied Slack message and share link to clipboard.")
3619
3722
 
3620
3723
  if not json_output:
3621
3724
  logger.info(f"======== Interactive export completed: {result['share_url']} ========")
3622
3725
  return 0
3726
+
3727
+
3728
+ def export_agent_shares_command(
3729
+ agent_id: str,
3730
+ password: Optional[str] = None,
3731
+ expiry_days: int = 7,
3732
+ max_views: int = 100,
3733
+ backend_url: Optional[str] = None,
3734
+ enable_mcp: bool = True,
3735
+ json_output: bool = False,
3736
+ compact: bool = True,
3737
+ max_tool_result_chars: int = 8_000,
3738
+ max_tool_command_chars: int = 2_000,
3739
+ progress_callback: Optional[Callable[[str], None]] = None,
3740
+ ) -> int:
3741
+ """
3742
+ Export all sessions associated with an agent and generate a share link.
3743
+
3744
+ This function creates a synthetic event structure from agent sessions,
3745
+ generates UI metadata (Slack message), uploads to backend, and returns
3746
+ the share link.
3747
+
3748
+ Args:
3749
+ agent_id: The agent_info ID to export sessions for
3750
+ password: Encryption password (if None, no encryption)
3751
+ expiry_days: Share expiry in days
3752
+ max_views: Maximum number of views
3753
+ backend_url: Backend server URL (uses config default if None)
3754
+ enable_mcp: Whether to include MCP instructions
3755
+ json_output: If True, output JSON format
3756
+ compact: Whether to compact the export data
3757
+ max_tool_result_chars: Max chars for tool results (with compact)
3758
+ max_tool_command_chars: Max chars for tool commands (with compact)
3759
+ progress_callback: Optional callback for progress updates (message: str) -> None
3760
+
3761
+ Returns:
3762
+ 0 on success, 1 on error
3763
+ """
3764
+ def _progress(msg: str) -> None:
3765
+ if progress_callback:
3766
+ progress_callback(msg)
3767
+
3768
+ if not json_output:
3769
+ logger.info(f"======== Export agent shares command started for agent {agent_id} ========")
3770
+
3771
+ # Check dependencies
3772
+ if not CRYPTO_AVAILABLE:
3773
+ if not json_output:
3774
+ print("Error: cryptography package not installed", file=sys.stderr)
3775
+ return 1
3776
+
3777
+ if not HTTPX_AVAILABLE:
3778
+ if not json_output:
3779
+ print("Error: httpx package not installed", file=sys.stderr)
3780
+ return 1
3781
+
3782
+ # Check authentication
3783
+ if not is_logged_in():
3784
+ if not json_output:
3785
+ print("Error: Not logged in. Please run 'aline login' first.", file=sys.stderr)
3786
+ return 1
3787
+
3788
+ _progress("Fetching agent info...")
3789
+
3790
+ # Get backend URL
3791
+ if backend_url is None:
3792
+ from ..config import ReAlignConfig
3793
+
3794
+ config = ReAlignConfig.load()
3795
+ backend_url = config.share_backend_url
3796
+
3797
+ # Get database
3798
+ from ..db import get_database
3799
+
3800
+ db = get_database()
3801
+
3802
+ # Get agent info
3803
+ agent_info = db.get_agent_info(agent_id)
3804
+ if not agent_info:
3805
+ if not json_output:
3806
+ print(f"Error: Agent not found: {agent_id}", file=sys.stderr)
3807
+ return 1
3808
+
3809
+ # Check for existing share (re-share → sync instead of new link)
3810
+ if (
3811
+ not password
3812
+ and agent_info.share_url
3813
+ and agent_info.share_id
3814
+ and (agent_info.share_admin_token or agent_info.share_contributor_token)
3815
+ ):
3816
+ _progress("Agent already shared, syncing...")
3817
+ try:
3818
+ from .sync_agent import sync_agent_command
3819
+
3820
+ sync_result = sync_agent_command(
3821
+ agent_id=agent_id,
3822
+ backend_url=backend_url,
3823
+ progress_callback=progress_callback,
3824
+ )
3825
+ if sync_result.get("success"):
3826
+ # Extend expiry if we have admin token
3827
+ if agent_info.share_admin_token:
3828
+ try:
3829
+ _extend_share_expiry(
3830
+ backend_url=backend_url,
3831
+ share_id=agent_info.share_id,
3832
+ admin_token=agent_info.share_admin_token,
3833
+ expiry_days=expiry_days,
3834
+ )
3835
+ except Exception as ext_err:
3836
+ logger.warning(f"Failed to extend share expiry: {ext_err}")
3837
+
3838
+ if json_output:
3839
+ output_data = {
3840
+ "agent_id": agent_id,
3841
+ "agent_name": agent_info.name,
3842
+ "share_link": agent_info.share_url,
3843
+ "synced": True,
3844
+ "sessions_pulled": sync_result.get("sessions_pulled", 0),
3845
+ "sessions_pushed": sync_result.get("sessions_pushed", 0),
3846
+ }
3847
+ print(json.dumps(output_data, ensure_ascii=False, indent=2))
3848
+ else:
3849
+ pulled = sync_result.get("sessions_pulled", 0)
3850
+ pushed = sync_result.get("sessions_pushed", 0)
3851
+ print(f"\n🔄 Synced agent: {agent_info.name}")
3852
+ print(f" Pulled {pulled} session(s), pushed {pushed} session(s)")
3853
+ print(f"🔗 Share link: {agent_info.share_url}")
3854
+ copied = _copy_share_to_clipboard(agent_info.share_url, None)
3855
+ if copied:
3856
+ print("📋 Copied share link to clipboard.")
3857
+ return 0
3858
+ else:
3859
+ err = sync_result.get("error", "Unknown sync error")
3860
+ logger.warning(f"Sync failed, falling through to new share: {err}")
3861
+ if not json_output:
3862
+ print(f"⚠️ Sync failed ({err}), creating new share link...", file=sys.stderr)
3863
+ except ImportError:
3864
+ logger.warning("sync_agent module not available, creating new share")
3865
+ except Exception as e:
3866
+ logger.warning(f"Sync failed, falling through to new share: {e}")
3867
+ if not json_output:
3868
+ print(f"⚠️ Sync failed ({e}), creating new share link...", file=sys.stderr)
3869
+
3870
+ # Get sessions for this agent
3871
+ session_records = db.get_sessions_by_agent_id(agent_id)
3872
+ if not session_records:
3873
+ if not json_output:
3874
+ print(f"Error: Agent has no sessions to share", file=sys.stderr)
3875
+ return 1
3876
+
3877
+ _progress(f"Found {len(session_records)} session(s)")
3878
+
3879
+ # Build exportable sessions
3880
+ selected_sessions = build_exportable_sessions_from_records(session_records)
3881
+ if not selected_sessions:
3882
+ if not json_output:
3883
+ print("Error: No sessions found for agent", file=sys.stderr)
3884
+ return 1
3885
+
3886
+ # Create a synthetic event structure for the agent
3887
+ # We use the agent name/description as event title/description
3888
+ event_title = agent_info.name or "Agent Sessions"
3889
+ event_description = agent_info.description or f"Sessions from agent: {agent_info.name}"
3890
+
3891
+ # Create a synthetic ExportableEvent
3892
+ synthetic_event = ExportableEvent(
3893
+ index=1,
3894
+ event_id=f"agent-{agent_id}",
3895
+ title=event_title,
3896
+ description=event_description,
3897
+ event_type="agent",
3898
+ status="active",
3899
+ updated_at=datetime.now(timezone.utc),
3900
+ sessions=session_records,
3901
+ )
3902
+
3903
+ # Build conversation data
3904
+ _progress("Building conversation data...")
3905
+
3906
+ username = os.environ.get("USER") or os.environ.get("USERNAME") or "anonymous"
3907
+
3908
+ compaction = None
3909
+ if compact:
3910
+ compaction = ExportCompactionConfig(
3911
+ enabled=True,
3912
+ max_tool_result_chars=max_tool_result_chars,
3913
+ max_tool_command_chars=max_tool_command_chars,
3914
+ )
3915
+
3916
+ try:
3917
+ conversation_data = build_enhanced_conversation_data(
3918
+ selected_event=synthetic_event,
3919
+ selected_sessions=selected_sessions,
3920
+ username=username,
3921
+ db=db,
3922
+ compaction=compaction,
3923
+ )
3924
+ except Exception as e:
3925
+ if not json_output:
3926
+ print(f"Error: Failed to build conversation data: {e}", file=sys.stderr)
3927
+ logger.error(f"Failed to build conversation data: {e}", exc_info=True)
3928
+ return 1
3929
+
3930
+ if not conversation_data.get("sessions"):
3931
+ if not json_output:
3932
+ print("Error: No sessions found in conversation data", file=sys.stderr)
3933
+ return 1
3934
+
3935
+ # Generate UI metadata with LLM
3936
+ _progress("Generating share message...")
3937
+
3938
+ from ..config import ReAlignConfig
3939
+
3940
+ config = ReAlignConfig.load()
3941
+ ui_metadata, _ = generate_ui_metadata_with_llm(
3942
+ conversation_data,
3943
+ [], # No commits
3944
+ event_title=event_title,
3945
+ event_description=event_description,
3946
+ provider=config.llm_provider,
3947
+ preset_id="default",
3948
+ silent=json_output,
3949
+ )
3950
+
3951
+ if ui_metadata:
3952
+ conversation_data["ui_metadata"] = ui_metadata
3953
+ else:
3954
+ conversation_data["ui_metadata"] = {
3955
+ "title": event_title,
3956
+ "description": event_description,
3957
+ }
3958
+
3959
+ # Add MCP instructions if enabled
3960
+ if enable_mcp:
3961
+ conversation_data["ui_metadata"]["mcp_instructions"] = {
3962
+ "tool_name": "ask_shared_conversation",
3963
+ "usage": "Local AI agents can install the aline MCP server and use the 'ask_shared_conversation' tool to query this conversation programmatically.",
3964
+ }
3965
+
3966
+ # Include sync_metadata placeholder (contributor_token will be added after upload for unencrypted shares)
3967
+ if not password:
3968
+ conversation_data["sync_metadata"] = {
3969
+ "contributor_token": None, # Will be populated after upload
3970
+ "sync_version": 0,
3971
+ }
3972
+
3973
+ # Upload to backend (no encryption for agent shares by default)
3974
+ _progress("Uploading to cloud...")
3975
+
3976
+ metadata = {
3977
+ "username": username,
3978
+ "expiry_days": expiry_days,
3979
+ "max_views": max_views,
3980
+ }
3981
+
3982
+ try:
3983
+ if password:
3984
+ encrypted_payload = encrypt_conversation_data(conversation_data, password)
3985
+ result = upload_to_backend(
3986
+ encrypted_payload=encrypted_payload,
3987
+ metadata=metadata,
3988
+ backend_url=backend_url,
3989
+ ui_metadata=conversation_data.get("ui_metadata"),
3990
+ background=True,
3991
+ )
3992
+ else:
3993
+ result = upload_to_backend_unencrypted(
3994
+ conversation_data=conversation_data,
3995
+ metadata=metadata,
3996
+ backend_url=backend_url,
3997
+ background=True,
3998
+ )
3999
+ except Exception as e:
4000
+ if not json_output:
4001
+ print(f"Error: Upload failed: {e}", file=sys.stderr)
4002
+ logger.error(f"Upload failed: {e}", exc_info=True)
4003
+ return 1
4004
+
4005
+ share_url = result.get("share_url")
4006
+ slack_message = ui_metadata.get("slack_message") if ui_metadata else None
4007
+
4008
+ # Store sync metadata for unencrypted shares
4009
+ if not password and share_url:
4010
+ share_id_result = result.get("share_id") or _extract_share_id_from_url(share_url)
4011
+ admin_token = result.get("admin_token")
4012
+ contributor_token = result.get("contributor_token")
4013
+ expiry_at = result.get("expiry_at")
4014
+
4015
+ if share_id_result:
4016
+ try:
4017
+ db.update_agent_sync_metadata(
4018
+ agent_id,
4019
+ share_id=share_id_result,
4020
+ share_url=share_url,
4021
+ share_admin_token=admin_token,
4022
+ share_contributor_token=contributor_token,
4023
+ share_expiry_at=expiry_at,
4024
+ last_synced_at=datetime.now(timezone.utc).isoformat(),
4025
+ sync_version=0,
4026
+ )
4027
+ except Exception as e:
4028
+ logger.warning(f"Failed to store sync metadata: {e}")
4029
+
4030
+ # Re-upload with contributor_token embedded in sync_metadata
4031
+ # so importers can get it
4032
+ if contributor_token:
4033
+ try:
4034
+ conversation_data["sync_metadata"] = {
4035
+ "contributor_token": contributor_token,
4036
+ "sync_version": 0,
4037
+ }
4038
+ _update_share_content(
4039
+ backend_url=backend_url,
4040
+ share_id=share_id_result,
4041
+ token=admin_token or contributor_token,
4042
+ conversation_data=conversation_data,
4043
+ expected_version=0,
4044
+ )
4045
+ except Exception as e:
4046
+ logger.warning(f"Failed to re-upload with sync metadata: {e}")
4047
+
4048
+ # Output results
4049
+ if json_output:
4050
+ output_data = {
4051
+ "agent_id": agent_id,
4052
+ "agent_name": agent_info.name,
4053
+ "share_link": share_url,
4054
+ "slack_message": slack_message,
4055
+ "session_count": len(selected_sessions),
4056
+ "password": password,
4057
+ }
4058
+ print(json.dumps(output_data, ensure_ascii=False, indent=2))
4059
+ else:
4060
+ print(f"\n✅ Shared {len(selected_sessions)} session(s) from agent: {agent_info.name}")
4061
+ print(f"🔗 Share link: {share_url}")
4062
+ if slack_message:
4063
+ print(f"\n📝 Slack message:\n{slack_message}")
4064
+ copied = _copy_share_to_clipboard(share_url, slack_message)
4065
+ if copied:
4066
+ print("📋 Copied Slack message and share link to clipboard.")
4067
+
4068
+ if not json_output:
4069
+ logger.info(f"======== Agent export completed: {share_url} ========")
4070
+ return 0
@@ -44,6 +44,206 @@ else:
44
44
  console = None
45
45
 
46
46
 
47
+ def download_share_data(
48
+ share_url: str,
49
+ password: Optional[str] = None,
50
+ ) -> Dict[str, Any]:
51
+ """
52
+ Download share data from a share URL.
53
+
54
+ Extracts download logic (URL parse, auth, fetch) into a reusable function.
55
+
56
+ Args:
57
+ share_url: Full share URL (e.g., https://realign-server.vercel.app/share/abc123)
58
+ password: Password for encrypted shares
59
+
60
+ Returns:
61
+ {"success": True, "data": conversation_data} on success
62
+ {"success": False, "error": str} on failure
63
+ """
64
+ if not HTTPX_AVAILABLE:
65
+ return {"success": False, "error": "httpx package not installed. Install with: pip install httpx"}
66
+
67
+ share_id = extract_share_id(share_url)
68
+ if not share_id:
69
+ return {"success": False, "error": f"Invalid share URL format: {share_url}"}
70
+
71
+ logger.info(f"download_share_data: share_id={share_id}")
72
+
73
+ config = ReAlignConfig.load()
74
+ backend_url = config.share_backend_url or "https://realign-server.vercel.app"
75
+
76
+ # Get share info
77
+ try:
78
+ info_response = httpx.get(f"{backend_url}/api/share/{share_id}/info", timeout=10.0)
79
+ info_response.raise_for_status()
80
+ info = info_response.json()
81
+ except Exception as e:
82
+ return {"success": False, "error": f"Failed to fetch share info: {e}"}
83
+
84
+ # Authenticate
85
+ if info.get("requires_password"):
86
+ if not password:
87
+ return {"success": False, "error": "This share requires a password"}
88
+ password_hash = hashlib.sha256(password.encode()).hexdigest()
89
+ headers = {"X-Password-Hash": password_hash}
90
+ else:
91
+ try:
92
+ session_response = httpx.post(
93
+ f"{backend_url}/api/share/{share_id}/session", timeout=10.0
94
+ )
95
+ session_response.raise_for_status()
96
+ session_data = session_response.json()
97
+ session_token = session_data.get("session_token")
98
+ headers = {"Authorization": f"Bearer {session_token}"}
99
+ except Exception as e:
100
+ return {"success": False, "error": f"Failed to create session: {e}"}
101
+
102
+ # Download export data
103
+ try:
104
+ export_response = httpx.get(
105
+ f"{backend_url}/api/share/{share_id}/export", headers=headers, timeout=30.0
106
+ )
107
+ export_data = export_response.json()
108
+
109
+ if export_response.status_code == 413 or export_data.get("needs_chunked_download"):
110
+ total_chunks = export_data.get("total_chunks", 1)
111
+ raw_data = _download_chunks(backend_url, share_id, headers, total_chunks)
112
+ conversation_data = json.loads(raw_data)
113
+ export_data = {
114
+ "success": True,
115
+ "data": conversation_data,
116
+ "metadata": export_data.get("metadata", {}),
117
+ }
118
+ else:
119
+ export_response.raise_for_status()
120
+ except Exception as e:
121
+ return {"success": False, "error": f"Failed to download data: {e}"}
122
+
123
+ if not export_data.get("success"):
124
+ return {"success": False, "error": export_data.get("error", "Unknown error")}
125
+
126
+ return {"success": True, "data": export_data["data"]}
127
+
128
+
129
+ def import_agent_from_share(
130
+ share_url: str,
131
+ password: Optional[str] = None,
132
+ db: Optional[DatabaseInterface] = None,
133
+ ) -> Dict[str, Any]:
134
+ """
135
+ Import an agent from a share link.
136
+
137
+ Downloads share data, creates agent_info record, imports sessions with
138
+ created_by/shared_by tracking, and links them to the agent.
139
+
140
+ Args:
141
+ share_url: Full share URL
142
+ password: Password for encrypted shares
143
+ db: Database instance (auto-created if None)
144
+
145
+ Returns:
146
+ {"success": True, "agent_id", "agent_name", "agent_description",
147
+ "sessions_imported", "turns_imported"} on success
148
+ {"success": False, "error": str} on failure
149
+ """
150
+ os.environ["REALIGN_DISABLE_AUTO_SUMMARIES"] = "1"
151
+
152
+ result = download_share_data(share_url, password)
153
+ if not result["success"]:
154
+ return result
155
+
156
+ conversation_data = result["data"]
157
+
158
+ # Extract agent identity from share data
159
+ event_data = conversation_data.get("event", {})
160
+ event_id = event_data.get("event_id", "")
161
+
162
+ # Agent ID: strip "agent-" prefix if present, otherwise generate new UUID
163
+ if event_id.startswith("agent-"):
164
+ agent_id = event_id[6:]
165
+ else:
166
+ agent_id = str(uuid_lib.uuid4())
167
+
168
+ agent_name = event_data.get("title") or "Imported Agent"
169
+ agent_description = event_data.get("description") or ""
170
+
171
+ # Set up database
172
+ if db is None:
173
+ from ..db.sqlite_db import SQLiteDatabase
174
+
175
+ config = ReAlignConfig.load()
176
+ db_path = Path(config.sqlite_db_path).expanduser()
177
+ db = SQLiteDatabase(db_path=db_path)
178
+
179
+ # Create agent_info record
180
+ try:
181
+ db.get_or_create_agent_info(agent_id, name=agent_name)
182
+ if agent_description:
183
+ db.update_agent_info(agent_id, description=agent_description)
184
+ except Exception as e:
185
+ return {"success": False, "error": f"Failed to create agent info: {e}"}
186
+
187
+ # Import sessions and link to agent
188
+ sessions_data = conversation_data.get("sessions", [])
189
+ total_sessions = 0
190
+ total_turns = 0
191
+
192
+ for session_data in sessions_data:
193
+ session_id = session_data.get("session_id") or generate_uuid()
194
+
195
+ # Use existing import logic (handles created_by/shared_by)
196
+ try:
197
+ # Use a dummy event_id — we don't need event linkage for agent imports
198
+ import_result = import_session_with_turns(
199
+ session_data, event_id or agent_id, share_url, db, force=False
200
+ )
201
+ total_sessions += import_result["sessions"]
202
+ total_turns += import_result["turns"]
203
+ except Exception as e:
204
+ logger.error(f"Failed to import session {session_id}: {e}")
205
+ continue
206
+
207
+ # Link session to agent
208
+ try:
209
+ db.update_session_agent_id(session_id, agent_id)
210
+ except Exception as e:
211
+ logger.error(f"Failed to link session {session_id} to agent: {e}")
212
+
213
+ # Extract and store sync metadata (for unencrypted shares)
214
+ sync_meta = conversation_data.get("sync_metadata", {})
215
+ contributor_token = sync_meta.get("contributor_token")
216
+ sync_enabled = False
217
+
218
+ if contributor_token and not password:
219
+ sync_enabled = True
220
+ share_id = extract_share_id(share_url)
221
+ try:
222
+ db.update_agent_sync_metadata(
223
+ agent_id,
224
+ share_id=share_id,
225
+ share_url=share_url,
226
+ share_contributor_token=contributor_token,
227
+ # No admin_token for importers
228
+ last_synced_at=datetime.now().isoformat(),
229
+ sync_version=sync_meta.get("sync_version", 0),
230
+ )
231
+ except Exception as e:
232
+ logger.warning(f"Failed to store sync metadata: {e}")
233
+ sync_enabled = False
234
+
235
+ return {
236
+ "success": True,
237
+ "agent_id": agent_id,
238
+ "agent_name": agent_name,
239
+ "agent_description": agent_description,
240
+ "sessions_imported": total_sessions,
241
+ "turns_imported": total_turns,
242
+ "sync_enabled": sync_enabled,
243
+ "share_url": share_url if sync_enabled else None,
244
+ }
245
+
246
+
47
247
  def import_share_command(
48
248
  share_url: str,
49
249
  password: Optional[str] = None,
@@ -84,11 +284,13 @@ def import_share_command(
84
284
 
85
285
  logger.info(f"Extracted share_id: {share_id}")
86
286
 
87
- # 2. Get share info
287
+ # Use download_share_data helper for non-interactive password case
288
+ # For interactive mode, we still need the prompt flow
88
289
  config = ReAlignConfig.load()
89
290
  backend_url = config.share_backend_url or "https://realign-server.vercel.app"
90
291
  logger.info(f"Backend URL: {backend_url}")
91
292
 
293
+ # 2. Get share info and handle password prompt interactively
92
294
  try:
93
295
  if console:
94
296
  console.print(f"[cyan]Fetching share info from {backend_url}...[/cyan]")