aline-ai 0.6.6__py3-none-any.whl → 0.6.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -44,6 +44,206 @@ else:
44
44
  console = None
45
45
 
46
46
 
47
+ def download_share_data(
48
+ share_url: str,
49
+ password: Optional[str] = None,
50
+ ) -> Dict[str, Any]:
51
+ """
52
+ Download share data from a share URL.
53
+
54
+ Extracts download logic (URL parse, auth, fetch) into a reusable function.
55
+
56
+ Args:
57
+ share_url: Full share URL (e.g., https://realign-server.vercel.app/share/abc123)
58
+ password: Password for encrypted shares
59
+
60
+ Returns:
61
+ {"success": True, "data": conversation_data} on success
62
+ {"success": False, "error": str} on failure
63
+ """
64
+ if not HTTPX_AVAILABLE:
65
+ return {"success": False, "error": "httpx package not installed. Install with: pip install httpx"}
66
+
67
+ share_id = extract_share_id(share_url)
68
+ if not share_id:
69
+ return {"success": False, "error": f"Invalid share URL format: {share_url}"}
70
+
71
+ logger.info(f"download_share_data: share_id={share_id}")
72
+
73
+ config = ReAlignConfig.load()
74
+ backend_url = config.share_backend_url or "https://realign-server.vercel.app"
75
+
76
+ # Get share info
77
+ try:
78
+ info_response = httpx.get(f"{backend_url}/api/share/{share_id}/info", timeout=10.0)
79
+ info_response.raise_for_status()
80
+ info = info_response.json()
81
+ except Exception as e:
82
+ return {"success": False, "error": f"Failed to fetch share info: {e}"}
83
+
84
+ # Authenticate
85
+ if info.get("requires_password"):
86
+ if not password:
87
+ return {"success": False, "error": "This share requires a password"}
88
+ password_hash = hashlib.sha256(password.encode()).hexdigest()
89
+ headers = {"X-Password-Hash": password_hash}
90
+ else:
91
+ try:
92
+ session_response = httpx.post(
93
+ f"{backend_url}/api/share/{share_id}/session", timeout=10.0
94
+ )
95
+ session_response.raise_for_status()
96
+ session_data = session_response.json()
97
+ session_token = session_data.get("session_token")
98
+ headers = {"Authorization": f"Bearer {session_token}"}
99
+ except Exception as e:
100
+ return {"success": False, "error": f"Failed to create session: {e}"}
101
+
102
+ # Download export data
103
+ try:
104
+ export_response = httpx.get(
105
+ f"{backend_url}/api/share/{share_id}/export", headers=headers, timeout=30.0
106
+ )
107
+ export_data = export_response.json()
108
+
109
+ if export_response.status_code == 413 or export_data.get("needs_chunked_download"):
110
+ total_chunks = export_data.get("total_chunks", 1)
111
+ raw_data = _download_chunks(backend_url, share_id, headers, total_chunks)
112
+ conversation_data = json.loads(raw_data)
113
+ export_data = {
114
+ "success": True,
115
+ "data": conversation_data,
116
+ "metadata": export_data.get("metadata", {}),
117
+ }
118
+ else:
119
+ export_response.raise_for_status()
120
+ except Exception as e:
121
+ return {"success": False, "error": f"Failed to download data: {e}"}
122
+
123
+ if not export_data.get("success"):
124
+ return {"success": False, "error": export_data.get("error", "Unknown error")}
125
+
126
+ return {"success": True, "data": export_data["data"]}
127
+
128
+
129
+ def import_agent_from_share(
130
+ share_url: str,
131
+ password: Optional[str] = None,
132
+ db: Optional[DatabaseInterface] = None,
133
+ ) -> Dict[str, Any]:
134
+ """
135
+ Import an agent from a share link.
136
+
137
+ Downloads share data, creates agent_info record, imports sessions with
138
+ created_by/shared_by tracking, and links them to the agent.
139
+
140
+ Args:
141
+ share_url: Full share URL
142
+ password: Password for encrypted shares
143
+ db: Database instance (auto-created if None)
144
+
145
+ Returns:
146
+ {"success": True, "agent_id", "agent_name", "agent_description",
147
+ "sessions_imported", "turns_imported"} on success
148
+ {"success": False, "error": str} on failure
149
+ """
150
+ os.environ["REALIGN_DISABLE_AUTO_SUMMARIES"] = "1"
151
+
152
+ result = download_share_data(share_url, password)
153
+ if not result["success"]:
154
+ return result
155
+
156
+ conversation_data = result["data"]
157
+
158
+ # Extract agent identity from share data
159
+ event_data = conversation_data.get("event", {})
160
+ event_id = event_data.get("event_id", "")
161
+
162
+ # Agent ID: strip "agent-" prefix if present, otherwise generate new UUID
163
+ if event_id.startswith("agent-"):
164
+ agent_id = event_id[6:]
165
+ else:
166
+ agent_id = str(uuid_lib.uuid4())
167
+
168
+ agent_name = event_data.get("title") or "Imported Agent"
169
+ agent_description = event_data.get("description") or ""
170
+
171
+ # Set up database
172
+ if db is None:
173
+ from ..db.sqlite_db import SQLiteDatabase
174
+
175
+ config = ReAlignConfig.load()
176
+ db_path = Path(config.sqlite_db_path).expanduser()
177
+ db = SQLiteDatabase(db_path=db_path)
178
+
179
+ # Create agent_info record
180
+ try:
181
+ db.get_or_create_agent_info(agent_id, name=agent_name)
182
+ if agent_description:
183
+ db.update_agent_info(agent_id, description=agent_description)
184
+ except Exception as e:
185
+ return {"success": False, "error": f"Failed to create agent info: {e}"}
186
+
187
+ # Import sessions and link to agent
188
+ sessions_data = conversation_data.get("sessions", [])
189
+ total_sessions = 0
190
+ total_turns = 0
191
+
192
+ for session_data in sessions_data:
193
+ session_id = session_data.get("session_id") or generate_uuid()
194
+
195
+ # Use existing import logic (handles created_by/shared_by)
196
+ try:
197
+ # Use a dummy event_id — we don't need event linkage for agent imports
198
+ import_result = import_session_with_turns(
199
+ session_data, event_id or agent_id, share_url, db, force=False
200
+ )
201
+ total_sessions += import_result["sessions"]
202
+ total_turns += import_result["turns"]
203
+ except Exception as e:
204
+ logger.error(f"Failed to import session {session_id}: {e}")
205
+ continue
206
+
207
+ # Link session to agent
208
+ try:
209
+ db.update_session_agent_id(session_id, agent_id)
210
+ except Exception as e:
211
+ logger.error(f"Failed to link session {session_id} to agent: {e}")
212
+
213
+ # Extract and store sync metadata (for unencrypted shares)
214
+ sync_meta = conversation_data.get("sync_metadata", {})
215
+ contributor_token = sync_meta.get("contributor_token")
216
+ sync_enabled = False
217
+
218
+ if contributor_token and not password:
219
+ sync_enabled = True
220
+ share_id = extract_share_id(share_url)
221
+ try:
222
+ db.update_agent_sync_metadata(
223
+ agent_id,
224
+ share_id=share_id,
225
+ share_url=share_url,
226
+ share_contributor_token=contributor_token,
227
+ # No admin_token for importers
228
+ last_synced_at=datetime.now().isoformat(),
229
+ sync_version=sync_meta.get("sync_version", 0),
230
+ )
231
+ except Exception as e:
232
+ logger.warning(f"Failed to store sync metadata: {e}")
233
+ sync_enabled = False
234
+
235
+ return {
236
+ "success": True,
237
+ "agent_id": agent_id,
238
+ "agent_name": agent_name,
239
+ "agent_description": agent_description,
240
+ "sessions_imported": total_sessions,
241
+ "turns_imported": total_turns,
242
+ "sync_enabled": sync_enabled,
243
+ "share_url": share_url if sync_enabled else None,
244
+ }
245
+
246
+
47
247
  def import_share_command(
48
248
  share_url: str,
49
249
  password: Optional[str] = None,
@@ -84,11 +284,13 @@ def import_share_command(
84
284
 
85
285
  logger.info(f"Extracted share_id: {share_id}")
86
286
 
87
- # 2. Get share info
287
+ # Use download_share_data helper for non-interactive password case
288
+ # For interactive mode, we still need the prompt flow
88
289
  config = ReAlignConfig.load()
89
290
  backend_url = config.share_backend_url or "https://realign-server.vercel.app"
90
291
  logger.info(f"Backend URL: {backend_url}")
91
292
 
293
+ # 2. Get share info and handle password prompt interactively
92
294
  try:
93
295
  if console:
94
296
  console.print(f"[cyan]Fetching share info from {backend_url}...[/cyan]")
@@ -0,0 +1,347 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Sync agent command - Bidirectional sync for shared agents.
4
+
5
+ Pull remote sessions, merge locally (union of sessions, dedup by content_hash),
6
+ push merged result back. Uses optimistic locking via sync_version.
7
+
8
+ Sync works with unencrypted shares only.
9
+ """
10
+
11
+ import json
12
+ import os
13
+ import logging
14
+ from datetime import datetime, timezone
15
+ from typing import Optional, Dict, Any, Callable
16
+
17
+ try:
18
+ import httpx
19
+
20
+ HTTPX_AVAILABLE = True
21
+ except ImportError:
22
+ HTTPX_AVAILABLE = False
23
+
24
+ from ..logging_config import setup_logger
25
+
26
+ logger = setup_logger("realign.commands.sync_agent", "sync_agent.log")
27
+
28
+ MAX_SYNC_RETRIES = 3
29
+
30
+
31
+ def sync_agent_command(
32
+ agent_id: str,
33
+ backend_url: Optional[str] = None,
34
+ progress_callback: Optional[Callable[[str], None]] = None,
35
+ ) -> dict:
36
+ """
37
+ Sync an agent's sessions with the remote share.
38
+
39
+ Algorithm:
40
+ 1. Load local state (agent_info, sessions, content hashes)
41
+ 2. Pull remote state (full download via export endpoint)
42
+ 3. Merge: union of sessions deduped by content_hash, last-write-wins for name/desc
43
+ 4. Push merged state via PUT with optimistic locking
44
+ 5. Update local sync metadata
45
+
46
+ Args:
47
+ agent_id: The agent_info ID to sync
48
+ backend_url: Backend server URL (uses config default if None)
49
+ progress_callback: Optional callback for progress updates
50
+
51
+ Returns:
52
+ {"success": True, "sessions_pulled": N, "sessions_pushed": N, ...} on success
53
+ {"success": False, "error": str} on failure
54
+ """
55
+ def _progress(msg: str) -> None:
56
+ if progress_callback:
57
+ progress_callback(msg)
58
+
59
+ if not HTTPX_AVAILABLE:
60
+ return {"success": False, "error": "httpx package not installed"}
61
+
62
+ # Get backend URL
63
+ if backend_url is None:
64
+ from ..config import ReAlignConfig
65
+
66
+ config = ReAlignConfig.load()
67
+ backend_url = config.share_backend_url or "https://realign-server.vercel.app"
68
+
69
+ # Get database
70
+ from ..db import get_database
71
+
72
+ db = get_database()
73
+
74
+ # 1. Load local state
75
+ _progress("Loading local agent data...")
76
+
77
+ agent_info = db.get_agent_info(agent_id)
78
+ if not agent_info:
79
+ return {"success": False, "error": f"Agent not found: {agent_id}"}
80
+
81
+ if not agent_info.share_id or not agent_info.share_url:
82
+ return {"success": False, "error": "Agent has no share metadata (not shared yet)"}
83
+
84
+ token = agent_info.share_admin_token or agent_info.share_contributor_token
85
+ if not token:
86
+ return {"success": False, "error": "No token available for sync (need admin or contributor token)"}
87
+
88
+ share_id = agent_info.share_id
89
+ local_sync_version = agent_info.sync_version or 0
90
+
91
+ local_sessions = db.get_sessions_by_agent_id(agent_id)
92
+ local_content_hashes = db.get_agent_content_hashes(agent_id)
93
+
94
+ logger.info(
95
+ f"Sync: agent={agent_id}, share={share_id}, "
96
+ f"local_sessions={len(local_sessions)}, local_hashes={len(local_content_hashes)}"
97
+ )
98
+
99
+ # 2. Pull remote state
100
+ _progress("Pulling remote data...")
101
+
102
+ remote_data = _pull_remote(backend_url, share_id)
103
+ if not remote_data.get("success"):
104
+ return {"success": False, "error": f"Failed to pull remote: {remote_data.get('error')}"}
105
+
106
+ conversation_data = remote_data["data"]
107
+ remote_sync_meta = conversation_data.get("sync_metadata", {})
108
+ remote_sync_version = remote_sync_meta.get("sync_version", 0)
109
+
110
+ remote_sessions_data = conversation_data.get("sessions", [])
111
+ remote_event = conversation_data.get("event", {})
112
+
113
+ # 3. Merge
114
+ _progress("Merging sessions...")
115
+
116
+ # Collect remote content hashes
117
+ remote_content_hashes = set()
118
+ for session_data in remote_sessions_data:
119
+ for turn_data in session_data.get("turns", []):
120
+ h = turn_data.get("content_hash")
121
+ if h:
122
+ remote_content_hashes.add(h)
123
+
124
+ # Import new remote sessions/turns locally
125
+ sessions_pulled = 0
126
+ from .import_shares import import_session_with_turns
127
+
128
+ for session_data in remote_sessions_data:
129
+ session_id = session_data.get("session_id", "")
130
+ session_turns = session_data.get("turns", [])
131
+
132
+ # Check if any turns in this session are new to us
133
+ new_turns = [
134
+ t for t in session_turns
135
+ if t.get("content_hash") and t["content_hash"] not in local_content_hashes
136
+ ]
137
+
138
+ if not new_turns:
139
+ continue
140
+
141
+ # Import the session (import_session_with_turns handles dedup by content_hash)
142
+ try:
143
+ # Suppress auto-summaries during sync
144
+ os.environ["REALIGN_DISABLE_AUTO_SUMMARIES"] = "1"
145
+ import_result = import_session_with_turns(
146
+ session_data, f"agent-{agent_id}", agent_info.share_url, db, force=False
147
+ )
148
+ if import_result.get("sessions", 0) > 0 or import_result.get("turns", 0) > 0:
149
+ sessions_pulled += 1
150
+
151
+ # Link session to agent
152
+ db.update_session_agent_id(session_id, agent_id)
153
+ except Exception as e:
154
+ logger.error(f"Failed to import remote session {session_id}: {e}")
155
+
156
+ # Merge name/description: last-write-wins by updated_at
157
+ description_updated = False
158
+ remote_updated_at = remote_event.get("updated_at")
159
+ if remote_updated_at:
160
+ try:
161
+ remote_dt = datetime.fromisoformat(remote_updated_at.replace("Z", "+00:00"))
162
+ local_dt = agent_info.updated_at
163
+ if hasattr(local_dt, "tzinfo") and local_dt.tzinfo is None:
164
+ local_dt = local_dt.replace(tzinfo=timezone.utc)
165
+ if remote_dt > local_dt:
166
+ remote_name = remote_event.get("title")
167
+ remote_desc = remote_event.get("description")
168
+ updates = {}
169
+ if remote_name and remote_name != agent_info.name:
170
+ updates["name"] = remote_name
171
+ if remote_desc is not None and remote_desc != agent_info.description:
172
+ updates["description"] = remote_desc
173
+ if updates:
174
+ db.update_agent_info(agent_id, **updates)
175
+ description_updated = True
176
+ agent_info = db.get_agent_info(agent_id)
177
+ except Exception as e:
178
+ logger.warning(f"Failed to compare timestamps for name/desc merge: {e}")
179
+
180
+ # 4. Build merged data and push
181
+ _progress("Pushing merged data...")
182
+
183
+ # Reload local state after merge
184
+ local_sessions = db.get_sessions_by_agent_id(agent_id)
185
+ local_content_hashes = db.get_agent_content_hashes(agent_id)
186
+
187
+ # Count sessions pushed (local sessions with turns not in remote)
188
+ sessions_pushed = 0
189
+ for session in local_sessions:
190
+ turns = db.get_turns_for_session(session.id)
191
+ new_local_turns = [t for t in turns if t.content_hash not in remote_content_hashes]
192
+ if new_local_turns:
193
+ sessions_pushed += 1
194
+
195
+ # Build full conversation data for push
196
+ merged_conversation = _build_merged_conversation_data(
197
+ agent_info=agent_info,
198
+ agent_id=agent_id,
199
+ sessions=local_sessions,
200
+ db=db,
201
+ contributor_token=agent_info.share_contributor_token,
202
+ )
203
+
204
+ # Push with optimistic locking + retry
205
+ from .export_shares import _update_share_content
206
+
207
+ new_version = remote_sync_version
208
+ for attempt in range(MAX_SYNC_RETRIES):
209
+ try:
210
+ push_result = _update_share_content(
211
+ backend_url=backend_url,
212
+ share_id=share_id,
213
+ token=token,
214
+ conversation_data=merged_conversation,
215
+ expected_version=new_version,
216
+ )
217
+ new_version = push_result.get("version", new_version + 1)
218
+ break
219
+ except Exception as e:
220
+ error_str = str(e)
221
+ if "409" in error_str and attempt < MAX_SYNC_RETRIES - 1:
222
+ _progress(f"Version conflict, retrying ({attempt + 2}/{MAX_SYNC_RETRIES})...")
223
+ # Re-pull and retry
224
+ remote_data = _pull_remote(backend_url, share_id)
225
+ if remote_data.get("success"):
226
+ conv = remote_data["data"]
227
+ new_version = conv.get("sync_metadata", {}).get("sync_version", 0)
228
+ continue
229
+ else:
230
+ logger.error(f"Push failed after {attempt + 1} attempts: {e}")
231
+ return {"success": False, "error": f"Push failed: {e}"}
232
+
233
+ # 5. Update local sync metadata
234
+ now_iso = datetime.now(timezone.utc).isoformat()
235
+ db.update_agent_sync_metadata(
236
+ agent_id,
237
+ last_synced_at=now_iso,
238
+ sync_version=new_version,
239
+ )
240
+
241
+ _progress("Sync complete!")
242
+
243
+ return {
244
+ "success": True,
245
+ "sessions_pulled": sessions_pulled,
246
+ "sessions_pushed": sessions_pushed,
247
+ "description_updated": description_updated,
248
+ "new_sync_version": new_version,
249
+ }
250
+
251
+
252
+ def _pull_remote(backend_url: str, share_id: str) -> dict:
253
+ """Pull remote share data via the download_share_data helper."""
254
+ try:
255
+ from .import_shares import download_share_data
256
+
257
+ share_url = f"{backend_url}/share/{share_id}"
258
+ return download_share_data(share_url, password=None)
259
+ except Exception as e:
260
+ return {"success": False, "error": str(e)}
261
+
262
+
263
+ def _build_merged_conversation_data(
264
+ agent_info,
265
+ agent_id: str,
266
+ sessions,
267
+ db,
268
+ contributor_token: Optional[str] = None,
269
+ ) -> dict:
270
+ """
271
+ Build a full conversation data dict from local agent state.
272
+
273
+ Mirrors the structure of build_enhanced_conversation_data but works
274
+ directly from DB records without ExportableSession wrappers.
275
+ """
276
+ import json as json_module
277
+
278
+ event_data = {
279
+ "event_id": f"agent-{agent_id}",
280
+ "title": agent_info.name or "Agent Sessions",
281
+ "description": agent_info.description or "",
282
+ "event_type": "agent",
283
+ "status": "active",
284
+ "created_at": agent_info.created_at.isoformat() if agent_info.created_at else None,
285
+ "updated_at": agent_info.updated_at.isoformat() if agent_info.updated_at else None,
286
+ }
287
+
288
+ sessions_data = []
289
+ for session in sessions:
290
+ turns = db.get_turns_for_session(session.id)
291
+ turns_data = []
292
+ for turn in turns:
293
+ turn_content = db.get_turn_content(turn.id)
294
+ messages = []
295
+ if turn_content:
296
+ for line in turn_content.strip().split("\n"):
297
+ if line.strip():
298
+ try:
299
+ messages.append(json_module.loads(line))
300
+ except Exception:
301
+ continue
302
+
303
+ turns_data.append({
304
+ "turn_id": turn.id,
305
+ "turn_number": turn.turn_number,
306
+ "content_hash": turn.content_hash,
307
+ "timestamp": turn.timestamp.isoformat() if turn.timestamp else None,
308
+ "llm_title": turn.llm_title or "",
309
+ "llm_description": turn.llm_description,
310
+ "user_message": turn.user_message,
311
+ "assistant_summary": turn.assistant_summary,
312
+ "model_name": turn.model_name,
313
+ "git_commit_hash": turn.git_commit_hash,
314
+ "messages": messages,
315
+ })
316
+
317
+ sessions_data.append({
318
+ "session_id": session.id,
319
+ "session_type": session.session_type or "unknown",
320
+ "workspace_path": session.workspace_path,
321
+ "session_title": session.session_title,
322
+ "session_summary": session.session_summary,
323
+ "started_at": session.started_at.isoformat() if session.started_at else None,
324
+ "last_activity_at": session.last_activity_at.isoformat() if session.last_activity_at else None,
325
+ "created_by": session.created_by,
326
+ "shared_by": session.shared_by,
327
+ "turns": turns_data,
328
+ })
329
+
330
+ username = os.environ.get("USER") or os.environ.get("USERNAME") or "anonymous"
331
+
332
+ result = {
333
+ "version": "2.1",
334
+ "username": username,
335
+ "time": datetime.now(timezone.utc).isoformat(),
336
+ "event": event_data,
337
+ "sessions": sessions_data,
338
+ "ui_metadata": {},
339
+ }
340
+
341
+ if contributor_token:
342
+ result["sync_metadata"] = {
343
+ "contributor_token": contributor_token,
344
+ "sync_version": agent_info.sync_version or 0,
345
+ }
346
+
347
+ return result