aline-ai 0.7.2__py3-none-any.whl → 0.7.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,6 +11,7 @@ Sync works with unencrypted shares only.
11
11
  import json
12
12
  import os
13
13
  import logging
14
+ import time
14
15
  from datetime import datetime, timezone
15
16
  from typing import Optional, Dict, Any, Callable
16
17
 
@@ -28,6 +29,40 @@ logger = setup_logger("realign.commands.sync_agent", "sync_agent.log")
28
29
  MAX_SYNC_RETRIES = 3
29
30
 
30
31
 
32
+ def _ensure_aware_utc(dt: datetime) -> datetime:
33
+ if dt.tzinfo is None:
34
+ return dt.replace(tzinfo=timezone.utc)
35
+ return dt
36
+
37
+
38
+ def _parse_iso_datetime_to_utc(value: Any) -> Optional[datetime]:
39
+ if not isinstance(value, str) or not value:
40
+ return None
41
+ try:
42
+ dt = datetime.fromisoformat(value.replace("Z", "+00:00"))
43
+ return _ensure_aware_utc(dt)
44
+ except Exception:
45
+ return None
46
+
47
+
48
+ def _extract_httpx_conflict_current_version(err: Exception) -> Optional[int]:
49
+ if not HTTPX_AVAILABLE:
50
+ return None
51
+ if not isinstance(err, httpx.HTTPStatusError):
52
+ return None
53
+ if err.response is None or err.response.status_code != 409:
54
+ return None
55
+ try:
56
+ payload = err.response.json()
57
+ except Exception:
58
+ return None
59
+ current = payload.get("current_version")
60
+ try:
61
+ return int(current)
62
+ except Exception:
63
+ return None
64
+
65
+
31
66
  def sync_agent_command(
32
67
  agent_id: str,
33
68
  backend_url: Optional[str] = None,
@@ -52,6 +87,7 @@ def sync_agent_command(
52
87
  {"success": True, "sessions_pulled": N, "sessions_pushed": N, ...} on success
53
88
  {"success": False, "error": str} on failure
54
89
  """
90
+
55
91
  def _progress(msg: str) -> None:
56
92
  if progress_callback:
57
93
  progress_callback(msg)
@@ -84,7 +120,10 @@ def sync_agent_command(
84
120
  agent_info = matches[0]
85
121
  agent_id = agent_info.id
86
122
  elif len(matches) > 1:
87
- return {"success": False, "error": f"Ambiguous agent_id prefix '{agent_id}' matches {len(matches)} agents"}
123
+ return {
124
+ "success": False,
125
+ "error": f"Ambiguous agent_id prefix '{agent_id}' matches {len(matches)} agents",
126
+ }
88
127
  else:
89
128
  return {"success": False, "error": f"Agent not found: {agent_id}"}
90
129
 
@@ -93,7 +132,10 @@ def sync_agent_command(
93
132
 
94
133
  token = agent_info.share_admin_token or agent_info.share_contributor_token
95
134
  if not token:
96
- return {"success": False, "error": "No token available for sync (need admin or contributor token)"}
135
+ return {
136
+ "success": False,
137
+ "error": "No token available for sync (need admin or contributor token)",
138
+ }
97
139
 
98
140
  share_id = agent_info.share_id
99
141
  local_sync_version = agent_info.sync_version or 0
@@ -164,7 +206,8 @@ def sync_agent_command(
164
206
 
165
207
  # Check if any turns in this session are new to THIS AGENT (not globally)
166
208
  new_turns = [
167
- t for t in session_turns
209
+ t
210
+ for t in session_turns
168
211
  if t.get("content_hash") and t["content_hash"] not in local_content_hashes
169
212
  ]
170
213
 
@@ -186,8 +229,11 @@ def sync_agent_command(
186
229
  session_data, f"agent-{agent_id}", agent_info.share_url, db, force=False
187
230
  )
188
231
  # Count as pulled if: created new session/turns, or session was new/needed linking
189
- if (import_result.get("sessions", 0) > 0 or import_result.get("turns", 0) > 0
190
- or should_count):
232
+ if (
233
+ import_result.get("sessions", 0) > 0
234
+ or import_result.get("turns", 0) > 0
235
+ or should_count
236
+ ):
191
237
  sessions_pulled += 1
192
238
  except Exception as e:
193
239
  logger.error(f"Failed to import remote session {session_id}: {e}")
@@ -206,11 +252,13 @@ def sync_agent_command(
206
252
  remote_updated_at = remote_event.get("updated_at")
207
253
  if remote_updated_at:
208
254
  try:
209
- remote_dt = datetime.fromisoformat(remote_updated_at.replace("Z", "+00:00"))
210
- local_dt = agent_info.updated_at
211
- if hasattr(local_dt, "tzinfo") and local_dt.tzinfo is None:
212
- local_dt = local_dt.replace(tzinfo=timezone.utc)
213
- if remote_dt > local_dt:
255
+ remote_dt = _parse_iso_datetime_to_utc(remote_updated_at)
256
+ local_dt = (
257
+ _ensure_aware_utc(agent_info.updated_at)
258
+ if isinstance(agent_info.updated_at, datetime)
259
+ else None
260
+ )
261
+ if remote_dt and local_dt and remote_dt > local_dt:
214
262
  remote_name = remote_event.get("title")
215
263
  remote_desc = remote_event.get("description")
216
264
  updates = {}
@@ -240,6 +288,53 @@ def sync_agent_command(
240
288
  if new_local_turns:
241
289
  sessions_pushed += 1
242
290
 
291
+ # Skip push if there's nothing new to send.
292
+ # This avoids re-uploading large, unchanged payloads (which can hit serverless limits and show up as 403/413).
293
+ needs_push_metadata = False
294
+ try:
295
+ remote_title = remote_event.get("title")
296
+ remote_desc = remote_event.get("description")
297
+
298
+ local_title = agent_info.name
299
+ local_desc = agent_info.description
300
+
301
+ has_metadata_diff = (remote_title != local_title) or (remote_desc != local_desc)
302
+ if has_metadata_diff and not description_updated:
303
+ remote_updated_at = remote_event.get("updated_at")
304
+ remote_dt = _parse_iso_datetime_to_utc(remote_updated_at)
305
+
306
+ local_dt = getattr(agent_info, "updated_at", None)
307
+ if isinstance(local_dt, datetime):
308
+ local_dt = _ensure_aware_utc(local_dt)
309
+
310
+ # If remote has no timestamp, assume local should win. Otherwise, push only if local is newer.
311
+ if remote_dt is None or (local_dt and remote_dt and local_dt > remote_dt):
312
+ needs_push_metadata = True
313
+ except Exception as e:
314
+ logger.warning(f"Failed to compute metadata push necessity (non-fatal): {e}")
315
+
316
+ if sessions_pushed == 0 and not needs_push_metadata:
317
+ now_iso = datetime.now(timezone.utc).isoformat()
318
+ try:
319
+ db.update_agent_sync_metadata(
320
+ agent_id,
321
+ last_synced_at=now_iso,
322
+ sync_version=remote_sync_version,
323
+ )
324
+ except Exception as e:
325
+ logger.warning(f"Failed to update local sync metadata after no-op sync: {e}")
326
+
327
+ _progress("No changes to push.")
328
+ _progress("Sync complete!")
329
+
330
+ return {
331
+ "success": True,
332
+ "sessions_pulled": sessions_pulled,
333
+ "sessions_pushed": 0,
334
+ "description_updated": description_updated,
335
+ "new_sync_version": remote_sync_version,
336
+ }
337
+
243
338
  # Build full conversation data for push
244
339
  merged_conversation = _build_merged_conversation_data(
245
340
  agent_info=agent_info,
@@ -265,14 +360,138 @@ def sync_agent_command(
265
360
  new_version = push_result.get("version", new_version + 1)
266
361
  break
267
362
  except Exception as e:
268
- error_str = str(e)
269
- if "409" in error_str and attempt < MAX_SYNC_RETRIES - 1:
270
- _progress(f"Version conflict, retrying ({attempt + 2}/{MAX_SYNC_RETRIES})...")
271
- # Re-pull and retry
363
+ conflict_current_version = _extract_httpx_conflict_current_version(e)
364
+ is_conflict = (
365
+ HTTPX_AVAILABLE
366
+ and isinstance(e, httpx.HTTPStatusError)
367
+ and e.response is not None
368
+ and e.response.status_code == 409
369
+ )
370
+ if is_conflict and attempt < MAX_SYNC_RETRIES - 1:
371
+ if conflict_current_version is not None:
372
+ _progress(
373
+ "Version conflict "
374
+ f"(remote={conflict_current_version}, local_expected={new_version}), "
375
+ f"retrying ({attempt + 2}/{MAX_SYNC_RETRIES})..."
376
+ )
377
+ else:
378
+ _progress(f"Version conflict, retrying ({attempt + 2}/{MAX_SYNC_RETRIES})...")
379
+
380
+ # Re-pull to re-merge any remote changes; also bypass potential CDN caching.
272
381
  remote_data = _pull_remote(backend_url, share_id)
273
382
  if remote_data.get("success"):
274
383
  conv = remote_data["data"]
275
- new_version = conv.get("sync_metadata", {}).get("sync_version", 0)
384
+ remote_version = conv.get("sync_metadata", {}).get("sync_version", 0)
385
+ try:
386
+ remote_version_int = int(remote_version)
387
+ except Exception:
388
+ remote_version_int = 0
389
+
390
+ if conflict_current_version is not None:
391
+ remote_version_int = max(remote_version_int, conflict_current_version)
392
+
393
+ new_version = remote_version_int
394
+
395
+ # Rebuild merge inputs from refreshed remote snapshot.
396
+ remote_sessions_data = conv.get("sessions", [])
397
+ remote_event = conv.get("event", {})
398
+
399
+ remote_content_hashes = set()
400
+ for session_data in remote_sessions_data:
401
+ for turn_data in session_data.get("turns", []):
402
+ h = turn_data.get("content_hash")
403
+ if h:
404
+ remote_content_hashes.add(h)
405
+
406
+ # Re-import remote sessions (idempotent via content_hash dedup) and re-merge metadata.
407
+ try:
408
+ from .import_shares import import_session_with_turns
409
+
410
+ local_content_hashes = db.get_agent_content_hashes(agent_id)
411
+ for session_data in remote_sessions_data:
412
+ session_id = session_data.get("session_id", "")
413
+ session_turns = session_data.get("turns", [])
414
+
415
+ new_turns = [
416
+ t
417
+ for t in session_turns
418
+ if t.get("content_hash")
419
+ and t["content_hash"] not in local_content_hashes
420
+ ]
421
+
422
+ existing_session = db.get_session_by_id(session_id)
423
+ session_is_new = existing_session is None
424
+ session_needs_linking = (
425
+ existing_session and existing_session.agent_id != agent_id
426
+ )
427
+
428
+ if not new_turns and not session_is_new and not session_needs_linking:
429
+ continue
430
+
431
+ should_count = session_is_new or session_needs_linking
432
+ try:
433
+ os.environ["REALIGN_DISABLE_AUTO_SUMMARIES"] = "1"
434
+ import_result = import_session_with_turns(
435
+ session_data,
436
+ f"agent-{agent_id}",
437
+ agent_info.share_url,
438
+ db,
439
+ force=False,
440
+ )
441
+ if (
442
+ import_result.get("sessions", 0) > 0
443
+ or import_result.get("turns", 0) > 0
444
+ or should_count
445
+ ):
446
+ sessions_pulled += 1
447
+ except Exception as ie:
448
+ logger.error(f"Failed to import remote session {session_id}: {ie}")
449
+ if should_count:
450
+ sessions_pulled += 1
451
+
452
+ try:
453
+ db.update_session_agent_id(session_id, agent_id)
454
+ except Exception as le:
455
+ logger.error(f"Failed to link session {session_id} to agent: {le}")
456
+
457
+ # Re-merge name/description: last-write-wins by updated_at.
458
+ refreshed_remote_updated_at = remote_event.get("updated_at")
459
+ remote_dt = _parse_iso_datetime_to_utc(refreshed_remote_updated_at)
460
+ local_dt = (
461
+ _ensure_aware_utc(agent_info.updated_at)
462
+ if isinstance(agent_info.updated_at, datetime)
463
+ else None
464
+ )
465
+ if remote_dt and local_dt and remote_dt > local_dt:
466
+ remote_name = remote_event.get("title")
467
+ remote_desc = remote_event.get("description")
468
+ updates = {}
469
+ if remote_name and remote_name != agent_info.name:
470
+ updates["name"] = remote_name
471
+ if remote_desc is not None and remote_desc != agent_info.description:
472
+ updates["description"] = remote_desc
473
+ if updates:
474
+ db.update_agent_info(agent_id, **updates)
475
+ description_updated = True
476
+ agent_info = db.get_agent_info(agent_id)
477
+ except Exception as merge_e:
478
+ logger.warning(
479
+ f"Failed to refresh merge after conflict (non-fatal): {merge_e}"
480
+ )
481
+
482
+ # Rebuild payload from refreshed local state before retrying.
483
+ local_sessions = db.get_sessions_by_agent_id(agent_id)
484
+ merged_conversation = _build_merged_conversation_data(
485
+ agent_info=agent_info,
486
+ agent_id=agent_id,
487
+ sessions=local_sessions,
488
+ db=db,
489
+ contributor_token=agent_info.share_contributor_token,
490
+ )
491
+
492
+ elif conflict_current_version is not None:
493
+ # If pull fails, fall back to server-provided current_version.
494
+ new_version = max(new_version, conflict_current_version)
276
495
  continue
277
496
  else:
278
497
  logger.error(f"Push failed after {attempt + 1} attempts: {e}")
@@ -303,7 +522,8 @@ def _pull_remote(backend_url: str, share_id: str) -> dict:
303
522
  from .import_shares import download_share_data
304
523
 
305
524
  share_url = f"{backend_url}/share/{share_id}"
306
- return download_share_data(share_url, password=None)
525
+ cache_buster = str(int(time.time() * 1000))
526
+ return download_share_data(share_url, password=None, cache_buster=cache_buster)
307
527
  except Exception as e:
308
528
  return {"success": False, "error": str(e)}
309
529
 
@@ -329,8 +549,12 @@ def _build_merged_conversation_data(
329
549
  "description": agent_info.description or "",
330
550
  "event_type": "agent",
331
551
  "status": "active",
332
- "created_at": agent_info.created_at.isoformat() if agent_info.created_at else None,
333
- "updated_at": agent_info.updated_at.isoformat() if agent_info.updated_at else None,
552
+ "created_at": (
553
+ _ensure_aware_utc(agent_info.created_at).isoformat() if agent_info.created_at else None
554
+ ),
555
+ "updated_at": (
556
+ _ensure_aware_utc(agent_info.updated_at).isoformat() if agent_info.updated_at else None
557
+ ),
334
558
  }
335
559
 
336
560
  sessions_data = []
@@ -348,32 +572,38 @@ def _build_merged_conversation_data(
348
572
  except Exception:
349
573
  continue
350
574
 
351
- turns_data.append({
352
- "turn_id": turn.id,
353
- "turn_number": turn.turn_number,
354
- "content_hash": turn.content_hash,
355
- "timestamp": turn.timestamp.isoformat() if turn.timestamp else None,
356
- "llm_title": turn.llm_title or "",
357
- "llm_description": turn.llm_description,
358
- "user_message": turn.user_message,
359
- "assistant_summary": turn.assistant_summary,
360
- "model_name": turn.model_name,
361
- "git_commit_hash": turn.git_commit_hash,
362
- "messages": messages,
363
- })
364
-
365
- sessions_data.append({
366
- "session_id": session.id,
367
- "session_type": session.session_type or "unknown",
368
- "workspace_path": session.workspace_path,
369
- "session_title": session.session_title,
370
- "session_summary": session.session_summary,
371
- "started_at": session.started_at.isoformat() if session.started_at else None,
372
- "last_activity_at": session.last_activity_at.isoformat() if session.last_activity_at else None,
373
- "created_by": session.created_by,
374
- "shared_by": session.shared_by,
375
- "turns": turns_data,
376
- })
575
+ turns_data.append(
576
+ {
577
+ "turn_id": turn.id,
578
+ "turn_number": turn.turn_number,
579
+ "content_hash": turn.content_hash,
580
+ "timestamp": turn.timestamp.isoformat() if turn.timestamp else None,
581
+ "llm_title": turn.llm_title or "",
582
+ "llm_description": turn.llm_description,
583
+ "user_message": turn.user_message,
584
+ "assistant_summary": turn.assistant_summary,
585
+ "model_name": turn.model_name,
586
+ "git_commit_hash": turn.git_commit_hash,
587
+ "messages": messages,
588
+ }
589
+ )
590
+
591
+ sessions_data.append(
592
+ {
593
+ "session_id": session.id,
594
+ "session_type": session.session_type or "unknown",
595
+ "workspace_path": session.workspace_path,
596
+ "session_title": session.session_title,
597
+ "session_summary": session.session_summary,
598
+ "started_at": session.started_at.isoformat() if session.started_at else None,
599
+ "last_activity_at": (
600
+ session.last_activity_at.isoformat() if session.last_activity_at else None
601
+ ),
602
+ "created_by": session.created_by,
603
+ "shared_by": session.shared_by,
604
+ "turns": turns_data,
605
+ }
606
+ )
377
607
 
378
608
  username = os.environ.get("USER") or os.environ.get("USERNAME") or "anonymous"
379
609