aline-ai 0.5.12__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -25,6 +25,7 @@ from typing import Any, List, Dict, Optional, Tuple, Set, Callable
25
25
  from ..logging_config import setup_logger
26
26
  from ..db.base import SessionRecord, TurnRecord
27
27
  from ..llm_client import call_llm, extract_json
28
+ from ..auth import get_auth_headers, is_logged_in
28
29
 
29
30
  logger = setup_logger("realign.commands.export_shares", "export_shares.log")
30
31
 
@@ -177,9 +178,9 @@ class ExportableSession:
177
178
  last_activity_at: Optional[datetime] # Last activity time
178
179
  turn_count: int # Number of turns in session
179
180
  turns: List[TurnRecord] # List of turns
180
- # V9: creator information
181
- creator_name: Optional[str] = None # Username who created the session
182
- creator_id: Optional[str] = None # User UUID
181
+ # V18: user identity
182
+ created_by: Optional[str] = None # Creator UID
183
+ shared_by: Optional[str] = None # Sharer UID
183
184
 
184
185
 
185
186
  def get_sessions_for_export(
@@ -215,8 +216,8 @@ def get_sessions_for_export(
215
216
  last_activity_at=session.last_activity_at,
216
217
  turn_count=len(turns),
217
218
  turns=turns,
218
- creator_name=session.creator_name,
219
- creator_id=session.creator_id,
219
+ created_by=session.created_by,
220
+ shared_by=session.shared_by,
220
221
  )
221
222
  )
222
223
 
@@ -298,8 +299,8 @@ def build_exportable_sessions_from_records(
298
299
  last_activity_at=session.last_activity_at,
299
300
  turn_count=len(turns),
300
301
  turns=turns,
301
- creator_name=session.creator_name,
302
- creator_id=session.creator_id,
302
+ created_by=session.created_by,
303
+ shared_by=session.shared_by,
303
304
  )
304
305
  )
305
306
  return exportable
@@ -803,8 +804,8 @@ def build_enhanced_conversation_data(
803
804
  full_event.created_at.isoformat() if full_event.created_at else None
804
805
  )
805
806
  event_data["metadata"] = full_event.metadata or {}
806
- event_data["creator_name"] = full_event.creator_name
807
- event_data["creator_id"] = full_event.creator_id
807
+ event_data["created_by"] = full_event.created_by
808
+ event_data["shared_by"] = full_event.shared_by
808
809
 
809
810
  # Build sessions data with turn structure
810
811
  sessions_data = []
@@ -867,13 +868,11 @@ def build_enhanced_conversation_data(
867
868
  ),
868
869
  "model_name": turn.model_name,
869
870
  "git_commit_hash": turn.git_commit_hash,
870
- "creator_name": turn.creator_name,
871
- "creator_id": turn.creator_id,
872
871
  "messages": messages, # Structured messages for this turn
873
872
  }
874
873
  turns_data.append(turn_data)
875
874
 
876
- # Build session data (V9: includes creator fields)
875
+ # Build session data (V18: created_by/shared_by)
877
876
  session_data = {
878
877
  "session_id": session.session_id,
879
878
  "session_type": session.session_type,
@@ -884,8 +883,8 @@ def build_enhanced_conversation_data(
884
883
  "last_activity_at": (
885
884
  session.last_activity_at.isoformat() if session.last_activity_at else None
886
885
  ),
887
- "creator_name": session.creator_name,
888
- "creator_id": session.creator_id,
886
+ "created_by": session.created_by,
887
+ "shared_by": session.shared_by,
889
888
  "turns": turns_data,
890
889
  }
891
890
  sessions_data.append(session_data)
@@ -1688,9 +1687,13 @@ def _standard_upload(
1688
1687
  if ui_metadata:
1689
1688
  payload["ui_metadata"] = ui_metadata
1690
1689
 
1690
+ # Include auth headers for Bearer token authentication
1691
+ headers = get_auth_headers()
1692
+
1691
1693
  response = httpx.post(
1692
1694
  f"{backend_url}/api/share/create",
1693
1695
  json=payload,
1696
+ headers=headers,
1694
1697
  timeout=30.0,
1695
1698
  )
1696
1699
  response.raise_for_status()
@@ -1705,12 +1708,14 @@ def _upload_chunks_and_complete(
1705
1708
  upload_id: str,
1706
1709
  backend_url: str,
1707
1710
  progress_callback: Optional[Callable] = None,
1711
+ auth_headers: Optional[Dict[str, str]] = None,
1708
1712
  ) -> None:
1709
1713
  """
1710
1714
  Helper function to upload chunks and complete the upload.
1711
1715
  Can be run in background thread.
1712
1716
  """
1713
1717
  total_chunks = len(chunks)
1718
+ headers = auth_headers or {}
1714
1719
 
1715
1720
  # Upload each chunk
1716
1721
  for i, chunk in enumerate(chunks):
@@ -1727,6 +1732,7 @@ def _upload_chunks_and_complete(
1727
1732
  response = httpx.post(
1728
1733
  f"{backend_url}/api/share/chunk/upload",
1729
1734
  json=chunk_payload,
1735
+ headers=headers,
1730
1736
  timeout=60.0, # Longer timeout for chunk uploads
1731
1737
  )
1732
1738
  response.raise_for_status()
@@ -1748,6 +1754,7 @@ def _upload_chunks_and_complete(
1748
1754
  response = httpx.post(
1749
1755
  f"{backend_url}/api/share/chunk/complete",
1750
1756
  json={"upload_id": upload_id},
1757
+ headers=headers,
1751
1758
  timeout=60.0,
1752
1759
  )
1753
1760
  response.raise_for_status()
@@ -1806,6 +1813,9 @@ def _chunked_upload(
1806
1813
  if progress_callback:
1807
1814
  progress_callback(0, total_chunks + 2, "Initializing chunked upload...")
1808
1815
 
1816
+ # Get auth headers for Bearer token authentication
1817
+ auth_headers = get_auth_headers()
1818
+
1809
1819
  # Step 1: Initialize upload session (now returns share_url immediately)
1810
1820
  try:
1811
1821
  init_payload = {
@@ -1823,6 +1833,7 @@ def _chunked_upload(
1823
1833
  response = httpx.post(
1824
1834
  f"{backend_url}/api/share/chunk/init",
1825
1835
  json=init_payload,
1836
+ headers=auth_headers,
1826
1837
  timeout=30.0,
1827
1838
  )
1828
1839
  response.raise_for_status()
@@ -1847,7 +1858,7 @@ def _chunked_upload(
1847
1858
  # but user already has the share URL displayed
1848
1859
  thread = threading.Thread(
1849
1860
  target=_upload_chunks_and_complete,
1850
- args=(chunks, upload_id, backend_url, None), # No callback in background
1861
+ args=(chunks, upload_id, backend_url, None, auth_headers), # No callback in background
1851
1862
  daemon=False, # Important: let thread complete before process exits
1852
1863
  )
1853
1864
  thread.start()
@@ -1862,7 +1873,7 @@ def _chunked_upload(
1862
1873
  }
1863
1874
 
1864
1875
  # Foreground mode: upload chunks synchronously
1865
- _upload_chunks_and_complete(chunks, upload_id, backend_url, progress_callback)
1876
+ _upload_chunks_and_complete(chunks, upload_id, backend_url, progress_callback, auth_headers)
1866
1877
 
1867
1878
  return {
1868
1879
  "share_id": share_id,
@@ -1917,11 +1928,13 @@ def upload_to_backend_unencrypted(
1917
1928
  else:
1918
1929
  logger.info(f"Payload size ({payload_size / 1024:.2f}KB), using standard upload")
1919
1930
  print(f"📤 Using standard upload...")
1920
- # Standard upload
1931
+ # Standard upload with auth headers
1921
1932
  try:
1933
+ headers = get_auth_headers()
1922
1934
  response = httpx.post(
1923
1935
  f"{backend_url}/api/share/create",
1924
1936
  json=full_payload,
1937
+ headers=headers,
1925
1938
  timeout=30.0,
1926
1939
  )
1927
1940
  response.raise_for_status()
@@ -1975,6 +1988,9 @@ def _chunked_upload_unencrypted(
1975
1988
  if progress_callback:
1976
1989
  progress_callback(0, total_chunks + 2, "Initializing chunked upload...")
1977
1990
 
1991
+ # Get auth headers for Bearer token authentication
1992
+ auth_headers = get_auth_headers()
1993
+
1978
1994
  # Step 1: Initialize upload session (now returns share_url immediately)
1979
1995
  try:
1980
1996
  init_payload = {
@@ -1988,6 +2004,7 @@ def _chunked_upload_unencrypted(
1988
2004
  response = httpx.post(
1989
2005
  f"{backend_url}/api/share/chunk/init",
1990
2006
  json=init_payload,
2007
+ headers=auth_headers,
1991
2008
  timeout=30.0,
1992
2009
  )
1993
2010
  response.raise_for_status()
@@ -2012,7 +2029,7 @@ def _chunked_upload_unencrypted(
2012
2029
  # but user already has the share URL displayed
2013
2030
  thread = threading.Thread(
2014
2031
  target=_upload_chunks_and_complete,
2015
- args=(chunks, upload_id, backend_url, None), # No callback in background
2032
+ args=(chunks, upload_id, backend_url, None, auth_headers), # No callback in background
2016
2033
  daemon=False, # Important: let thread complete before process exits
2017
2034
  )
2018
2035
  thread.start()
@@ -2027,7 +2044,7 @@ def _chunked_upload_unencrypted(
2027
2044
  }
2028
2045
 
2029
2046
  # Foreground mode: upload chunks synchronously
2030
- _upload_chunks_and_complete(chunks, upload_id, backend_url, progress_callback)
2047
+ _upload_chunks_and_complete(chunks, upload_id, backend_url, progress_callback, auth_headers)
2031
2048
 
2032
2049
  return {
2033
2050
  "share_id": share_id,
@@ -3036,16 +3053,22 @@ def export_shares_interactive_command(
3036
3053
  # Check dependencies
3037
3054
  if not CRYPTO_AVAILABLE:
3038
3055
  if not json_output:
3039
- print("Error: cryptography package not installed", file=sys.stderr)
3056
+ print("Error: cryptography package not installed", file=sys.stderr)
3040
3057
  print("Install it with: pip install cryptography", file=sys.stderr)
3041
3058
  return 1
3042
3059
 
3043
3060
  if not HTTPX_AVAILABLE:
3044
3061
  if not json_output:
3045
- print("Error: httpx package not installed", file=sys.stderr)
3062
+ print("Error: httpx package not installed", file=sys.stderr)
3046
3063
  print("Install it with: pip install httpx", file=sys.stderr)
3047
3064
  return 1
3048
3065
 
3066
+ # Check authentication - require login to create shares
3067
+ if not is_logged_in():
3068
+ if not json_output:
3069
+ print("Error: Not logged in. Please run 'aline login' first.", file=sys.stderr)
3070
+ return 1
3071
+
3049
3072
  # Get backend URL
3050
3073
  if backend_url is None:
3051
3074
  # Try to load from config
@@ -239,6 +239,9 @@ def import_v2_data(
239
239
 
240
240
  logger.info("Starting v2.0 data import")
241
241
 
242
+ # Load config to get current user's UID for shared_by
243
+ config = ReAlignConfig.load()
244
+
242
245
  # 1. Create Event
243
246
  event_data = data.get("event", {})
244
247
  event_id = event_data.get("event_id")
@@ -288,8 +291,9 @@ def import_v2_data(
288
291
  slack_message=None,
289
292
  share_url=share_url,
290
293
  commit_hashes=[],
291
- creator_name=event_data.get("creator_name"), # V9: preserve creator info
292
- creator_id=event_data.get("creator_id"),
294
+ # V18: user identity (with backward compatibility for old format)
295
+ created_by=event_data.get("created_by") or event_data.get("uid") or event_data.get("creator_id"),
296
+ shared_by=config.uid, # Current user is the importer
293
297
  )
294
298
 
295
299
  # Use sync_events for both create and update (upsert behavior)
@@ -365,6 +369,9 @@ def import_session_with_turns(
365
369
  Returns:
366
370
  Dict with counts: {'sessions': int, 'turns': int, 'skipped': int}
367
371
  """
372
+ # Load config to get current user's UID for shared_by
373
+ config = ReAlignConfig.load()
374
+
368
375
  session_id = session_data.get("session_id")
369
376
  imported_sessions = 0
370
377
  imported_turns = 0
@@ -393,8 +400,9 @@ def import_session_with_turns(
393
400
  summary_status="completed",
394
401
  summary_locked_until=None,
395
402
  summary_error=None,
396
- creator_name=session_data.get("creator_name"), # V9: preserve creator info
397
- creator_id=session_data.get("creator_id"),
403
+ # V18: user identity (with backward compatibility for old format)
404
+ created_by=session_data.get("created_by") or session_data.get("uid") or session_data.get("creator_id"),
405
+ shared_by=config.uid, # Current user is the importer
398
406
  )
399
407
 
400
408
  if existing_session and force:
@@ -423,8 +431,8 @@ def import_session_with_turns(
423
431
  summary_status = ?,
424
432
  summary_locked_until = ?,
425
433
  summary_error = ?,
426
- creator_name = ?,
427
- creator_id = ?
434
+ created_by = ?,
435
+ shared_by = ?
428
436
  WHERE id = ?
429
437
  """,
430
438
  (
@@ -434,8 +442,8 @@ def import_session_with_turns(
434
442
  session.summary_status,
435
443
  session.summary_locked_until,
436
444
  session.summary_error,
437
- session.creator_name,
438
- session.creator_id,
445
+ session.created_by,
446
+ session.shared_by,
439
447
  session.id,
440
448
  ),
441
449
  )
@@ -473,8 +481,6 @@ def import_session_with_turns(
473
481
  timestamp=parse_datetime(turn_data.get("timestamp")) or datetime.now(),
474
482
  created_at=datetime.now(),
475
483
  git_commit_hash=turn_data.get("git_commit_hash"),
476
- creator_name=turn_data.get("creator_name"), # V9: preserve creator info
477
- creator_id=turn_data.get("creator_id"),
478
484
  )
479
485
 
480
486
  # Store turn content (JSONL)
realign/commands/init.py CHANGED
@@ -9,7 +9,6 @@ from rich.console import Console
9
9
  from ..config import (
10
10
  ReAlignConfig,
11
11
  get_default_config_content,
12
- generate_user_id,
13
12
  generate_random_username,
14
13
  )
15
14
 
@@ -647,45 +646,23 @@ def init_global(
647
646
  # Load config
648
647
  config = ReAlignConfig.load()
649
648
 
650
- # User identity setup (V9)
651
- if not config.user_id:
649
+ # User identity setup (V17: uid from Supabase login)
650
+ if not config.uid:
652
651
  console.print("\n[bold blue]═══ User Identity Setup ═══[/bold blue]")
653
652
  console.print(
654
- "ReAlign needs to identify you for tracking session ownership.\n"
653
+ "Aline requires login for user identification.\n"
655
654
  )
656
-
657
- # Generate user UUID (based on MAC address)
658
- config.user_id = generate_user_id()
659
- console.print(f"Generated user ID: [cyan]{config.user_id[:8]}...[/cyan]\n")
660
-
661
- # Prompt user for username
662
- try:
663
- from rich.prompt import Prompt
664
-
665
- user_input = Prompt.ask(
666
- "[cyan]Enter your username (or press Enter for auto-generated)[/cyan]",
667
- default="",
668
- )
669
- except ImportError:
670
- user_input = input(
671
- "Enter your username (or press Enter for auto-generated): "
672
- ).strip()
673
-
674
- if user_input:
675
- config.user_name = user_input
676
- else:
677
- # Auto-generate username
655
+ console.print(
656
+ "[yellow]Run 'aline login' to authenticate with your account.[/yellow]\n"
657
+ )
658
+ # If user_name is also not set, generate a temporary one
659
+ if not config.user_name:
678
660
  config.user_name = generate_random_username()
661
+ config.save()
679
662
  console.print(
680
- f"Auto-generated username: [yellow]{config.user_name}[/yellow]"
663
+ f"Auto-generated username: [yellow]{config.user_name}[/yellow] (will update on login)\n"
681
664
  )
682
665
 
683
- # Save config with user identity
684
- config.save()
685
- console.print(
686
- f"[green]✓[/green] User identity saved: [bold]{config.user_name}[/bold] ([dim]{config.user_id[:8]}...[/dim])\n"
687
- )
688
-
689
666
  # Initialize database
690
667
  db_path = Path(config.sqlite_db_path).expanduser()
691
668
  db_path.parent.mkdir(parents=True, exist_ok=True)
@@ -658,6 +658,14 @@ def watcher_start_command() -> int:
658
658
  int: Exit code (0 = success, 1 = error)
659
659
  """
660
660
  try:
661
+ # Check login status first
662
+ from ..auth import is_logged_in
663
+
664
+ if not is_logged_in():
665
+ console.print("[red]✗ Not logged in. Watcher requires authentication.[/red]")
666
+ console.print("[dim]Run 'aline login' first.[/dim]")
667
+ return 1
668
+
661
669
  # Check if already running
662
670
  is_running, pid, mode = detect_watcher_process()
663
671
 
@@ -1012,7 +1020,7 @@ def _get_imported_sessions(db, exclude_session_ids: set) -> list:
1012
1020
  "session_file": None, # No file for imported sessions
1013
1021
  "session_title": session.session_title,
1014
1022
  "session_summary": session.session_summary,
1015
- "creator_name": session.creator_name,
1023
+ "created_by": session.created_by,
1016
1024
  }
1017
1025
  )
1018
1026
 
@@ -1226,7 +1234,7 @@ def _get_session_tracking_status_batch(
1226
1234
  if record and status in ("partial", "tracked"):
1227
1235
  info["session_title"] = record.session_title
1228
1236
  info["session_summary"] = record.session_summary
1229
- info["creator_name"] = record.creator_name
1237
+ info["created_by"] = record.created_by
1230
1238
 
1231
1239
  session_infos.append(info)
1232
1240
 
@@ -1526,7 +1534,7 @@ def watcher_session_list_command(
1526
1534
  "last_activity": info["last_activity"].isoformat(),
1527
1535
  "session_title": info.get("session_title"),
1528
1536
  "session_summary": info.get("session_summary"),
1529
- "creator_name": info.get("creator_name"),
1537
+ "created_by": info.get("created_by"),
1530
1538
  "session_file": (
1531
1539
  str(info.get("session_file")) if info.get("session_file") else None
1532
1540
  ),
@@ -1622,14 +1630,12 @@ def watcher_session_list_command(
1622
1630
  title_str = info.get("session_title") or "-"
1623
1631
  title_str = title_str.strip()
1624
1632
 
1625
- # V9: Display creator (truncate if too long)
1633
+ # V18: Display created_by UID (truncate if too long)
1626
1634
  creator_display = "-"
1627
1635
  if info["status"] in ("partial", "tracked"):
1628
- creator_name = info.get("creator_name")
1629
- if creator_name:
1630
- creator_display = creator_name
1631
- if len(creator_display) > 10:
1632
- creator_display = creator_display[:10] + "..."
1636
+ created_by = info.get("created_by")
1637
+ if created_by:
1638
+ creator_display = created_by[:8] + "..."
1633
1639
 
1634
1640
  # Truncate project name
1635
1641
  project_name = info["project_name"]
@@ -1851,8 +1857,7 @@ def watcher_event_generate_command(session_selector: str, show_sessions: bool =
1851
1857
  updated_at=now,
1852
1858
  metadata={},
1853
1859
  commit_hashes=[],
1854
- creator_name=config.user_name,
1855
- creator_id=config.user_id,
1860
+ created_by=config.uid,
1856
1861
  )
1857
1862
 
1858
1863
  # Save to database
@@ -2117,7 +2122,7 @@ def watcher_event_list_command(
2117
2122
  "id": event.id,
2118
2123
  "title": event.title,
2119
2124
  "description": event.description,
2120
- "creator_name": event.creator_name,
2125
+ "created_by": event.created_by,
2121
2126
  "generated_by": generated_by,
2122
2127
  "session_count": session_count,
2123
2128
  "session_ids": session_ids,
@@ -2177,10 +2182,8 @@ def watcher_event_list_command(
2177
2182
  else:
2178
2183
  share_link_display = "[dim]-[/dim]"
2179
2184
 
2180
- # V9: Display creator (truncate if too long)
2181
- creator_display = event.creator_name or "-"
2182
- if creator_display and len(creator_display) > 12:
2183
- creator_display = creator_display[:12] + "..."
2185
+ # V18: Display created_by UID (truncate)
2186
+ creator_display = (event.created_by[:8] + "...") if event.created_by else "-"
2184
2187
 
2185
2188
  table.add_row(
2186
2189
  str(idx),
@@ -461,6 +461,14 @@ def worker_repair_command(*, force: bool = False) -> int:
461
461
 
462
462
  def worker_start_command() -> int:
463
463
  try:
464
+ # Check login status first
465
+ from ..auth import is_logged_in
466
+
467
+ if not is_logged_in():
468
+ console.print("[red]✗ Not logged in. Worker requires authentication.[/red]")
469
+ console.print("[dim]Run 'aline login' first.[/dim]")
470
+ return 1
471
+
464
472
  is_running, pid, mode = detect_worker_process()
465
473
  if is_running:
466
474
  console.print(f"[yellow]Worker is already running (PID: {pid}, mode: {mode})[/yellow]")
realign/config.py CHANGED
@@ -27,9 +27,9 @@ class ReAlignConfig:
27
27
  "https://realign-server.vercel.app" # Backend URL for interactive share export
28
28
  )
29
29
 
30
- # User identity (V9)
31
- user_name: str = "" # User's display name (set during init)
32
- user_id: str = "" # User's UUID (generated from MAC address)
30
+ # User identity (V9, renamed in V17: user_id -> uid)
31
+ user_name: str = "" # User's display name (set during init or login)
32
+ uid: str = "" # User's UUID (from Supabase login)
33
33
 
34
34
  # Session catch-up settings
35
35
  max_catchup_sessions: int = 3 # Max sessions to auto-import on watcher startup
@@ -92,7 +92,7 @@ class ReAlignConfig:
92
92
  "enable_temp_turn_titles": os.getenv("REALIGN_ENABLE_TEMP_TURN_TITLES"),
93
93
  "share_backend_url": os.getenv("REALIGN_SHARE_BACKEND_URL"),
94
94
  "user_name": os.getenv("REALIGN_USER_NAME"),
95
- "user_id": os.getenv("REALIGN_USER_ID"),
95
+ "uid": os.getenv("REALIGN_UID"),
96
96
  "max_catchup_sessions": os.getenv("REALIGN_MAX_CATCHUP_SESSIONS"),
97
97
  "anthropic_api_key": os.getenv("REALIGN_ANTHROPIC_API_KEY"),
98
98
  "openai_api_key": os.getenv("REALIGN_OPENAI_API_KEY"),
@@ -124,6 +124,13 @@ class ReAlignConfig:
124
124
  else:
125
125
  config_dict[key] = value
126
126
 
127
+ # Migration: user_id -> uid (V17)
128
+ if "user_id" in config_dict and "uid" not in config_dict:
129
+ config_dict["uid"] = config_dict.pop("user_id")
130
+ elif "user_id" in config_dict:
131
+ # Both exist, prefer uid, discard user_id
132
+ config_dict.pop("user_id")
133
+
127
134
  return cls(**{k: v for k, v in config_dict.items() if k in cls.__annotations__})
128
135
 
129
136
  def save(self, config_path: Optional[Path] = None):
@@ -148,7 +155,7 @@ class ReAlignConfig:
148
155
  "enable_temp_turn_titles": self.enable_temp_turn_titles,
149
156
  "share_backend_url": self.share_backend_url,
150
157
  "user_name": self.user_name,
151
- "user_id": self.user_id,
158
+ "uid": self.uid,
152
159
  "max_catchup_sessions": self.max_catchup_sessions,
153
160
  "anthropic_api_key": self.anthropic_api_key,
154
161
  "openai_api_key": self.openai_api_key,
@@ -163,30 +170,6 @@ class ReAlignConfig:
163
170
  yaml.dump(config_dict, f, default_flow_style=False, allow_unicode=True)
164
171
 
165
172
 
166
- def generate_user_id() -> str:
167
- """
168
- Generate a persistent user UUID based on MAC address.
169
-
170
- Uses uuid.getnode() to get the MAC address, then generates a UUID5
171
- using DNS namespace. If MAC address retrieval fails, falls back to
172
- a random UUID.
173
-
174
- Returns:
175
- str: User UUID as a string
176
- """
177
- import uuid
178
-
179
- try:
180
- mac = uuid.getnode()
181
- # Use MAC address with DNS namespace to generate UUID5
182
- namespace = uuid.UUID("6ba7b810-9dad-11d1-80b4-00c04fd430c8") # DNS namespace
183
- user_uuid = uuid.uuid5(namespace, str(mac))
184
- return str(user_uuid)
185
- except Exception:
186
- # Fallback to random UUID if MAC address retrieval fails
187
- return str(uuid.uuid4())
188
-
189
-
190
173
  def generate_random_username() -> str:
191
174
  """
192
175
  Generate a random username with format: 3 lowercase letters + 3 digits.