aline-ai 0.5.4__py3-none-any.whl → 0.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. {aline_ai-0.5.4.dist-info → aline_ai-0.5.5.dist-info}/METADATA +1 -1
  2. aline_ai-0.5.5.dist-info/RECORD +93 -0
  3. realign/__init__.py +1 -1
  4. realign/adapters/antigravity.py +28 -20
  5. realign/adapters/base.py +46 -50
  6. realign/adapters/claude.py +14 -14
  7. realign/adapters/codex.py +7 -7
  8. realign/adapters/gemini.py +11 -11
  9. realign/adapters/registry.py +14 -10
  10. realign/claude_detector.py +2 -2
  11. realign/claude_hooks/__init__.py +3 -3
  12. realign/claude_hooks/permission_request_hook_installer.py +31 -32
  13. realign/claude_hooks/stop_hook.py +4 -1
  14. realign/claude_hooks/stop_hook_installer.py +30 -31
  15. realign/cli.py +7 -0
  16. realign/codex_detector.py +11 -11
  17. realign/commands/add.py +88 -65
  18. realign/commands/config.py +3 -12
  19. realign/commands/context.py +3 -1
  20. realign/commands/export_shares.py +86 -127
  21. realign/commands/import_shares.py +145 -155
  22. realign/commands/init.py +166 -30
  23. realign/commands/restore.py +18 -6
  24. realign/commands/search.py +14 -42
  25. realign/commands/upgrade.py +155 -11
  26. realign/commands/watcher.py +98 -219
  27. realign/commands/worker.py +29 -6
  28. realign/config.py +25 -20
  29. realign/context.py +1 -3
  30. realign/dashboard/app.py +4 -4
  31. realign/dashboard/screens/create_event.py +3 -1
  32. realign/dashboard/screens/event_detail.py +14 -6
  33. realign/dashboard/screens/session_detail.py +3 -1
  34. realign/dashboard/screens/share_import.py +7 -3
  35. realign/dashboard/tmux_manager.py +54 -9
  36. realign/dashboard/widgets/config_panel.py +85 -1
  37. realign/dashboard/widgets/events_table.py +3 -1
  38. realign/dashboard/widgets/header.py +1 -0
  39. realign/dashboard/widgets/search_panel.py +37 -27
  40. realign/dashboard/widgets/sessions_table.py +24 -15
  41. realign/dashboard/widgets/terminal_panel.py +66 -22
  42. realign/dashboard/widgets/watcher_panel.py +6 -2
  43. realign/dashboard/widgets/worker_panel.py +10 -1
  44. realign/db/__init__.py +1 -1
  45. realign/db/base.py +5 -15
  46. realign/db/locks.py +0 -1
  47. realign/db/migration.py +82 -76
  48. realign/db/schema.py +2 -6
  49. realign/db/sqlite_db.py +23 -41
  50. realign/events/__init__.py +0 -1
  51. realign/events/event_summarizer.py +27 -15
  52. realign/events/session_summarizer.py +29 -15
  53. realign/file_lock.py +1 -0
  54. realign/hooks.py +150 -60
  55. realign/logging_config.py +12 -15
  56. realign/mcp_server.py +30 -51
  57. realign/mcp_watcher.py +0 -1
  58. realign/models/event.py +29 -20
  59. realign/prompts/__init__.py +7 -7
  60. realign/prompts/presets.py +15 -11
  61. realign/redactor.py +99 -59
  62. realign/triggers/__init__.py +9 -9
  63. realign/triggers/antigravity_trigger.py +30 -28
  64. realign/triggers/base.py +4 -3
  65. realign/triggers/claude_trigger.py +104 -85
  66. realign/triggers/codex_trigger.py +15 -5
  67. realign/triggers/gemini_trigger.py +57 -47
  68. realign/triggers/next_turn_trigger.py +3 -1
  69. realign/triggers/registry.py +6 -2
  70. realign/triggers/turn_status.py +3 -1
  71. realign/watcher_core.py +306 -131
  72. realign/watcher_daemon.py +8 -8
  73. realign/worker_core.py +3 -1
  74. realign/worker_daemon.py +3 -1
  75. aline_ai-0.5.4.dist-info/RECORD +0 -93
  76. {aline_ai-0.5.4.dist-info → aline_ai-0.5.5.dist-info}/WHEEL +0 -0
  77. {aline_ai-0.5.4.dist-info → aline_ai-0.5.5.dist-info}/entry_points.txt +0 -0
  78. {aline_ai-0.5.4.dist-info → aline_ai-0.5.5.dist-info}/licenses/LICENSE +0 -0
  79. {aline_ai-0.5.4.dist-info → aline_ai-0.5.5.dist-info}/top_level.txt +0 -0
@@ -18,6 +18,7 @@ from pathlib import Path
18
18
 
19
19
  try:
20
20
  import httpx
21
+
21
22
  HTTPX_AVAILABLE = True
22
23
  except ImportError:
23
24
  HTTPX_AVAILABLE = False
@@ -26,6 +27,7 @@ try:
26
27
  from rich.console import Console
27
28
  from rich.progress import Progress
28
29
  from rich.prompt import Prompt
30
+
29
31
  RICH_AVAILABLE = True
30
32
  except ImportError:
31
33
  RICH_AVAILABLE = False
@@ -34,7 +36,7 @@ from ..logging_config import setup_logger
34
36
  from ..db.base import DatabaseInterface, EventRecord, SessionRecord, TurnRecord
35
37
  from ..config import ReAlignConfig
36
38
 
37
- logger = setup_logger('realign.commands.import_shares', 'import_shares.log')
39
+ logger = setup_logger("realign.commands.import_shares", "import_shares.log")
38
40
 
39
41
  if RICH_AVAILABLE:
40
42
  console = Console()
@@ -93,10 +95,7 @@ def import_share_command(
93
95
  else:
94
96
  print(f"Fetching share info from {backend_url}...")
95
97
 
96
- info_response = httpx.get(
97
- f"{backend_url}/api/share/{share_id}/info",
98
- timeout=10.0
99
- )
98
+ info_response = httpx.get(f"{backend_url}/api/share/{share_id}/info", timeout=10.0)
100
99
  info_response.raise_for_status()
101
100
  info = info_response.json()
102
101
  logger.info(f"Share info retrieved: requires_password={info.get('requires_password')}")
@@ -106,19 +105,22 @@ def import_share_command(
106
105
  return 1
107
106
 
108
107
  # 3. Authenticate if needed
109
- if info.get('requires_password'):
108
+ if info.get("requires_password"):
110
109
  if not password:
111
110
  if non_interactive:
112
- print("[ERROR] This share requires a password but none was provided", file=sys.stderr)
111
+ print(
112
+ "[ERROR] This share requires a password but none was provided", file=sys.stderr
113
+ )
113
114
  return 1
114
115
  if console and RICH_AVAILABLE:
115
116
  password = Prompt.ask("Enter password", password=True)
116
117
  else:
117
118
  import getpass
119
+
118
120
  password = getpass.getpass("Enter password: ")
119
121
 
120
122
  password_hash = hashlib.sha256(password.encode()).hexdigest()
121
- headers = {'X-Password-Hash': password_hash}
123
+ headers = {"X-Password-Hash": password_hash}
122
124
  logger.info("Using password authentication")
123
125
  else:
124
126
  # Create session for non-password shares
@@ -129,13 +131,12 @@ def import_share_command(
129
131
  print("Creating session...")
130
132
 
131
133
  session_response = httpx.post(
132
- f"{backend_url}/api/share/{share_id}/session",
133
- timeout=10.0
134
+ f"{backend_url}/api/share/{share_id}/session", timeout=10.0
134
135
  )
135
136
  session_response.raise_for_status()
136
137
  session_data = session_response.json()
137
- session_token = session_data.get('session_token')
138
- headers = {'Authorization': f'Bearer {session_token}'}
138
+ session_token = session_data.get("session_token")
139
+ headers = {"Authorization": f"Bearer {session_token}"}
139
140
  logger.info("Created session token for authentication")
140
141
  except Exception as e:
141
142
  print(f"[ERROR] Failed to create session: {e}", file=sys.stderr)
@@ -151,35 +152,35 @@ def import_share_command(
151
152
 
152
153
  # First, try standard download
153
154
  export_response = httpx.get(
154
- f"{backend_url}/api/share/{share_id}/export",
155
- headers=headers,
156
- timeout=30.0
155
+ f"{backend_url}/api/share/{share_id}/export", headers=headers, timeout=30.0
157
156
  )
158
157
 
159
158
  export_data = export_response.json()
160
159
 
161
160
  # Check if chunked download is needed
162
- if export_response.status_code == 413 or export_data.get('needs_chunked_download'):
161
+ if export_response.status_code == 413 or export_data.get("needs_chunked_download"):
163
162
  logger.info("Data too large, switching to chunked download")
164
- total_chunks = export_data.get('total_chunks', 1)
165
- data_size = export_data.get('data_size', 0)
163
+ total_chunks = export_data.get("total_chunks", 1)
164
+ data_size = export_data.get("data_size", 0)
166
165
 
167
166
  if console:
168
- console.print(f"[yellow]Large file detected ({data_size / 1024 / 1024:.2f}MB), using chunked download...[/yellow]")
167
+ console.print(
168
+ f"[yellow]Large file detected ({data_size / 1024 / 1024:.2f}MB), using chunked download...[/yellow]"
169
+ )
169
170
  else:
170
- print(f"Large file detected ({data_size / 1024 / 1024:.2f}MB), using chunked download...")
171
+ print(
172
+ f"Large file detected ({data_size / 1024 / 1024:.2f}MB), using chunked download..."
173
+ )
171
174
 
172
175
  # Download chunks
173
- raw_data = _download_chunks(
174
- backend_url, share_id, headers, total_chunks
175
- )
176
+ raw_data = _download_chunks(backend_url, share_id, headers, total_chunks)
176
177
 
177
178
  # Parse the combined data
178
179
  conversation_data = json.loads(raw_data)
179
180
  export_data = {
180
- 'success': True,
181
- 'data': conversation_data,
182
- 'metadata': export_data.get('metadata', {})
181
+ "success": True,
182
+ "data": conversation_data,
183
+ "metadata": export_data.get("metadata", {}),
183
184
  }
184
185
  else:
185
186
  export_response.raise_for_status()
@@ -190,25 +191,26 @@ def import_share_command(
190
191
  logger.error(f"Failed to download data: {e}", exc_info=True)
191
192
  return 1
192
193
 
193
- if not export_data.get('success'):
194
- error_msg = export_data.get('error', 'Unknown error')
194
+ if not export_data.get("success"):
195
+ error_msg = export_data.get("error", "Unknown error")
195
196
  print(f"[ERROR] Export failed: {error_msg}", file=sys.stderr)
196
197
  logger.error(f"Export failed: {error_msg}")
197
198
  return 1
198
199
 
199
- conversation_data = export_data['data']
200
- version = conversation_data.get('version', '1.0')
200
+ conversation_data = export_data["data"]
201
+ version = conversation_data.get("version", "1.0")
201
202
  logger.info(f"Conversation data version: {version}")
202
203
 
203
204
  # 5. Import to local database
204
205
  if db is None:
205
206
  from ..db.sqlite_db import SQLiteDatabase
206
207
  from pathlib import Path
208
+
207
209
  db_path = Path(config.sqlite_db_path).expanduser()
208
210
  db = SQLiteDatabase(db_path=db_path)
209
211
 
210
212
  try:
211
- if version == '2.0':
213
+ if version == "2.0":
212
214
  return import_v2_data(conversation_data, share_url, db, force, non_interactive)
213
215
  else:
214
216
  return import_v1_data(conversation_data, share_url, db, force)
@@ -216,6 +218,7 @@ def import_share_command(
216
218
  print(f"[ERROR] Import failed: {e}", file=sys.stderr)
217
219
  logger.error(f"Import failed: {e}", exc_info=True)
218
220
  import traceback
221
+
219
222
  traceback.print_exc()
220
223
  return 1
221
224
 
@@ -237,8 +240,8 @@ def import_v2_data(
237
240
  logger.info("Starting v2.0 data import")
238
241
 
239
242
  # 1. Create Event
240
- event_data = data.get('event', {})
241
- event_id = event_data.get('event_id')
243
+ event_data = data.get("event", {})
244
+ event_id = event_data.get("event_id")
242
245
 
243
246
  # Check if event already exists
244
247
  existing_event = db.get_event_by_id(event_id) if event_id else None
@@ -246,7 +249,9 @@ def import_v2_data(
246
249
  if existing_event and not force:
247
250
  if console:
248
251
  console.print(f"[yellow]Event '{existing_event.title}' already exists.[/yellow]")
249
- console.print("[yellow]Use --force to re-import or press Enter to use existing event.[/yellow]")
252
+ console.print(
253
+ "[yellow]Use --force to re-import or press Enter to use existing event.[/yellow]"
254
+ )
250
255
  else:
251
256
  print(f"Event '{existing_event.title}' already exists.")
252
257
  print("Use --force to re-import or press Enter to use existing event.")
@@ -256,7 +261,7 @@ def import_v2_data(
256
261
  else:
257
262
  try:
258
263
  response = input("Use existing event? (Y/n): ").strip().lower()
259
- use_existing = response in ['', 'y', 'yes']
264
+ use_existing = response in ["", "y", "yes"]
260
265
  except (EOFError, KeyboardInterrupt):
261
266
  use_existing = True
262
267
 
@@ -270,21 +275,21 @@ def import_v2_data(
270
275
  # Create new event or update existing
271
276
  event = EventRecord(
272
277
  id=event_id or generate_uuid(),
273
- title=event_data.get('title', 'Imported Event'),
274
- description=event_data.get('description'),
275
- event_type=event_data.get('event_type', 'imported'),
276
- status=event_data.get('status', 'archived'),
277
- start_timestamp=parse_datetime(event_data.get('created_at')),
278
- end_timestamp=parse_datetime(event_data.get('updated_at')),
278
+ title=event_data.get("title", "Imported Event"),
279
+ description=event_data.get("description"),
280
+ event_type=event_data.get("event_type", "imported"),
281
+ status=event_data.get("status", "archived"),
282
+ start_timestamp=parse_datetime(event_data.get("created_at")),
283
+ end_timestamp=parse_datetime(event_data.get("updated_at")),
279
284
  created_at=datetime.now(),
280
285
  updated_at=datetime.now(),
281
- metadata={'source': 'share_import', 'share_url': share_url},
286
+ metadata={"source": "share_import", "share_url": share_url},
282
287
  preset_questions=None,
283
288
  slack_message=None,
284
289
  share_url=share_url,
285
290
  commit_hashes=[],
286
- creator_name=event_data.get('creator_name'), # V9: preserve creator info
287
- creator_id=event_data.get('creator_id'),
291
+ creator_name=event_data.get("creator_name"), # V9: preserve creator info
292
+ creator_id=event_data.get("creator_id"),
288
293
  )
289
294
 
290
295
  # Use sync_events for both create and update (upsert behavior)
@@ -305,7 +310,7 @@ def import_v2_data(
305
310
  event_id = event.id
306
311
 
307
312
  # 2. Import Sessions and Turns
308
- sessions_data = data.get('sessions', [])
313
+ sessions_data = data.get("sessions", [])
309
314
  imported_sessions = 0
310
315
  imported_turns = 0
311
316
  skipped_turns = 0
@@ -315,22 +320,18 @@ def import_v2_data(
315
320
  task = progress.add_task("[cyan]Importing sessions...", total=len(sessions_data))
316
321
 
317
322
  for session_data in sessions_data:
318
- result = import_session_with_turns(
319
- session_data, event_id, share_url, db, force
320
- )
321
- imported_sessions += result['sessions']
322
- imported_turns += result['turns']
323
- skipped_turns += result['skipped']
323
+ result = import_session_with_turns(session_data, event_id, share_url, db, force)
324
+ imported_sessions += result["sessions"]
325
+ imported_turns += result["turns"]
326
+ skipped_turns += result["skipped"]
324
327
  progress.update(task, advance=1)
325
328
  else:
326
329
  for idx, session_data in enumerate(sessions_data, 1):
327
330
  print(f"Importing session {idx}/{len(sessions_data)}...")
328
- result = import_session_with_turns(
329
- session_data, event_id, share_url, db, force
330
- )
331
- imported_sessions += result['sessions']
332
- imported_turns += result['turns']
333
- skipped_turns += result['skipped']
331
+ result = import_session_with_turns(session_data, event_id, share_url, db, force)
332
+ imported_sessions += result["sessions"]
333
+ imported_turns += result["turns"]
334
+ skipped_turns += result["skipped"]
334
335
 
335
336
  # 3. Display summary
336
337
  if console:
@@ -348,17 +349,15 @@ def import_v2_data(
348
349
  if skipped_turns > 0:
349
350
  print(f"Turns skipped (duplicates): {skipped_turns}")
350
351
 
351
- logger.info(f"Import completed: {imported_sessions} sessions, {imported_turns} turns, {skipped_turns} skipped")
352
+ logger.info(
353
+ f"Import completed: {imported_sessions} sessions, {imported_turns} turns, {skipped_turns} skipped"
354
+ )
352
355
 
353
356
  return 0
354
357
 
355
358
 
356
359
  def import_session_with_turns(
357
- session_data: Dict[str, Any],
358
- event_id: str,
359
- share_url: str,
360
- db: DatabaseInterface,
361
- force: bool
360
+ session_data: Dict[str, Any], event_id: str, share_url: str, db: DatabaseInterface, force: bool
362
361
  ) -> Dict[str, int]:
363
362
  """
364
363
  Import a single session with all its turns.
@@ -366,7 +365,7 @@ def import_session_with_turns(
366
365
  Returns:
367
366
  Dict with counts: {'sessions': int, 'turns': int, 'skipped': int}
368
367
  """
369
- session_id = session_data.get('session_id')
368
+ session_id = session_data.get("session_id")
370
369
  imported_sessions = 0
371
370
  imported_turns = 0
372
371
  skipped_turns = 0
@@ -380,22 +379,22 @@ def import_session_with_turns(
380
379
  # Create new session
381
380
  session = SessionRecord(
382
381
  id=session_id or generate_uuid(),
383
- session_file_path=Path(''), # Not applicable for imported sessions
384
- session_type=session_data.get('session_type', 'imported'),
385
- workspace_path=session_data.get('workspace_path'),
386
- started_at=parse_datetime(session_data.get('started_at')) or datetime.now(),
387
- last_activity_at=parse_datetime(session_data.get('last_activity_at')) or datetime.now(),
382
+ session_file_path=Path(""), # Not applicable for imported sessions
383
+ session_type=session_data.get("session_type", "imported"),
384
+ workspace_path=session_data.get("workspace_path"),
385
+ started_at=parse_datetime(session_data.get("started_at")) or datetime.now(),
386
+ last_activity_at=parse_datetime(session_data.get("last_activity_at")) or datetime.now(),
388
387
  created_at=datetime.now(),
389
388
  updated_at=datetime.now(),
390
- metadata={'source': 'share_import', 'share_url': share_url},
391
- session_title=session_data.get('session_title'),
392
- session_summary=session_data.get('session_summary'),
389
+ metadata={"source": "share_import", "share_url": share_url},
390
+ session_title=session_data.get("session_title"),
391
+ session_summary=session_data.get("session_summary"),
393
392
  summary_updated_at=None,
394
- summary_status='completed',
393
+ summary_status="completed",
395
394
  summary_locked_until=None,
396
395
  summary_error=None,
397
- creator_name=session_data.get('creator_name'), # V9: preserve creator info
398
- creator_id=session_data.get('creator_id'),
396
+ creator_name=session_data.get("creator_name"), # V9: preserve creator info
397
+ creator_id=session_data.get("creator_id"),
399
398
  )
400
399
 
401
400
  if existing_session and force:
@@ -409,13 +408,14 @@ def import_session_with_turns(
409
408
  session_type=session.session_type,
410
409
  started_at=session.started_at,
411
410
  workspace_path=session.workspace_path,
412
- metadata=session.metadata
411
+ metadata=session.metadata,
413
412
  )
414
413
  logger.info(f"Created session: {session_id}")
415
414
 
416
415
  # Update session with title, summary, and creator info (not supported by get_or_create_session)
417
416
  with db._get_connection() as conn:
418
- conn.execute("""
417
+ conn.execute(
418
+ """
419
419
  UPDATE sessions
420
420
  SET session_title = ?,
421
421
  session_summary = ?,
@@ -426,25 +426,27 @@ def import_session_with_turns(
426
426
  creator_name = ?,
427
427
  creator_id = ?
428
428
  WHERE id = ?
429
- """, (
430
- session.session_title,
431
- session.session_summary,
432
- session.summary_updated_at,
433
- session.summary_status,
434
- session.summary_locked_until,
435
- session.summary_error,
436
- session.creator_name,
437
- session.creator_id,
438
- session.id
439
- ))
429
+ """,
430
+ (
431
+ session.session_title,
432
+ session.session_summary,
433
+ session.summary_updated_at,
434
+ session.summary_status,
435
+ session.summary_locked_until,
436
+ session.summary_error,
437
+ session.creator_name,
438
+ session.creator_id,
439
+ session.id,
440
+ ),
441
+ )
440
442
  conn.commit()
441
443
 
442
444
  imported_sessions += 1
443
445
 
444
446
  # Import turns
445
- turns_data = session_data.get('turns', [])
447
+ turns_data = session_data.get("turns", [])
446
448
  for turn_data in turns_data:
447
- content_hash = turn_data.get('content_hash')
449
+ content_hash = turn_data.get("content_hash")
448
450
 
449
451
  # Check for duplicates using content_hash
450
452
  if content_hash and not force:
@@ -456,28 +458,28 @@ def import_session_with_turns(
456
458
 
457
459
  # Create turn
458
460
  turn = TurnRecord(
459
- id=turn_data.get('turn_id') or generate_uuid(),
461
+ id=turn_data.get("turn_id") or generate_uuid(),
460
462
  session_id=session_id,
461
- turn_number=turn_data.get('turn_number', 0),
462
- user_message=turn_data.get('user_message'),
463
- assistant_summary=turn_data.get('assistant_summary'),
464
- turn_status='completed',
465
- llm_title=turn_data.get('llm_title', ''),
466
- llm_description=turn_data.get('llm_description'),
467
- model_name=turn_data.get('model_name'),
468
- if_last_task='no',
469
- satisfaction='unknown',
470
- content_hash=content_hash or generate_content_hash(turn_data.get('messages', [])),
471
- timestamp=parse_datetime(turn_data.get('timestamp')) or datetime.now(),
463
+ turn_number=turn_data.get("turn_number", 0),
464
+ user_message=turn_data.get("user_message"),
465
+ assistant_summary=turn_data.get("assistant_summary"),
466
+ turn_status="completed",
467
+ llm_title=turn_data.get("llm_title", ""),
468
+ llm_description=turn_data.get("llm_description"),
469
+ model_name=turn_data.get("model_name"),
470
+ if_last_task="no",
471
+ satisfaction="unknown",
472
+ content_hash=content_hash or generate_content_hash(turn_data.get("messages", [])),
473
+ timestamp=parse_datetime(turn_data.get("timestamp")) or datetime.now(),
472
474
  created_at=datetime.now(),
473
- git_commit_hash=turn_data.get('git_commit_hash'),
474
- creator_name=turn_data.get('creator_name'), # V9: preserve creator info
475
- creator_id=turn_data.get('creator_id'),
475
+ git_commit_hash=turn_data.get("git_commit_hash"),
476
+ creator_name=turn_data.get("creator_name"), # V9: preserve creator info
477
+ creator_id=turn_data.get("creator_id"),
476
478
  )
477
479
 
478
480
  # Store turn content (JSONL)
479
- messages = turn_data.get('messages', [])
480
- jsonl_content = '\n'.join([json.dumps(msg) for msg in messages])
481
+ messages = turn_data.get("messages", [])
482
+ jsonl_content = "\n".join([json.dumps(msg) for msg in messages])
481
483
 
482
484
  db.create_turn(turn, jsonl_content)
483
485
  logger.debug(f"Created turn: {turn.id}")
@@ -487,19 +489,10 @@ def import_session_with_turns(
487
489
  # Link session to event
488
490
  db.link_session_to_event(event_id, session_id)
489
491
 
490
- return {
491
- 'sessions': imported_sessions,
492
- 'turns': imported_turns,
493
- 'skipped': skipped_turns
494
- }
492
+ return {"sessions": imported_sessions, "turns": imported_turns, "skipped": skipped_turns}
495
493
 
496
494
 
497
- def import_v1_data(
498
- data: Dict[str, Any],
499
- share_url: str,
500
- db: DatabaseInterface,
501
- force: bool
502
- ) -> int:
495
+ def import_v1_data(data: Dict[str, Any], share_url: str, db: DatabaseInterface, force: bool) -> int:
503
496
  """Import v1.0 format data (flat messages without Event/Turn structure)."""
504
497
 
505
498
  if console:
@@ -514,44 +507,44 @@ def import_v1_data(
514
507
  id=generate_uuid(),
515
508
  title=f"Imported from {share_url[:50]}...",
516
509
  description="Imported from share (v1.0 format)",
517
- event_type='imported',
518
- status='archived',
510
+ event_type="imported",
511
+ status="archived",
519
512
  start_timestamp=datetime.now(),
520
513
  end_timestamp=None,
521
514
  created_at=datetime.now(),
522
515
  updated_at=datetime.now(),
523
- metadata={'source': 'share_import', 'share_url': share_url, 'version': '1.0'},
516
+ metadata={"source": "share_import", "share_url": share_url, "version": "1.0"},
524
517
  preset_questions=None,
525
518
  slack_message=None,
526
519
  share_url=share_url,
527
- commit_hashes=[]
520
+ commit_hashes=[],
528
521
  )
529
522
  db.sync_events([event])
530
523
  logger.info(f"Created legacy event: {event.id}")
531
524
 
532
525
  # Import sessions (without turn structure)
533
- sessions_data = data.get('sessions', [])
526
+ sessions_data = data.get("sessions", [])
534
527
  for session_data in sessions_data:
535
- session_id = session_data.get('session_id', generate_uuid())
536
- messages = session_data.get('messages', [])
528
+ session_id = session_data.get("session_id", generate_uuid())
529
+ messages = session_data.get("messages", [])
537
530
 
538
531
  # Create session
539
532
  session = SessionRecord(
540
533
  id=session_id,
541
- session_file_path=Path(''),
542
- session_type='imported',
534
+ session_file_path=Path(""),
535
+ session_type="imported",
543
536
  workspace_path=None,
544
537
  started_at=datetime.now(),
545
538
  last_activity_at=datetime.now(),
546
539
  created_at=datetime.now(),
547
540
  updated_at=datetime.now(),
548
- metadata={'source': 'share_import', 'version': '1.0'},
541
+ metadata={"source": "share_import", "version": "1.0"},
549
542
  session_title=None,
550
543
  session_summary=None,
551
544
  summary_updated_at=None,
552
- summary_status='idle',
545
+ summary_status="idle",
553
546
  summary_locked_until=None,
554
- summary_error=None
547
+ summary_error=None,
555
548
  )
556
549
  db.get_or_create_session(
557
550
  session_id=session.id,
@@ -559,7 +552,7 @@ def import_v1_data(
559
552
  session_type=session.session_type,
560
553
  started_at=session.started_at,
561
554
  workspace_path=session.workspace_path,
562
- metadata=session.metadata
555
+ metadata=session.metadata,
563
556
  )
564
557
 
565
558
  # Create a single turn with all messages
@@ -569,20 +562,20 @@ def import_v1_data(
569
562
  turn_number=1,
570
563
  user_message=None,
571
564
  assistant_summary=None,
572
- turn_status='completed',
573
- llm_title='Imported Messages',
565
+ turn_status="completed",
566
+ llm_title="Imported Messages",
574
567
  llm_description=None,
575
568
  model_name=None,
576
- if_last_task='no',
577
- satisfaction='unknown',
569
+ if_last_task="no",
570
+ satisfaction="unknown",
578
571
  content_hash=generate_content_hash(messages),
579
572
  timestamp=datetime.now(),
580
573
  created_at=datetime.now(),
581
- git_commit_hash=None
574
+ git_commit_hash=None,
582
575
  )
583
576
 
584
577
  # Store content
585
- jsonl_content = '\n'.join([json.dumps(msg) for msg in messages])
578
+ jsonl_content = "\n".join([json.dumps(msg) for msg in messages])
586
579
  db.create_turn(turn, jsonl_content)
587
580
 
588
581
  # Link to event
@@ -600,10 +593,12 @@ def import_v1_data(
600
593
 
601
594
  # Helper functions
602
595
 
596
+
603
597
  def extract_share_id(share_url: str) -> Optional[str]:
604
598
  """Extract share ID from URL."""
605
599
  import re
606
- match = re.search(r'/share/([a-zA-Z0-9_-]+)', share_url)
600
+
601
+ match = re.search(r"/share/([a-zA-Z0-9_-]+)", share_url)
607
602
  return match.group(1) if match else None
608
603
 
609
604
 
@@ -612,7 +607,7 @@ def parse_datetime(dt_str: Optional[str]) -> Optional[datetime]:
612
607
  if not dt_str:
613
608
  return None
614
609
  try:
615
- return datetime.fromisoformat(dt_str.replace('Z', '+00:00'))
610
+ return datetime.fromisoformat(dt_str.replace("Z", "+00:00"))
616
611
  except Exception as e:
617
612
  logger.warning(f"Failed to parse datetime: {dt_str}, error: {e}")
618
613
  return None
@@ -630,10 +625,7 @@ def generate_content_hash(messages: List[Dict]) -> str:
630
625
 
631
626
 
632
627
  def _download_chunks(
633
- backend_url: str,
634
- share_id: str,
635
- headers: Dict[str, str],
636
- total_chunks: int
628
+ backend_url: str, share_id: str, headers: Dict[str, str], total_chunks: int
637
629
  ) -> str:
638
630
  """
639
631
  Download data in chunks and combine them.
@@ -651,25 +643,23 @@ def _download_chunks(
651
643
 
652
644
  if console and RICH_AVAILABLE:
653
645
  with Progress() as progress:
654
- task = progress.add_task(
655
- "[cyan]Downloading chunks...", total=total_chunks
656
- )
646
+ task = progress.add_task("[cyan]Downloading chunks...", total=total_chunks)
657
647
 
658
648
  for i in range(total_chunks):
659
649
  chunk_response = httpx.get(
660
650
  f"{backend_url}/api/share/{share_id}/export?chunk={i}",
661
651
  headers=headers,
662
- timeout=60.0
652
+ timeout=60.0,
663
653
  )
664
654
  chunk_response.raise_for_status()
665
655
  chunk_data = chunk_response.json()
666
656
 
667
- if not chunk_data.get('success'):
657
+ if not chunk_data.get("success"):
668
658
  raise RuntimeError(f"Failed to download chunk {i}: {chunk_data.get('error')}")
669
659
 
670
660
  # Decode base64 chunk
671
- encoded_chunk = chunk_data.get('chunk_data', '')
672
- decoded_chunk = base64.b64decode(encoded_chunk).decode('utf-8')
661
+ encoded_chunk = chunk_data.get("chunk_data", "")
662
+ decoded_chunk = base64.b64decode(encoded_chunk).decode("utf-8")
673
663
  chunks.append(decoded_chunk)
674
664
 
675
665
  progress.update(task, advance=1)
@@ -681,23 +671,23 @@ def _download_chunks(
681
671
  chunk_response = httpx.get(
682
672
  f"{backend_url}/api/share/{share_id}/export?chunk={i}",
683
673
  headers=headers,
684
- timeout=60.0
674
+ timeout=60.0,
685
675
  )
686
676
  chunk_response.raise_for_status()
687
677
  chunk_data = chunk_response.json()
688
678
 
689
- if not chunk_data.get('success'):
679
+ if not chunk_data.get("success"):
690
680
  raise RuntimeError(f"Failed to download chunk {i}: {chunk_data.get('error')}")
691
681
 
692
682
  # Decode base64 chunk
693
- encoded_chunk = chunk_data.get('chunk_data', '')
694
- decoded_chunk = base64.b64decode(encoded_chunk).decode('utf-8')
683
+ encoded_chunk = chunk_data.get("chunk_data", "")
684
+ decoded_chunk = base64.b64decode(encoded_chunk).decode("utf-8")
695
685
  chunks.append(decoded_chunk)
696
686
 
697
687
  logger.debug(f"Downloaded chunk {i + 1}/{total_chunks}")
698
688
 
699
689
  # Combine all chunks
700
- combined_data = ''.join(chunks)
690
+ combined_data = "".join(chunks)
701
691
  logger.info(f"Combined {total_chunks} chunks into {len(combined_data)} bytes")
702
692
 
703
693
  return combined_data