aline-ai 0.5.4__py3-none-any.whl → 0.5.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/METADATA +1 -1
  2. aline_ai-0.5.6.dist-info/RECORD +95 -0
  3. realign/__init__.py +1 -1
  4. realign/adapters/antigravity.py +28 -20
  5. realign/adapters/base.py +46 -50
  6. realign/adapters/claude.py +14 -14
  7. realign/adapters/codex.py +7 -7
  8. realign/adapters/gemini.py +11 -11
  9. realign/adapters/registry.py +14 -10
  10. realign/claude_detector.py +2 -2
  11. realign/claude_hooks/__init__.py +3 -3
  12. realign/claude_hooks/permission_request_hook_installer.py +31 -32
  13. realign/claude_hooks/stop_hook.py +4 -1
  14. realign/claude_hooks/stop_hook_installer.py +30 -31
  15. realign/cli.py +23 -4
  16. realign/codex_detector.py +11 -11
  17. realign/commands/add.py +88 -65
  18. realign/commands/config.py +3 -12
  19. realign/commands/context.py +3 -1
  20. realign/commands/export_shares.py +86 -127
  21. realign/commands/import_shares.py +145 -155
  22. realign/commands/init.py +166 -30
  23. realign/commands/restore.py +18 -6
  24. realign/commands/search.py +14 -42
  25. realign/commands/upgrade.py +155 -11
  26. realign/commands/watcher.py +98 -219
  27. realign/commands/worker.py +29 -6
  28. realign/config.py +25 -20
  29. realign/context.py +1 -3
  30. realign/dashboard/app.py +34 -24
  31. realign/dashboard/screens/__init__.py +10 -1
  32. realign/dashboard/screens/create_agent.py +244 -0
  33. realign/dashboard/screens/create_event.py +3 -1
  34. realign/dashboard/screens/event_detail.py +14 -6
  35. realign/dashboard/screens/help_screen.py +114 -0
  36. realign/dashboard/screens/session_detail.py +3 -1
  37. realign/dashboard/screens/share_import.py +7 -3
  38. realign/dashboard/tmux_manager.py +54 -9
  39. realign/dashboard/widgets/config_panel.py +85 -1
  40. realign/dashboard/widgets/events_table.py +314 -70
  41. realign/dashboard/widgets/header.py +2 -1
  42. realign/dashboard/widgets/search_panel.py +37 -27
  43. realign/dashboard/widgets/sessions_table.py +404 -85
  44. realign/dashboard/widgets/terminal_panel.py +155 -175
  45. realign/dashboard/widgets/watcher_panel.py +6 -2
  46. realign/dashboard/widgets/worker_panel.py +10 -1
  47. realign/db/__init__.py +1 -1
  48. realign/db/base.py +5 -15
  49. realign/db/locks.py +0 -1
  50. realign/db/migration.py +82 -76
  51. realign/db/schema.py +2 -6
  52. realign/db/sqlite_db.py +23 -41
  53. realign/events/__init__.py +0 -1
  54. realign/events/event_summarizer.py +27 -15
  55. realign/events/session_summarizer.py +29 -15
  56. realign/file_lock.py +1 -0
  57. realign/hooks.py +150 -60
  58. realign/logging_config.py +12 -15
  59. realign/mcp_server.py +30 -51
  60. realign/mcp_watcher.py +0 -1
  61. realign/models/event.py +29 -20
  62. realign/prompts/__init__.py +7 -7
  63. realign/prompts/presets.py +15 -11
  64. realign/redactor.py +99 -59
  65. realign/triggers/__init__.py +9 -9
  66. realign/triggers/antigravity_trigger.py +30 -28
  67. realign/triggers/base.py +4 -3
  68. realign/triggers/claude_trigger.py +104 -85
  69. realign/triggers/codex_trigger.py +15 -5
  70. realign/triggers/gemini_trigger.py +57 -47
  71. realign/triggers/next_turn_trigger.py +3 -1
  72. realign/triggers/registry.py +6 -2
  73. realign/triggers/turn_status.py +3 -1
  74. realign/watcher_core.py +306 -131
  75. realign/watcher_daemon.py +8 -8
  76. realign/worker_core.py +3 -1
  77. realign/worker_daemon.py +3 -1
  78. aline_ai-0.5.4.dist-info/RECORD +0 -93
  79. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/WHEEL +0 -0
  80. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/entry_points.txt +0 -0
  81. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/licenses/LICENSE +0 -0
  82. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/top_level.txt +0 -0
realign/watcher_core.py CHANGED
@@ -19,7 +19,7 @@ from .hooks import find_all_active_sessions
19
19
  from .logging_config import setup_logger
20
20
 
21
21
  # Initialize logger for watcher
22
- logger = setup_logger('realign.watcher_core', 'watcher_core.log')
22
+ logger = setup_logger("realign.watcher_core", "watcher_core.log")
23
23
 
24
24
 
25
25
  # Session type detection
@@ -116,18 +116,18 @@ def decode_claude_project_path(project_dir_name: str) -> Optional[Path]:
116
116
  if isinstance(project_dir_name, Path):
117
117
  project_dir = project_dir_name
118
118
  dir_name = project_dir.name
119
- elif '/' in project_dir_name:
119
+ elif "/" in project_dir_name:
120
120
  project_dir = Path(project_dir_name)
121
121
  dir_name = project_dir.name
122
122
  else:
123
123
  dir_name = project_dir_name
124
124
  project_dir = Path.home() / ".claude" / "projects" / dir_name
125
125
 
126
- if not dir_name.startswith('-'):
126
+ if not dir_name.startswith("-"):
127
127
  return None
128
128
 
129
129
  # Try naive decoding first
130
- path_str = '/' + dir_name[1:].replace('-', '/')
130
+ path_str = "/" + dir_name[1:].replace("-", "/")
131
131
  project_path = Path(path_str)
132
132
 
133
133
  if project_path.exists():
@@ -143,8 +143,9 @@ def decode_claude_project_path(project_dir_name: str) -> Optional[Path]:
143
143
  # Find any JSONL file (excluding agent files)
144
144
  try:
145
145
  jsonl_files = [
146
- f for f in project_dir.iterdir()
147
- if f.suffix == '.jsonl' and not f.name.startswith('agent-')
146
+ f
147
+ for f in project_dir.iterdir()
148
+ if f.suffix == ".jsonl" and not f.name.startswith("agent-")
148
149
  ]
149
150
 
150
151
  if not jsonl_files:
@@ -153,7 +154,7 @@ def decode_claude_project_path(project_dir_name: str) -> Optional[Path]:
153
154
 
154
155
  # Read lines from first JSONL file to find cwd field
155
156
  jsonl_file = jsonl_files[0]
156
- with jsonl_file.open('r', encoding='utf-8') as f:
157
+ with jsonl_file.open("r", encoding="utf-8") as f:
157
158
  # Check up to first 20 lines for cwd field
158
159
  for i, line in enumerate(f):
159
160
  if i >= 20:
@@ -164,7 +165,7 @@ def decode_claude_project_path(project_dir_name: str) -> Optional[Path]:
164
165
  continue
165
166
 
166
167
  session_data = json.loads(line)
167
- cwd = session_data.get('cwd')
168
+ cwd = session_data.get("cwd")
168
169
 
169
170
  if cwd:
170
171
  project_path = Path(cwd)
@@ -210,20 +211,27 @@ class DialogueWatcher:
210
211
  self.last_commit_times: Dict[str, float] = {} # Track last commit time per project
211
212
  self.last_session_sizes: Dict[str, int] = {} # Track file sizes
212
213
  self.last_stop_reason_counts: Dict[str, int] = {} # Track stop_reason counts per session
213
- self.last_session_mtimes: Dict[str, float] = {} # Track last mtime of session files for idle detection
214
- self.last_final_commit_times: Dict[str, float] = {} # Track when we last tried final commit per session
214
+ self.last_session_mtimes: Dict[str, float] = (
215
+ {}
216
+ ) # Track last mtime of session files for idle detection
217
+ self.last_final_commit_times: Dict[str, float] = (
218
+ {}
219
+ ) # Track when we last tried final commit per session
215
220
  self.min_commit_interval = 5.0 # Minimum 5 seconds between commits (cooldown)
216
221
  self.debounce_delay = 10.0 # Wait 10 seconds after file change to ensure turn is complete (increased from 2.0 to handle streaming responses)
217
222
  self.final_commit_idle_timeout = 300.0 # 5 minutes idle to trigger final commit
218
223
  self.running = False
219
224
  self.pending_commit_task: Optional[asyncio.Task] = None
220
- self._pending_changed_files: set[str] = set() # Accumulate changed files instead of cancelling
225
+ self._pending_changed_files: set[str] = (
226
+ set()
227
+ ) # Accumulate changed files instead of cancelling
221
228
 
222
229
  # Trigger support for pluggable turn detection
223
230
  from .triggers.registry import get_global_registry
231
+
224
232
  self.trigger_registry = get_global_registry()
225
- self.trigger_name = 'next_turn' # Default trigger (可配置)
226
- self._session_triggers: Dict[str, 'TurnTrigger'] = {} # Cache triggers per session
233
+ self.trigger_name = "next_turn" # Default trigger (可配置)
234
+ self._session_triggers: Dict[str, "TurnTrigger"] = {} # Cache triggers per session
227
235
 
228
236
  # Owner id for DB-backed lease locks (cross-process).
229
237
  try:
@@ -238,7 +246,7 @@ class DialogueWatcher:
238
246
  self.processing_turn_ttl_seconds = 20 * 60 # 20 minutes
239
247
 
240
248
  # Signal directory for Stop hook integration
241
- self.signal_dir = Path.home() / '.aline' / '.signals'
249
+ self.signal_dir = Path.home() / ".aline" / ".signals"
242
250
  self.signal_dir.mkdir(parents=True, exist_ok=True)
243
251
  self.user_prompt_signal_dir = self.signal_dir / "user_prompt_submit"
244
252
  self.user_prompt_signal_dir.mkdir(parents=True, exist_ok=True)
@@ -257,14 +265,20 @@ class DialogueWatcher:
257
265
  logger.info(f"Supports: Claude Code & Codex (auto-detected)")
258
266
  logger.info(f"Debounce: {self.debounce_delay}s, Cooldown: {self.min_commit_interval}s")
259
267
  print("[Watcher] Started watching for dialogue completion", file=sys.stderr)
260
- print(f"[Watcher] Mode: Multi-project monitoring (all Claude Code projects)", file=sys.stderr)
268
+ print(
269
+ f"[Watcher] Mode: Multi-project monitoring (all Claude Code projects)", file=sys.stderr
270
+ )
261
271
  print(f"[Watcher] Trigger: Per-request (at end of each AI response)", file=sys.stderr)
262
272
  print(f"[Watcher] Supports: Claude Code & Codex (auto-detected)", file=sys.stderr)
263
- print(f"[Watcher] Debounce: {self.debounce_delay}s, Cooldown: {self.min_commit_interval}s", file=sys.stderr)
273
+ print(
274
+ f"[Watcher] Debounce: {self.debounce_delay}s, Cooldown: {self.min_commit_interval}s",
275
+ file=sys.stderr,
276
+ )
264
277
 
265
278
  # Auto-install Claude Code Stop hook for reliable turn completion detection
266
279
  try:
267
280
  from .claude_hooks.stop_hook_installer import ensure_stop_hook_installed
281
+
268
282
  if ensure_stop_hook_installed(quiet=True):
269
283
  logger.info("Claude Code Stop hook is ready")
270
284
  else:
@@ -278,6 +292,7 @@ class DialogueWatcher:
278
292
  from .claude_hooks.user_prompt_submit_hook_installer import (
279
293
  ensure_user_prompt_submit_hook_installed,
280
294
  )
295
+
281
296
  if ensure_user_prompt_submit_hook_installed(quiet=True):
282
297
  logger.info("Claude Code UserPromptSubmit hook is ready")
283
298
  else:
@@ -346,13 +361,13 @@ class DialogueWatcher:
346
361
  if not self.signal_dir.exists():
347
362
  return
348
363
 
349
- for signal_file in self.signal_dir.glob('*.signal'):
364
+ for signal_file in self.signal_dir.glob("*.signal"):
350
365
  try:
351
366
  # Read signal data
352
367
  signal_data = json.loads(signal_file.read_text())
353
- session_id = signal_data.get('session_id', '')
354
- project_dir = signal_data.get('project_dir', '')
355
- transcript_path = signal_data.get('transcript_path', '')
368
+ session_id = signal_data.get("session_id", "")
369
+ project_dir = signal_data.get("project_dir", "")
370
+ transcript_path = signal_data.get("transcript_path", "")
356
371
 
357
372
  logger.info(f"Stop signal received for session {session_id}")
358
373
  print(f"[Watcher] Stop signal received for {session_id}", file=sys.stderr)
@@ -389,7 +404,9 @@ class DialogueWatcher:
389
404
  session_type=self._detect_session_type(session_file),
390
405
  )
391
406
  except Exception as e:
392
- logger.warning(f"Failed to enqueue stop-hook job for {session_id}: {e}")
407
+ logger.warning(
408
+ f"Failed to enqueue stop-hook job for {session_id}: {e}"
409
+ )
393
410
  continue
394
411
 
395
412
  logger.info(
@@ -400,7 +417,9 @@ class DialogueWatcher:
400
417
  file=sys.stderr,
401
418
  )
402
419
  else:
403
- logger.warning(f"Could not determine project path for session {session_id}")
420
+ logger.warning(
421
+ f"Could not determine project path for session {session_id}"
422
+ )
404
423
  else:
405
424
  logger.warning(f"Session file not found for {session_id}")
406
425
 
@@ -492,9 +511,9 @@ class DialogueWatcher:
492
511
  return 0
493
512
 
494
513
  # For Claude trigger, use get_detailed_analysis to get total turns
495
- if hasattr(trigger, 'get_detailed_analysis'):
514
+ if hasattr(trigger, "get_detailed_analysis"):
496
515
  analysis = trigger.get_detailed_analysis(session_file)
497
- return analysis.get('total_turns', 0)
516
+ return analysis.get("total_turns", 0)
498
517
 
499
518
  # Fallback: use count_complete_turns + 1 (assuming last turn just completed)
500
519
  return trigger.count_complete_turns(session_file) + 1
@@ -537,7 +556,7 @@ class DialogueWatcher:
537
556
  """
538
557
  try:
539
558
  # Search in Claude Code sessions directory
540
- claude_base = Path.home() / '.claude' / 'projects'
559
+ claude_base = Path.home() / ".claude" / "projects"
541
560
  if claude_base.exists():
542
561
  for project_dir in claude_base.iterdir():
543
562
  if project_dir.is_dir():
@@ -559,11 +578,11 @@ class DialogueWatcher:
559
578
 
560
579
  def _get_session_stats(self) -> tuple[Dict[str, int], Dict[str, float]]:
561
580
  """Get (sizes, mtimes) for all active session files.
562
-
581
+
563
582
  For directory-based sessions (e.g., Antigravity), computes:
564
583
  - size: sum of all artifact file sizes
565
584
  - mtime: max mtime of all artifact files
566
-
585
+
567
586
  This ensures changes to individual artifacts are detected.
568
587
  """
569
588
  sizes: Dict[str, int] = {}
@@ -574,21 +593,21 @@ class DialogueWatcher:
574
593
  if not session_file.exists():
575
594
  continue
576
595
  path_key = str(session_file)
577
-
596
+
578
597
  # Handle directory-based sessions (e.g., Antigravity brain directories)
579
598
  if session_file.is_dir():
580
599
  # Check for Antigravity-style artifacts
581
600
  artifacts = ["task.md", "walkthrough.md", "implementation_plan.md"]
582
601
  total_size = 0
583
602
  max_mtime = 0.0
584
-
603
+
585
604
  for artifact_name in artifacts:
586
605
  artifact_path = session_file / artifact_name
587
606
  if artifact_path.exists():
588
607
  artifact_stat = artifact_path.stat()
589
608
  total_size += artifact_stat.st_size
590
609
  max_mtime = max(max_mtime, artifact_stat.st_mtime)
591
-
610
+
592
611
  # Only track if at least one artifact exists
593
612
  if max_mtime > 0:
594
613
  sizes[path_key] = total_size
@@ -602,12 +621,21 @@ class DialogueWatcher:
602
621
  logger.debug(f"Tracked {len(sizes)} session file(s) across all projects")
603
622
  except PermissionError as e:
604
623
  # macOS permission issue - only log once
605
- if not hasattr(self, '_permission_error_logged'):
624
+ if not hasattr(self, "_permission_error_logged"):
606
625
  self._permission_error_logged = True
607
626
  logger.error("PERMISSION DENIED: Cannot access Claude Code sessions directory")
608
- print("[Watcher] ✗ PERMISSION DENIED: Cannot access ~/.claude/projects/", file=sys.stderr)
609
- print("[Watcher] Grant Full Disk Access to Acme in System Preferences", file=sys.stderr)
610
- print("[Watcher] ⓘ System Preferences → Privacy & Security → Full Disk Access → Add Acme", file=sys.stderr)
627
+ print(
628
+ "[Watcher] PERMISSION DENIED: Cannot access ~/.claude/projects/",
629
+ file=sys.stderr,
630
+ )
631
+ print(
632
+ "[Watcher] ⓘ Grant Full Disk Access to Acme in System Preferences",
633
+ file=sys.stderr,
634
+ )
635
+ print(
636
+ "[Watcher] ⓘ System Preferences → Privacy & Security → Full Disk Access → Add Acme",
637
+ file=sys.stderr,
638
+ )
611
639
  except Exception as e:
612
640
  logger.error(f"Error getting session stats: {e}", exc_info=True)
613
641
  print(f"[Watcher] Error getting session stats: {e}", file=sys.stderr)
@@ -671,13 +699,21 @@ class DialogueWatcher:
671
699
  session_files.sort(key=lambda f: f.stat().st_mtime if f.exists() else 0, reverse=True)
672
700
 
673
701
  # Limit to max_catchup_sessions
674
- max_sessions = getattr(self.config, 'max_catchup_sessions', 3)
702
+ max_sessions = getattr(self.config, "max_catchup_sessions", 3)
675
703
  if len(session_files) > max_sessions:
676
704
  sessions_to_process = session_files[:max_sessions]
677
705
  skipped_count = len(session_files) - max_sessions
678
- logger.info(f"Limiting catch-up to {max_sessions} most recent sessions, skipping {skipped_count} older sessions")
679
- print(f"[Watcher] Limiting catch-up to {max_sessions} most recent sessions ({skipped_count} skipped)", file=sys.stderr)
680
- print(f"[Watcher] Use 'aline watcher session list' to see all sessions", file=sys.stderr)
706
+ logger.info(
707
+ f"Limiting catch-up to {max_sessions} most recent sessions, skipping {skipped_count} older sessions"
708
+ )
709
+ print(
710
+ f"[Watcher] Limiting catch-up to {max_sessions} most recent sessions ({skipped_count} skipped)",
711
+ file=sys.stderr,
712
+ )
713
+ print(
714
+ f"[Watcher] Use 'aline watcher session list' to see all sessions",
715
+ file=sys.stderr,
716
+ )
681
717
  else:
682
718
  sessions_to_process = session_files
683
719
 
@@ -709,8 +745,13 @@ class DialogueWatcher:
709
745
  self.last_stop_reason_counts[str(session_file)] = current_count
710
746
  continue
711
747
 
712
- logger.info(f"Catch-up: {session_file.name} missing {len(missing_turns)} turn(s): {missing_turns}")
713
- print(f"[Watcher] Catch-up {session_file.name}: {len(missing_turns)} missing turn(s)", file=sys.stderr)
748
+ logger.info(
749
+ f"Catch-up: {session_file.name} missing {len(missing_turns)} turn(s): {missing_turns}"
750
+ )
751
+ print(
752
+ f"[Watcher] Catch-up {session_file.name}: {len(missing_turns)} missing turn(s)",
753
+ file=sys.stderr,
754
+ )
714
755
 
715
756
  enqueued = 0
716
757
  for turn in missing_turns:
@@ -723,11 +764,15 @@ class DialogueWatcher:
723
764
  )
724
765
  enqueued += 1
725
766
  except Exception as e:
726
- logger.warning(f"Error enqueuing catch-up for {session_file.name} turn {turn}: {e}")
767
+ logger.warning(
768
+ f"Error enqueuing catch-up for {session_file.name} turn {turn}: {e}"
769
+ )
727
770
 
728
771
  if enqueued:
729
772
  # Align baseline for future polling (Claude baseline excludes last turn).
730
- self.last_stop_reason_counts[str(session_file)] = self._count_complete_turns(session_file)
773
+ self.last_stop_reason_counts[str(session_file)] = self._count_complete_turns(
774
+ session_file
775
+ )
731
776
 
732
777
  except Exception as e:
733
778
  logger.error(f"Catch-up error: {e}", exc_info=True)
@@ -735,7 +780,7 @@ class DialogueWatcher:
735
780
  def _get_file_hash(self, session_file: Path) -> Optional[str]:
736
781
  """Compute MD5 hash of session file for duplicate detection."""
737
782
  try:
738
- with open(session_file, 'rb') as f:
783
+ with open(session_file, "rb") as f:
739
784
  md5_hash = hashlib.md5()
740
785
  while chunk := f.read(8192):
741
786
  md5_hash.update(chunk)
@@ -806,7 +851,12 @@ class DialogueWatcher:
806
851
  try:
807
852
  age_seconds = max(
808
853
  0.0,
809
- (datetime.now() - getattr(existing, "created_at", datetime.now())).total_seconds(),
854
+ (
855
+ datetime.now()
856
+ - getattr(
857
+ existing, "created_at", datetime.now()
858
+ )
859
+ ).total_seconds(),
810
860
  )
811
861
  except Exception:
812
862
  age_seconds = 0.0
@@ -836,7 +886,9 @@ class DialogueWatcher:
836
886
 
837
887
  project_path = self._extract_project_path(session_file)
838
888
  if not project_path:
839
- logger.debug(f"Skipping enqueue for {session_file.name}: could not extract project path")
889
+ logger.debug(
890
+ f"Skipping enqueue for {session_file.name}: could not extract project path"
891
+ )
840
892
  # Mark as attempted to avoid spamming logs
841
893
  self.last_final_commit_times[session_path] = current_time
842
894
  continue
@@ -859,7 +911,8 @@ class DialogueWatcher:
859
911
  if enqueued_any:
860
912
  # Baseline follows completed_count (Claude excludes last turn by design).
861
913
  self.last_stop_reason_counts[session_path] = max(
862
- self.last_stop_reason_counts.get(session_path, 0), int(completed_count)
914
+ self.last_stop_reason_counts.get(session_path, 0),
915
+ int(completed_count),
863
916
  )
864
917
 
865
918
  self.last_final_commit_times[session_path] = current_time
@@ -879,20 +932,23 @@ class DialogueWatcher:
879
932
  try:
880
933
  # Use AdapterRegistry to find the right adapter
881
934
  from .adapters import get_adapter_registry
935
+
882
936
  registry = get_adapter_registry()
883
-
937
+
884
938
  adapter = registry.auto_detect_adapter(session_file)
885
939
  if adapter:
886
940
  project_path = adapter.extract_project_path(session_file)
887
941
  if project_path:
888
942
  return project_path
889
-
943
+
890
944
  # Fallback for legacy logic if adapter returns None or no adapter found
891
945
  # (Keep existing logic as backup if needed, or rely on adapters)
892
-
946
+
893
947
  # Method 3: For Gemini CLI / Antigravity - return a pseudo path if adapter failed
894
- if '.gemini/' in str(session_file):
895
- logger.debug(f"Gemini/Antigravity session detected, using home as pseudo project fallback: {session_file.name}")
948
+ if ".gemini/" in str(session_file):
949
+ logger.debug(
950
+ f"Gemini/Antigravity session detected, using home as pseudo project fallback: {session_file.name}"
951
+ )
896
952
  return Path.home()
897
953
 
898
954
  logger.debug(f"Could not extract project path from {session_file.name}")
@@ -909,6 +965,7 @@ class DialogueWatcher:
909
965
  try:
910
966
  # Delegate to registry logic to ensure consistency
911
967
  from .adapters import get_adapter_registry
968
+
912
969
  registry = get_adapter_registry()
913
970
  adapter = registry.auto_detect_adapter(session_file)
914
971
  if adapter:
@@ -917,37 +974,41 @@ class DialogueWatcher:
917
974
  name = adapter.name
918
975
  if name in ["claude", "codex", "gemini", "antigravity"]:
919
976
  return name
920
-
977
+
921
978
  return "unknown"
922
979
 
923
980
  except Exception as e:
924
- print(f"[Watcher] Error detecting session type for {session_file.name}: {e}", file=sys.stderr)
981
+ print(
982
+ f"[Watcher] Error detecting session type for {session_file.name}: {e}",
983
+ file=sys.stderr,
984
+ )
925
985
  return "unknown"
926
986
 
927
987
  def _get_trigger_for_session(self, session_file: Path):
928
988
  """
929
989
  获取或创建session的trigger
930
-
990
+
931
991
  Args:
932
992
  session_file: session文件路径
933
-
993
+
934
994
  Returns:
935
995
  TurnTrigger实例,如果session类型不支持则返回None
936
996
  """
937
997
  session_path = str(session_file)
938
-
998
+
939
999
  if session_path not in self._session_triggers:
940
1000
  # Use registry to get adapter and trigger
941
1001
  from .adapters import get_adapter_registry
1002
+
942
1003
  registry = get_adapter_registry()
943
1004
  adapter = registry.auto_detect_adapter(session_file)
944
-
1005
+
945
1006
  if not adapter:
946
1007
  logger.error(f"Unknown session type for {session_file.name}, cannot select trigger")
947
1008
  return None
948
-
1009
+
949
1010
  self._session_triggers[session_path] = adapter.trigger
950
-
1011
+
951
1012
  return self._session_triggers[session_path]
952
1013
 
953
1014
  def _count_complete_turns(self, session_file: Path) -> int:
@@ -991,7 +1052,9 @@ class DialogueWatcher:
991
1052
 
992
1053
  if size != old_size or (mtime is not None and mtime != old_mtime):
993
1054
  changed_files.append(Path(path))
994
- logger.debug(f"Session file changed: {Path(path).name} (size {old_size} -> {size} bytes)")
1055
+ logger.debug(
1056
+ f"Session file changed: {Path(path).name} (size {old_size} -> {size} bytes)"
1057
+ )
995
1058
  # Any activity should reset idle final-commit attempts
996
1059
  self.last_final_commit_times.pop(path, None)
997
1060
 
@@ -1003,12 +1066,16 @@ class DialogueWatcher:
1003
1066
 
1004
1067
  # Only create new task if no pending task or previous one completed
1005
1068
  if not self.pending_commit_task or self.pending_commit_task.done():
1006
- logger.info(f"Scheduling commit check for {len(self._pending_changed_files)} session file(s)")
1069
+ logger.info(
1070
+ f"Scheduling commit check for {len(self._pending_changed_files)} session file(s)"
1071
+ )
1007
1072
  self.pending_commit_task = asyncio.create_task(
1008
1073
  self._debounced_commit_accumulated()
1009
1074
  )
1010
1075
  else:
1011
- logger.debug(f"Accumulated {len(changed_files)} file(s), total pending: {len(self._pending_changed_files)}")
1076
+ logger.debug(
1077
+ f"Accumulated {len(changed_files)} file(s), total pending: {len(self._pending_changed_files)}"
1078
+ )
1012
1079
 
1013
1080
  # Update tracked sizes
1014
1081
  self.last_session_sizes = current_sizes
@@ -1227,7 +1294,9 @@ class DialogueWatcher:
1227
1294
  if target_turn:
1228
1295
  current_count = max(current_count, target_turn)
1229
1296
  self.last_stop_reason_counts[session_path] = current_count
1230
- logger.debug(f"Updated turn count baseline for {session_file.name}: {current_count}")
1297
+ logger.debug(
1298
+ f"Updated turn count baseline for {session_file.name}: {current_count}"
1299
+ )
1231
1300
  else:
1232
1301
  logger.warning(f"Commit failed for {project_path.name}")
1233
1302
 
@@ -1249,7 +1318,7 @@ class DialogueWatcher:
1249
1318
  debug_callback: Optional[Callable[[Dict[str, Any]], None]] = None,
1250
1319
  skip_dedup: bool = False,
1251
1320
  skip_session_summary: bool = False,
1252
- ) -> bool:
1321
+ ) -> bool:
1253
1322
  """
1254
1323
  Execute commit with DB-backed lease locking to prevent cross-process races.
1255
1324
 
@@ -1348,10 +1417,13 @@ class DialogueWatcher:
1348
1417
  # Extract session information
1349
1418
  session_id = session_file.stem # e.g., "minhao_claude_abc123"
1350
1419
  turn_number = target_turn or self._get_current_turn_number(session_file)
1351
- user_message = user_message_override or self._extract_user_message_for_turn(session_file, turn_number)
1420
+ user_message = user_message_override or self._extract_user_message_for_turn(
1421
+ session_file, turn_number
1422
+ )
1352
1423
 
1353
1424
  # V9: Get user identity for creator tracking
1354
1425
  from .config import ReAlignConfig
1426
+
1355
1427
  config = ReAlignConfig.load()
1356
1428
 
1357
1429
  # Compute hash of current turn content (not the whole session file)
@@ -1368,7 +1440,9 @@ class DialogueWatcher:
1368
1440
  db = get_database()
1369
1441
 
1370
1442
  file_stat = session_file.stat()
1371
- file_created = datetime.fromtimestamp(getattr(file_stat, "st_birthtime", file_stat.st_ctime))
1443
+ file_created = datetime.fromtimestamp(
1444
+ getattr(file_stat, "st_birthtime", file_stat.st_ctime)
1445
+ )
1372
1446
  db.get_or_create_session(
1373
1447
  session_id=session_id,
1374
1448
  session_file_path=session_file,
@@ -1390,7 +1464,10 @@ class DialogueWatcher:
1390
1464
  try:
1391
1465
  age_seconds = max(
1392
1466
  0.0,
1393
- (datetime.now() - getattr(existing_turn, "created_at", datetime.now())).total_seconds(),
1467
+ (
1468
+ datetime.now()
1469
+ - getattr(existing_turn, "created_at", datetime.now())
1470
+ ).total_seconds(),
1394
1471
  )
1395
1472
  except Exception:
1396
1473
  age_seconds = 0.0
@@ -1463,6 +1540,7 @@ class DialogueWatcher:
1463
1540
 
1464
1541
  # Check if it's an API key problem
1465
1542
  from .hooks import get_last_llm_error
1543
+
1466
1544
  last_error = get_last_llm_error()
1467
1545
  if last_error:
1468
1546
  if "API_KEY not set" in last_error or "api_key" in last_error.lower():
@@ -1490,7 +1568,11 @@ class DialogueWatcher:
1490
1568
  print(f"[Watcher] ✗ Invalid commit message title: '{title}'", file=sys.stderr)
1491
1569
  raise RuntimeError(f"Invalid LLM title: {title!r}")
1492
1570
 
1493
- if title.strip() in ["{", "}", "[", "]"] or title.startswith("{") and not title.endswith("}"):
1571
+ if (
1572
+ title.strip() in ["{", "}", "[", "]"]
1573
+ or title.startswith("{")
1574
+ and not title.endswith("}")
1575
+ ):
1494
1576
  logger.error(f"Title appears to be truncated JSON: '{title}' - skipping commit")
1495
1577
  print(f"[Watcher] ✗ Truncated JSON in title: '{title}'", file=sys.stderr)
1496
1578
  raise RuntimeError(f"Truncated JSON title: {title!r}")
@@ -1526,7 +1608,10 @@ class DialogueWatcher:
1526
1608
  except Exception as e:
1527
1609
  # If we were taking over a stale processing turn, a failure here should stop further retries.
1528
1610
  if takeover_attempt:
1529
- logger.error(f"Takeover attempt failed for {session_id} #{turn_number}: {e}", exc_info=True)
1611
+ logger.error(
1612
+ f"Takeover attempt failed for {session_id} #{turn_number}: {e}",
1613
+ exc_info=True,
1614
+ )
1530
1615
  failed_turn = TurnRecord(
1531
1616
  id=str(uuid.uuid4()),
1532
1617
  session_id=session_id,
@@ -1577,7 +1662,9 @@ class DialogueWatcher:
1577
1662
  logger.error(f"Failed to find latest session: {e}")
1578
1663
  return None
1579
1664
 
1580
- def _handle_session_redaction(self, session_file: Path, project_path: Path, quiet: bool = False) -> Path:
1665
+ def _handle_session_redaction(
1666
+ self, session_file: Path, project_path: Path, quiet: bool = False
1667
+ ) -> Path:
1581
1668
  """Check and redact sensitive information from session file.
1582
1669
 
1583
1670
  Args:
@@ -1594,7 +1681,7 @@ class DialogueWatcher:
1594
1681
  try:
1595
1682
  from .redactor import check_and_redact_session, save_original_session
1596
1683
 
1597
- content = session_file.read_text(encoding='utf-8')
1684
+ content = session_file.read_text(encoding="utf-8")
1598
1685
  redacted_content, has_secrets, secrets = check_and_redact_session(
1599
1686
  content, redact_mode="auto", quiet=quiet
1600
1687
  )
@@ -1602,7 +1689,7 @@ class DialogueWatcher:
1602
1689
  if has_secrets:
1603
1690
  logger.warning(f"Secrets detected: {len(secrets)} secret(s)")
1604
1691
  backup_path = save_original_session(session_file, project_path)
1605
- session_file.write_text(redacted_content, encoding='utf-8')
1692
+ session_file.write_text(redacted_content, encoding="utf-8")
1606
1693
  logger.info(f"Session redacted, original saved to {backup_path}")
1607
1694
 
1608
1695
  return session_file
@@ -1630,7 +1717,7 @@ class DialogueWatcher:
1630
1717
  try:
1631
1718
  user_messages = []
1632
1719
 
1633
- with open(session_file, 'r', encoding='utf-8') as f:
1720
+ with open(session_file, "r", encoding="utf-8") as f:
1634
1721
  for line in f:
1635
1722
  try:
1636
1723
  data = json.loads(line.strip())
@@ -1716,6 +1803,7 @@ class DialogueWatcher:
1716
1803
  return
1717
1804
 
1718
1805
  from .hooks import clean_user_message
1806
+
1719
1807
  user_message = clean_user_message(prompt) if prompt else ""
1720
1808
  if not user_message:
1721
1809
  user_message = str(group.get("user_message") or "")
@@ -1743,7 +1831,9 @@ class DialogueWatcher:
1743
1831
  db = get_database()
1744
1832
 
1745
1833
  file_stat = session_file.stat()
1746
- file_created = datetime.fromtimestamp(getattr(file_stat, "st_birthtime", file_stat.st_ctime))
1834
+ file_created = datetime.fromtimestamp(
1835
+ getattr(file_stat, "st_birthtime", file_stat.st_ctime)
1836
+ )
1747
1837
 
1748
1838
  project_path = None
1749
1839
  if project_dir:
@@ -1830,14 +1920,21 @@ class DialogueWatcher:
1830
1920
  if turn_info and turn_info.get("turn_content"):
1831
1921
  return turn_info["turn_content"]
1832
1922
  # Fallback: construct content from group data
1833
- return json.dumps({
1834
- "turn_number": turn_number,
1835
- "user_message": group.get("user_message", ""),
1836
- "assistant_response": group.get("summary_message", ""),
1837
- }, ensure_ascii=False, indent=2)
1923
+ return json.dumps(
1924
+ {
1925
+ "turn_number": turn_number,
1926
+ "user_message": group.get("user_message", ""),
1927
+ "assistant_response": group.get("summary_message", ""),
1928
+ },
1929
+ ensure_ascii=False,
1930
+ indent=2,
1931
+ )
1838
1932
 
1839
1933
  if session_format == "antigravity_markdown":
1840
- print(f"[Debug] Extracting Antigravity content for turn {turn_number}", file=sys.stderr)
1934
+ print(
1935
+ f"[Debug] Extracting Antigravity content for turn {turn_number}",
1936
+ file=sys.stderr,
1937
+ )
1841
1938
  # Directly read artifact files from directory (don't rely on trigger.user_message which is empty)
1842
1939
  session_dir = session_file if session_file.is_dir() else session_file.parent
1843
1940
  artifacts = ["task.md", "walkthrough.md", "implementation_plan.md"]
@@ -1846,15 +1943,20 @@ class DialogueWatcher:
1846
1943
  path = session_dir / filename
1847
1944
  if path.exists():
1848
1945
  try:
1849
- text = path.read_text(encoding='utf-8')
1946
+ text = path.read_text(encoding="utf-8")
1850
1947
  content_parts.append(f"--- {filename} ---\n{text}")
1851
1948
  except Exception as e:
1852
1949
  print(f"[Debug] Failed to read {filename}: {e}", file=sys.stderr)
1853
1950
  full_content = "\n\n".join(content_parts)
1854
1951
  if full_content:
1855
- print(f"[Debug] Antigravity content extracted: {len(full_content)} chars", file=sys.stderr)
1952
+ print(
1953
+ f"[Debug] Antigravity content extracted: {len(full_content)} chars",
1954
+ file=sys.stderr,
1955
+ )
1856
1956
  return full_content
1857
- print(f"[Debug] Antigravity content empty - no artifact files found", file=sys.stderr)
1957
+ print(
1958
+ f"[Debug] Antigravity content empty - no artifact files found", file=sys.stderr
1959
+ )
1858
1960
  return ""
1859
1961
 
1860
1962
  # For JSONL formats, extract by line numbers
@@ -1864,7 +1966,7 @@ class DialogueWatcher:
1864
1966
  return ""
1865
1967
 
1866
1968
  lines = []
1867
- with open(session_file, 'r', encoding='utf-8') as f:
1969
+ with open(session_file, "r", encoding="utf-8") as f:
1868
1970
  for idx, line in enumerate(f, 1):
1869
1971
  if start_line <= idx <= end_line:
1870
1972
  lines.append(line)
@@ -1880,7 +1982,7 @@ class DialogueWatcher:
1880
1982
  """Extract a summary of the assistant's response from session file."""
1881
1983
  try:
1882
1984
  if session_file.is_dir():
1883
- # For directory sessions (Antigravity), we don't have a simple way to extract assistant summary
1985
+ # For directory sessions (Antigravity), we don't have a simple way to extract assistant summary
1884
1986
  # from a single file scan. Return generic message or use trigger if possible.
1885
1987
  return "Antigravity Session State"
1886
1988
 
@@ -1894,11 +1996,11 @@ class DialogueWatcher:
1894
1996
  try:
1895
1997
  # Extract last assistant response text
1896
1998
  assistant_text = ""
1897
-
1999
+
1898
2000
  if session_file.is_dir():
1899
2001
  return "Antigravity Session"
1900
2002
 
1901
- with open(session_file, 'r', encoding='utf-8') as f:
2003
+ with open(session_file, "r", encoding="utf-8") as f:
1902
2004
  for line in f:
1903
2005
  try:
1904
2006
  data = json.loads(line.strip())
@@ -1940,8 +2042,8 @@ class DialogueWatcher:
1940
2042
  try:
1941
2043
  if session_file.is_dir():
1942
2044
  return None
1943
-
1944
- with open(session_file, 'r', encoding='utf-8') as f:
2045
+
2046
+ with open(session_file, "r", encoding="utf-8") as f:
1945
2047
  lines = f.readlines()
1946
2048
 
1947
2049
  for line in reversed(lines):
@@ -1994,15 +2096,15 @@ class DialogueWatcher:
1994
2096
  # or extracting just the User Message to identify intent.
1995
2097
  trigger = self._get_trigger_for_session(session_file)
1996
2098
  if trigger:
1997
- # Get current turn number
1998
- turn = self._get_current_turn_number(session_file)
1999
- info = trigger.extract_turn_info(session_file, turn)
2000
- if info:
2001
- return info.user_message
2099
+ # Get current turn number
2100
+ turn = self._get_current_turn_number(session_file)
2101
+ info = trigger.extract_turn_info(session_file, turn)
2102
+ if info:
2103
+ return info.user_message
2002
2104
  return ""
2003
2105
 
2004
2106
  # Read all lines and track user message positions
2005
- with open(session_file, 'r', encoding='utf-8') as f:
2107
+ with open(session_file, "r", encoding="utf-8") as f:
2006
2108
  for idx, line in enumerate(f):
2007
2109
  lines.append(line)
2008
2110
  try:
@@ -2014,16 +2116,21 @@ class DialogueWatcher:
2014
2116
  # Check if this is a real user message (not tool result, IDE notification, etc.)
2015
2117
  is_real_message = False
2016
2118
  if isinstance(content, str):
2017
- if not content.startswith("This session is being continued") and \
2018
- not content.startswith("<ide_opened_file>"):
2119
+ if not content.startswith(
2120
+ "This session is being continued"
2121
+ ) and not content.startswith("<ide_opened_file>"):
2019
2122
  is_real_message = True
2020
2123
  elif isinstance(content, list):
2021
- text_parts = [item.get("text", "") for item in content
2022
- if isinstance(item, dict) and item.get("type") == "text"]
2124
+ text_parts = [
2125
+ item.get("text", "")
2126
+ for item in content
2127
+ if isinstance(item, dict) and item.get("type") == "text"
2128
+ ]
2023
2129
  if text_parts:
2024
2130
  combined_text = "\n".join(text_parts)
2025
- if not combined_text.startswith("This session is being continued") and \
2026
- not combined_text.startswith("<ide_opened_file>"):
2131
+ if not combined_text.startswith(
2132
+ "This session is being continued"
2133
+ ) and not combined_text.startswith("<ide_opened_file>"):
2027
2134
  is_real_message = True
2028
2135
 
2029
2136
  if is_real_message:
@@ -2089,6 +2196,7 @@ class DialogueWatcher:
2089
2196
  try:
2090
2197
  # Get recent turns from database for context
2091
2198
  from .db import get_database
2199
+
2092
2200
  db = get_database()
2093
2201
  session_id = resolved_session_id
2094
2202
  recent_turns = db.get_turns_for_session(session_id)
@@ -2100,7 +2208,9 @@ class DialogueWatcher:
2100
2208
  # Get the most recent title
2101
2209
  if previous_records:
2102
2210
  previous_commit_title = previous_records[-1]
2103
- recent_ctx = "Recent turns:\n" + "\n".join(f"- {t}" for t in previous_records)
2211
+ recent_ctx = "Recent turns:\n" + "\n".join(
2212
+ f"- {t}" for t in previous_records
2213
+ )
2104
2214
  except Exception:
2105
2215
  recent_ctx = ""
2106
2216
  previous_records = []
@@ -2115,7 +2225,14 @@ class DialogueWatcher:
2115
2225
  try:
2116
2226
  trigger = self._get_trigger_for_session(session_file)
2117
2227
  analysis = trigger.get_detailed_analysis(session_file)
2118
- group = next((g for g in analysis.get("groups", []) if g.get("turn_number") == turn_number), None)
2228
+ group = next(
2229
+ (
2230
+ g
2231
+ for g in analysis.get("groups", [])
2232
+ if g.get("turn_number") == turn_number
2233
+ ),
2234
+ None,
2235
+ )
2119
2236
  except Exception:
2120
2237
  group = None
2121
2238
 
@@ -2129,43 +2246,77 @@ class DialogueWatcher:
2129
2246
  turn_status = group.get("turn_status") or turn_status
2130
2247
 
2131
2248
  # Robust fallback for directory sessions (Antigravity) if group lookup failed
2132
- if session_file is not None and session_file.is_dir() and (not user_message or not assistant_summary):
2249
+ if (
2250
+ session_file is not None
2251
+ and session_file.is_dir()
2252
+ and (not user_message or not assistant_summary)
2253
+ ):
2133
2254
  logger.info("Using fallback extraction for Antigravity directory session")
2134
- print(f"[Debug] Antigravity fallback: user_message={bool(user_message)}, assistant_summary={bool(assistant_summary)}", file=sys.stderr)
2255
+ print(
2256
+ f"[Debug] Antigravity fallback: user_message={bool(user_message)}, assistant_summary={bool(assistant_summary)}",
2257
+ file=sys.stderr,
2258
+ )
2135
2259
  if not user_message:
2136
2260
  # For Antigravity, turn_content is essentially the user message (full state)
2137
2261
  user_message = turn_content
2138
- print(f"[Debug] Set user_message from turn_content: {len(user_message) if user_message else 0} chars", file=sys.stderr)
2262
+ print(
2263
+ f"[Debug] Set user_message from turn_content: {len(user_message) if user_message else 0} chars",
2264
+ file=sys.stderr,
2265
+ )
2139
2266
  if not assistant_summary:
2140
2267
  assistant_summary = "Antigravity Session State"
2141
2268
  turn_status = "completed"
2142
2269
 
2143
- print(f"[Debug] Before LLM call: user_message={len(user_message) if user_message else 0} chars, assistant_summary={bool(assistant_summary)}", file=sys.stderr)
2270
+ print(
2271
+ f"[Debug] Before LLM call: user_message={len(user_message) if user_message else 0} chars, assistant_summary={bool(assistant_summary)}",
2272
+ file=sys.stderr,
2273
+ )
2144
2274
  if user_message and assistant_summary:
2145
2275
  from .hooks import generate_summary_with_llm_from_turn_context
2276
+
2146
2277
  # Pass full turn content to include all messages (user, assistant text, thinking)
2147
2278
  # but exclude tool use and code changes (handled by filter_session_content)
2148
- title, model_name, description, if_last_task, satisfaction = generate_summary_with_llm_from_turn_context(
2149
- user_message=user_message,
2150
- assistant_summary=assistant_summary,
2151
- turn_status=turn_status,
2152
- recent_commit_context=recent_ctx,
2153
- provider=self.config.llm_provider,
2154
- previous_commit_title=previous_commit_title,
2155
- full_turn_content=turn_content, # Pass full turn content
2156
- previous_records=previous_records, # Pass extracted records from git history
2157
- debug_callback=debug_callback, # Pass debug callback
2279
+ title, model_name, description, if_last_task, satisfaction = (
2280
+ generate_summary_with_llm_from_turn_context(
2281
+ user_message=user_message,
2282
+ assistant_summary=assistant_summary,
2283
+ turn_status=turn_status,
2284
+ recent_commit_context=recent_ctx,
2285
+ provider=self.config.llm_provider,
2286
+ previous_commit_title=previous_commit_title,
2287
+ full_turn_content=turn_content, # Pass full turn content
2288
+ previous_records=previous_records, # Pass extracted records from git history
2289
+ debug_callback=debug_callback, # Pass debug callback
2290
+ )
2158
2291
  )
2159
2292
 
2160
2293
  if title:
2161
2294
  logger.info(f"Generated LLM summary from turn context using {model_name}")
2162
- print(f"[Watcher] ✓ Generated summary from turn context ({model_name})", file=sys.stderr)
2163
- return (title, model_name or "unknown", description or "", if_last_task, satisfaction)
2295
+ print(
2296
+ f"[Watcher] Generated summary from turn context ({model_name})",
2297
+ file=sys.stderr,
2298
+ )
2299
+ return (
2300
+ title,
2301
+ model_name or "unknown",
2302
+ description or "",
2303
+ if_last_task,
2304
+ satisfaction,
2305
+ )
2164
2306
 
2165
2307
  if session_file is not None and session_file.is_dir():
2166
2308
  # Fallback if LLM fails for Antigravity
2167
- print(f"[Watcher] ⚠ LLM summary failed/empty, using generic fallback for Antigravity", file=sys.stderr)
2168
- return ("Update Antigravity Brain", "fallback", "Automatic update of brain artifacts", "yes", "fine")
2309
+ print(
2310
+ f"[Watcher] LLM summary failed/empty, using generic fallback for Antigravity",
2311
+ file=sys.stderr,
2312
+ )
2313
+ return (
2314
+ "Update Antigravity Brain",
2315
+ "fallback",
2316
+ "Automatic update of brain artifacts",
2317
+ "yes",
2318
+ "fine",
2319
+ )
2169
2320
 
2170
2321
  # Fallback: Extract turn content and use the legacy pipeline
2171
2322
  if turn_content is None and session_file is not None:
@@ -2196,7 +2347,6 @@ class DialogueWatcher:
2196
2347
  except Exception:
2197
2348
  pass
2198
2349
 
2199
-
2200
2350
  # Use direct API calls for LLM summary
2201
2351
  from .hooks import generate_summary_with_llm
2202
2352
 
@@ -2212,14 +2362,29 @@ class DialogueWatcher:
2212
2362
  if model_name:
2213
2363
  logger.info(f"Generated LLM summary using {model_name}")
2214
2364
  print(f"[Watcher] ✓ Generated LLM summary using {model_name}", file=sys.stderr)
2215
- return (title, model_name or "unknown", description or "", if_last_task, satisfaction)
2365
+ return (
2366
+ title,
2367
+ model_name or "unknown",
2368
+ description or "",
2369
+ if_last_task,
2370
+ satisfaction,
2371
+ )
2216
2372
  else:
2217
2373
  logger.warning("LLM summary generation returned empty result")
2218
2374
 
2219
2375
  if session_file is not None and session_file.is_dir():
2220
2376
  # Fallback if LLM fails for Antigravity (generic path)
2221
- print(f"[Watcher] ⚠ LLM summary returned empty, using fallback for Antigravity", file=sys.stderr)
2222
- return ("Update Antigravity Brain", "fallback", "Automatic update of brain artifacts", "yes", "fine")
2377
+ print(
2378
+ f"[Watcher] LLM summary returned empty, using fallback for Antigravity",
2379
+ file=sys.stderr,
2380
+ )
2381
+ return (
2382
+ "Update Antigravity Brain",
2383
+ "fallback",
2384
+ "Automatic update of brain artifacts",
2385
+ "yes",
2386
+ "fine",
2387
+ )
2223
2388
 
2224
2389
  return None
2225
2390
 
@@ -2229,16 +2394,25 @@ class DialogueWatcher:
2229
2394
 
2230
2395
  # Record the error for later use in fallback logic
2231
2396
  from .hooks import set_last_llm_error
2397
+
2232
2398
  set_last_llm_error(str(e))
2233
2399
 
2234
2400
  # Robust fallback for Antigravity directory sessions if anything fails
2235
2401
  if session_file is not None and session_file.is_dir():
2236
- print(f"[Watcher] ⚠ Using generic fallback after exception for Antigravity", file=sys.stderr)
2237
- return ("Update Antigravity Brain", "fallback", "Automatic update of brain artifacts", "yes", "fine")
2402
+ print(
2403
+ f"[Watcher] Using generic fallback after exception for Antigravity",
2404
+ file=sys.stderr,
2405
+ )
2406
+ return (
2407
+ "Update Antigravity Brain",
2408
+ "fallback",
2409
+ "Automatic update of brain artifacts",
2410
+ "yes",
2411
+ "fine",
2412
+ )
2238
2413
 
2239
2414
  return None
2240
2415
 
2241
-
2242
2416
  @staticmethod
2243
2417
  def _extract_latest_commit_title(context: str) -> Optional[str]:
2244
2418
  """
@@ -2273,11 +2447,11 @@ class DialogueWatcher:
2273
2447
  # For directories, just use creation time
2274
2448
  try:
2275
2449
  stat = session_file.stat()
2276
- return getattr(stat, 'st_birthtime', stat.st_ctime)
2450
+ return getattr(stat, "st_birthtime", stat.st_ctime)
2277
2451
  except:
2278
2452
  return session_file.stat().st_ctime
2279
2453
 
2280
- with open(session_file, 'r', encoding='utf-8') as f:
2454
+ with open(session_file, "r", encoding="utf-8") as f:
2281
2455
  for line in f:
2282
2456
  try:
2283
2457
  data = json.loads(line.strip())
@@ -2287,7 +2461,8 @@ class DialogueWatcher:
2287
2461
  if timestamp_str:
2288
2462
  # Parse ISO 8601 timestamp
2289
2463
  from datetime import datetime
2290
- dt = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
2464
+
2465
+ dt = datetime.fromisoformat(timestamp_str.replace("Z", "+00:00"))
2291
2466
  return dt.timestamp()
2292
2467
 
2293
2468
  except (json.JSONDecodeError, ValueError, KeyError):