aline-ai 0.5.4__py3-none-any.whl → 0.5.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/METADATA +1 -1
  2. aline_ai-0.5.6.dist-info/RECORD +95 -0
  3. realign/__init__.py +1 -1
  4. realign/adapters/antigravity.py +28 -20
  5. realign/adapters/base.py +46 -50
  6. realign/adapters/claude.py +14 -14
  7. realign/adapters/codex.py +7 -7
  8. realign/adapters/gemini.py +11 -11
  9. realign/adapters/registry.py +14 -10
  10. realign/claude_detector.py +2 -2
  11. realign/claude_hooks/__init__.py +3 -3
  12. realign/claude_hooks/permission_request_hook_installer.py +31 -32
  13. realign/claude_hooks/stop_hook.py +4 -1
  14. realign/claude_hooks/stop_hook_installer.py +30 -31
  15. realign/cli.py +23 -4
  16. realign/codex_detector.py +11 -11
  17. realign/commands/add.py +88 -65
  18. realign/commands/config.py +3 -12
  19. realign/commands/context.py +3 -1
  20. realign/commands/export_shares.py +86 -127
  21. realign/commands/import_shares.py +145 -155
  22. realign/commands/init.py +166 -30
  23. realign/commands/restore.py +18 -6
  24. realign/commands/search.py +14 -42
  25. realign/commands/upgrade.py +155 -11
  26. realign/commands/watcher.py +98 -219
  27. realign/commands/worker.py +29 -6
  28. realign/config.py +25 -20
  29. realign/context.py +1 -3
  30. realign/dashboard/app.py +34 -24
  31. realign/dashboard/screens/__init__.py +10 -1
  32. realign/dashboard/screens/create_agent.py +244 -0
  33. realign/dashboard/screens/create_event.py +3 -1
  34. realign/dashboard/screens/event_detail.py +14 -6
  35. realign/dashboard/screens/help_screen.py +114 -0
  36. realign/dashboard/screens/session_detail.py +3 -1
  37. realign/dashboard/screens/share_import.py +7 -3
  38. realign/dashboard/tmux_manager.py +54 -9
  39. realign/dashboard/widgets/config_panel.py +85 -1
  40. realign/dashboard/widgets/events_table.py +314 -70
  41. realign/dashboard/widgets/header.py +2 -1
  42. realign/dashboard/widgets/search_panel.py +37 -27
  43. realign/dashboard/widgets/sessions_table.py +404 -85
  44. realign/dashboard/widgets/terminal_panel.py +155 -175
  45. realign/dashboard/widgets/watcher_panel.py +6 -2
  46. realign/dashboard/widgets/worker_panel.py +10 -1
  47. realign/db/__init__.py +1 -1
  48. realign/db/base.py +5 -15
  49. realign/db/locks.py +0 -1
  50. realign/db/migration.py +82 -76
  51. realign/db/schema.py +2 -6
  52. realign/db/sqlite_db.py +23 -41
  53. realign/events/__init__.py +0 -1
  54. realign/events/event_summarizer.py +27 -15
  55. realign/events/session_summarizer.py +29 -15
  56. realign/file_lock.py +1 -0
  57. realign/hooks.py +150 -60
  58. realign/logging_config.py +12 -15
  59. realign/mcp_server.py +30 -51
  60. realign/mcp_watcher.py +0 -1
  61. realign/models/event.py +29 -20
  62. realign/prompts/__init__.py +7 -7
  63. realign/prompts/presets.py +15 -11
  64. realign/redactor.py +99 -59
  65. realign/triggers/__init__.py +9 -9
  66. realign/triggers/antigravity_trigger.py +30 -28
  67. realign/triggers/base.py +4 -3
  68. realign/triggers/claude_trigger.py +104 -85
  69. realign/triggers/codex_trigger.py +15 -5
  70. realign/triggers/gemini_trigger.py +57 -47
  71. realign/triggers/next_turn_trigger.py +3 -1
  72. realign/triggers/registry.py +6 -2
  73. realign/triggers/turn_status.py +3 -1
  74. realign/watcher_core.py +306 -131
  75. realign/watcher_daemon.py +8 -8
  76. realign/worker_core.py +3 -1
  77. realign/worker_daemon.py +3 -1
  78. aline_ai-0.5.4.dist-info/RECORD +0 -93
  79. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/WHEEL +0 -0
  80. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/entry_points.txt +0 -0
  81. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/licenses/LICENSE +0 -0
  82. {aline_ai-0.5.4.dist-info → aline_ai-0.5.6.dist-info}/top_level.txt +0 -0
realign/db/migration.py CHANGED
@@ -33,58 +33,58 @@ def parse_commit_message(message: str) -> Dict[str, Any]:
33
33
  Dictionary with extracted fields
34
34
  """
35
35
  result = {
36
- 'llm_title': '',
37
- 'llm_description': '',
38
- 'session_id': '',
39
- 'turn_number': 0,
40
- 'user_message': '',
36
+ "llm_title": "",
37
+ "llm_description": "",
38
+ "session_id": "",
39
+ "turn_number": 0,
40
+ "user_message": "",
41
41
  }
42
42
 
43
43
  if not message:
44
44
  return result
45
45
 
46
- lines = message.strip().split('\n')
46
+ lines = message.strip().split("\n")
47
47
  if not lines:
48
48
  return result
49
49
 
50
50
  # First line is the title
51
- result['llm_title'] = lines[0].strip()
51
+ result["llm_title"] = lines[0].strip()
52
52
 
53
53
  # Find the --- separator
54
54
  separator_idx = -1
55
55
  for i, line in enumerate(lines):
56
- if line.strip() == '---':
56
+ if line.strip() == "---":
57
57
  separator_idx = i
58
58
  break
59
59
 
60
60
  if separator_idx > 1:
61
61
  # Description is between title and separator
62
62
  description_lines = lines[1:separator_idx]
63
- result['llm_description'] = '\n'.join(description_lines).strip()
63
+ result["llm_description"] = "\n".join(description_lines).strip()
64
64
 
65
65
  # Parse metadata after separator
66
66
  if separator_idx >= 0:
67
- metadata_lines = lines[separator_idx + 1:]
67
+ metadata_lines = lines[separator_idx + 1 :]
68
68
  for line in metadata_lines:
69
69
  line = line.strip()
70
70
 
71
71
  # Parse "Session: xxx | Turn: #N"
72
- session_match = re.match(r'Session:\s*(.+?)\s*\|\s*Turn:\s*#?(\d+)', line)
72
+ session_match = re.match(r"Session:\s*(.+?)\s*\|\s*Turn:\s*#?(\d+)", line)
73
73
  if session_match:
74
- result['session_id'] = session_match.group(1).strip()
75
- result['turn_number'] = int(session_match.group(2))
74
+ result["session_id"] = session_match.group(1).strip()
75
+ result["turn_number"] = int(session_match.group(2))
76
76
  continue
77
77
 
78
78
  # Parse "Request: xxx"
79
- if line.startswith('Request:'):
80
- result['user_message'] = line[8:].strip()
79
+ if line.startswith("Request:"):
80
+ result["user_message"] = line[8:].strip()
81
81
 
82
82
  # Fallback: try legacy format "Session xxx, Turn N: ..."
83
- if not result['session_id'] and result['llm_title']:
84
- legacy_match = re.match(r'Session\s+(\S+),\s*Turn\s+(\d+):', result['llm_title'])
83
+ if not result["session_id"] and result["llm_title"]:
84
+ legacy_match = re.match(r"Session\s+(\S+),\s*Turn\s+(\d+):", result["llm_title"])
85
85
  if legacy_match:
86
- result['session_id'] = legacy_match.group(1)
87
- result['turn_number'] = int(legacy_match.group(2))
86
+ result["session_id"] = legacy_match.group(1)
87
+ result["turn_number"] = int(legacy_match.group(2))
88
88
 
89
89
  return result
90
90
 
@@ -99,18 +99,18 @@ def get_all_commits(realign_dir: Path) -> List[Dict[str, Any]]:
99
99
  Returns:
100
100
  List of commit dictionaries with hash, timestamp, and message
101
101
  """
102
- if not (realign_dir / '.git').exists():
102
+ if not (realign_dir / ".git").exists():
103
103
  logger.warning(f"No git repo found at {realign_dir}")
104
104
  return []
105
105
 
106
106
  try:
107
107
  # Use record separator for robust parsing
108
- rs = '\x1e'
109
- us = '\x1f'
110
- fmt = f'%H{us}%at{us}%B{rs}'
108
+ rs = "\x1e"
109
+ us = "\x1f"
110
+ fmt = f"%H{us}%at{us}%B{rs}"
111
111
 
112
112
  result = subprocess.run(
113
- ['git', 'log', '--reverse', f'--pretty=format:{fmt}'],
113
+ ["git", "log", "--reverse", f"--pretty=format:{fmt}"],
114
114
  cwd=realign_dir,
115
115
  capture_output=True,
116
116
  text=True,
@@ -136,14 +136,16 @@ def get_all_commits(realign_dir: Path) -> List[Dict[str, Any]]:
136
136
  continue
137
137
 
138
138
  # Skip initial commit
139
- if message.lower().startswith('initial commit'):
139
+ if message.lower().startswith("initial commit"):
140
140
  continue
141
141
 
142
- commits.append({
143
- 'hash': commit_hash,
144
- 'timestamp': datetime.fromtimestamp(int(timestamp)),
145
- 'message': message,
146
- })
142
+ commits.append(
143
+ {
144
+ "hash": commit_hash,
145
+ "timestamp": datetime.fromtimestamp(int(timestamp)),
146
+ "message": message,
147
+ }
148
+ )
147
149
 
148
150
  return commits
149
151
 
@@ -185,7 +187,7 @@ def migrate_project(
185
187
 
186
188
  if dry_run:
187
189
  for commit in commits:
188
- parsed = parse_commit_message(commit['message'])
190
+ parsed = parse_commit_message(commit["message"])
189
191
  logger.info(f"Would migrate: {commit['hash'][:8]} - {parsed['llm_title'][:50]}")
190
192
  return (len(commits), 0)
191
193
 
@@ -198,8 +200,8 @@ def migrate_project(
198
200
  # Group commits by session
199
201
  session_commits: Dict[str, List[Dict]] = {}
200
202
  for commit in commits:
201
- parsed = parse_commit_message(commit['message'])
202
- session_id = parsed.get('session_id', '')
203
+ parsed = parse_commit_message(commit["message"])
204
+ session_id = parsed.get("session_id", "")
203
205
 
204
206
  if not session_id:
205
207
  logger.debug(f"Skipping commit without session: {commit['hash'][:8]}")
@@ -208,7 +210,7 @@ def migrate_project(
208
210
 
209
211
  if session_id not in session_commits:
210
212
  session_commits[session_id] = []
211
- session_commits[session_id].append({**commit, 'parsed': parsed})
213
+ session_commits[session_id].append({**commit, "parsed": parsed})
212
214
 
213
215
  # Process each session
214
216
  for session_id, session_data in session_commits.items():
@@ -220,22 +222,22 @@ def migrate_project(
220
222
  session_rec = db.get_or_create_session(
221
223
  session_id=session_id,
222
224
  session_file_path=Path(f"~/.claude/projects/{project_path.name}/{session_id}.jsonl"),
223
- session_type='claude', # Default assumption
224
- started_at=first_commit['timestamp'],
225
+ session_type="claude", # Default assumption
226
+ started_at=first_commit["timestamp"],
225
227
  workspace_path=str(project_path),
226
228
  )
227
229
 
228
230
  # Create turn records
229
231
  for commit_data in session_data:
230
- parsed = commit_data['parsed']
231
- turn_number = parsed.get('turn_number', 0)
232
+ parsed = commit_data["parsed"]
233
+ turn_number = parsed.get("turn_number", 0)
232
234
 
233
235
  if turn_number == 0:
234
236
  skipped += 1
235
237
  continue
236
238
 
237
239
  # Generate content hash from commit message (since we don't have original content)
238
- content_hash = hashlib.md5(commit_data['message'].encode()).hexdigest()
240
+ content_hash = hashlib.md5(commit_data["message"].encode()).hexdigest()
239
241
 
240
242
  # Check if already migrated
241
243
  existing = db.get_turn_by_hash(session_id, content_hash)
@@ -246,26 +248,27 @@ def migrate_project(
246
248
 
247
249
  try:
248
250
  import uuid
251
+
249
252
  turn = TurnRecord(
250
253
  id=str(uuid.uuid4()),
251
254
  session_id=session_id,
252
255
  turn_number=turn_number,
253
- user_message=parsed.get('user_message', ''),
254
- assistant_summary=parsed.get('llm_description', ''),
255
- turn_status='completed',
256
- llm_title=parsed.get('llm_title', 'Migrated commit'),
257
- llm_description=parsed.get('llm_description', ''),
258
- model_name='migrated',
259
- if_last_task='unknown',
260
- satisfaction='unknown',
256
+ user_message=parsed.get("user_message", ""),
257
+ assistant_summary=parsed.get("llm_description", ""),
258
+ turn_status="completed",
259
+ llm_title=parsed.get("llm_title", "Migrated commit"),
260
+ llm_description=parsed.get("llm_description", ""),
261
+ model_name="migrated",
262
+ if_last_task="unknown",
263
+ satisfaction="unknown",
261
264
  content_hash=content_hash,
262
- timestamp=commit_data['timestamp'],
265
+ timestamp=commit_data["timestamp"],
263
266
  created_at=datetime.now(),
264
- git_commit_hash=commit_data['hash'],
267
+ git_commit_hash=commit_data["hash"],
265
268
  )
266
269
 
267
270
  # We don't have the original turn content, so store the commit message
268
- db.create_turn(turn, content=commit_data['message'])
271
+ db.create_turn(turn, content=commit_data["message"])
269
272
  migrated += 1
270
273
  logger.debug(f"Migrated: {session_id} #{turn_number}")
271
274
 
@@ -273,7 +276,9 @@ def migrate_project(
273
276
  logger.error(f"Failed to migrate turn: {e}")
274
277
  skipped += 1
275
278
 
276
- logger.info(f"Migration complete for {project_path.name}: {migrated} migrated, {skipped} skipped")
279
+ logger.info(
280
+ f"Migration complete for {project_path.name}: {migrated} migrated, {skipped} skipped"
281
+ )
277
282
  return (migrated, skipped)
278
283
 
279
284
 
@@ -296,7 +301,7 @@ def migrate_all_projects(
296
301
  if db is None:
297
302
  db = get_database()
298
303
 
299
- aline_base = Path.home() / '.aline'
304
+ aline_base = Path.home() / ".aline"
300
305
  if not aline_base.exists():
301
306
  logger.info("No .aline directory found")
302
307
  return {}
@@ -307,9 +312,9 @@ def migrate_all_projects(
307
312
  for item in aline_base.iterdir():
308
313
  if not item.is_dir():
309
314
  continue
310
- if item.name in ('db', 'logs', 'cache'):
315
+ if item.name in ("db", "logs", "cache"):
311
316
  continue
312
- if not (item / '.git').exists():
317
+ if not (item / ".git").exists():
313
318
  continue
314
319
 
315
320
  # This is a project's .aline directory
@@ -323,7 +328,7 @@ def migrate_all_projects(
323
328
 
324
329
  # Create a placeholder project path
325
330
  # The real path should be stored in project metadata
326
- project_path = Path.home() / 'Projects' / project_name
331
+ project_path = Path.home() / "Projects" / project_name
327
332
 
328
333
  # Use the .aline directory directly for migration
329
334
  migrated, skipped = migrate_project_from_realign_dir(item, db, dry_run)
@@ -348,7 +353,7 @@ def migrate_project_from_realign_dir(
348
353
  Returns:
349
354
  Tuple of (migrated_count, skipped_count)
350
355
  """
351
- if not (realign_dir / '.git').exists():
356
+ if not (realign_dir / ".git").exists():
352
357
  logger.warning(f"No git repo at {realign_dir}")
353
358
  return (0, 0)
354
359
 
@@ -360,23 +365,23 @@ def migrate_project_from_realign_dir(
360
365
 
361
366
  if dry_run:
362
367
  for commit in commits:
363
- parsed = parse_commit_message(commit['message'])
364
- title = parsed.get('llm_title', 'No title')[:50]
368
+ parsed = parse_commit_message(commit["message"])
369
+ title = parsed.get("llm_title", "No title")[:50]
365
370
  logger.info(f" Would migrate: {commit['hash'][:8]} - {title}")
366
371
  return (len(commits), 0)
367
372
 
368
373
  # Get or create project (use realign_dir name as project name)
369
374
  project_name = realign_dir.name
370
- project_path = Path.home() / 'Projects' / project_name # Placeholder
375
+ project_path = Path.home() / "Projects" / project_name # Placeholder
371
376
  project_rec = db.get_or_create_project(project_path, name=project_name)
372
377
 
373
378
  migrated = 0
374
379
  skipped = 0
375
380
 
376
381
  for commit in commits:
377
- parsed = parse_commit_message(commit['message'])
378
- session_id = parsed.get('session_id', '')
379
- turn_number = parsed.get('turn_number', 0)
382
+ parsed = parse_commit_message(commit["message"])
383
+ session_id = parsed.get("session_id", "")
384
+ turn_number = parsed.get("turn_number", 0)
380
385
 
381
386
  if not session_id or turn_number == 0:
382
387
  skipped += 1
@@ -386,12 +391,12 @@ def migrate_project_from_realign_dir(
386
391
  session_rec = db.get_or_create_session(
387
392
  session_id=session_id,
388
393
  session_file_path=Path(f"migrated/{session_id}.jsonl"),
389
- session_type='claude',
390
- started_at=commit['timestamp'],
394
+ session_type="claude",
395
+ started_at=commit["timestamp"],
391
396
  workspace_path=str(project_path),
392
397
  )
393
398
 
394
- content_hash = hashlib.md5(commit['message'].encode()).hexdigest()
399
+ content_hash = hashlib.md5(commit["message"].encode()).hexdigest()
395
400
 
396
401
  if db.get_turn_by_hash(session_id, content_hash):
397
402
  skipped += 1
@@ -399,25 +404,26 @@ def migrate_project_from_realign_dir(
399
404
 
400
405
  try:
401
406
  import uuid
407
+
402
408
  turn = TurnRecord(
403
409
  id=str(uuid.uuid4()),
404
410
  session_id=session_id,
405
411
  turn_number=turn_number,
406
- user_message=parsed.get('user_message', ''),
407
- assistant_summary=parsed.get('llm_description', ''),
408
- turn_status='completed',
409
- llm_title=parsed.get('llm_title', 'Migrated'),
410
- llm_description=parsed.get('llm_description', ''),
411
- model_name='migrated',
412
- if_last_task='unknown',
413
- satisfaction='unknown',
412
+ user_message=parsed.get("user_message", ""),
413
+ assistant_summary=parsed.get("llm_description", ""),
414
+ turn_status="completed",
415
+ llm_title=parsed.get("llm_title", "Migrated"),
416
+ llm_description=parsed.get("llm_description", ""),
417
+ model_name="migrated",
418
+ if_last_task="unknown",
419
+ satisfaction="unknown",
414
420
  content_hash=content_hash,
415
- timestamp=commit['timestamp'],
421
+ timestamp=commit["timestamp"],
416
422
  created_at=datetime.now(),
417
- git_commit_hash=commit['hash'],
423
+ git_commit_hash=commit["hash"],
418
424
  )
419
425
 
420
- db.create_turn(turn, content=commit['message'])
426
+ db.create_turn(turn, content=commit["message"])
421
427
  migrated += 1
422
428
 
423
429
  except Exception as e:
realign/db/schema.py CHANGED
@@ -444,15 +444,11 @@ def get_migration_scripts(from_version: int, to_version: int) -> list:
444
444
 
445
445
  if from_version < 12 and to_version >= 12:
446
446
  # V12: Add total_turns_mtime for lazy cache validation
447
- scripts.append(
448
- "ALTER TABLE sessions ADD COLUMN total_turns_mtime REAL;"
449
- )
447
+ scripts.append("ALTER TABLE sessions ADD COLUMN total_turns_mtime REAL;")
450
448
 
451
449
  if from_version < 13 and to_version >= 13:
452
450
  # V13: Temporary turn title
453
- scripts.append(
454
- "ALTER TABLE turns ADD COLUMN temp_title TEXT;"
455
- )
451
+ scripts.append("ALTER TABLE turns ADD COLUMN temp_title TEXT;")
456
452
 
457
453
  if from_version < 14 and to_version >= 14:
458
454
  scripts.extend(MIGRATION_V13_TO_V14)
realign/db/sqlite_db.py CHANGED
@@ -206,17 +206,13 @@ class SQLiteDatabase(DatabaseInterface):
206
206
  )
207
207
  # For existing databases, run migrations
208
208
  elif current_version < SCHEMA_VERSION:
209
- migration_scripts = get_migration_scripts(
210
- current_version, SCHEMA_VERSION
211
- )
209
+ migration_scripts = get_migration_scripts(current_version, SCHEMA_VERSION)
212
210
  for script in migration_scripts:
213
211
  try:
214
212
  cursor.execute(script)
215
213
  except sqlite3.OperationalError as e:
216
214
  # Some migrations may fail if column already exists, etc.
217
- logger.debug(
218
- f"Migration script skipped (may already be applied): {e}"
219
- )
215
+ logger.debug(f"Migration script skipped (may already be applied): {e}")
220
216
 
221
217
  cursor.execute(
222
218
  "INSERT INTO schema_version (version, description) VALUES (?, ?)",
@@ -256,9 +252,7 @@ class SQLiteDatabase(DatabaseInterface):
256
252
  """Public wrapper for fetching a project by path."""
257
253
  return self._get_project_by_path(path)
258
254
 
259
- def get_or_create_project(
260
- self, path: Path, name: Optional[str] = None
261
- ) -> ProjectRecord:
255
+ def get_or_create_project(self, path: Path, name: Optional[str] = None) -> ProjectRecord:
262
256
  """Get existing project or create new one."""
263
257
  conn = self._get_connection()
264
258
  cursor = conn.cursor()
@@ -353,9 +347,7 @@ class SQLiteDatabase(DatabaseInterface):
353
347
  creator_id=config.user_id,
354
348
  )
355
349
 
356
- def update_session_activity(
357
- self, session_id: str, last_activity_at: datetime
358
- ) -> None:
350
+ def update_session_activity(self, session_id: str, last_activity_at: datetime) -> None:
359
351
  """Update last activity timestamp."""
360
352
  conn = self._get_connection()
361
353
  conn.execute(
@@ -552,9 +544,7 @@ class SQLiteDatabase(DatabaseInterface):
552
544
  return row[0]
553
545
  return None
554
546
 
555
- def get_turn_by_hash(
556
- self, session_id: str, content_hash: str
557
- ) -> Optional[TurnRecord]:
547
+ def get_turn_by_hash(self, session_id: str, content_hash: str) -> Optional[TurnRecord]:
558
548
  """Check for existing turn by content hash."""
559
549
  conn = self._get_connection()
560
550
  cursor = conn.cursor()
@@ -569,9 +559,7 @@ class SQLiteDatabase(DatabaseInterface):
569
559
  return self._row_to_turn(row)
570
560
  return None
571
561
 
572
- def get_turn_by_number(
573
- self, session_id: str, turn_number: int
574
- ) -> Optional[TurnRecord]:
562
+ def get_turn_by_number(self, session_id: str, turn_number: int) -> Optional[TurnRecord]:
575
563
  """Get a turn by session_id and turn_number."""
576
564
  conn = self._get_connection()
577
565
  cursor = conn.cursor()
@@ -625,10 +613,7 @@ class SQLiteDatabase(DatabaseInterface):
625
613
  session_ids,
626
614
  )
627
615
 
628
- return {
629
- row[0]: int(row[1]) if row[1] is not None else 0
630
- for row in cursor.fetchall()
631
- }
616
+ return {row[0]: int(row[1]) if row[1] is not None else 0 for row in cursor.fetchall()}
632
617
 
633
618
  def get_committed_turn_numbers(self, session_id: str) -> set[int]:
634
619
  """Get the set of turn numbers that have been committed for a session."""
@@ -775,9 +760,7 @@ class SQLiteDatabase(DatabaseInterface):
775
760
  value = row[0]
776
761
  return int(value) if value is not None else 0
777
762
 
778
- def try_acquire_lock(
779
- self, lock_key: str, *, owner: str, ttl_seconds: float
780
- ) -> bool:
763
+ def try_acquire_lock(self, lock_key: str, *, owner: str, ttl_seconds: float) -> bool:
781
764
  """
782
765
  Try to acquire a cross-process lease lock.
783
766
 
@@ -936,7 +919,16 @@ class SQLiteDatabase(DatabaseInterface):
936
919
  ELSE jobs.next_run_at
937
920
  END
938
921
  """,
939
- (job_id, kind, dedupe_key, payload_json, int(priority), run_at, requeue_done_int, requeue_done_int),
922
+ (
923
+ job_id,
924
+ kind,
925
+ dedupe_key,
926
+ payload_json,
927
+ int(priority),
928
+ run_at,
929
+ requeue_done_int,
930
+ requeue_done_int,
931
+ ),
940
932
  )
941
933
  conn.commit()
942
934
 
@@ -1318,9 +1310,7 @@ class SQLiteDatabase(DatabaseInterface):
1318
1310
  conn = self._get_connection()
1319
1311
  counts: Dict[str, int] = {}
1320
1312
  try:
1321
- rows = conn.execute(
1322
- "SELECT status, COUNT(*) AS c FROM jobs GROUP BY status"
1323
- ).fetchall()
1313
+ rows = conn.execute("SELECT status, COUNT(*) AS c FROM jobs GROUP BY status").fetchall()
1324
1314
  for row in rows:
1325
1315
  counts[str(row[0])] = int(row[1])
1326
1316
  return counts
@@ -1540,9 +1530,7 @@ class SQLiteDatabase(DatabaseInterface):
1540
1530
  )
1541
1531
 
1542
1532
  # Sync Commits (Delete all existing links and re-insert)
1543
- conn.execute(
1544
- "DELETE FROM event_commits WHERE event_id = ?", (event.id,)
1545
- )
1533
+ conn.execute("DELETE FROM event_commits WHERE event_id = ?", (event.id,))
1546
1534
 
1547
1535
  if event.commit_hashes:
1548
1536
  conn.executemany(
@@ -1758,9 +1746,7 @@ class SQLiteDatabase(DatabaseInterface):
1758
1746
  conn = self._get_connection()
1759
1747
 
1760
1748
  # Convert preset_questions list to JSON string
1761
- preset_questions_json = (
1762
- json.dumps(preset_questions) if preset_questions else None
1763
- )
1749
+ preset_questions_json = json.dumps(preset_questions) if preset_questions else None
1764
1750
 
1765
1751
  share_expiry_str = (
1766
1752
  share_expiry_at.isoformat() if isinstance(share_expiry_at, datetime) else None
@@ -1964,9 +1950,7 @@ class SQLiteDatabase(DatabaseInterface):
1964
1950
  "title": row["llm_title"],
1965
1951
  "summary": row["assistant_summary"],
1966
1952
  "content": content,
1967
- "content_preview": content[:200] + "..."
1968
- if len(content) > 200
1969
- else content,
1953
+ "content_preview": content[:200] + "..." if len(content) > 200 else content,
1970
1954
  }
1971
1955
  )
1972
1956
  return results
@@ -2197,9 +2181,7 @@ class SQLiteDatabase(DatabaseInterface):
2197
2181
  # For now, let's just do a separate query.
2198
2182
  conn = self._get_connection()
2199
2183
  cur = conn.cursor()
2200
- cur.execute(
2201
- "SELECT commit_hash FROM event_commits WHERE event_id = ?", (row["id"],)
2202
- )
2184
+ cur.execute("SELECT commit_hash FROM event_commits WHERE event_id = ?", (row["id"],))
2203
2185
  commits = [r[0] for r in cur.fetchall()]
2204
2186
  except Exception:
2205
2187
  pass # Indicate no commits or error
@@ -1,2 +1 @@
1
1
  """Event maintenance utilities."""
2
-
@@ -31,6 +31,7 @@ def schedule_event_summary_update(db: SQLiteDatabase, event_id: str) -> None:
31
31
  db: Database instance
32
32
  event_id: ID of the event to update
33
33
  """
34
+
34
35
  def do_update():
35
36
  _update_event_summary(db, event_id)
36
37
 
@@ -122,10 +123,17 @@ def _get_event_summary_prompt() -> str:
122
123
  logger.debug(f"Loaded user-customized event summary prompt from {user_prompt_path}")
123
124
  return text
124
125
  except Exception:
125
- logger.debug("Failed to load user-customized event summary prompt, falling back", exc_info=True)
126
+ logger.debug(
127
+ "Failed to load user-customized event summary prompt, falling back", exc_info=True
128
+ )
126
129
 
127
130
  # Fall back to built-in prompt (tools/commit_message_prompts/event_summary.md)
128
- candidate = Path(__file__).resolve().parents[2] / "tools" / "commit_message_prompts" / "event_summary.md"
131
+ candidate = (
132
+ Path(__file__).resolve().parents[2]
133
+ / "tools"
134
+ / "commit_message_prompts"
135
+ / "event_summary.md"
136
+ )
129
137
  try:
130
138
  if candidate.exists():
131
139
  text = candidate.read_text(encoding="utf-8").strip()
@@ -184,19 +192,25 @@ def _generate_event_summary_llm(sessions: List[SessionRecord]) -> Tuple[str, str
184
192
  # Build sessions payload for prompt
185
193
  sessions_data = []
186
194
  for i, session in enumerate(sessions):
187
- sessions_data.append({
188
- "session_number": i + 1,
189
- "title": session.session_title or f"Session {session.id[:8]}",
190
- "summary": session.session_summary or "(no summary)",
191
- "session_type": session.session_type,
192
- })
195
+ sessions_data.append(
196
+ {
197
+ "session_number": i + 1,
198
+ "title": session.session_title or f"Session {session.id[:8]}",
199
+ "summary": session.session_summary or "(no summary)",
200
+ "session_type": session.session_type,
201
+ }
202
+ )
193
203
 
194
204
  system_prompt = _get_event_summary_prompt()
195
205
 
196
- user_prompt = json.dumps({
197
- "total_sessions": len(sessions),
198
- "sessions": sessions_data,
199
- }, ensure_ascii=False, indent=2)
206
+ user_prompt = json.dumps(
207
+ {
208
+ "total_sessions": len(sessions),
209
+ "sessions": sessions_data,
210
+ },
211
+ ensure_ascii=False,
212
+ indent=2,
213
+ )
200
214
 
201
215
  try:
202
216
  # Use unified LLM client
@@ -205,7 +219,7 @@ def _generate_event_summary_llm(sessions: List[SessionRecord]) -> Tuple[str, str
205
219
  user_prompt=user_prompt,
206
220
  provider="auto", # Try Claude first, fallback to OpenAI
207
221
  max_tokens=500,
208
- purpose="event_summary"
222
+ purpose="event_summary",
209
223
  )
210
224
 
211
225
  if not response:
@@ -241,5 +255,3 @@ def _fallback_event_summary(sessions: List[SessionRecord]) -> Tuple[str, str]:
241
255
  description = f"Event containing {len(sessions)} sessions."
242
256
 
243
257
  return title[:100], description
244
-
245
-