aline-ai 0.5.9__py3-none-any.whl → 0.5.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
realign/db/__init__.py CHANGED
@@ -8,6 +8,47 @@ from .base import DatabaseInterface
8
8
  from .sqlite_db import SQLiteDatabase
9
9
 
10
10
  _DB_INSTANCE = None
11
+ _MIGRATION_DONE = False
12
+
13
+
14
+ def _auto_migrate_agents_data(db: SQLiteDatabase) -> None:
15
+ """Auto-migrate terminal.json and load.json data to SQLite (runs once).
16
+
17
+ This is triggered automatically when the schema is upgraded to V15.
18
+ Uses a marker file to avoid running multiple times.
19
+ """
20
+ global _MIGRATION_DONE
21
+ if _MIGRATION_DONE:
22
+ return
23
+
24
+ # Check marker file
25
+ marker_path = Path.home() / ".aline" / ".agents_migrated_v15"
26
+ if marker_path.exists():
27
+ _MIGRATION_DONE = True
28
+ return
29
+
30
+ # Skip during tests
31
+ if os.getenv("PYTEST_CURRENT_TEST"):
32
+ _MIGRATION_DONE = True
33
+ return
34
+
35
+ try:
36
+ from .migrate_agents import migrate_terminal_json, migrate_load_json
37
+
38
+ # Run migrations silently (no dry_run)
39
+ agents_count = migrate_terminal_json(db, dry_run=False, silent=True)
40
+ contexts_count = migrate_load_json(db, dry_run=False, silent=True)
41
+
42
+ # Create marker file
43
+ marker_path.parent.mkdir(parents=True, exist_ok=True)
44
+ marker_path.write_text(
45
+ f"Migrated: {agents_count} agents, {contexts_count} contexts\n"
46
+ )
47
+
48
+ _MIGRATION_DONE = True
49
+ except Exception:
50
+ # Don't fail if migration fails - JSON fallback will still work
51
+ _MIGRATION_DONE = True
11
52
 
12
53
 
13
54
  def get_database(
@@ -54,4 +95,7 @@ def get_database(
54
95
  _DB_INSTANCE = SQLiteDatabase(db_path, connect_timeout_seconds=timeout)
55
96
  _DB_INSTANCE.initialize()
56
97
 
98
+ # Auto-migrate JSON data to SQLite (runs once after V15 upgrade)
99
+ _auto_migrate_agents_data(_DB_INSTANCE)
100
+
57
101
  return _DB_INSTANCE
realign/db/base.py CHANGED
@@ -107,6 +107,42 @@ class EventRecord:
107
107
  creator_id: Optional[str] = None
108
108
 
109
109
 
110
+ @dataclass
111
+ class AgentRecord:
112
+ """Represents a terminal/agent mapping (V15: replaces terminal.json)."""
113
+
114
+ id: str # terminal_id (UUID)
115
+ provider: str # 'claude', 'codex', 'opencode', 'zsh'
116
+ session_type: str
117
+ created_at: datetime
118
+ updated_at: datetime
119
+ session_id: Optional[str] = None # FK to sessions.id
120
+ context_id: Optional[str] = None
121
+ transcript_path: Optional[str] = None
122
+ cwd: Optional[str] = None
123
+ project_dir: Optional[str] = None
124
+ status: str = "active" # 'active', 'stopped'
125
+ attention: Optional[str] = None # 'permission_request', 'stop', or None
126
+ source: Optional[str] = None
127
+ creator_name: Optional[str] = None
128
+ creator_id: Optional[str] = None
129
+
130
+
131
+ @dataclass
132
+ class AgentContextRecord:
133
+ """Represents a context entry (V15: replaces load.json)."""
134
+
135
+ id: str # context_id
136
+ created_at: datetime
137
+ updated_at: datetime
138
+ workspace: Optional[str] = None
139
+ loaded_at: Optional[str] = None
140
+ metadata: Optional[Dict[str, Any]] = None
141
+ # Populated when reading (from M2M tables)
142
+ session_ids: Optional[List[str]] = None
143
+ event_ids: Optional[List[str]] = None
144
+
145
+
110
146
  class DatabaseInterface(ABC):
111
147
  """Abstract interface for ReAlign storage backend."""
112
148
 
@@ -0,0 +1,297 @@
1
+ """Migration script to import terminal.json and load.json data into SQLite.
2
+
3
+ This script migrates:
4
+ - ~/.aline/terminal.json -> agents table
5
+ - ~/.aline/load.json -> agent_contexts, agent_context_sessions, agent_context_events tables
6
+
7
+ Usage:
8
+ python -m realign.db.migrate_agents [--backup] [--dry-run]
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ import argparse
14
+ import json
15
+ import sys
16
+ from datetime import datetime
17
+ from pathlib import Path
18
+ from typing import Any
19
+
20
+
21
+ def migrate_terminal_json(db, dry_run: bool = False, silent: bool = False) -> int:
22
+ """Migrate terminal.json to agents table.
23
+
24
+ Args:
25
+ db: Database instance
26
+ dry_run: If True, don't actually migrate
27
+ silent: If True, don't print any output
28
+
29
+ Returns:
30
+ Number of agents migrated
31
+ """
32
+ path = Path.home() / ".aline" / "terminal.json"
33
+ if not path.exists():
34
+ if not silent:
35
+ print(f"[migrate] terminal.json not found at {path}, skipping")
36
+ return 0
37
+
38
+ try:
39
+ payload = json.loads(path.read_text(encoding="utf-8"))
40
+ except Exception as e:
41
+ if not silent:
42
+ print(f"[migrate] Failed to read terminal.json: {e}")
43
+ return 0
44
+
45
+ terminals = payload.get("terminals", {})
46
+ if not isinstance(terminals, dict):
47
+ if not silent:
48
+ print("[migrate] terminal.json has no 'terminals' dict, skipping")
49
+ return 0
50
+
51
+ migrated = 0
52
+ for terminal_id, data in terminals.items():
53
+ if not isinstance(terminal_id, str) or not isinstance(data, dict):
54
+ continue
55
+
56
+ provider = data.get("provider", "unknown")
57
+ session_type = data.get("session_type", provider)
58
+ session_id = data.get("session_id") or None
59
+ transcript_path = data.get("transcript_path") or None
60
+ cwd = data.get("cwd") or None
61
+ project_dir = data.get("project_dir") or None
62
+ source = data.get("source") or None
63
+ context_id = data.get("context_id") or None
64
+ attention = data.get("attention") or None
65
+
66
+ if dry_run:
67
+ if not silent:
68
+ print(f"[dry-run] Would migrate agent: {terminal_id[:8]}... ({provider})")
69
+ migrated += 1
70
+ continue
71
+
72
+ try:
73
+ existing = db.get_agent_by_id(terminal_id)
74
+ if existing:
75
+ if not silent:
76
+ print(f"[migrate] Agent {terminal_id[:8]}... already exists, skipping")
77
+ continue
78
+
79
+ db.get_or_create_agent(
80
+ terminal_id,
81
+ provider=provider,
82
+ session_type=session_type,
83
+ session_id=session_id,
84
+ context_id=context_id,
85
+ transcript_path=transcript_path,
86
+ cwd=cwd,
87
+ project_dir=project_dir,
88
+ source=source,
89
+ attention=attention,
90
+ )
91
+ if not silent:
92
+ print(f"[migrate] Migrated agent: {terminal_id[:8]}... ({provider})")
93
+ migrated += 1
94
+ except Exception as e:
95
+ if not silent:
96
+ print(f"[migrate] Failed to migrate agent {terminal_id[:8]}...: {e}")
97
+
98
+ return migrated
99
+
100
+
101
+ def migrate_load_json(db, dry_run: bool = False, silent: bool = False) -> int:
102
+ """Migrate load.json to agent_contexts tables.
103
+
104
+ Args:
105
+ db: Database instance
106
+ dry_run: If True, don't actually migrate
107
+ silent: If True, don't print any output
108
+
109
+ Returns:
110
+ Number of contexts migrated
111
+ """
112
+ path = Path.home() / ".aline" / "load.json"
113
+ if not path.exists():
114
+ if not silent:
115
+ print(f"[migrate] load.json not found at {path}, skipping")
116
+ return 0
117
+
118
+ try:
119
+ payload = json.loads(path.read_text(encoding="utf-8"))
120
+ except Exception as e:
121
+ if not silent:
122
+ print(f"[migrate] Failed to read load.json: {e}")
123
+ return 0
124
+
125
+ contexts = payload.get("contexts", [])
126
+ if not isinstance(contexts, list):
127
+ if not silent:
128
+ print("[migrate] load.json has no 'contexts' list, skipping")
129
+ return 0
130
+
131
+ migrated = 0
132
+ for ctx_data in contexts:
133
+ if not isinstance(ctx_data, dict):
134
+ continue
135
+
136
+ context_id = ctx_data.get("context_id")
137
+ if not context_id:
138
+ # Generate context_id from workspace if not present
139
+ workspace = ctx_data.get("workspace")
140
+ if workspace:
141
+ # Create a deterministic ID from workspace
142
+ import hashlib
143
+
144
+ context_id = f"ws-{hashlib.sha256(workspace.encode()).hexdigest()[:12]}"
145
+ else:
146
+ continue
147
+
148
+ workspace = ctx_data.get("workspace")
149
+ loaded_at = ctx_data.get("loaded_at")
150
+ context_sessions = ctx_data.get("context_sessions", [])
151
+ context_events = ctx_data.get("context_events", [])
152
+
153
+ if dry_run:
154
+ if not silent:
155
+ print(
156
+ f"[dry-run] Would migrate context: {context_id} "
157
+ f"(sessions={len(context_sessions)}, events={len(context_events)})"
158
+ )
159
+ migrated += 1
160
+ continue
161
+
162
+ try:
163
+ existing = db.get_agent_context_by_id(context_id)
164
+ if existing:
165
+ if not silent:
166
+ print(f"[migrate] Context {context_id} already exists, updating links")
167
+ else:
168
+ db.get_or_create_agent_context(
169
+ context_id,
170
+ workspace=workspace,
171
+ loaded_at=loaded_at,
172
+ )
173
+ if not silent:
174
+ print(f"[migrate] Created context: {context_id}")
175
+
176
+ # Update session links (silently skips if session not in DB)
177
+ if context_sessions:
178
+ for session_id in context_sessions:
179
+ db.link_session_to_agent_context(context_id, session_id)
180
+ if not silent:
181
+ print(f"[migrate] Linked {len(context_sessions)} sessions to {context_id}")
182
+
183
+ # Update event links (silently skips if event not in DB)
184
+ if context_events:
185
+ for event_id in context_events:
186
+ db.link_event_to_agent_context(context_id, event_id)
187
+ if not silent:
188
+ print(f"[migrate] Linked {len(context_events)} events to {context_id}")
189
+
190
+ migrated += 1
191
+ except Exception as e:
192
+ if not silent:
193
+ print(f"[migrate] Failed to migrate context {context_id}: {e}")
194
+
195
+ return migrated
196
+
197
+
198
+ def backup_json_files() -> bool:
199
+ """Create backup of JSON files.
200
+
201
+ Returns:
202
+ True if backup was successful or files don't exist
203
+ """
204
+ files = [
205
+ Path.home() / ".aline" / "terminal.json",
206
+ Path.home() / ".aline" / "load.json",
207
+ ]
208
+
209
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
210
+
211
+ for path in files:
212
+ if not path.exists():
213
+ continue
214
+
215
+ backup_path = path.with_suffix(f".json.bak.{timestamp}")
216
+ try:
217
+ backup_path.write_text(path.read_text(encoding="utf-8"), encoding="utf-8")
218
+ print(f"[backup] Created backup: {backup_path}")
219
+ except Exception as e:
220
+ print(f"[backup] Failed to backup {path}: {e}")
221
+ return False
222
+
223
+ return True
224
+
225
+
226
+ def main():
227
+ parser = argparse.ArgumentParser(
228
+ description="Migrate terminal.json and load.json to SQLite database"
229
+ )
230
+ parser.add_argument(
231
+ "--backup",
232
+ action="store_true",
233
+ help="Create backup of JSON files before migration",
234
+ )
235
+ parser.add_argument(
236
+ "--dry-run",
237
+ action="store_true",
238
+ help="Show what would be migrated without actually doing it",
239
+ )
240
+ parser.add_argument(
241
+ "--db-path",
242
+ type=str,
243
+ default=str(Path.home() / ".aline" / "realign.db"),
244
+ help="Path to SQLite database",
245
+ )
246
+
247
+ args = parser.parse_args()
248
+
249
+ print("=" * 60)
250
+ print("Aline Agents Migration: JSON -> SQLite")
251
+ print("=" * 60)
252
+
253
+ if args.dry_run:
254
+ print("[mode] DRY RUN - no changes will be made")
255
+
256
+ # Backup if requested
257
+ if args.backup and not args.dry_run:
258
+ print("\n[step] Creating backups...")
259
+ if not backup_json_files():
260
+ print("[error] Backup failed, aborting migration")
261
+ sys.exit(1)
262
+
263
+ # Initialize database
264
+ if not args.dry_run:
265
+ from .sqlite_db import SQLiteDatabase
266
+
267
+ print(f"\n[step] Initializing database at {args.db_path}")
268
+ db = SQLiteDatabase(args.db_path)
269
+ if not db.initialize():
270
+ print("[error] Database initialization failed")
271
+ sys.exit(1)
272
+ else:
273
+ db = None
274
+
275
+ # Migrate terminal.json
276
+ print("\n[step] Migrating terminal.json -> agents table")
277
+ agents_migrated = migrate_terminal_json(db, dry_run=args.dry_run)
278
+ print(f"[result] Migrated {agents_migrated} agents")
279
+
280
+ # Migrate load.json
281
+ print("\n[step] Migrating load.json -> agent_contexts tables")
282
+ contexts_migrated = migrate_load_json(db, dry_run=args.dry_run)
283
+ print(f"[result] Migrated {contexts_migrated} contexts")
284
+
285
+ # Cleanup
286
+ if db:
287
+ db.close()
288
+
289
+ print("\n" + "=" * 60)
290
+ print("Migration complete!")
291
+ print(f" Agents migrated: {agents_migrated}")
292
+ print(f" Contexts migrated: {contexts_migrated}")
293
+ print("=" * 60)
294
+
295
+
296
+ if __name__ == "__main__":
297
+ main()
realign/db/schema.py CHANGED
@@ -52,9 +52,15 @@ Schema V14: Share reuse per event.
52
52
  - events.share_id: Share ID on server
53
53
  - events.share_admin_token: Admin token for extending expiry
54
54
  - events.share_expiry_at: Last known expiry timestamp
55
+
56
+ Schema V15: Agents and contexts tables (replaces terminal.json and load.json).
57
+ - agents table: terminal_id -> session mapping (replaces terminal.json)
58
+ - agent_contexts table: context definitions (replaces load.json)
59
+ - agent_context_sessions table: M2M context-session links
60
+ - agent_context_events table: M2M context-event links
55
61
  """
56
62
 
57
- SCHEMA_VERSION = 14
63
+ SCHEMA_VERSION = 15
58
64
 
59
65
  FTS_EVENTS_SCRIPTS = [
60
66
  # Full Text Search for Events
@@ -246,6 +252,63 @@ INIT_SCRIPTS = [
246
252
  """,
247
253
  "CREATE INDEX IF NOT EXISTS idx_event_sessions_event ON event_sessions(event_id);",
248
254
  "CREATE INDEX IF NOT EXISTS idx_event_sessions_session ON event_sessions(session_id);",
255
+ # Agents table (V15: replaces terminal.json)
256
+ """
257
+ CREATE TABLE IF NOT EXISTS agents (
258
+ id TEXT PRIMARY KEY, -- terminal_id (UUID)
259
+ provider TEXT NOT NULL, -- 'claude', 'codex', 'opencode', 'zsh'
260
+ session_type TEXT NOT NULL,
261
+ session_id TEXT, -- FK to sessions.id (nullable, may not exist yet)
262
+ context_id TEXT,
263
+ transcript_path TEXT,
264
+ cwd TEXT,
265
+ project_dir TEXT,
266
+ status TEXT DEFAULT 'active', -- 'active', 'stopped'
267
+ attention TEXT, -- 'permission_request', 'stop', NULL
268
+ source TEXT,
269
+ created_at TEXT DEFAULT (datetime('now')),
270
+ updated_at TEXT DEFAULT (datetime('now')),
271
+ creator_name TEXT,
272
+ creator_id TEXT
273
+ );
274
+ """,
275
+ "CREATE INDEX IF NOT EXISTS idx_agents_session ON agents(session_id);",
276
+ "CREATE INDEX IF NOT EXISTS idx_agents_context ON agents(context_id);",
277
+ "CREATE INDEX IF NOT EXISTS idx_agents_status ON agents(status);",
278
+ # Agent contexts table (V15: replaces load.json)
279
+ """
280
+ CREATE TABLE IF NOT EXISTS agent_contexts (
281
+ id TEXT PRIMARY KEY, -- context_id
282
+ workspace TEXT,
283
+ loaded_at TEXT,
284
+ created_at TEXT DEFAULT (datetime('now')),
285
+ updated_at TEXT DEFAULT (datetime('now')),
286
+ metadata TEXT
287
+ );
288
+ """,
289
+ "CREATE INDEX IF NOT EXISTS idx_agent_contexts_workspace ON agent_contexts(workspace);",
290
+ # Agent context sessions (M2M)
291
+ """
292
+ CREATE TABLE IF NOT EXISTS agent_context_sessions (
293
+ context_id TEXT NOT NULL REFERENCES agent_contexts(id) ON DELETE CASCADE,
294
+ session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
295
+ added_at TEXT DEFAULT (datetime('now')),
296
+ PRIMARY KEY (context_id, session_id)
297
+ );
298
+ """,
299
+ "CREATE INDEX IF NOT EXISTS idx_agent_context_sessions_context ON agent_context_sessions(context_id);",
300
+ "CREATE INDEX IF NOT EXISTS idx_agent_context_sessions_session ON agent_context_sessions(session_id);",
301
+ # Agent context events (M2M)
302
+ """
303
+ CREATE TABLE IF NOT EXISTS agent_context_events (
304
+ context_id TEXT NOT NULL REFERENCES agent_contexts(id) ON DELETE CASCADE,
305
+ event_id TEXT NOT NULL REFERENCES events(id) ON DELETE CASCADE,
306
+ added_at TEXT DEFAULT (datetime('now')),
307
+ PRIMARY KEY (context_id, event_id)
308
+ );
309
+ """,
310
+ "CREATE INDEX IF NOT EXISTS idx_agent_context_events_context ON agent_context_events(context_id);",
311
+ "CREATE INDEX IF NOT EXISTS idx_agent_context_events_event ON agent_context_events(event_id);",
249
312
  *FTS_EVENTS_SCRIPTS,
250
313
  ]
251
314
 
@@ -403,6 +466,66 @@ MIGRATION_V13_TO_V14 = [
403
466
  "ALTER TABLE events ADD COLUMN share_expiry_at TEXT;",
404
467
  ]
405
468
 
469
+ MIGRATION_V14_TO_V15 = [
470
+ # Agents table (replaces terminal.json)
471
+ """
472
+ CREATE TABLE IF NOT EXISTS agents (
473
+ id TEXT PRIMARY KEY,
474
+ provider TEXT NOT NULL,
475
+ session_type TEXT NOT NULL,
476
+ session_id TEXT,
477
+ context_id TEXT,
478
+ transcript_path TEXT,
479
+ cwd TEXT,
480
+ project_dir TEXT,
481
+ status TEXT DEFAULT 'active',
482
+ attention TEXT,
483
+ source TEXT,
484
+ created_at TEXT DEFAULT (datetime('now')),
485
+ updated_at TEXT DEFAULT (datetime('now')),
486
+ creator_name TEXT,
487
+ creator_id TEXT
488
+ );
489
+ """,
490
+ "CREATE INDEX IF NOT EXISTS idx_agents_session ON agents(session_id);",
491
+ "CREATE INDEX IF NOT EXISTS idx_agents_context ON agents(context_id);",
492
+ "CREATE INDEX IF NOT EXISTS idx_agents_status ON agents(status);",
493
+ # Agent contexts table (replaces load.json)
494
+ """
495
+ CREATE TABLE IF NOT EXISTS agent_contexts (
496
+ id TEXT PRIMARY KEY,
497
+ workspace TEXT,
498
+ loaded_at TEXT,
499
+ created_at TEXT DEFAULT (datetime('now')),
500
+ updated_at TEXT DEFAULT (datetime('now')),
501
+ metadata TEXT
502
+ );
503
+ """,
504
+ "CREATE INDEX IF NOT EXISTS idx_agent_contexts_workspace ON agent_contexts(workspace);",
505
+ # Agent context sessions (M2M)
506
+ """
507
+ CREATE TABLE IF NOT EXISTS agent_context_sessions (
508
+ context_id TEXT NOT NULL REFERENCES agent_contexts(id) ON DELETE CASCADE,
509
+ session_id TEXT NOT NULL REFERENCES sessions(id) ON DELETE CASCADE,
510
+ added_at TEXT DEFAULT (datetime('now')),
511
+ PRIMARY KEY (context_id, session_id)
512
+ );
513
+ """,
514
+ "CREATE INDEX IF NOT EXISTS idx_agent_context_sessions_context ON agent_context_sessions(context_id);",
515
+ "CREATE INDEX IF NOT EXISTS idx_agent_context_sessions_session ON agent_context_sessions(session_id);",
516
+ # Agent context events (M2M)
517
+ """
518
+ CREATE TABLE IF NOT EXISTS agent_context_events (
519
+ context_id TEXT NOT NULL REFERENCES agent_contexts(id) ON DELETE CASCADE,
520
+ event_id TEXT NOT NULL REFERENCES events(id) ON DELETE CASCADE,
521
+ added_at TEXT DEFAULT (datetime('now')),
522
+ PRIMARY KEY (context_id, event_id)
523
+ );
524
+ """,
525
+ "CREATE INDEX IF NOT EXISTS idx_agent_context_events_context ON agent_context_events(context_id);",
526
+ "CREATE INDEX IF NOT EXISTS idx_agent_context_events_event ON agent_context_events(event_id);",
527
+ ]
528
+
406
529
 
407
530
  def get_migration_scripts(from_version: int, to_version: int) -> list:
408
531
  """Get migration scripts for upgrading between versions."""
@@ -453,4 +576,7 @@ def get_migration_scripts(from_version: int, to_version: int) -> list:
453
576
  if from_version < 14 and to_version >= 14:
454
577
  scripts.extend(MIGRATION_V13_TO_V14)
455
578
 
579
+ if from_version < 15 and to_version >= 15:
580
+ scripts.extend(MIGRATION_V14_TO_V15)
581
+
456
582
  return scripts