aline-ai 0.7.0__py3-none-any.whl → 0.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.7.0.dist-info → aline_ai-0.7.2.dist-info}/METADATA +1 -1
- {aline_ai-0.7.0.dist-info → aline_ai-0.7.2.dist-info}/RECORD +20 -18
- realign/__init__.py +1 -1
- realign/cli.py +70 -0
- realign/commands/export_shares.py +3 -0
- realign/commands/import_shares.py +2 -2
- realign/commands/sync_agent.py +57 -7
- realign/config.py +6 -1
- realign/dashboard/app.py +26 -33
- realign/dashboard/local_api.py +122 -0
- realign/dashboard/screens/create_agent.py +2 -11
- realign/dashboard/state.py +41 -0
- realign/dashboard/widgets/agents_panel.py +37 -0
- realign/dashboard/widgets/config_panel.py +63 -1
- realign/watcher_core.py +152 -75
- realign/worker_core.py +17 -0
- {aline_ai-0.7.0.dist-info → aline_ai-0.7.2.dist-info}/WHEEL +0 -0
- {aline_ai-0.7.0.dist-info → aline_ai-0.7.2.dist-info}/entry_points.txt +0 -0
- {aline_ai-0.7.0.dist-info → aline_ai-0.7.2.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.7.0.dist-info → aline_ai-0.7.2.dist-info}/top_level.txt +0 -0
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
aline_ai-0.7.
|
|
2
|
-
realign/__init__.py,sha256=
|
|
1
|
+
aline_ai-0.7.2.dist-info/licenses/LICENSE,sha256=H8wTqV5IF1oHw_HbBtS1PSDU8G_q81yblEIL_JfV8Vo,1077
|
|
2
|
+
realign/__init__.py,sha256=T1uqzYkOUbzoMzR_BVD5kRPO727lyC6k7hF_RudJoi8,1623
|
|
3
3
|
realign/agent_names.py,sha256=H4oVJMkqg1ZYCk58vD_Jh9apaAHSFJRswa-C9SPdJxc,1171
|
|
4
4
|
realign/auth.py,sha256=d_1yvCwluN5iIrdgjtuSKpOYAksDzrzNgntKacLVJrw,16583
|
|
5
5
|
realign/claude_detector.py,sha256=ZLSJacMo6zzQclXByABKA70UNpstxqIv3fPGqdpA934,2792
|
|
6
|
-
realign/cli.py,sha256=
|
|
6
|
+
realign/cli.py,sha256=PiMUA_sFQ-K7zlIr1Ahs7St8NwcXDG3JKT_8yIqLwZI,40569
|
|
7
7
|
realign/codex_detector.py,sha256=WGIClvlrFVCqJ5vR9DrKVsp1eJhOShvcaXibTHb0Nfc,6304
|
|
8
8
|
realign/codex_home.py,sha256=ljkW8uCfQD4cisEJtPNQmIgaR0yEfWSyHwoVQFY-6p4,4374
|
|
9
9
|
realign/codex_terminal_linker.py,sha256=L2Ha4drlZ7Sbq2jzXyxczOdUY3S5fu1gJqoI5WN9CKk,6211
|
|
10
|
-
realign/config.py,sha256=
|
|
10
|
+
realign/config.py,sha256=_loJkoTKszMONgo6Qq3N8VRm_iqvD-7WvXeCsKUgGUE,9478
|
|
11
11
|
realign/context.py,sha256=8hzgNOg-7_eMW22wt7OM5H9IsmMveKXCv0epG7E0G7w,13917
|
|
12
12
|
realign/file_lock.py,sha256=kLNm1Rra4TCrTMyPM5fwjVascq-CUz2Bzh9HHKtCKOE,3444
|
|
13
13
|
realign/hooks.py,sha256=wSSIjS5x9w7fm9LUcL63Lf7bglEfb75dHFja_znKDDQ,65134
|
|
@@ -16,9 +16,9 @@ realign/logging_config.py,sha256=LCAigKFhTj86PSJm4-kUl3Ag9h_GENh3x2iPnMv7qUI,487
|
|
|
16
16
|
realign/mcp_server.py,sha256=LWiQ2qukYoNLsoV2ID2f0vF9jkJlBvB587HpM5jymgE,10193
|
|
17
17
|
realign/mcp_watcher.py,sha256=aK4jWStv7CoCroS4tXFHgZ_y_-q4QDjrpWgm4DxcEj4,1260
|
|
18
18
|
realign/redactor.py,sha256=Zsoi5HfYak2yPmck20JArhm-1cPSB78IdkBJiNVXfrc,17096
|
|
19
|
-
realign/watcher_core.py,sha256=
|
|
19
|
+
realign/watcher_core.py,sha256=iXVXlDhYsoxk_BZ1rOVIfTDu6ABXZO18UeNQKVPsvzc,120374
|
|
20
20
|
realign/watcher_daemon.py,sha256=OHUQ9P1LlagKJHfrf6uRnzO-zDtBRXIxt8ydMFHf5S8,3475
|
|
21
|
-
realign/worker_core.py,sha256=
|
|
21
|
+
realign/worker_core.py,sha256=tJGETH_FIBpb899D8-JP_cOyDE63BYc8kxxmrhb83i4,13013
|
|
22
22
|
realign/worker_daemon.py,sha256=X7Xyjw_u6m6KG4E84nx0HpDFw4cWMv8ja1G8btc9PiM,3957
|
|
23
23
|
realign/adapters/__init__.py,sha256=alkJr7DRn_CrJecSJRjRJOHHnkz9EnZ5TnsU8n1Bb0k,719
|
|
24
24
|
realign/adapters/base.py,sha256=2IdAZKGjg5gPB3YLf_8r3V4XAdbK7fHpj06GjjsYEFY,7409
|
|
@@ -41,19 +41,21 @@ realign/commands/auth.py,sha256=wcs1lUcSXxv75WcGruzyZ3kgi0xXA8W4lNnUwM4a3CI,1173
|
|
|
41
41
|
realign/commands/config.py,sha256=nYnu_h2pk7GODcrzrV04K51D-s7v06FlRXHJ0HJ-gvU,6732
|
|
42
42
|
realign/commands/context.py,sha256=pM2KfZHVkB-ou4nBhFvKSwnYliLBzwN3zerLyBAbhfE,7095
|
|
43
43
|
realign/commands/doctor.py,sha256=0c1TZuA_cw1CSU0yKMVRU-18uTxdqjXKJ8lP2CTTNSQ,20656
|
|
44
|
-
realign/commands/export_shares.py,sha256=
|
|
45
|
-
realign/commands/import_shares.py,sha256=
|
|
44
|
+
realign/commands/export_shares.py,sha256=76VYTB9r6JVx9rLcimwJ1xwpTNeeQU5TFL4SmTbRY54,148590
|
|
45
|
+
realign/commands/import_shares.py,sha256=Jx_7HVSg7SrGGKLDxsf_UqoStDimw8B26uKkqNFF6t8,33071
|
|
46
46
|
realign/commands/init.py,sha256=6rBr1LVIrQLbUH_UvoDhkF1qXmMh2xkjNWCYAUz5Tho,35274
|
|
47
47
|
realign/commands/restore.py,sha256=s2BxQZHxQw9r12NzRVsK20KlGafy5AIoSjWMo5PcnHY,11173
|
|
48
48
|
realign/commands/search.py,sha256=QlUDzRDD6ebq21LTtLe5-OZM62iwDrDqfbnXbuxfklU,27516
|
|
49
|
-
realign/commands/sync_agent.py,sha256=
|
|
49
|
+
realign/commands/sync_agent.py,sha256=gzvbqdujBbFdUY0SNyb66OPjp3e-qnqazinH5ZiPFok,14876
|
|
50
50
|
realign/commands/upgrade.py,sha256=L3PLOUIN5qAQTbkfoVtSsIbbzEezA_xjjk9F1GMVfjw,12781
|
|
51
51
|
realign/commands/watcher.py,sha256=4WTThIgr-Z5guKh_JqGDcPmerr97XiHrVaaijmckHsA,134350
|
|
52
52
|
realign/commands/worker.py,sha256=jTu7Pj60nTnn7SsH3oNCNnO6zl4TIFCJVNSC1OoQ_0o,23363
|
|
53
53
|
realign/dashboard/__init__.py,sha256=QZkHTsGityH8UkF8rmvA3xW7dMXNe0swEWr443qfgCM,128
|
|
54
|
-
realign/dashboard/app.py,sha256=
|
|
54
|
+
realign/dashboard/app.py,sha256=gp44tyR3dEJdxx36D-HODpbNh56cw4117zY1ina1chw,8182
|
|
55
55
|
realign/dashboard/clipboard.py,sha256=81frq83E_urqLkwuCvtl0hiTEjavtdQn8kCi72jJWcs,1207
|
|
56
56
|
realign/dashboard/layout.py,sha256=sZxmFj6QTbkois9MHTvBEMMcnaRVehCDqugdbiFx10k,9072
|
|
57
|
+
realign/dashboard/local_api.py,sha256=Roq74etTJR0uOiHE3uIe7sqVITjS5JGQEF4g0nmUm5Q,4332
|
|
58
|
+
realign/dashboard/state.py,sha256=V7zBKvyDgqdXv68XHxV4T8xf3IhYbI5W33UmYW3_hyM,1139
|
|
57
59
|
realign/dashboard/terminal_backend.py,sha256=MlDfwtqhftyQK6jDNizQGFjAWIo5Bx2TDpSnP3MCZVM,3375
|
|
58
60
|
realign/dashboard/tmux_manager.py,sha256=sS6fo7UVPHWxYm1RYtLDPmwsagFh5RO6TRwYd1CuHaI,34581
|
|
59
61
|
realign/dashboard/backends/__init__.py,sha256=POROX7YKtukYZcLB1pi_kO0sSEpuO3y-hwmF3WIN1Kk,163
|
|
@@ -61,7 +63,7 @@ realign/dashboard/backends/iterm2.py,sha256=XYYJT5lrrp4pW_MyEqPZYkRI0qyKUwJlezwM
|
|
|
61
63
|
realign/dashboard/backends/kitty.py,sha256=5jdkR1f2PwB8a4SnS3EG6uOQ2XU-PB7-cpKBfIJq3hU,12066
|
|
62
64
|
realign/dashboard/screens/__init__.py,sha256=MiefFamCYRrzTwQXiCUdybaJaFxlK5XKtLHaSQmqDv0,597
|
|
63
65
|
realign/dashboard/screens/agent_detail.py,sha256=N-iUC4434C91OcDu4dkQaxS_NXQ5Yl5sqNBb2mTmoBw,10490
|
|
64
|
-
realign/dashboard/screens/create_agent.py,sha256=
|
|
66
|
+
realign/dashboard/screens/create_agent.py,sha256=Dy9liP_4fj_zgNafRRJGX2iQJiarHvtVLdghrqMGiLQ,11323
|
|
65
67
|
realign/dashboard/screens/create_agent_info.py,sha256=K2Rbp4zHVdanPT3Fp82We4qlSAM-0IBZXPLuQuevuME,7838
|
|
66
68
|
realign/dashboard/screens/create_event.py,sha256=oiQY1zKpUYnQU-5fQLeuZH9BV5NClE5B5XZIVBYG5A8,5506
|
|
67
69
|
realign/dashboard/screens/event_detail.py,sha256=-pqt3NBoeTXGJKtbndZy-msklwXTeNWMS4H12oMG5ks,20175
|
|
@@ -70,8 +72,8 @@ realign/dashboard/screens/session_detail.py,sha256=TBkHqSHyMxsLB2QdZq9m1EoiH8oRV
|
|
|
70
72
|
realign/dashboard/screens/share_import.py,sha256=hl2x0yGVycsoUI76AmdZTAV-br3Q6191g5xHHrZ8hOA,6318
|
|
71
73
|
realign/dashboard/styles/dashboard.tcss,sha256=9W5Tx0lgyGb4HU-z-Kn7gBdexIK0aPe0bkVn2k_AseM,3288
|
|
72
74
|
realign/dashboard/widgets/__init__.py,sha256=dXsOnbeu_8XhP-6Bu6-R_0LNGqsSM6x7dG7FCDumpa8,460
|
|
73
|
-
realign/dashboard/widgets/agents_panel.py,sha256=
|
|
74
|
-
realign/dashboard/widgets/config_panel.py,sha256=
|
|
75
|
+
realign/dashboard/widgets/agents_panel.py,sha256=SEzjDFaMdl9bVhKr0XlhMww68pp9pmIu9HqyFfp5Iaw,45209
|
|
76
|
+
realign/dashboard/widgets/config_panel.py,sha256=J6A_rxGVqNu5TMFcWELWgdX1nFCHAjKprFMMp7mBDKo,18203
|
|
75
77
|
realign/dashboard/widgets/events_table.py,sha256=0cMvE0KdZFBZyvywv7vlt005qsR0aLQnQiMf3ZzK7RY,30218
|
|
76
78
|
realign/dashboard/widgets/header.py,sha256=0HHCFXX7F3C6HII-WDwOJwWkJrajmKPWmdoMWyOkn9E,1587
|
|
77
79
|
realign/dashboard/widgets/openable_table.py,sha256=GeJPDEYp0kRHShqvmPMzAePpYXRZHUNqcWNnxqsqxjA,1963
|
|
@@ -104,8 +106,8 @@ realign/triggers/next_turn_trigger.py,sha256=-x80_I-WmIjXXzQHEPBykgx_GQW6oKaLDQx
|
|
|
104
106
|
realign/triggers/registry.py,sha256=dkIjSd8Bg-hF0nxaO2Fi2K-0Zipqv6vVjc-HYSrA_fY,3656
|
|
105
107
|
realign/triggers/turn_status.py,sha256=wAZEhXDAmDoX5F-ohWfSnZZ0eA6DAJ9svSPiSv_f6sg,6041
|
|
106
108
|
realign/triggers/turn_summary.py,sha256=f3hEUshgv9skJ9AbfWpoYs417lsv_HK2A_vpPjgryO4,4467
|
|
107
|
-
aline_ai-0.7.
|
|
108
|
-
aline_ai-0.7.
|
|
109
|
-
aline_ai-0.7.
|
|
110
|
-
aline_ai-0.7.
|
|
111
|
-
aline_ai-0.7.
|
|
109
|
+
aline_ai-0.7.2.dist-info/METADATA,sha256=8dK5fqTKyPuVF0b8Y5I0DMLyaRKF7AlzfQ92oSsWBFM,1597
|
|
110
|
+
aline_ai-0.7.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
111
|
+
aline_ai-0.7.2.dist-info/entry_points.txt,sha256=TvYELpMoWsUTcQdMV8tBHxCbEf_LbK4sESqK3r8PM6Y,78
|
|
112
|
+
aline_ai-0.7.2.dist-info/top_level.txt,sha256=yIL3s2xv9nf1GwD5n71Aq_JEIV4AfzCIDNKBzewuRm4,8
|
|
113
|
+
aline_ai-0.7.2.dist-info/RECORD,,
|
realign/__init__.py
CHANGED
realign/cli.py
CHANGED
|
@@ -311,6 +311,76 @@ def agent_share_cli(
|
|
|
311
311
|
raise typer.Exit(code=exit_code)
|
|
312
312
|
|
|
313
313
|
|
|
314
|
+
@agent_app.command(name="import")
|
|
315
|
+
def agent_import_cli(
|
|
316
|
+
share_url: str = typer.Argument(..., help="Share URL to import agent from"),
|
|
317
|
+
password: Optional[str] = typer.Option(
|
|
318
|
+
None, "--password", "-p", help="Password for encrypted share"
|
|
319
|
+
),
|
|
320
|
+
):
|
|
321
|
+
"""Import an agent from a share link.
|
|
322
|
+
|
|
323
|
+
Downloads the shared agent data and creates a local agent with all its sessions.
|
|
324
|
+
For unencrypted shares, sync is automatically enabled so you can push/pull updates.
|
|
325
|
+
|
|
326
|
+
Examples:
|
|
327
|
+
aline agent import https://realign-server.vercel.app/share/abc123
|
|
328
|
+
aline agent import https://realign-server.vercel.app/share/xyz789 --password mypass
|
|
329
|
+
"""
|
|
330
|
+
from .commands.import_shares import import_agent_from_share
|
|
331
|
+
|
|
332
|
+
result = import_agent_from_share(share_url=share_url, password=password)
|
|
333
|
+
|
|
334
|
+
if result["success"]:
|
|
335
|
+
console.print(f"\n[green]✅ Agent imported successfully![/green]")
|
|
336
|
+
console.print(f"[cyan]Agent ID:[/cyan] {result['agent_id']}")
|
|
337
|
+
console.print(f"[cyan]Agent Name:[/cyan] {result['agent_name']}")
|
|
338
|
+
if result.get('agent_description'):
|
|
339
|
+
console.print(f"[cyan]Description:[/cyan] {result['agent_description'][:100]}...")
|
|
340
|
+
console.print(f"[cyan]Sessions imported:[/cyan] {result['sessions_imported']}")
|
|
341
|
+
console.print(f"[cyan]Turns imported:[/cyan] {result['turns_imported']}")
|
|
342
|
+
if result.get('sync_enabled'):
|
|
343
|
+
console.print(f"\n[green]✓ Sync enabled![/green] Use 'aline agent sync {result['agent_id'][:8]}' to sync updates.")
|
|
344
|
+
raise typer.Exit(code=0)
|
|
345
|
+
else:
|
|
346
|
+
console.print(f"\n[red]❌ Import failed:[/red] {result['error']}")
|
|
347
|
+
raise typer.Exit(code=1)
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
@agent_app.command(name="sync")
|
|
351
|
+
def agent_sync_cli(
|
|
352
|
+
agent_id: str = typer.Argument(..., help="Agent ID to sync"),
|
|
353
|
+
):
|
|
354
|
+
"""Sync an agent's sessions with the remote share.
|
|
355
|
+
|
|
356
|
+
Performs bidirectional sync: pulls new sessions from remote, merges locally,
|
|
357
|
+
then pushes merged state back. Uses optimistic locking to handle conflicts.
|
|
358
|
+
|
|
359
|
+
Only works with agents that were shared or imported with sync enabled.
|
|
360
|
+
|
|
361
|
+
Examples:
|
|
362
|
+
aline agent sync abc123de
|
|
363
|
+
"""
|
|
364
|
+
from .commands.sync_agent import sync_agent_command
|
|
365
|
+
|
|
366
|
+
def progress_callback(msg: str) -> None:
|
|
367
|
+
console.print(f"[dim]{msg}[/dim]")
|
|
368
|
+
|
|
369
|
+
result = sync_agent_command(agent_id=agent_id, progress_callback=progress_callback)
|
|
370
|
+
|
|
371
|
+
if result["success"]:
|
|
372
|
+
console.print(f"\n[green]✅ Sync completed![/green]")
|
|
373
|
+
console.print(f"[cyan]Sessions pulled:[/cyan] {result['sessions_pulled']}")
|
|
374
|
+
console.print(f"[cyan]Sessions pushed:[/cyan] {result['sessions_pushed']}")
|
|
375
|
+
if result.get('description_updated'):
|
|
376
|
+
console.print(f"[cyan]Description updated:[/cyan] Yes")
|
|
377
|
+
console.print(f"[cyan]New sync version:[/cyan] {result['new_sync_version']}")
|
|
378
|
+
raise typer.Exit(code=0)
|
|
379
|
+
else:
|
|
380
|
+
console.print(f"\n[red]❌ Sync failed:[/red] {result['error']}")
|
|
381
|
+
raise typer.Exit(code=1)
|
|
382
|
+
|
|
383
|
+
|
|
314
384
|
@add_app.command(name="tmux")
|
|
315
385
|
def add_tmux_cli():
|
|
316
386
|
"""Install tmux via Homebrew and set up Aline tmux clipboard bindings."""
|
|
@@ -3956,6 +3956,9 @@ def export_agent_shares_command(
|
|
|
3956
3956
|
"description": event_description,
|
|
3957
3957
|
}
|
|
3958
3958
|
|
|
3959
|
+
# Add agent name to ui_metadata for chat display
|
|
3960
|
+
conversation_data["ui_metadata"]["agent_name"] = agent_info.name
|
|
3961
|
+
|
|
3959
3962
|
# Add MCP instructions if enabled
|
|
3960
3963
|
if enable_mcp:
|
|
3961
3964
|
conversation_data["ui_metadata"]["mcp_instructions"] = {
|
|
@@ -202,9 +202,9 @@ def import_agent_from_share(
|
|
|
202
202
|
total_turns += import_result["turns"]
|
|
203
203
|
except Exception as e:
|
|
204
204
|
logger.error(f"Failed to import session {session_id}: {e}")
|
|
205
|
-
continue
|
|
205
|
+
# Don't continue - still try to link the session if it exists
|
|
206
206
|
|
|
207
|
-
# Link session to agent
|
|
207
|
+
# Link session to agent (always attempt, even if import was skipped/failed)
|
|
208
208
|
try:
|
|
209
209
|
db.update_session_agent_id(session_id, agent_id)
|
|
210
210
|
except Exception as e:
|
realign/commands/sync_agent.py
CHANGED
|
@@ -74,9 +74,19 @@ def sync_agent_command(
|
|
|
74
74
|
# 1. Load local state
|
|
75
75
|
_progress("Loading local agent data...")
|
|
76
76
|
|
|
77
|
+
# Support prefix matching for agent_id
|
|
77
78
|
agent_info = db.get_agent_info(agent_id)
|
|
78
79
|
if not agent_info:
|
|
79
|
-
|
|
80
|
+
# Try prefix match
|
|
81
|
+
all_agents = db.list_agent_info()
|
|
82
|
+
matches = [a for a in all_agents if a.id.startswith(agent_id)]
|
|
83
|
+
if len(matches) == 1:
|
|
84
|
+
agent_info = matches[0]
|
|
85
|
+
agent_id = agent_info.id
|
|
86
|
+
elif len(matches) > 1:
|
|
87
|
+
return {"success": False, "error": f"Ambiguous agent_id prefix '{agent_id}' matches {len(matches)} agents"}
|
|
88
|
+
else:
|
|
89
|
+
return {"success": False, "error": f"Agent not found: {agent_id}"}
|
|
80
90
|
|
|
81
91
|
if not agent_info.share_id or not agent_info.share_url:
|
|
82
92
|
return {"success": False, "error": "Agent has no share metadata (not shared yet)"}
|
|
@@ -88,6 +98,29 @@ def sync_agent_command(
|
|
|
88
98
|
share_id = agent_info.share_id
|
|
89
99
|
local_sync_version = agent_info.sync_version or 0
|
|
90
100
|
|
|
101
|
+
# Repair: backfill agent_id on sessions linked via windowlink but missing agent_id.
|
|
102
|
+
# This handles Claude sessions where the watcher created the session before the
|
|
103
|
+
# agent_id was known (race between polling and stop-hook signals).
|
|
104
|
+
try:
|
|
105
|
+
conn = db._get_connection()
|
|
106
|
+
unlinked = conn.execute(
|
|
107
|
+
"""SELECT DISTINCT w.session_id
|
|
108
|
+
FROM windowlink w
|
|
109
|
+
JOIN sessions s ON s.id = w.session_id
|
|
110
|
+
WHERE w.agent_id = ?
|
|
111
|
+
AND (s.agent_id IS NULL OR s.agent_id = '')""",
|
|
112
|
+
(agent_id,),
|
|
113
|
+
).fetchall()
|
|
114
|
+
for row in unlinked:
|
|
115
|
+
sid = row[0]
|
|
116
|
+
if sid:
|
|
117
|
+
db.update_session_agent_id(sid, agent_id)
|
|
118
|
+
logger.info(f"Sync repair: linked session {sid} to agent {agent_id}")
|
|
119
|
+
if unlinked:
|
|
120
|
+
_progress(f"Repaired {len(unlinked)} unlinked session(s)")
|
|
121
|
+
except Exception as e:
|
|
122
|
+
logger.warning(f"Session repair step failed (non-fatal): {e}")
|
|
123
|
+
|
|
91
124
|
local_sessions = db.get_sessions_by_agent_id(agent_id)
|
|
92
125
|
local_content_hashes = db.get_agent_content_hashes(agent_id)
|
|
93
126
|
|
|
@@ -129,29 +162,44 @@ def sync_agent_command(
|
|
|
129
162
|
session_id = session_data.get("session_id", "")
|
|
130
163
|
session_turns = session_data.get("turns", [])
|
|
131
164
|
|
|
132
|
-
# Check if any turns in this session are new to
|
|
165
|
+
# Check if any turns in this session are new to THIS AGENT (not globally)
|
|
133
166
|
new_turns = [
|
|
134
167
|
t for t in session_turns
|
|
135
168
|
if t.get("content_hash") and t["content_hash"] not in local_content_hashes
|
|
136
169
|
]
|
|
137
170
|
|
|
138
|
-
if
|
|
171
|
+
# Check if session exists and whether it's linked to this agent
|
|
172
|
+
existing_session = db.get_session_by_id(session_id)
|
|
173
|
+
session_is_new = existing_session is None
|
|
174
|
+
session_needs_linking = existing_session and existing_session.agent_id != agent_id
|
|
175
|
+
|
|
176
|
+
# Import if: new turns, or session is new, or session needs linking
|
|
177
|
+
if not new_turns and not session_is_new and not session_needs_linking:
|
|
139
178
|
continue
|
|
140
179
|
|
|
141
180
|
# Import the session (import_session_with_turns handles dedup by content_hash)
|
|
181
|
+
should_count = session_is_new or session_needs_linking
|
|
142
182
|
try:
|
|
143
183
|
# Suppress auto-summaries during sync
|
|
144
184
|
os.environ["REALIGN_DISABLE_AUTO_SUMMARIES"] = "1"
|
|
145
185
|
import_result = import_session_with_turns(
|
|
146
186
|
session_data, f"agent-{agent_id}", agent_info.share_url, db, force=False
|
|
147
187
|
)
|
|
148
|
-
|
|
188
|
+
# Count as pulled if: created new session/turns, or session was new/needed linking
|
|
189
|
+
if (import_result.get("sessions", 0) > 0 or import_result.get("turns", 0) > 0
|
|
190
|
+
or should_count):
|
|
191
|
+
sessions_pulled += 1
|
|
192
|
+
except Exception as e:
|
|
193
|
+
logger.error(f"Failed to import remote session {session_id}: {e}")
|
|
194
|
+
# Still count if we intended to import this session
|
|
195
|
+
if should_count:
|
|
149
196
|
sessions_pulled += 1
|
|
150
197
|
|
|
151
|
-
|
|
198
|
+
# Always link session to agent (even if import was skipped)
|
|
199
|
+
try:
|
|
152
200
|
db.update_session_agent_id(session_id, agent_id)
|
|
153
201
|
except Exception as e:
|
|
154
|
-
logger.error(f"Failed to
|
|
202
|
+
logger.error(f"Failed to link session {session_id} to agent: {e}")
|
|
155
203
|
|
|
156
204
|
# Merge name/description: last-write-wins by updated_at
|
|
157
205
|
description_updated = False
|
|
@@ -335,7 +383,9 @@ def _build_merged_conversation_data(
|
|
|
335
383
|
"time": datetime.now(timezone.utc).isoformat(),
|
|
336
384
|
"event": event_data,
|
|
337
385
|
"sessions": sessions_data,
|
|
338
|
-
"ui_metadata": {
|
|
386
|
+
"ui_metadata": {
|
|
387
|
+
"agent_name": agent_info.name,
|
|
388
|
+
},
|
|
339
389
|
}
|
|
340
390
|
|
|
341
391
|
if contributor_token:
|
realign/config.py
CHANGED
|
@@ -33,6 +33,9 @@ class ReAlignConfig:
|
|
|
33
33
|
# Session catch-up settings
|
|
34
34
|
max_catchup_sessions: int = 3 # Max sessions to auto-import on watcher startup
|
|
35
35
|
|
|
36
|
+
# Local API server port (for one-click browser import)
|
|
37
|
+
local_api_port: int = 17280
|
|
38
|
+
|
|
36
39
|
# Terminal auto-close settings
|
|
37
40
|
auto_close_stale_terminals: bool = False # Auto-close terminals inactive for 24+ hours
|
|
38
41
|
stale_terminal_hours: int = 24 # Hours of inactivity before auto-closing
|
|
@@ -85,13 +88,14 @@ class ReAlignConfig:
|
|
|
85
88
|
"user_name": os.getenv("REALIGN_USER_NAME"),
|
|
86
89
|
"uid": os.getenv("REALIGN_UID"),
|
|
87
90
|
"max_catchup_sessions": os.getenv("REALIGN_MAX_CATCHUP_SESSIONS"),
|
|
91
|
+
"local_api_port": os.getenv("ALINE_LOCAL_API_PORT"),
|
|
88
92
|
"auto_close_stale_terminals": os.getenv("REALIGN_AUTO_CLOSE_STALE_TERMINALS"),
|
|
89
93
|
"stale_terminal_hours": os.getenv("REALIGN_STALE_TERMINAL_HOURS"),
|
|
90
94
|
}
|
|
91
95
|
|
|
92
96
|
for key, value in env_overrides.items():
|
|
93
97
|
if value is not None:
|
|
94
|
-
if key in ["summary_max_chars", "max_catchup_sessions", "stale_terminal_hours"]:
|
|
98
|
+
if key in ["summary_max_chars", "max_catchup_sessions", "stale_terminal_hours", "local_api_port"]:
|
|
95
99
|
config_dict[key] = int(value)
|
|
96
100
|
elif key in [
|
|
97
101
|
"redact_on_match",
|
|
@@ -139,6 +143,7 @@ class ReAlignConfig:
|
|
|
139
143
|
"user_name": self.user_name,
|
|
140
144
|
"uid": self.uid,
|
|
141
145
|
"max_catchup_sessions": self.max_catchup_sessions,
|
|
146
|
+
"local_api_port": self.local_api_port,
|
|
142
147
|
"auto_close_stale_terminals": self.auto_close_stale_terminals,
|
|
143
148
|
"stale_terminal_hours": self.stale_terminal_hours,
|
|
144
149
|
}
|
realign/dashboard/app.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
"""Aline Dashboard - Main Application."""
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
-
import subprocess
|
|
5
|
-
import sys
|
|
6
4
|
import time
|
|
7
5
|
import traceback
|
|
8
6
|
|
|
@@ -16,6 +14,7 @@ from .widgets import (
|
|
|
16
14
|
ConfigPanel,
|
|
17
15
|
AgentsPanel,
|
|
18
16
|
)
|
|
17
|
+
from .state import get_dashboard_state_value
|
|
19
18
|
|
|
20
19
|
# Environment variable to control terminal mode
|
|
21
20
|
ENV_TERMINAL_MODE = "ALINE_TERMINAL_MODE"
|
|
@@ -24,27 +23,6 @@ ENV_TERMINAL_MODE = "ALINE_TERMINAL_MODE"
|
|
|
24
23
|
logger = setup_logger("realign.dashboard", "dashboard.log")
|
|
25
24
|
|
|
26
25
|
|
|
27
|
-
def _detect_system_dark_mode() -> bool:
|
|
28
|
-
"""Detect if the system is in dark mode.
|
|
29
|
-
|
|
30
|
-
On macOS, checks AppleInterfaceStyle via defaults command.
|
|
31
|
-
Returns True for dark mode, False for light mode.
|
|
32
|
-
"""
|
|
33
|
-
if sys.platform != "darwin":
|
|
34
|
-
return True # Default to dark on non-macOS
|
|
35
|
-
|
|
36
|
-
try:
|
|
37
|
-
result = subprocess.run(
|
|
38
|
-
["defaults", "read", "-g", "AppleInterfaceStyle"],
|
|
39
|
-
capture_output=True,
|
|
40
|
-
text=True,
|
|
41
|
-
timeout=1,
|
|
42
|
-
)
|
|
43
|
-
return result.stdout.strip().lower() == "dark"
|
|
44
|
-
except Exception:
|
|
45
|
-
return True # Default to dark on error
|
|
46
|
-
|
|
47
|
-
|
|
48
26
|
def _monotonic() -> float:
|
|
49
27
|
"""Small wrapper so tests can patch without affecting global time.monotonic()."""
|
|
50
28
|
return time.monotonic()
|
|
@@ -78,6 +56,8 @@ class AlineDashboard(App):
|
|
|
78
56
|
super().__init__()
|
|
79
57
|
self.use_native_terminal = use_native_terminal
|
|
80
58
|
self._native_terminal_mode = self._detect_native_mode()
|
|
59
|
+
self._local_api_server = None
|
|
60
|
+
self._apply_saved_theme()
|
|
81
61
|
logger.info(
|
|
82
62
|
f"AlineDashboard initialized (native_terminal={self._native_terminal_mode})"
|
|
83
63
|
)
|
|
@@ -110,14 +90,14 @@ class AlineDashboard(App):
|
|
|
110
90
|
return ["agents", "config"]
|
|
111
91
|
|
|
112
92
|
def on_mount(self) -> None:
|
|
113
|
-
"""Apply theme based on
|
|
93
|
+
"""Apply dashboard theme based on saved preference."""
|
|
114
94
|
logger.info("on_mount() started")
|
|
115
95
|
try:
|
|
116
|
-
self._sync_theme()
|
|
117
|
-
# Check for system theme changes every 2 seconds
|
|
118
|
-
self.set_interval(2, self._sync_theme)
|
|
119
96
|
self._quit_confirm_deadline: float | None = None
|
|
120
97
|
|
|
98
|
+
# Start local API server for one-click browser import
|
|
99
|
+
self._start_local_api_server()
|
|
100
|
+
|
|
121
101
|
# Set up side-by-side layout for native terminal mode
|
|
122
102
|
if self._native_terminal_mode:
|
|
123
103
|
self._setup_native_terminal_layout()
|
|
@@ -127,6 +107,25 @@ class AlineDashboard(App):
|
|
|
127
107
|
logger.error(f"on_mount() failed: {e}\n{traceback.format_exc()}")
|
|
128
108
|
raise
|
|
129
109
|
|
|
110
|
+
def _start_local_api_server(self) -> None:
|
|
111
|
+
"""Start the local HTTP API server for browser-based agent import."""
|
|
112
|
+
try:
|
|
113
|
+
from ..config import ReAlignConfig
|
|
114
|
+
from .local_api import LocalAPIServer
|
|
115
|
+
|
|
116
|
+
config = ReAlignConfig.load()
|
|
117
|
+
self._local_api_server = LocalAPIServer(port=config.local_api_port)
|
|
118
|
+
self._local_api_server.start()
|
|
119
|
+
except Exception as e:
|
|
120
|
+
logger.warning(f"Could not start local API server: {e}")
|
|
121
|
+
|
|
122
|
+
def _apply_saved_theme(self) -> None:
|
|
123
|
+
theme_choice = str(get_dashboard_state_value("theme", "dark")).strip().lower()
|
|
124
|
+
if theme_choice == "light":
|
|
125
|
+
self.theme = "textual-light"
|
|
126
|
+
else:
|
|
127
|
+
self.theme = "textual-dark"
|
|
128
|
+
|
|
130
129
|
def _setup_native_terminal_layout(self) -> None:
|
|
131
130
|
"""Set up side-by-side layout for Dashboard and native terminal."""
|
|
132
131
|
# Skip if using iTerm2 split pane mode (already set up by CLI)
|
|
@@ -158,12 +157,6 @@ class AlineDashboard(App):
|
|
|
158
157
|
except Exception as e:
|
|
159
158
|
logger.warning(f"Could not set up native terminal layout: {e}")
|
|
160
159
|
|
|
161
|
-
def _sync_theme(self) -> None:
|
|
162
|
-
"""Sync app theme with system theme."""
|
|
163
|
-
target_theme = "textual-dark" if _detect_system_dark_mode() else "textual-light"
|
|
164
|
-
if self.theme != target_theme:
|
|
165
|
-
self.theme = target_theme
|
|
166
|
-
|
|
167
160
|
def action_next_tab(self) -> None:
|
|
168
161
|
"""Switch to next tab."""
|
|
169
162
|
tabbed_content = self.query_one(TabbedContent)
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"""Local HTTP API server for one-click agent import from browser."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import threading
|
|
5
|
+
from http.server import HTTPServer, BaseHTTPRequestHandler
|
|
6
|
+
|
|
7
|
+
from ..logging_config import setup_logger
|
|
8
|
+
|
|
9
|
+
logger = setup_logger("realign.dashboard.local_api", "local_api.log")
|
|
10
|
+
|
|
11
|
+
ALLOWED_ORIGINS = [
|
|
12
|
+
"https://realign-server.vercel.app",
|
|
13
|
+
"http://localhost:3000",
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class LocalAPIHandler(BaseHTTPRequestHandler):
|
|
18
|
+
"""Handle local API requests from the browser."""
|
|
19
|
+
|
|
20
|
+
def _set_cors_headers(self) -> bool:
|
|
21
|
+
"""Set CORS headers. Returns True if origin is allowed."""
|
|
22
|
+
origin = self.headers.get("Origin", "")
|
|
23
|
+
if origin in ALLOWED_ORIGINS:
|
|
24
|
+
self.send_header("Access-Control-Allow-Origin", origin)
|
|
25
|
+
self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
|
|
26
|
+
self.send_header("Access-Control-Allow-Headers", "Content-Type")
|
|
27
|
+
return True
|
|
28
|
+
return False
|
|
29
|
+
|
|
30
|
+
def _send_json(self, status: int, data: dict) -> None:
|
|
31
|
+
self.send_response(status)
|
|
32
|
+
self.send_header("Content-Type", "application/json")
|
|
33
|
+
self._set_cors_headers()
|
|
34
|
+
self.end_headers()
|
|
35
|
+
self.wfile.write(json.dumps(data).encode())
|
|
36
|
+
|
|
37
|
+
def do_OPTIONS(self) -> None:
|
|
38
|
+
"""Handle CORS preflight."""
|
|
39
|
+
self.send_response(204)
|
|
40
|
+
self._set_cors_headers()
|
|
41
|
+
self.end_headers()
|
|
42
|
+
|
|
43
|
+
def do_GET(self) -> None:
|
|
44
|
+
if self.path == "/api/health":
|
|
45
|
+
self._send_json(200, {"status": "ok"})
|
|
46
|
+
else:
|
|
47
|
+
self._send_json(404, {"error": "not found"})
|
|
48
|
+
|
|
49
|
+
def do_POST(self) -> None:
|
|
50
|
+
if self.path == "/api/import-agent":
|
|
51
|
+
self._handle_import_agent()
|
|
52
|
+
else:
|
|
53
|
+
self._send_json(404, {"error": "not found"})
|
|
54
|
+
|
|
55
|
+
def _handle_import_agent(self) -> None:
|
|
56
|
+
try:
|
|
57
|
+
length = int(self.headers.get("Content-Length", 0))
|
|
58
|
+
body = json.loads(self.rfile.read(length)) if length else {}
|
|
59
|
+
except (json.JSONDecodeError, ValueError):
|
|
60
|
+
self._send_json(400, {"error": "invalid JSON body"})
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
share_url = body.get("share_url")
|
|
64
|
+
if not share_url:
|
|
65
|
+
self._send_json(400, {"error": "share_url is required"})
|
|
66
|
+
return
|
|
67
|
+
|
|
68
|
+
password = body.get("password")
|
|
69
|
+
|
|
70
|
+
logger.info(f"Import agent request: {share_url}")
|
|
71
|
+
|
|
72
|
+
try:
|
|
73
|
+
from ..commands.import_shares import import_agent_from_share
|
|
74
|
+
|
|
75
|
+
result = import_agent_from_share(share_url, password=password)
|
|
76
|
+
if result.get("success"):
|
|
77
|
+
logger.info(
|
|
78
|
+
f"Agent imported: {result.get('agent_name')} "
|
|
79
|
+
f"({result.get('sessions_imported')} sessions)"
|
|
80
|
+
)
|
|
81
|
+
self._send_json(200, result)
|
|
82
|
+
else:
|
|
83
|
+
logger.warning(f"Import failed: {result.get('error')}")
|
|
84
|
+
self._send_json(422, result)
|
|
85
|
+
except Exception as e:
|
|
86
|
+
logger.error(f"Import agent error: {e}", exc_info=True)
|
|
87
|
+
self._send_json(500, {"success": False, "error": str(e)})
|
|
88
|
+
|
|
89
|
+
def log_message(self, format: str, *args) -> None:
|
|
90
|
+
"""Suppress default stderr logging; use our logger instead."""
|
|
91
|
+
logger.debug(f"HTTP {args[0] if args else ''}")
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class LocalAPIServer:
|
|
95
|
+
"""Manages the local HTTP API server in a daemon thread."""
|
|
96
|
+
|
|
97
|
+
def __init__(self, port: int = 17280):
|
|
98
|
+
self.port = port
|
|
99
|
+
self._server: HTTPServer | None = None
|
|
100
|
+
self._thread: threading.Thread | None = None
|
|
101
|
+
|
|
102
|
+
def start(self) -> bool:
|
|
103
|
+
"""Start the server. Returns True on success."""
|
|
104
|
+
try:
|
|
105
|
+
self._server = HTTPServer(("127.0.0.1", self.port), LocalAPIHandler)
|
|
106
|
+
self._thread = threading.Thread(
|
|
107
|
+
target=self._server.serve_forever, daemon=True
|
|
108
|
+
)
|
|
109
|
+
self._thread.start()
|
|
110
|
+
logger.info(f"Local API server started on http://127.0.0.1:{self.port}")
|
|
111
|
+
return True
|
|
112
|
+
except OSError as e:
|
|
113
|
+
logger.warning(f"Failed to start local API server on port {self.port}: {e}")
|
|
114
|
+
return False
|
|
115
|
+
|
|
116
|
+
def stop(self) -> None:
|
|
117
|
+
"""Stop the server."""
|
|
118
|
+
if self._server:
|
|
119
|
+
self._server.shutdown()
|
|
120
|
+
self._server = None
|
|
121
|
+
self._thread = None
|
|
122
|
+
logger.info("Local API server stopped")
|
|
@@ -15,9 +15,7 @@ from textual.containers import Container, Horizontal, Vertical
|
|
|
15
15
|
from textual.screen import ModalScreen
|
|
16
16
|
from textual.widgets import Button, Label, RadioButton, RadioSet, Static
|
|
17
17
|
|
|
18
|
-
|
|
19
|
-
# State file for storing last workspace path
|
|
20
|
-
DASHBOARD_STATE_FILE = Path.home() / ".aline" / "dashboard_state.json"
|
|
18
|
+
from ..state import DASHBOARD_STATE_FILE, set_dashboard_state_value
|
|
21
19
|
|
|
22
20
|
|
|
23
21
|
def _load_last_workspace() -> str:
|
|
@@ -80,14 +78,7 @@ def _save_claude_tracking_mode(mode: str) -> None:
|
|
|
80
78
|
def _save_state(key: str, value: str) -> None:
|
|
81
79
|
"""Save a key-value pair to the state file."""
|
|
82
80
|
try:
|
|
83
|
-
|
|
84
|
-
state = {}
|
|
85
|
-
if DASHBOARD_STATE_FILE.exists():
|
|
86
|
-
with open(DASHBOARD_STATE_FILE, "r", encoding="utf-8") as f:
|
|
87
|
-
state = json.load(f)
|
|
88
|
-
state[key] = value
|
|
89
|
-
with open(DASHBOARD_STATE_FILE, "w", encoding="utf-8") as f:
|
|
90
|
-
json.dump(state, f, indent=2)
|
|
81
|
+
set_dashboard_state_value(key, value)
|
|
91
82
|
except Exception:
|
|
92
83
|
pass
|
|
93
84
|
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""Small persistence helpers for dashboard UI state."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
DASHBOARD_STATE_FILE = Path.home() / ".aline" / "dashboard_state.json"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def load_dashboard_state() -> dict[str, Any]:
|
|
14
|
+
try:
|
|
15
|
+
if not DASHBOARD_STATE_FILE.exists():
|
|
16
|
+
return {}
|
|
17
|
+
with open(DASHBOARD_STATE_FILE, "r", encoding="utf-8") as f:
|
|
18
|
+
data = json.load(f)
|
|
19
|
+
return data if isinstance(data, dict) else {}
|
|
20
|
+
except Exception:
|
|
21
|
+
return {}
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def save_dashboard_state(state: dict[str, Any]) -> None:
|
|
25
|
+
try:
|
|
26
|
+
DASHBOARD_STATE_FILE.parent.mkdir(parents=True, exist_ok=True)
|
|
27
|
+
with open(DASHBOARD_STATE_FILE, "w", encoding="utf-8") as f:
|
|
28
|
+
json.dump(state, f, indent=2)
|
|
29
|
+
except Exception:
|
|
30
|
+
return
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_dashboard_state_value(key: str, default: Any) -> Any: # noqa: ANN401
|
|
34
|
+
return load_dashboard_state().get(key, default)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def set_dashboard_state_value(key: str, value: Any) -> None: # noqa: ANN401
|
|
38
|
+
state = load_dashboard_state()
|
|
39
|
+
state[key] = value
|
|
40
|
+
save_dashboard_state(state)
|
|
41
|
+
|
|
@@ -388,6 +388,15 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
388
388
|
classes="agent-share",
|
|
389
389
|
)
|
|
390
390
|
)
|
|
391
|
+
# Link button to copy share URL to clipboard
|
|
392
|
+
await row.mount(
|
|
393
|
+
Button(
|
|
394
|
+
"Link",
|
|
395
|
+
id=f"link-{safe_id}",
|
|
396
|
+
name=agent["id"],
|
|
397
|
+
classes="agent-share",
|
|
398
|
+
)
|
|
399
|
+
)
|
|
391
400
|
else:
|
|
392
401
|
await row.mount(
|
|
393
402
|
Button(
|
|
@@ -554,6 +563,11 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
554
563
|
await self._sync_agent(agent_id)
|
|
555
564
|
return
|
|
556
565
|
|
|
566
|
+
if btn_id.startswith("link-"):
|
|
567
|
+
agent_id = event.button.name or ""
|
|
568
|
+
await self._copy_share_link(agent_id)
|
|
569
|
+
return
|
|
570
|
+
|
|
557
571
|
if btn_id.startswith("switch-"):
|
|
558
572
|
terminal_id = event.button.name or ""
|
|
559
573
|
await self._switch_to_terminal(terminal_id)
|
|
@@ -1260,3 +1274,26 @@ class AgentsPanel(Container, can_focus=True):
|
|
|
1260
1274
|
else:
|
|
1261
1275
|
error = result.get("error", "Unknown error")
|
|
1262
1276
|
self.app.notify(f"Sync failed: {error}", title="Sync", severity="error")
|
|
1277
|
+
|
|
1278
|
+
async def _copy_share_link(self, agent_id: str) -> None:
|
|
1279
|
+
"""Copy the share link for an agent to clipboard."""
|
|
1280
|
+
if not agent_id:
|
|
1281
|
+
return
|
|
1282
|
+
|
|
1283
|
+
agent = next((a for a in self._agents if a["id"] == agent_id), None)
|
|
1284
|
+
if not agent:
|
|
1285
|
+
self.app.notify("Agent not found", title="Link", severity="error")
|
|
1286
|
+
return
|
|
1287
|
+
|
|
1288
|
+
share_url = agent.get("share_url")
|
|
1289
|
+
if not share_url:
|
|
1290
|
+
self.app.notify("No share link available", title="Link", severity="warning")
|
|
1291
|
+
return
|
|
1292
|
+
|
|
1293
|
+
copied = copy_text(self.app, share_url)
|
|
1294
|
+
if copied:
|
|
1295
|
+
self.app.notify("Share link copied to clipboard", title="Link", timeout=3)
|
|
1296
|
+
else:
|
|
1297
|
+
self.app.notify(
|
|
1298
|
+
f"Failed to copy. Link: {share_url}", title="Link", severity="warning"
|
|
1299
|
+
)
|
|
@@ -7,6 +7,7 @@ from textual.containers import Horizontal
|
|
|
7
7
|
from textual.widgets import Button, RadioButton, RadioSet, Static
|
|
8
8
|
|
|
9
9
|
from ..tmux_manager import _run_outer_tmux
|
|
10
|
+
from ..state import get_dashboard_state_value, set_dashboard_state_value
|
|
10
11
|
from ...auth import (
|
|
11
12
|
load_credentials,
|
|
12
13
|
save_credentials,
|
|
@@ -113,6 +114,30 @@ class ConfigPanel(Static):
|
|
|
113
114
|
width: auto;
|
|
114
115
|
margin-right: 2;
|
|
115
116
|
}
|
|
117
|
+
|
|
118
|
+
ConfigPanel .appearance-settings {
|
|
119
|
+
height: auto;
|
|
120
|
+
margin-top: 2;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
ConfigPanel .appearance-settings .setting-row {
|
|
124
|
+
height: auto;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
ConfigPanel .appearance-settings .setting-label {
|
|
128
|
+
width: auto;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
ConfigPanel .appearance-settings RadioSet {
|
|
132
|
+
width: auto;
|
|
133
|
+
height: auto;
|
|
134
|
+
layout: horizontal;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
ConfigPanel .appearance-settings RadioButton {
|
|
138
|
+
width: auto;
|
|
139
|
+
margin-right: 2;
|
|
140
|
+
}
|
|
116
141
|
"""
|
|
117
142
|
|
|
118
143
|
def __init__(self) -> None:
|
|
@@ -131,6 +156,15 @@ class ConfigPanel(Static):
|
|
|
131
156
|
yield Static(id="account-email", classes="account-email")
|
|
132
157
|
yield Button("Login", id="auth-btn", variant="primary")
|
|
133
158
|
|
|
159
|
+
# Appearance settings section
|
|
160
|
+
with Static(classes="appearance-settings"):
|
|
161
|
+
yield Static("[bold]Appearance[/bold]", classes="section-title")
|
|
162
|
+
with Horizontal(classes="setting-row"):
|
|
163
|
+
yield Static("Theme:", classes="setting-label")
|
|
164
|
+
with RadioSet(id="theme-radio"):
|
|
165
|
+
yield RadioButton("Dark", id="theme-dark", value=True)
|
|
166
|
+
yield RadioButton("Light", id="theme-light")
|
|
167
|
+
|
|
134
168
|
# Tmux settings section
|
|
135
169
|
with Static(classes="tmux-settings"):
|
|
136
170
|
yield Static("[bold]Tmux Settings[/bold]", classes="section-title")
|
|
@@ -160,6 +194,9 @@ class ConfigPanel(Static):
|
|
|
160
194
|
# Update account status display
|
|
161
195
|
self._update_account_status()
|
|
162
196
|
|
|
197
|
+
# Sync theme from persisted preference
|
|
198
|
+
self._sync_theme_radio()
|
|
199
|
+
|
|
163
200
|
# Query and set the actual tmux border resize state
|
|
164
201
|
self._sync_border_resize_radio()
|
|
165
202
|
|
|
@@ -184,7 +221,10 @@ class ConfigPanel(Static):
|
|
|
184
221
|
"""Handle radio set change events."""
|
|
185
222
|
if self._syncing_radio:
|
|
186
223
|
return # Ignore events during sync
|
|
187
|
-
if event.radio_set.id == "
|
|
224
|
+
if event.radio_set.id == "theme-radio":
|
|
225
|
+
theme = "dark" if event.pressed.id == "theme-dark" else "light"
|
|
226
|
+
self._set_theme(theme)
|
|
227
|
+
elif event.radio_set.id == "border-resize-radio":
|
|
188
228
|
# Check which radio button is selected
|
|
189
229
|
enabled = event.pressed.id == "border-resize-enabled"
|
|
190
230
|
self._toggle_border_resize(enabled)
|
|
@@ -313,6 +353,28 @@ class ConfigPanel(Static):
|
|
|
313
353
|
else:
|
|
314
354
|
self.app.notify("Failed to logout", title="Account", severity="error")
|
|
315
355
|
|
|
356
|
+
def _sync_theme_radio(self) -> None:
|
|
357
|
+
"""Sync theme radio buttons with persisted dashboard preference."""
|
|
358
|
+
try:
|
|
359
|
+
theme = str(get_dashboard_state_value("theme", "dark")).strip().lower()
|
|
360
|
+
self._syncing_radio = True
|
|
361
|
+
try:
|
|
362
|
+
if theme == "light":
|
|
363
|
+
radio = self.query_one("#theme-light", RadioButton)
|
|
364
|
+
else:
|
|
365
|
+
radio = self.query_one("#theme-dark", RadioButton)
|
|
366
|
+
radio.value = True
|
|
367
|
+
finally:
|
|
368
|
+
self._syncing_radio = False
|
|
369
|
+
except Exception:
|
|
370
|
+
pass
|
|
371
|
+
|
|
372
|
+
def _set_theme(self, theme: str) -> None:
|
|
373
|
+
theme_value = "light" if theme.strip().lower() == "light" else "dark"
|
|
374
|
+
set_dashboard_state_value("theme", theme_value)
|
|
375
|
+
self.app.theme = "textual-light" if theme_value == "light" else "textual-dark"
|
|
376
|
+
self.app.notify(f"Theme set to {theme_value}", title="Appearance")
|
|
377
|
+
|
|
316
378
|
def _sync_border_resize_radio(self) -> None:
|
|
317
379
|
"""Query tmux state and sync the radio buttons to match."""
|
|
318
380
|
try:
|
realign/watcher_core.py
CHANGED
|
@@ -245,12 +245,129 @@ class DialogueWatcher:
|
|
|
245
245
|
# a new run may take over and re-process it to avoid permanent stuck states.
|
|
246
246
|
self.processing_turn_ttl_seconds = 20 * 60 # 20 minutes
|
|
247
247
|
|
|
248
|
+
# Layer 1: Session list cache (avoid re-scanning directories every 0.5s)
|
|
249
|
+
self._cached_session_list: list[Path] = []
|
|
250
|
+
self._session_list_last_scan: float = 0.0
|
|
251
|
+
self._session_list_scan_interval: float = 30.0
|
|
252
|
+
|
|
253
|
+
# Layer 2: Per-cycle sizes stash (shared between check_for_changes and idle check)
|
|
254
|
+
self._last_cycle_sizes: Dict[str, int] = {}
|
|
255
|
+
|
|
256
|
+
# Layer 3: Turn count cache (avoid re-parsing JSONL for unchanged files)
|
|
257
|
+
self._cached_turn_counts: dict[str, int] = {}
|
|
258
|
+
self._cached_total_turn_counts: dict[str, int] = {}
|
|
259
|
+
self._turn_count_file_stats: dict[str, tuple[float, int]] = {}
|
|
260
|
+
|
|
248
261
|
# Signal directory for Stop hook integration
|
|
249
262
|
self.signal_dir = Path.home() / ".aline" / ".signals"
|
|
250
263
|
self.signal_dir.mkdir(parents=True, exist_ok=True)
|
|
251
264
|
self.user_prompt_signal_dir = self.signal_dir / "user_prompt_submit"
|
|
252
265
|
self.user_prompt_signal_dir.mkdir(parents=True, exist_ok=True)
|
|
253
266
|
|
|
267
|
+
def _get_cached_session_list(self, force_rescan: bool = False) -> list[Path]:
|
|
268
|
+
"""Return cached list of active session files, re-scanning only every 30s.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
force_rescan: If True, bypass the time gate and re-scan immediately.
|
|
272
|
+
"""
|
|
273
|
+
now = time.time()
|
|
274
|
+
if (
|
|
275
|
+
not force_rescan
|
|
276
|
+
and self._cached_session_list
|
|
277
|
+
and (now - self._session_list_last_scan) < self._session_list_scan_interval
|
|
278
|
+
):
|
|
279
|
+
return self._cached_session_list
|
|
280
|
+
|
|
281
|
+
try:
|
|
282
|
+
self._cached_session_list = find_all_active_sessions(self.config, project_path=None)
|
|
283
|
+
self._session_list_last_scan = now
|
|
284
|
+
except PermissionError:
|
|
285
|
+
if not hasattr(self, "_permission_error_logged"):
|
|
286
|
+
self._permission_error_logged = True
|
|
287
|
+
logger.error("PERMISSION DENIED: Cannot access Claude Code sessions directory")
|
|
288
|
+
except Exception as e:
|
|
289
|
+
logger.error(f"Error scanning session list: {e}", exc_info=True)
|
|
290
|
+
|
|
291
|
+
return self._cached_session_list
|
|
292
|
+
|
|
293
|
+
def _get_cycle_session_stats(self) -> tuple[list[Path], Dict[str, int], Dict[str, float]]:
|
|
294
|
+
"""Single stat() pass per cycle over the cached session list.
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
(session_files, sizes_dict, mtimes_dict)
|
|
298
|
+
"""
|
|
299
|
+
session_files = self._get_cached_session_list()
|
|
300
|
+
sizes: Dict[str, int] = {}
|
|
301
|
+
mtimes: Dict[str, float] = {}
|
|
302
|
+
force_rescan = False
|
|
303
|
+
|
|
304
|
+
for session_file in list(session_files): # copy so we can mutate
|
|
305
|
+
path_key = str(session_file)
|
|
306
|
+
try:
|
|
307
|
+
if session_file.is_dir():
|
|
308
|
+
# Handle directory-based sessions (e.g., Antigravity brain directories)
|
|
309
|
+
artifacts = ["task.md", "walkthrough.md", "implementation_plan.md"]
|
|
310
|
+
total_size = 0
|
|
311
|
+
max_mtime = 0.0
|
|
312
|
+
for artifact_name in artifacts:
|
|
313
|
+
artifact_path = session_file / artifact_name
|
|
314
|
+
if artifact_path.exists():
|
|
315
|
+
artifact_stat = artifact_path.stat()
|
|
316
|
+
total_size += artifact_stat.st_size
|
|
317
|
+
max_mtime = max(max_mtime, artifact_stat.st_mtime)
|
|
318
|
+
if max_mtime > 0:
|
|
319
|
+
sizes[path_key] = total_size
|
|
320
|
+
mtimes[path_key] = max_mtime
|
|
321
|
+
else:
|
|
322
|
+
stat = session_file.stat()
|
|
323
|
+
sizes[path_key] = stat.st_size
|
|
324
|
+
mtimes[path_key] = stat.st_mtime
|
|
325
|
+
except FileNotFoundError:
|
|
326
|
+
# File disappeared — prune from cache, force re-scan next cycle
|
|
327
|
+
if session_file in self._cached_session_list:
|
|
328
|
+
self._cached_session_list.remove(session_file)
|
|
329
|
+
force_rescan = True
|
|
330
|
+
except Exception as e:
|
|
331
|
+
logger.debug(f"Error stat-ing {path_key}: {e}")
|
|
332
|
+
|
|
333
|
+
if force_rescan:
|
|
334
|
+
self._session_list_last_scan = 0.0 # Force re-scan next cycle
|
|
335
|
+
|
|
336
|
+
live_files = [f for f in session_files if str(f) in sizes]
|
|
337
|
+
return live_files, sizes, mtimes
|
|
338
|
+
|
|
339
|
+
def _get_cached_turn_counts(
|
|
340
|
+
self, session_file: Path, current_mtime: float, current_size: int
|
|
341
|
+
) -> tuple[int, int]:
|
|
342
|
+
"""Return (completed_turns, total_turns) for a session, using cache when file unchanged.
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
session_file: Path to the session file.
|
|
346
|
+
current_mtime: The mtime from the current cycle's stat pass.
|
|
347
|
+
current_size: The size from the current cycle's stat pass.
|
|
348
|
+
|
|
349
|
+
Returns:
|
|
350
|
+
(completed_turn_count, total_turn_count)
|
|
351
|
+
"""
|
|
352
|
+
path_key = str(session_file)
|
|
353
|
+
cached_stats = self._turn_count_file_stats.get(path_key)
|
|
354
|
+
|
|
355
|
+
if cached_stats and cached_stats == (current_mtime, current_size):
|
|
356
|
+
return (
|
|
357
|
+
self._cached_turn_counts.get(path_key, 0),
|
|
358
|
+
self._cached_total_turn_counts.get(path_key, 0),
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
# File changed — re-parse
|
|
362
|
+
completed = self._count_complete_turns(session_file)
|
|
363
|
+
total = self._get_total_turn_count(session_file)
|
|
364
|
+
|
|
365
|
+
self._cached_turn_counts[path_key] = completed
|
|
366
|
+
self._cached_total_turn_counts[path_key] = total
|
|
367
|
+
self._turn_count_file_stats[path_key] = (current_mtime, current_size)
|
|
368
|
+
|
|
369
|
+
return completed, total
|
|
370
|
+
|
|
254
371
|
def _maybe_link_codex_terminal(self, session_file: Path) -> None:
|
|
255
372
|
"""Best-effort: bind a Codex session file to the most likely active Codex terminal."""
|
|
256
373
|
try:
|
|
@@ -468,11 +585,15 @@ class DialogueWatcher:
|
|
|
468
585
|
if self.config.enable_temp_turn_titles:
|
|
469
586
|
await self._check_user_prompt_submit_signals()
|
|
470
587
|
|
|
588
|
+
# Single stat pass per cycle (Layer 2)
|
|
589
|
+
cycle_sessions, cycle_sizes, cycle_mtimes = self._get_cycle_session_stats()
|
|
590
|
+
self._last_cycle_sizes = cycle_sizes
|
|
591
|
+
|
|
471
592
|
# Priority 2: Fallback polling mechanism (with debounce)
|
|
472
|
-
await self.check_for_changes()
|
|
593
|
+
await self.check_for_changes(cycle_sizes, cycle_mtimes)
|
|
473
594
|
|
|
474
595
|
# Check for idle sessions that need final commit
|
|
475
|
-
await self._check_idle_sessions_for_final_commit()
|
|
596
|
+
await self._check_idle_sessions_for_final_commit(cycle_sessions, cycle_mtimes)
|
|
476
597
|
|
|
477
598
|
await asyncio.sleep(0.5) # Check every 0.5 seconds for responsiveness
|
|
478
599
|
except Exception as e:
|
|
@@ -555,7 +676,15 @@ class DialogueWatcher:
|
|
|
555
676
|
logger.warning(
|
|
556
677
|
f"Failed to enqueue stop-hook job for {session_id}: {e}"
|
|
557
678
|
)
|
|
558
|
-
|
|
679
|
+
|
|
680
|
+
# Directly link session to agent (in case the turn job
|
|
681
|
+
# was already processed by polling without agent_id
|
|
682
|
+
# and the new enqueue was deduped).
|
|
683
|
+
if agent_id and session_id:
|
|
684
|
+
try:
|
|
685
|
+
db.update_session_agent_id(session_id, agent_id)
|
|
686
|
+
except Exception:
|
|
687
|
+
pass
|
|
559
688
|
|
|
560
689
|
logger.info(
|
|
561
690
|
f"Enqueued turn_summary via Stop hook: {session_id} turn {target_turn} ({project_path.name})"
|
|
@@ -737,66 +866,11 @@ class DialogueWatcher:
|
|
|
737
866
|
def _get_session_stats(self) -> tuple[Dict[str, int], Dict[str, float]]:
|
|
738
867
|
"""Get (sizes, mtimes) for all active session files.
|
|
739
868
|
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
- mtime: max mtime of all artifact files
|
|
743
|
-
|
|
744
|
-
This ensures changes to individual artifacts are detected.
|
|
869
|
+
Delegates to _get_cycle_session_stats() which uses the cached session list
|
|
870
|
+
and performs a single stat() pass.
|
|
745
871
|
"""
|
|
746
|
-
sizes
|
|
747
|
-
|
|
748
|
-
try:
|
|
749
|
-
session_files = find_all_active_sessions(self.config, project_path=None)
|
|
750
|
-
for session_file in session_files:
|
|
751
|
-
if not session_file.exists():
|
|
752
|
-
continue
|
|
753
|
-
path_key = str(session_file)
|
|
754
|
-
|
|
755
|
-
# Handle directory-based sessions (e.g., Antigravity brain directories)
|
|
756
|
-
if session_file.is_dir():
|
|
757
|
-
# Check for Antigravity-style artifacts
|
|
758
|
-
artifacts = ["task.md", "walkthrough.md", "implementation_plan.md"]
|
|
759
|
-
total_size = 0
|
|
760
|
-
max_mtime = 0.0
|
|
761
|
-
|
|
762
|
-
for artifact_name in artifacts:
|
|
763
|
-
artifact_path = session_file / artifact_name
|
|
764
|
-
if artifact_path.exists():
|
|
765
|
-
artifact_stat = artifact_path.stat()
|
|
766
|
-
total_size += artifact_stat.st_size
|
|
767
|
-
max_mtime = max(max_mtime, artifact_stat.st_mtime)
|
|
768
|
-
|
|
769
|
-
# Only track if at least one artifact exists
|
|
770
|
-
if max_mtime > 0:
|
|
771
|
-
sizes[path_key] = total_size
|
|
772
|
-
mtimes[path_key] = max_mtime
|
|
773
|
-
else:
|
|
774
|
-
# Regular file-based session
|
|
775
|
-
stat = session_file.stat()
|
|
776
|
-
sizes[path_key] = stat.st_size
|
|
777
|
-
mtimes[path_key] = stat.st_mtime
|
|
778
|
-
|
|
779
|
-
logger.debug(f"Tracked {len(sizes)} session file(s) across all projects")
|
|
780
|
-
except PermissionError as e:
|
|
781
|
-
# macOS permission issue - only log once
|
|
782
|
-
if not hasattr(self, "_permission_error_logged"):
|
|
783
|
-
self._permission_error_logged = True
|
|
784
|
-
logger.error("PERMISSION DENIED: Cannot access Claude Code sessions directory")
|
|
785
|
-
print(
|
|
786
|
-
"[Watcher] ✗ PERMISSION DENIED: Cannot access ~/.claude/projects/",
|
|
787
|
-
file=sys.stderr,
|
|
788
|
-
)
|
|
789
|
-
print(
|
|
790
|
-
"[Watcher] ⓘ Grant Full Disk Access to Acme in System Preferences",
|
|
791
|
-
file=sys.stderr,
|
|
792
|
-
)
|
|
793
|
-
print(
|
|
794
|
-
"[Watcher] ⓘ System Preferences → Privacy & Security → Full Disk Access → Add Acme",
|
|
795
|
-
file=sys.stderr,
|
|
796
|
-
)
|
|
797
|
-
except Exception as e:
|
|
798
|
-
logger.error(f"Error getting session stats: {e}", exc_info=True)
|
|
799
|
-
print(f"[Watcher] Error getting session stats: {e}", file=sys.stderr)
|
|
872
|
+
_, sizes, mtimes = self._get_cycle_session_stats()
|
|
873
|
+
logger.debug(f"Tracked {len(sizes)} session file(s) across all projects")
|
|
800
874
|
return sizes, mtimes
|
|
801
875
|
|
|
802
876
|
def _get_session_sizes(self) -> Dict[str, int]:
|
|
@@ -965,23 +1039,22 @@ class DialogueWatcher:
|
|
|
965
1039
|
logger.warning(f"Failed to compute hash for {session_file.name}: {e}")
|
|
966
1040
|
return None
|
|
967
1041
|
|
|
968
|
-
async def _check_idle_sessions_for_final_commit(self):
|
|
1042
|
+
async def _check_idle_sessions_for_final_commit(self, session_files: Optional[list[Path]] = None, current_mtimes: Optional[Dict[str, float]] = None):
|
|
969
1043
|
"""Check for idle sessions and trigger final commits if needed."""
|
|
970
1044
|
try:
|
|
971
1045
|
current_time = time.time()
|
|
972
|
-
|
|
973
|
-
|
|
1046
|
+
if session_files is None:
|
|
1047
|
+
session_files = self._get_cached_session_list()
|
|
1048
|
+
if current_mtimes is None:
|
|
1049
|
+
_, _, current_mtimes = self._get_cycle_session_stats()
|
|
974
1050
|
|
|
975
1051
|
for session_file in session_files:
|
|
976
|
-
if not session_file.exists():
|
|
977
|
-
continue
|
|
978
|
-
|
|
979
1052
|
session_path = str(session_file)
|
|
1053
|
+
mtime = current_mtimes.get(session_path)
|
|
1054
|
+
if mtime is None:
|
|
1055
|
+
continue
|
|
980
1056
|
|
|
981
1057
|
try:
|
|
982
|
-
# Get current mtime
|
|
983
|
-
mtime = session_file.stat().st_mtime
|
|
984
|
-
|
|
985
1058
|
# Initialize tracking if first time seeing this session
|
|
986
1059
|
if session_path not in self.last_session_mtimes:
|
|
987
1060
|
self.last_session_mtimes[session_path] = mtime
|
|
@@ -1010,13 +1083,16 @@ class DialogueWatcher:
|
|
|
1010
1083
|
|
|
1011
1084
|
session_id = session_file.stem
|
|
1012
1085
|
session_type = self._detect_session_type(session_file)
|
|
1013
|
-
|
|
1086
|
+
# Use cached turn counts (Layer 3) — avoids re-parsing unchanged files
|
|
1087
|
+
file_size = self._last_cycle_sizes.get(session_path, 0)
|
|
1088
|
+
completed_count, total_turns = self._get_cached_turn_counts(
|
|
1089
|
+
session_file, mtime, file_size
|
|
1090
|
+
)
|
|
1014
1091
|
last_count = self.last_stop_reason_counts.get(session_path, 0)
|
|
1015
1092
|
|
|
1016
1093
|
# For Claude, also consider the last turn (not counted in completed_count).
|
|
1017
1094
|
last_turn_to_enqueue: Optional[int] = None
|
|
1018
1095
|
if session_type == "claude":
|
|
1019
|
-
total_turns = self._get_total_turn_count(session_file)
|
|
1020
1096
|
if total_turns > completed_count:
|
|
1021
1097
|
existing = db.get_turn_by_number(session_id, int(total_turns))
|
|
1022
1098
|
if existing is None:
|
|
@@ -1221,10 +1297,11 @@ class DialogueWatcher:
|
|
|
1221
1297
|
logger.error(f"Trigger error for {session_file.name}: {e}")
|
|
1222
1298
|
return 0
|
|
1223
1299
|
|
|
1224
|
-
async def check_for_changes(self):
|
|
1300
|
+
async def check_for_changes(self, current_sizes: Optional[Dict[str, int]] = None, current_mtimes: Optional[Dict[str, float]] = None):
|
|
1225
1301
|
"""Check if any session file has been modified."""
|
|
1226
1302
|
try:
|
|
1227
|
-
current_sizes
|
|
1303
|
+
if current_sizes is None or current_mtimes is None:
|
|
1304
|
+
current_sizes, current_mtimes = self._get_session_stats()
|
|
1228
1305
|
|
|
1229
1306
|
# Detect changed files
|
|
1230
1307
|
changed_files = []
|
realign/worker_core.py
CHANGED
|
@@ -279,6 +279,8 @@ class AlineWorker:
|
|
|
279
279
|
return
|
|
280
280
|
|
|
281
281
|
agent_info_id = None
|
|
282
|
+
|
|
283
|
+
# Method 1: Match via agents table session_id (works for Codex)
|
|
282
284
|
for agent in agents:
|
|
283
285
|
try:
|
|
284
286
|
if (agent.session_id or "").strip() != session_id:
|
|
@@ -290,6 +292,21 @@ class AlineWorker:
|
|
|
290
292
|
except Exception:
|
|
291
293
|
continue
|
|
292
294
|
|
|
295
|
+
# Method 2: Match via windowlink table (works for Claude)
|
|
296
|
+
if not agent_info_id:
|
|
297
|
+
try:
|
|
298
|
+
conn = self.db._get_connection()
|
|
299
|
+
row = conn.execute(
|
|
300
|
+
"""SELECT w.agent_id FROM windowlink w
|
|
301
|
+
WHERE w.session_id = ? AND w.agent_id IS NOT NULL
|
|
302
|
+
ORDER BY w.ts DESC LIMIT 1""",
|
|
303
|
+
(session_id,),
|
|
304
|
+
).fetchone()
|
|
305
|
+
if row and row[0]:
|
|
306
|
+
agent_info_id = row[0]
|
|
307
|
+
except Exception:
|
|
308
|
+
pass
|
|
309
|
+
|
|
293
310
|
if agent_info_id:
|
|
294
311
|
try:
|
|
295
312
|
self.db.update_session_agent_id(session_id, agent_info_id)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|