aline-ai 0.6.5__py3-none-any.whl → 0.6.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/METADATA +1 -1
- {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/RECORD +41 -34
- realign/__init__.py +1 -1
- realign/agent_names.py +79 -0
- realign/claude_hooks/stop_hook.py +3 -0
- realign/claude_hooks/terminal_state.py +43 -1
- realign/claude_hooks/user_prompt_submit_hook.py +3 -0
- realign/cli.py +62 -0
- realign/codex_detector.py +18 -3
- realign/codex_home.py +65 -16
- realign/codex_terminal_linker.py +18 -7
- realign/commands/agent.py +109 -0
- realign/commands/doctor.py +74 -1
- realign/commands/export_shares.py +448 -0
- realign/commands/import_shares.py +203 -1
- realign/commands/search.py +58 -29
- realign/commands/sync_agent.py +347 -0
- realign/dashboard/app.py +9 -9
- realign/dashboard/clipboard.py +54 -0
- realign/dashboard/screens/__init__.py +4 -0
- realign/dashboard/screens/agent_detail.py +333 -0
- realign/dashboard/screens/create_agent_info.py +244 -0
- realign/dashboard/screens/event_detail.py +6 -27
- realign/dashboard/styles/dashboard.tcss +22 -28
- realign/dashboard/tmux_manager.py +36 -10
- realign/dashboard/widgets/__init__.py +2 -2
- realign/dashboard/widgets/agents_panel.py +1248 -0
- realign/dashboard/widgets/events_table.py +4 -27
- realign/dashboard/widgets/sessions_table.py +4 -27
- realign/db/base.py +69 -0
- realign/db/locks.py +4 -0
- realign/db/schema.py +111 -2
- realign/db/sqlite_db.py +360 -2
- realign/events/agent_summarizer.py +157 -0
- realign/events/session_summarizer.py +25 -0
- realign/watcher_core.py +193 -5
- realign/worker_core.py +59 -1
- realign/dashboard/widgets/terminal_panel.py +0 -1653
- {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/WHEEL +0 -0
- {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/entry_points.txt +0 -0
- {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.6.5.dist-info → aline_ai-0.6.7.dist-info}/top_level.txt +0 -0
realign/commands/search.py
CHANGED
|
@@ -335,9 +335,28 @@ def search_command(
|
|
|
335
335
|
context_session_ids = get_context_session_ids()
|
|
336
336
|
context_event_ids = get_context_event_ids()
|
|
337
337
|
|
|
338
|
+
# Apply agent scoping if ALINE_AGENT_ID is set
|
|
339
|
+
agent_session_ids = None
|
|
340
|
+
if not no_context:
|
|
341
|
+
import os
|
|
342
|
+
|
|
343
|
+
agent_id = os.environ.get("ALINE_AGENT_ID")
|
|
344
|
+
if agent_id:
|
|
345
|
+
agent_sessions = db.get_sessions_by_agent_id(agent_id)
|
|
346
|
+
# Always set agent_session_ids when agent_id exists
|
|
347
|
+
# (empty list means no sessions for this agent -> empty results)
|
|
348
|
+
agent_session_ids = [s.id for s in agent_sessions]
|
|
349
|
+
|
|
338
350
|
# Parse session IDs if provided (resolve prefixes)
|
|
339
351
|
session_ids = _resolve_id_prefixes(db, "sessions", sessions) or None
|
|
340
352
|
|
|
353
|
+
# Intersect with agent sessions first (highest priority)
|
|
354
|
+
if agent_session_ids is not None:
|
|
355
|
+
if session_ids:
|
|
356
|
+
session_ids = list(set(session_ids) & set(agent_session_ids))
|
|
357
|
+
else:
|
|
358
|
+
session_ids = agent_session_ids if agent_session_ids else []
|
|
359
|
+
|
|
341
360
|
# Intersect with context sessions
|
|
342
361
|
if context_session_ids:
|
|
343
362
|
if session_ids:
|
|
@@ -372,43 +391,53 @@ def search_command(
|
|
|
372
391
|
turn_ids = _resolve_id_prefixes(db, "turns", turns) or None
|
|
373
392
|
|
|
374
393
|
# 1. Search Events (events don't have session scope, skip if sessions/events filter is active)
|
|
375
|
-
|
|
394
|
+
# Use 'is None' to distinguish "no filter" from "empty filter results"
|
|
395
|
+
if type in ("all", "event") and session_ids is None and event_ids is None:
|
|
376
396
|
events = db.search_events(query, limit=limit, use_regex=regex, ignore_case=ignore_case)
|
|
377
397
|
results["events"] = events
|
|
378
398
|
|
|
379
|
-
# 2. Search Turns
|
|
399
|
+
# 2. Search Turns (skip if session filter results in empty list)
|
|
380
400
|
if type in ("all", "turn"):
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
401
|
+
if session_ids is not None and len(session_ids) == 0:
|
|
402
|
+
results["turns"] = []
|
|
403
|
+
else:
|
|
404
|
+
turns = db.search_conversations(
|
|
405
|
+
query,
|
|
406
|
+
limit=limit,
|
|
407
|
+
use_regex=regex,
|
|
408
|
+
ignore_case=ignore_case,
|
|
409
|
+
session_ids=session_ids if session_ids else None,
|
|
410
|
+
)
|
|
411
|
+
results["turns"] = turns
|
|
389
412
|
|
|
390
|
-
# 3. Search Sessions
|
|
413
|
+
# 3. Search Sessions (skip if session filter results in empty list)
|
|
391
414
|
if type in ("all", "session"):
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
415
|
+
if session_ids is not None and len(session_ids) == 0:
|
|
416
|
+
results["sessions"] = []
|
|
417
|
+
else:
|
|
418
|
+
sessions_results = db.search_sessions(
|
|
419
|
+
query,
|
|
420
|
+
limit=limit,
|
|
421
|
+
use_regex=regex,
|
|
422
|
+
ignore_case=ignore_case,
|
|
423
|
+
session_ids=session_ids if session_ids else None,
|
|
424
|
+
)
|
|
425
|
+
results["sessions"] = sessions_results
|
|
400
426
|
|
|
401
|
-
# 4. Search Turn Content
|
|
427
|
+
# 4. Search Turn Content (skip if session filter results in empty list)
|
|
402
428
|
if type == "content":
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
429
|
+
if session_ids is not None and len(session_ids) == 0:
|
|
430
|
+
results["content"] = []
|
|
431
|
+
else:
|
|
432
|
+
content_results = db.search_turn_content(
|
|
433
|
+
query,
|
|
434
|
+
limit=limit,
|
|
435
|
+
use_regex=regex,
|
|
436
|
+
ignore_case=ignore_case,
|
|
437
|
+
session_ids=session_ids if session_ids else None,
|
|
438
|
+
turn_ids=turn_ids,
|
|
439
|
+
)
|
|
440
|
+
results["content"] = content_results
|
|
412
441
|
|
|
413
442
|
# === Grep-style output for regex mode ===
|
|
414
443
|
if regex:
|
|
@@ -0,0 +1,347 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Sync agent command - Bidirectional sync for shared agents.
|
|
4
|
+
|
|
5
|
+
Pull remote sessions, merge locally (union of sessions, dedup by content_hash),
|
|
6
|
+
push merged result back. Uses optimistic locking via sync_version.
|
|
7
|
+
|
|
8
|
+
Sync works with unencrypted shares only.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
import logging
|
|
14
|
+
from datetime import datetime, timezone
|
|
15
|
+
from typing import Optional, Dict, Any, Callable
|
|
16
|
+
|
|
17
|
+
try:
|
|
18
|
+
import httpx
|
|
19
|
+
|
|
20
|
+
HTTPX_AVAILABLE = True
|
|
21
|
+
except ImportError:
|
|
22
|
+
HTTPX_AVAILABLE = False
|
|
23
|
+
|
|
24
|
+
from ..logging_config import setup_logger
|
|
25
|
+
|
|
26
|
+
logger = setup_logger("realign.commands.sync_agent", "sync_agent.log")
|
|
27
|
+
|
|
28
|
+
MAX_SYNC_RETRIES = 3
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def sync_agent_command(
|
|
32
|
+
agent_id: str,
|
|
33
|
+
backend_url: Optional[str] = None,
|
|
34
|
+
progress_callback: Optional[Callable[[str], None]] = None,
|
|
35
|
+
) -> dict:
|
|
36
|
+
"""
|
|
37
|
+
Sync an agent's sessions with the remote share.
|
|
38
|
+
|
|
39
|
+
Algorithm:
|
|
40
|
+
1. Load local state (agent_info, sessions, content hashes)
|
|
41
|
+
2. Pull remote state (full download via export endpoint)
|
|
42
|
+
3. Merge: union of sessions deduped by content_hash, last-write-wins for name/desc
|
|
43
|
+
4. Push merged state via PUT with optimistic locking
|
|
44
|
+
5. Update local sync metadata
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
agent_id: The agent_info ID to sync
|
|
48
|
+
backend_url: Backend server URL (uses config default if None)
|
|
49
|
+
progress_callback: Optional callback for progress updates
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
{"success": True, "sessions_pulled": N, "sessions_pushed": N, ...} on success
|
|
53
|
+
{"success": False, "error": str} on failure
|
|
54
|
+
"""
|
|
55
|
+
def _progress(msg: str) -> None:
|
|
56
|
+
if progress_callback:
|
|
57
|
+
progress_callback(msg)
|
|
58
|
+
|
|
59
|
+
if not HTTPX_AVAILABLE:
|
|
60
|
+
return {"success": False, "error": "httpx package not installed"}
|
|
61
|
+
|
|
62
|
+
# Get backend URL
|
|
63
|
+
if backend_url is None:
|
|
64
|
+
from ..config import ReAlignConfig
|
|
65
|
+
|
|
66
|
+
config = ReAlignConfig.load()
|
|
67
|
+
backend_url = config.share_backend_url or "https://realign-server.vercel.app"
|
|
68
|
+
|
|
69
|
+
# Get database
|
|
70
|
+
from ..db import get_database
|
|
71
|
+
|
|
72
|
+
db = get_database()
|
|
73
|
+
|
|
74
|
+
# 1. Load local state
|
|
75
|
+
_progress("Loading local agent data...")
|
|
76
|
+
|
|
77
|
+
agent_info = db.get_agent_info(agent_id)
|
|
78
|
+
if not agent_info:
|
|
79
|
+
return {"success": False, "error": f"Agent not found: {agent_id}"}
|
|
80
|
+
|
|
81
|
+
if not agent_info.share_id or not agent_info.share_url:
|
|
82
|
+
return {"success": False, "error": "Agent has no share metadata (not shared yet)"}
|
|
83
|
+
|
|
84
|
+
token = agent_info.share_admin_token or agent_info.share_contributor_token
|
|
85
|
+
if not token:
|
|
86
|
+
return {"success": False, "error": "No token available for sync (need admin or contributor token)"}
|
|
87
|
+
|
|
88
|
+
share_id = agent_info.share_id
|
|
89
|
+
local_sync_version = agent_info.sync_version or 0
|
|
90
|
+
|
|
91
|
+
local_sessions = db.get_sessions_by_agent_id(agent_id)
|
|
92
|
+
local_content_hashes = db.get_agent_content_hashes(agent_id)
|
|
93
|
+
|
|
94
|
+
logger.info(
|
|
95
|
+
f"Sync: agent={agent_id}, share={share_id}, "
|
|
96
|
+
f"local_sessions={len(local_sessions)}, local_hashes={len(local_content_hashes)}"
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
# 2. Pull remote state
|
|
100
|
+
_progress("Pulling remote data...")
|
|
101
|
+
|
|
102
|
+
remote_data = _pull_remote(backend_url, share_id)
|
|
103
|
+
if not remote_data.get("success"):
|
|
104
|
+
return {"success": False, "error": f"Failed to pull remote: {remote_data.get('error')}"}
|
|
105
|
+
|
|
106
|
+
conversation_data = remote_data["data"]
|
|
107
|
+
remote_sync_meta = conversation_data.get("sync_metadata", {})
|
|
108
|
+
remote_sync_version = remote_sync_meta.get("sync_version", 0)
|
|
109
|
+
|
|
110
|
+
remote_sessions_data = conversation_data.get("sessions", [])
|
|
111
|
+
remote_event = conversation_data.get("event", {})
|
|
112
|
+
|
|
113
|
+
# 3. Merge
|
|
114
|
+
_progress("Merging sessions...")
|
|
115
|
+
|
|
116
|
+
# Collect remote content hashes
|
|
117
|
+
remote_content_hashes = set()
|
|
118
|
+
for session_data in remote_sessions_data:
|
|
119
|
+
for turn_data in session_data.get("turns", []):
|
|
120
|
+
h = turn_data.get("content_hash")
|
|
121
|
+
if h:
|
|
122
|
+
remote_content_hashes.add(h)
|
|
123
|
+
|
|
124
|
+
# Import new remote sessions/turns locally
|
|
125
|
+
sessions_pulled = 0
|
|
126
|
+
from .import_shares import import_session_with_turns
|
|
127
|
+
|
|
128
|
+
for session_data in remote_sessions_data:
|
|
129
|
+
session_id = session_data.get("session_id", "")
|
|
130
|
+
session_turns = session_data.get("turns", [])
|
|
131
|
+
|
|
132
|
+
# Check if any turns in this session are new to us
|
|
133
|
+
new_turns = [
|
|
134
|
+
t for t in session_turns
|
|
135
|
+
if t.get("content_hash") and t["content_hash"] not in local_content_hashes
|
|
136
|
+
]
|
|
137
|
+
|
|
138
|
+
if not new_turns:
|
|
139
|
+
continue
|
|
140
|
+
|
|
141
|
+
# Import the session (import_session_with_turns handles dedup by content_hash)
|
|
142
|
+
try:
|
|
143
|
+
# Suppress auto-summaries during sync
|
|
144
|
+
os.environ["REALIGN_DISABLE_AUTO_SUMMARIES"] = "1"
|
|
145
|
+
import_result = import_session_with_turns(
|
|
146
|
+
session_data, f"agent-{agent_id}", agent_info.share_url, db, force=False
|
|
147
|
+
)
|
|
148
|
+
if import_result.get("sessions", 0) > 0 or import_result.get("turns", 0) > 0:
|
|
149
|
+
sessions_pulled += 1
|
|
150
|
+
|
|
151
|
+
# Link session to agent
|
|
152
|
+
db.update_session_agent_id(session_id, agent_id)
|
|
153
|
+
except Exception as e:
|
|
154
|
+
logger.error(f"Failed to import remote session {session_id}: {e}")
|
|
155
|
+
|
|
156
|
+
# Merge name/description: last-write-wins by updated_at
|
|
157
|
+
description_updated = False
|
|
158
|
+
remote_updated_at = remote_event.get("updated_at")
|
|
159
|
+
if remote_updated_at:
|
|
160
|
+
try:
|
|
161
|
+
remote_dt = datetime.fromisoformat(remote_updated_at.replace("Z", "+00:00"))
|
|
162
|
+
local_dt = agent_info.updated_at
|
|
163
|
+
if hasattr(local_dt, "tzinfo") and local_dt.tzinfo is None:
|
|
164
|
+
local_dt = local_dt.replace(tzinfo=timezone.utc)
|
|
165
|
+
if remote_dt > local_dt:
|
|
166
|
+
remote_name = remote_event.get("title")
|
|
167
|
+
remote_desc = remote_event.get("description")
|
|
168
|
+
updates = {}
|
|
169
|
+
if remote_name and remote_name != agent_info.name:
|
|
170
|
+
updates["name"] = remote_name
|
|
171
|
+
if remote_desc is not None and remote_desc != agent_info.description:
|
|
172
|
+
updates["description"] = remote_desc
|
|
173
|
+
if updates:
|
|
174
|
+
db.update_agent_info(agent_id, **updates)
|
|
175
|
+
description_updated = True
|
|
176
|
+
agent_info = db.get_agent_info(agent_id)
|
|
177
|
+
except Exception as e:
|
|
178
|
+
logger.warning(f"Failed to compare timestamps for name/desc merge: {e}")
|
|
179
|
+
|
|
180
|
+
# 4. Build merged data and push
|
|
181
|
+
_progress("Pushing merged data...")
|
|
182
|
+
|
|
183
|
+
# Reload local state after merge
|
|
184
|
+
local_sessions = db.get_sessions_by_agent_id(agent_id)
|
|
185
|
+
local_content_hashes = db.get_agent_content_hashes(agent_id)
|
|
186
|
+
|
|
187
|
+
# Count sessions pushed (local sessions with turns not in remote)
|
|
188
|
+
sessions_pushed = 0
|
|
189
|
+
for session in local_sessions:
|
|
190
|
+
turns = db.get_turns_for_session(session.id)
|
|
191
|
+
new_local_turns = [t for t in turns if t.content_hash not in remote_content_hashes]
|
|
192
|
+
if new_local_turns:
|
|
193
|
+
sessions_pushed += 1
|
|
194
|
+
|
|
195
|
+
# Build full conversation data for push
|
|
196
|
+
merged_conversation = _build_merged_conversation_data(
|
|
197
|
+
agent_info=agent_info,
|
|
198
|
+
agent_id=agent_id,
|
|
199
|
+
sessions=local_sessions,
|
|
200
|
+
db=db,
|
|
201
|
+
contributor_token=agent_info.share_contributor_token,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
# Push with optimistic locking + retry
|
|
205
|
+
from .export_shares import _update_share_content
|
|
206
|
+
|
|
207
|
+
new_version = remote_sync_version
|
|
208
|
+
for attempt in range(MAX_SYNC_RETRIES):
|
|
209
|
+
try:
|
|
210
|
+
push_result = _update_share_content(
|
|
211
|
+
backend_url=backend_url,
|
|
212
|
+
share_id=share_id,
|
|
213
|
+
token=token,
|
|
214
|
+
conversation_data=merged_conversation,
|
|
215
|
+
expected_version=new_version,
|
|
216
|
+
)
|
|
217
|
+
new_version = push_result.get("version", new_version + 1)
|
|
218
|
+
break
|
|
219
|
+
except Exception as e:
|
|
220
|
+
error_str = str(e)
|
|
221
|
+
if "409" in error_str and attempt < MAX_SYNC_RETRIES - 1:
|
|
222
|
+
_progress(f"Version conflict, retrying ({attempt + 2}/{MAX_SYNC_RETRIES})...")
|
|
223
|
+
# Re-pull and retry
|
|
224
|
+
remote_data = _pull_remote(backend_url, share_id)
|
|
225
|
+
if remote_data.get("success"):
|
|
226
|
+
conv = remote_data["data"]
|
|
227
|
+
new_version = conv.get("sync_metadata", {}).get("sync_version", 0)
|
|
228
|
+
continue
|
|
229
|
+
else:
|
|
230
|
+
logger.error(f"Push failed after {attempt + 1} attempts: {e}")
|
|
231
|
+
return {"success": False, "error": f"Push failed: {e}"}
|
|
232
|
+
|
|
233
|
+
# 5. Update local sync metadata
|
|
234
|
+
now_iso = datetime.now(timezone.utc).isoformat()
|
|
235
|
+
db.update_agent_sync_metadata(
|
|
236
|
+
agent_id,
|
|
237
|
+
last_synced_at=now_iso,
|
|
238
|
+
sync_version=new_version,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
_progress("Sync complete!")
|
|
242
|
+
|
|
243
|
+
return {
|
|
244
|
+
"success": True,
|
|
245
|
+
"sessions_pulled": sessions_pulled,
|
|
246
|
+
"sessions_pushed": sessions_pushed,
|
|
247
|
+
"description_updated": description_updated,
|
|
248
|
+
"new_sync_version": new_version,
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def _pull_remote(backend_url: str, share_id: str) -> dict:
|
|
253
|
+
"""Pull remote share data via the download_share_data helper."""
|
|
254
|
+
try:
|
|
255
|
+
from .import_shares import download_share_data
|
|
256
|
+
|
|
257
|
+
share_url = f"{backend_url}/share/{share_id}"
|
|
258
|
+
return download_share_data(share_url, password=None)
|
|
259
|
+
except Exception as e:
|
|
260
|
+
return {"success": False, "error": str(e)}
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def _build_merged_conversation_data(
|
|
264
|
+
agent_info,
|
|
265
|
+
agent_id: str,
|
|
266
|
+
sessions,
|
|
267
|
+
db,
|
|
268
|
+
contributor_token: Optional[str] = None,
|
|
269
|
+
) -> dict:
|
|
270
|
+
"""
|
|
271
|
+
Build a full conversation data dict from local agent state.
|
|
272
|
+
|
|
273
|
+
Mirrors the structure of build_enhanced_conversation_data but works
|
|
274
|
+
directly from DB records without ExportableSession wrappers.
|
|
275
|
+
"""
|
|
276
|
+
import json as json_module
|
|
277
|
+
|
|
278
|
+
event_data = {
|
|
279
|
+
"event_id": f"agent-{agent_id}",
|
|
280
|
+
"title": agent_info.name or "Agent Sessions",
|
|
281
|
+
"description": agent_info.description or "",
|
|
282
|
+
"event_type": "agent",
|
|
283
|
+
"status": "active",
|
|
284
|
+
"created_at": agent_info.created_at.isoformat() if agent_info.created_at else None,
|
|
285
|
+
"updated_at": agent_info.updated_at.isoformat() if agent_info.updated_at else None,
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
sessions_data = []
|
|
289
|
+
for session in sessions:
|
|
290
|
+
turns = db.get_turns_for_session(session.id)
|
|
291
|
+
turns_data = []
|
|
292
|
+
for turn in turns:
|
|
293
|
+
turn_content = db.get_turn_content(turn.id)
|
|
294
|
+
messages = []
|
|
295
|
+
if turn_content:
|
|
296
|
+
for line in turn_content.strip().split("\n"):
|
|
297
|
+
if line.strip():
|
|
298
|
+
try:
|
|
299
|
+
messages.append(json_module.loads(line))
|
|
300
|
+
except Exception:
|
|
301
|
+
continue
|
|
302
|
+
|
|
303
|
+
turns_data.append({
|
|
304
|
+
"turn_id": turn.id,
|
|
305
|
+
"turn_number": turn.turn_number,
|
|
306
|
+
"content_hash": turn.content_hash,
|
|
307
|
+
"timestamp": turn.timestamp.isoformat() if turn.timestamp else None,
|
|
308
|
+
"llm_title": turn.llm_title or "",
|
|
309
|
+
"llm_description": turn.llm_description,
|
|
310
|
+
"user_message": turn.user_message,
|
|
311
|
+
"assistant_summary": turn.assistant_summary,
|
|
312
|
+
"model_name": turn.model_name,
|
|
313
|
+
"git_commit_hash": turn.git_commit_hash,
|
|
314
|
+
"messages": messages,
|
|
315
|
+
})
|
|
316
|
+
|
|
317
|
+
sessions_data.append({
|
|
318
|
+
"session_id": session.id,
|
|
319
|
+
"session_type": session.session_type or "unknown",
|
|
320
|
+
"workspace_path": session.workspace_path,
|
|
321
|
+
"session_title": session.session_title,
|
|
322
|
+
"session_summary": session.session_summary,
|
|
323
|
+
"started_at": session.started_at.isoformat() if session.started_at else None,
|
|
324
|
+
"last_activity_at": session.last_activity_at.isoformat() if session.last_activity_at else None,
|
|
325
|
+
"created_by": session.created_by,
|
|
326
|
+
"shared_by": session.shared_by,
|
|
327
|
+
"turns": turns_data,
|
|
328
|
+
})
|
|
329
|
+
|
|
330
|
+
username = os.environ.get("USER") or os.environ.get("USERNAME") or "anonymous"
|
|
331
|
+
|
|
332
|
+
result = {
|
|
333
|
+
"version": "2.1",
|
|
334
|
+
"username": username,
|
|
335
|
+
"time": datetime.now(timezone.utc).isoformat(),
|
|
336
|
+
"event": event_data,
|
|
337
|
+
"sessions": sessions_data,
|
|
338
|
+
"ui_metadata": {},
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
if contributor_token:
|
|
342
|
+
result["sync_metadata"] = {
|
|
343
|
+
"contributor_token": contributor_token,
|
|
344
|
+
"sync_version": agent_info.sync_version or 0,
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
return result
|
realign/dashboard/app.py
CHANGED
|
@@ -16,7 +16,7 @@ from .widgets import (
|
|
|
16
16
|
WatcherPanel,
|
|
17
17
|
WorkerPanel,
|
|
18
18
|
ConfigPanel,
|
|
19
|
-
|
|
19
|
+
AgentsPanel,
|
|
20
20
|
)
|
|
21
21
|
|
|
22
22
|
# Environment variable to control terminal mode
|
|
@@ -104,9 +104,9 @@ class AlineDashboard(App):
|
|
|
104
104
|
try:
|
|
105
105
|
yield AlineHeader()
|
|
106
106
|
tab_ids = self._tab_ids()
|
|
107
|
-
with TabbedContent(initial=tab_ids[0] if tab_ids else "
|
|
108
|
-
with TabPane("Agents", id="
|
|
109
|
-
yield
|
|
107
|
+
with TabbedContent(initial=tab_ids[0] if tab_ids else "agents"):
|
|
108
|
+
with TabPane("Agents", id="agents"):
|
|
109
|
+
yield AgentsPanel()
|
|
110
110
|
if self.dev_mode:
|
|
111
111
|
with TabPane("Watcher", id="watcher"):
|
|
112
112
|
yield WatcherPanel()
|
|
@@ -122,8 +122,8 @@ class AlineDashboard(App):
|
|
|
122
122
|
|
|
123
123
|
def _tab_ids(self) -> list[str]:
|
|
124
124
|
if self.dev_mode:
|
|
125
|
-
return ["
|
|
126
|
-
return ["
|
|
125
|
+
return ["agents", "watcher", "worker", "config"]
|
|
126
|
+
return ["agents", "config"]
|
|
127
127
|
|
|
128
128
|
def on_mount(self) -> None:
|
|
129
129
|
"""Apply theme based on system settings and watch for changes."""
|
|
@@ -205,14 +205,14 @@ class AlineDashboard(App):
|
|
|
205
205
|
tabbed_content = self.query_one(TabbedContent)
|
|
206
206
|
active_tab_id = tabbed_content.active
|
|
207
207
|
|
|
208
|
-
if active_tab_id == "
|
|
208
|
+
if active_tab_id == "agents":
|
|
209
|
+
self.query_one(AgentsPanel).refresh_data()
|
|
210
|
+
elif active_tab_id == "watcher":
|
|
209
211
|
self.query_one(WatcherPanel).refresh_data()
|
|
210
212
|
elif active_tab_id == "worker":
|
|
211
213
|
self.query_one(WorkerPanel).refresh_data()
|
|
212
214
|
elif active_tab_id == "config":
|
|
213
215
|
self.query_one(ConfigPanel).refresh_data()
|
|
214
|
-
elif active_tab_id == "terminal":
|
|
215
|
-
await self.query_one(TerminalPanel).refresh_data()
|
|
216
216
|
|
|
217
217
|
def action_page_next(self) -> None:
|
|
218
218
|
"""Go to next page in current panel."""
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"""Clipboard helpers for the dashboard."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import shutil
|
|
7
|
+
import subprocess
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _run_copy(command: list[str], text: str) -> bool:
|
|
11
|
+
try:
|
|
12
|
+
return (
|
|
13
|
+
subprocess.run(
|
|
14
|
+
command,
|
|
15
|
+
input=text,
|
|
16
|
+
text=True,
|
|
17
|
+
capture_output=False,
|
|
18
|
+
check=False,
|
|
19
|
+
).returncode
|
|
20
|
+
== 0
|
|
21
|
+
)
|
|
22
|
+
except Exception:
|
|
23
|
+
return False
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def copy_text(app, text: str) -> bool:
|
|
27
|
+
if not text:
|
|
28
|
+
return False
|
|
29
|
+
|
|
30
|
+
if shutil.which("pbcopy"):
|
|
31
|
+
if _run_copy(["pbcopy"], text):
|
|
32
|
+
return True
|
|
33
|
+
|
|
34
|
+
if os.name == "nt" and shutil.which("clip"):
|
|
35
|
+
if _run_copy(["clip"], text):
|
|
36
|
+
return True
|
|
37
|
+
|
|
38
|
+
if shutil.which("wl-copy"):
|
|
39
|
+
if _run_copy(["wl-copy"], text):
|
|
40
|
+
return True
|
|
41
|
+
|
|
42
|
+
if shutil.which("xclip"):
|
|
43
|
+
if _run_copy(["xclip", "-selection", "clipboard"], text):
|
|
44
|
+
return True
|
|
45
|
+
|
|
46
|
+
if shutil.which("xsel"):
|
|
47
|
+
if _run_copy(["xsel", "--clipboard", "--input"], text):
|
|
48
|
+
return True
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
app.copy_to_clipboard(text)
|
|
52
|
+
return True
|
|
53
|
+
except Exception:
|
|
54
|
+
return False
|
|
@@ -2,16 +2,20 @@
|
|
|
2
2
|
|
|
3
3
|
from .session_detail import SessionDetailScreen
|
|
4
4
|
from .event_detail import EventDetailScreen
|
|
5
|
+
from .agent_detail import AgentDetailScreen
|
|
5
6
|
from .create_event import CreateEventScreen
|
|
6
7
|
from .create_agent import CreateAgentScreen
|
|
8
|
+
from .create_agent_info import CreateAgentInfoScreen
|
|
7
9
|
from .share_import import ShareImportScreen
|
|
8
10
|
from .help_screen import HelpScreen
|
|
9
11
|
|
|
10
12
|
__all__ = [
|
|
11
13
|
"SessionDetailScreen",
|
|
12
14
|
"EventDetailScreen",
|
|
15
|
+
"AgentDetailScreen",
|
|
13
16
|
"CreateEventScreen",
|
|
14
17
|
"CreateAgentScreen",
|
|
18
|
+
"CreateAgentInfoScreen",
|
|
15
19
|
"ShareImportScreen",
|
|
16
20
|
"HelpScreen",
|
|
17
21
|
]
|