aline-ai 0.6.2__py3-none-any.whl → 0.6.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/METADATA +1 -1
- {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/RECORD +38 -37
- realign/__init__.py +1 -1
- realign/adapters/__init__.py +0 -3
- realign/adapters/codex.py +14 -9
- realign/cli.py +42 -236
- realign/codex_detector.py +72 -32
- realign/codex_home.py +85 -0
- realign/codex_terminal_linker.py +172 -0
- realign/commands/__init__.py +2 -2
- realign/commands/add.py +89 -9
- realign/commands/doctor.py +495 -0
- realign/commands/export_shares.py +154 -226
- realign/commands/init.py +66 -4
- realign/commands/watcher.py +30 -80
- realign/config.py +9 -46
- realign/dashboard/app.py +7 -11
- realign/dashboard/screens/event_detail.py +0 -3
- realign/dashboard/screens/session_detail.py +0 -1
- realign/dashboard/tmux_manager.py +129 -4
- realign/dashboard/widgets/config_panel.py +175 -241
- realign/dashboard/widgets/events_table.py +71 -128
- realign/dashboard/widgets/sessions_table.py +77 -136
- realign/dashboard/widgets/terminal_panel.py +349 -27
- realign/dashboard/widgets/watcher_panel.py +0 -2
- realign/db/sqlite_db.py +77 -2
- realign/events/event_summarizer.py +76 -35
- realign/events/session_summarizer.py +73 -32
- realign/hooks.py +334 -647
- realign/llm_client.py +201 -520
- realign/triggers/__init__.py +0 -2
- realign/triggers/next_turn_trigger.py +4 -5
- realign/triggers/registry.py +1 -4
- realign/watcher_core.py +53 -35
- realign/adapters/antigravity.py +0 -159
- realign/triggers/antigravity_trigger.py +0 -140
- {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/WHEEL +0 -0
- {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/entry_points.txt +0 -0
- {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/licenses/LICENSE +0 -0
- {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/top_level.txt +0 -0
|
@@ -9,7 +9,7 @@ from ..db.sqlite_db import SQLiteDatabase
|
|
|
9
9
|
from ..db.base import SessionRecord
|
|
10
10
|
from ..db.locks import lease_lock, lock_key_for_event_summary, make_lock_owner
|
|
11
11
|
from .debouncer import Debouncer
|
|
12
|
-
from ..llm_client import
|
|
12
|
+
from ..llm_client import extract_json, call_llm_cloud
|
|
13
13
|
|
|
14
14
|
logger = logging.getLogger(__name__)
|
|
15
15
|
|
|
@@ -201,41 +201,82 @@ def _generate_event_summary_llm(sessions: List[SessionRecord]) -> Tuple[str, str
|
|
|
201
201
|
}
|
|
202
202
|
)
|
|
203
203
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
user_prompt = json.dumps(
|
|
207
|
-
{
|
|
208
|
-
"total_sessions": len(sessions),
|
|
209
|
-
"sessions": sessions_data,
|
|
210
|
-
},
|
|
211
|
-
ensure_ascii=False,
|
|
212
|
-
indent=2,
|
|
213
|
-
)
|
|
214
|
-
|
|
204
|
+
# Try cloud provider first if user is logged in
|
|
215
205
|
try:
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
206
|
+
from ..auth import is_logged_in
|
|
207
|
+
|
|
208
|
+
if is_logged_in():
|
|
209
|
+
logger.debug("Attempting cloud LLM for event summary")
|
|
210
|
+
# Load user custom prompt if available
|
|
211
|
+
custom_prompt = None
|
|
212
|
+
user_prompt_path = Path.home() / ".aline" / "prompts" / "event_summary.md"
|
|
213
|
+
try:
|
|
214
|
+
if user_prompt_path.exists():
|
|
215
|
+
custom_prompt = user_prompt_path.read_text(encoding="utf-8").strip()
|
|
216
|
+
except Exception:
|
|
217
|
+
pass
|
|
218
|
+
|
|
219
|
+
_, result = call_llm_cloud(
|
|
220
|
+
task="event_summary",
|
|
221
|
+
payload={"sessions": sessions_data},
|
|
222
|
+
custom_prompt=custom_prompt,
|
|
223
|
+
silent=True,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
if result:
|
|
227
|
+
title = result.get("event_title", "Untitled Event")[:100]
|
|
228
|
+
description = result.get("event_description", "")
|
|
229
|
+
logger.info(f"Cloud LLM event summary success: title={title[:50]}...")
|
|
230
|
+
return title, description
|
|
231
|
+
else:
|
|
232
|
+
# Cloud LLM failed, use fallback (local fallback disabled)
|
|
233
|
+
logger.warning("Cloud LLM event summary failed, using fallback")
|
|
234
|
+
return _fallback_event_summary(sessions)
|
|
235
|
+
except ImportError:
|
|
236
|
+
logger.debug("Auth module not available, skipping cloud LLM")
|
|
237
|
+
|
|
238
|
+
# User not logged in, use fallback (local fallback disabled)
|
|
239
|
+
logger.warning("Not logged in, cannot use cloud LLM for event summary")
|
|
240
|
+
return _fallback_event_summary(sessions)
|
|
241
|
+
|
|
242
|
+
# =========================================================================
|
|
243
|
+
# LOCAL LLM FALLBACK DISABLED - Code kept for reference
|
|
244
|
+
# =========================================================================
|
|
245
|
+
# system_prompt = _get_event_summary_prompt()
|
|
246
|
+
#
|
|
247
|
+
# user_prompt = json.dumps(
|
|
248
|
+
# {
|
|
249
|
+
# "total_sessions": len(sessions),
|
|
250
|
+
# "sessions": sessions_data,
|
|
251
|
+
# },
|
|
252
|
+
# ensure_ascii=False,
|
|
253
|
+
# indent=2,
|
|
254
|
+
# )
|
|
255
|
+
#
|
|
256
|
+
# try:
|
|
257
|
+
# # Use unified LLM client
|
|
258
|
+
# _, response = call_llm(
|
|
259
|
+
# system_prompt=system_prompt,
|
|
260
|
+
# user_prompt=user_prompt,
|
|
261
|
+
# provider="auto", # Try Claude first, fallback to OpenAI
|
|
262
|
+
# max_tokens=500,
|
|
263
|
+
# purpose="event_summary",
|
|
264
|
+
# )
|
|
265
|
+
#
|
|
266
|
+
# if not response:
|
|
267
|
+
# logger.warning("LLM returned empty response, using fallback")
|
|
268
|
+
# return _fallback_event_summary(sessions)
|
|
269
|
+
#
|
|
270
|
+
# result = extract_json(response)
|
|
271
|
+
#
|
|
272
|
+
# title = result.get("event_title", "Untitled Event")[:100]
|
|
273
|
+
# description = result.get("event_description", "")
|
|
274
|
+
#
|
|
275
|
+
# return title, description
|
|
276
|
+
#
|
|
277
|
+
# except Exception as e:
|
|
278
|
+
# logger.warning(f"LLM event summary failed, using fallback: {e}")
|
|
279
|
+
# return _fallback_event_summary(sessions)
|
|
239
280
|
|
|
240
281
|
|
|
241
282
|
def _fallback_event_summary(sessions: List[SessionRecord]) -> Tuple[str, str]:
|
|
@@ -10,7 +10,7 @@ from typing import List, Tuple, Optional
|
|
|
10
10
|
from ..db.sqlite_db import SQLiteDatabase
|
|
11
11
|
from ..db.base import TurnRecord
|
|
12
12
|
from ..db.locks import lease_lock, lock_key_for_session_summary, make_lock_owner
|
|
13
|
-
from ..llm_client import
|
|
13
|
+
from ..llm_client import extract_json, call_llm_cloud
|
|
14
14
|
|
|
15
15
|
logger = logging.getLogger(__name__)
|
|
16
16
|
|
|
@@ -267,41 +267,82 @@ def _generate_session_summary_llm(turns: List[TurnRecord]) -> Tuple[str, str]:
|
|
|
267
267
|
}
|
|
268
268
|
)
|
|
269
269
|
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
user_prompt = json.dumps(
|
|
273
|
-
{
|
|
274
|
-
"total_turns": len(turns),
|
|
275
|
-
"turns": turns_data,
|
|
276
|
-
},
|
|
277
|
-
ensure_ascii=False,
|
|
278
|
-
indent=2,
|
|
279
|
-
)
|
|
280
|
-
|
|
270
|
+
# Try cloud provider first if user is logged in
|
|
281
271
|
try:
|
|
282
|
-
|
|
283
|
-
_, response = call_llm(
|
|
284
|
-
system_prompt=system_prompt,
|
|
285
|
-
user_prompt=user_prompt,
|
|
286
|
-
provider="auto", # Try Claude first, fallback to OpenAI
|
|
287
|
-
max_tokens=500,
|
|
288
|
-
purpose="session_summary",
|
|
289
|
-
)
|
|
290
|
-
|
|
291
|
-
if not response:
|
|
292
|
-
logger.warning("LLM returned empty response, using fallback")
|
|
293
|
-
return _fallback_summary(turns)
|
|
294
|
-
|
|
295
|
-
result = extract_json(response)
|
|
272
|
+
from ..auth import is_logged_in
|
|
296
273
|
|
|
297
|
-
|
|
298
|
-
|
|
274
|
+
if is_logged_in():
|
|
275
|
+
logger.debug("Attempting cloud LLM for session summary")
|
|
276
|
+
# Load user custom prompt if available
|
|
277
|
+
custom_prompt = None
|
|
278
|
+
user_prompt_path = Path.home() / ".aline" / "prompts" / "session_summary.md"
|
|
279
|
+
try:
|
|
280
|
+
if user_prompt_path.exists():
|
|
281
|
+
custom_prompt = user_prompt_path.read_text(encoding="utf-8").strip()
|
|
282
|
+
except Exception:
|
|
283
|
+
pass
|
|
299
284
|
|
|
300
|
-
|
|
285
|
+
_, result = call_llm_cloud(
|
|
286
|
+
task="session_summary",
|
|
287
|
+
payload={"turns": turns_data},
|
|
288
|
+
custom_prompt=custom_prompt,
|
|
289
|
+
silent=True,
|
|
290
|
+
)
|
|
301
291
|
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
292
|
+
if result:
|
|
293
|
+
title = result.get("session_title", "Untitled Session")[:80]
|
|
294
|
+
summary = result.get("session_summary", "")
|
|
295
|
+
logger.info(f"Cloud LLM session summary success: title={title[:50]}...")
|
|
296
|
+
return title, summary
|
|
297
|
+
else:
|
|
298
|
+
# Cloud LLM failed, use fallback (local fallback disabled)
|
|
299
|
+
logger.warning("Cloud LLM session summary failed, using fallback")
|
|
300
|
+
return _fallback_summary(turns)
|
|
301
|
+
except ImportError:
|
|
302
|
+
logger.debug("Auth module not available, skipping cloud LLM")
|
|
303
|
+
|
|
304
|
+
# User not logged in, use fallback (local fallback disabled)
|
|
305
|
+
logger.warning("Not logged in, cannot use cloud LLM for session summary")
|
|
306
|
+
return _fallback_summary(turns)
|
|
307
|
+
|
|
308
|
+
# =========================================================================
|
|
309
|
+
# LOCAL LLM FALLBACK DISABLED - Code kept for reference
|
|
310
|
+
# =========================================================================
|
|
311
|
+
# system_prompt = _get_session_summary_prompt()
|
|
312
|
+
#
|
|
313
|
+
# user_prompt = json.dumps(
|
|
314
|
+
# {
|
|
315
|
+
# "total_turns": len(turns),
|
|
316
|
+
# "turns": turns_data,
|
|
317
|
+
# },
|
|
318
|
+
# ensure_ascii=False,
|
|
319
|
+
# indent=2,
|
|
320
|
+
# )
|
|
321
|
+
#
|
|
322
|
+
# try:
|
|
323
|
+
# # Use unified LLM client
|
|
324
|
+
# _, response = call_llm(
|
|
325
|
+
# system_prompt=system_prompt,
|
|
326
|
+
# user_prompt=user_prompt,
|
|
327
|
+
# provider="auto", # Try Claude first, fallback to OpenAI
|
|
328
|
+
# max_tokens=500,
|
|
329
|
+
# purpose="session_summary",
|
|
330
|
+
# )
|
|
331
|
+
#
|
|
332
|
+
# if not response:
|
|
333
|
+
# logger.warning("LLM returned empty response, using fallback")
|
|
334
|
+
# return _fallback_summary(turns)
|
|
335
|
+
#
|
|
336
|
+
# result = extract_json(response)
|
|
337
|
+
#
|
|
338
|
+
# title = result.get("session_title", "Untitled Session")[:80]
|
|
339
|
+
# summary = result.get("session_summary", "")
|
|
340
|
+
#
|
|
341
|
+
# return title, summary
|
|
342
|
+
#
|
|
343
|
+
# except Exception as e:
|
|
344
|
+
# logger.warning(f"LLM session summary failed, using fallback: {e}")
|
|
345
|
+
# return _fallback_summary(turns)
|
|
305
346
|
|
|
306
347
|
|
|
307
348
|
def _fallback_summary(turns: List[TurnRecord]) -> Tuple[str, str]:
|