aline-ai 0.6.2__py3-none-any.whl → 0.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/METADATA +1 -1
  2. {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/RECORD +38 -37
  3. realign/__init__.py +1 -1
  4. realign/adapters/__init__.py +0 -3
  5. realign/adapters/codex.py +14 -9
  6. realign/cli.py +42 -236
  7. realign/codex_detector.py +72 -32
  8. realign/codex_home.py +85 -0
  9. realign/codex_terminal_linker.py +172 -0
  10. realign/commands/__init__.py +2 -2
  11. realign/commands/add.py +89 -9
  12. realign/commands/doctor.py +495 -0
  13. realign/commands/export_shares.py +154 -226
  14. realign/commands/init.py +66 -4
  15. realign/commands/watcher.py +30 -80
  16. realign/config.py +9 -46
  17. realign/dashboard/app.py +7 -11
  18. realign/dashboard/screens/event_detail.py +0 -3
  19. realign/dashboard/screens/session_detail.py +0 -1
  20. realign/dashboard/tmux_manager.py +129 -4
  21. realign/dashboard/widgets/config_panel.py +175 -241
  22. realign/dashboard/widgets/events_table.py +71 -128
  23. realign/dashboard/widgets/sessions_table.py +77 -136
  24. realign/dashboard/widgets/terminal_panel.py +349 -27
  25. realign/dashboard/widgets/watcher_panel.py +0 -2
  26. realign/db/sqlite_db.py +77 -2
  27. realign/events/event_summarizer.py +76 -35
  28. realign/events/session_summarizer.py +73 -32
  29. realign/hooks.py +334 -647
  30. realign/llm_client.py +201 -520
  31. realign/triggers/__init__.py +0 -2
  32. realign/triggers/next_turn_trigger.py +4 -5
  33. realign/triggers/registry.py +1 -4
  34. realign/watcher_core.py +53 -35
  35. realign/adapters/antigravity.py +0 -159
  36. realign/triggers/antigravity_trigger.py +0 -140
  37. {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/WHEEL +0 -0
  38. {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/entry_points.txt +0 -0
  39. {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/licenses/LICENSE +0 -0
  40. {aline_ai-0.6.2.dist-info → aline_ai-0.6.4.dist-info}/top_level.txt +0 -0
@@ -9,7 +9,7 @@ from ..db.sqlite_db import SQLiteDatabase
9
9
  from ..db.base import SessionRecord
10
10
  from ..db.locks import lease_lock, lock_key_for_event_summary, make_lock_owner
11
11
  from .debouncer import Debouncer
12
- from ..llm_client import call_llm, extract_json
12
+ from ..llm_client import extract_json, call_llm_cloud
13
13
 
14
14
  logger = logging.getLogger(__name__)
15
15
 
@@ -201,41 +201,82 @@ def _generate_event_summary_llm(sessions: List[SessionRecord]) -> Tuple[str, str
201
201
  }
202
202
  )
203
203
 
204
- system_prompt = _get_event_summary_prompt()
205
-
206
- user_prompt = json.dumps(
207
- {
208
- "total_sessions": len(sessions),
209
- "sessions": sessions_data,
210
- },
211
- ensure_ascii=False,
212
- indent=2,
213
- )
214
-
204
+ # Try cloud provider first if user is logged in
215
205
  try:
216
- # Use unified LLM client
217
- _, response = call_llm(
218
- system_prompt=system_prompt,
219
- user_prompt=user_prompt,
220
- provider="auto", # Try Claude first, fallback to OpenAI
221
- max_tokens=500,
222
- purpose="event_summary",
223
- )
224
-
225
- if not response:
226
- logger.warning("LLM returned empty response, using fallback")
227
- return _fallback_event_summary(sessions)
228
-
229
- result = extract_json(response)
230
-
231
- title = result.get("event_title", "Untitled Event")[:100]
232
- description = result.get("event_description", "")
233
-
234
- return title, description
235
-
236
- except Exception as e:
237
- logger.warning(f"LLM event summary failed, using fallback: {e}")
238
- return _fallback_event_summary(sessions)
206
+ from ..auth import is_logged_in
207
+
208
+ if is_logged_in():
209
+ logger.debug("Attempting cloud LLM for event summary")
210
+ # Load user custom prompt if available
211
+ custom_prompt = None
212
+ user_prompt_path = Path.home() / ".aline" / "prompts" / "event_summary.md"
213
+ try:
214
+ if user_prompt_path.exists():
215
+ custom_prompt = user_prompt_path.read_text(encoding="utf-8").strip()
216
+ except Exception:
217
+ pass
218
+
219
+ _, result = call_llm_cloud(
220
+ task="event_summary",
221
+ payload={"sessions": sessions_data},
222
+ custom_prompt=custom_prompt,
223
+ silent=True,
224
+ )
225
+
226
+ if result:
227
+ title = result.get("event_title", "Untitled Event")[:100]
228
+ description = result.get("event_description", "")
229
+ logger.info(f"Cloud LLM event summary success: title={title[:50]}...")
230
+ return title, description
231
+ else:
232
+ # Cloud LLM failed, use fallback (local fallback disabled)
233
+ logger.warning("Cloud LLM event summary failed, using fallback")
234
+ return _fallback_event_summary(sessions)
235
+ except ImportError:
236
+ logger.debug("Auth module not available, skipping cloud LLM")
237
+
238
+ # User not logged in, use fallback (local fallback disabled)
239
+ logger.warning("Not logged in, cannot use cloud LLM for event summary")
240
+ return _fallback_event_summary(sessions)
241
+
242
+ # =========================================================================
243
+ # LOCAL LLM FALLBACK DISABLED - Code kept for reference
244
+ # =========================================================================
245
+ # system_prompt = _get_event_summary_prompt()
246
+ #
247
+ # user_prompt = json.dumps(
248
+ # {
249
+ # "total_sessions": len(sessions),
250
+ # "sessions": sessions_data,
251
+ # },
252
+ # ensure_ascii=False,
253
+ # indent=2,
254
+ # )
255
+ #
256
+ # try:
257
+ # # Use unified LLM client
258
+ # _, response = call_llm(
259
+ # system_prompt=system_prompt,
260
+ # user_prompt=user_prompt,
261
+ # provider="auto", # Try Claude first, fallback to OpenAI
262
+ # max_tokens=500,
263
+ # purpose="event_summary",
264
+ # )
265
+ #
266
+ # if not response:
267
+ # logger.warning("LLM returned empty response, using fallback")
268
+ # return _fallback_event_summary(sessions)
269
+ #
270
+ # result = extract_json(response)
271
+ #
272
+ # title = result.get("event_title", "Untitled Event")[:100]
273
+ # description = result.get("event_description", "")
274
+ #
275
+ # return title, description
276
+ #
277
+ # except Exception as e:
278
+ # logger.warning(f"LLM event summary failed, using fallback: {e}")
279
+ # return _fallback_event_summary(sessions)
239
280
 
240
281
 
241
282
  def _fallback_event_summary(sessions: List[SessionRecord]) -> Tuple[str, str]:
@@ -10,7 +10,7 @@ from typing import List, Tuple, Optional
10
10
  from ..db.sqlite_db import SQLiteDatabase
11
11
  from ..db.base import TurnRecord
12
12
  from ..db.locks import lease_lock, lock_key_for_session_summary, make_lock_owner
13
- from ..llm_client import call_llm, extract_json
13
+ from ..llm_client import extract_json, call_llm_cloud
14
14
 
15
15
  logger = logging.getLogger(__name__)
16
16
 
@@ -267,41 +267,82 @@ def _generate_session_summary_llm(turns: List[TurnRecord]) -> Tuple[str, str]:
267
267
  }
268
268
  )
269
269
 
270
- system_prompt = _get_session_summary_prompt()
271
-
272
- user_prompt = json.dumps(
273
- {
274
- "total_turns": len(turns),
275
- "turns": turns_data,
276
- },
277
- ensure_ascii=False,
278
- indent=2,
279
- )
280
-
270
+ # Try cloud provider first if user is logged in
281
271
  try:
282
- # Use unified LLM client
283
- _, response = call_llm(
284
- system_prompt=system_prompt,
285
- user_prompt=user_prompt,
286
- provider="auto", # Try Claude first, fallback to OpenAI
287
- max_tokens=500,
288
- purpose="session_summary",
289
- )
290
-
291
- if not response:
292
- logger.warning("LLM returned empty response, using fallback")
293
- return _fallback_summary(turns)
294
-
295
- result = extract_json(response)
272
+ from ..auth import is_logged_in
296
273
 
297
- title = result.get("session_title", "Untitled Session")[:80]
298
- summary = result.get("session_summary", "")
274
+ if is_logged_in():
275
+ logger.debug("Attempting cloud LLM for session summary")
276
+ # Load user custom prompt if available
277
+ custom_prompt = None
278
+ user_prompt_path = Path.home() / ".aline" / "prompts" / "session_summary.md"
279
+ try:
280
+ if user_prompt_path.exists():
281
+ custom_prompt = user_prompt_path.read_text(encoding="utf-8").strip()
282
+ except Exception:
283
+ pass
299
284
 
300
- return title, summary
285
+ _, result = call_llm_cloud(
286
+ task="session_summary",
287
+ payload={"turns": turns_data},
288
+ custom_prompt=custom_prompt,
289
+ silent=True,
290
+ )
301
291
 
302
- except Exception as e:
303
- logger.warning(f"LLM session summary failed, using fallback: {e}")
304
- return _fallback_summary(turns)
292
+ if result:
293
+ title = result.get("session_title", "Untitled Session")[:80]
294
+ summary = result.get("session_summary", "")
295
+ logger.info(f"Cloud LLM session summary success: title={title[:50]}...")
296
+ return title, summary
297
+ else:
298
+ # Cloud LLM failed, use fallback (local fallback disabled)
299
+ logger.warning("Cloud LLM session summary failed, using fallback")
300
+ return _fallback_summary(turns)
301
+ except ImportError:
302
+ logger.debug("Auth module not available, skipping cloud LLM")
303
+
304
+ # User not logged in, use fallback (local fallback disabled)
305
+ logger.warning("Not logged in, cannot use cloud LLM for session summary")
306
+ return _fallback_summary(turns)
307
+
308
+ # =========================================================================
309
+ # LOCAL LLM FALLBACK DISABLED - Code kept for reference
310
+ # =========================================================================
311
+ # system_prompt = _get_session_summary_prompt()
312
+ #
313
+ # user_prompt = json.dumps(
314
+ # {
315
+ # "total_turns": len(turns),
316
+ # "turns": turns_data,
317
+ # },
318
+ # ensure_ascii=False,
319
+ # indent=2,
320
+ # )
321
+ #
322
+ # try:
323
+ # # Use unified LLM client
324
+ # _, response = call_llm(
325
+ # system_prompt=system_prompt,
326
+ # user_prompt=user_prompt,
327
+ # provider="auto", # Try Claude first, fallback to OpenAI
328
+ # max_tokens=500,
329
+ # purpose="session_summary",
330
+ # )
331
+ #
332
+ # if not response:
333
+ # logger.warning("LLM returned empty response, using fallback")
334
+ # return _fallback_summary(turns)
335
+ #
336
+ # result = extract_json(response)
337
+ #
338
+ # title = result.get("session_title", "Untitled Session")[:80]
339
+ # summary = result.get("session_summary", "")
340
+ #
341
+ # return title, summary
342
+ #
343
+ # except Exception as e:
344
+ # logger.warning(f"LLM session summary failed, using fallback: {e}")
345
+ # return _fallback_summary(turns)
305
346
 
306
347
 
307
348
  def _fallback_summary(turns: List[TurnRecord]) -> Tuple[str, str]: