code-lm 0.3.1__tar.gz → 0.3.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {code_lm-0.3.1/src/code_lm.egg-info → code_lm-0.3.2}/PKG-INFO +15 -16
- {code_lm-0.3.1 → code_lm-0.3.2}/README.md +14 -15
- {code_lm-0.3.1 → code_lm-0.3.2}/pyproject.toml +1 -1
- {code_lm-0.3.1 → code_lm-0.3.2/src/code_lm.egg-info}/PKG-INFO +15 -16
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/__init__.py +1 -1
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/main.py +232 -89
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/models/openrouter.py +2 -96
- code_lm-0.3.2/src/lm_code/session.py +283 -0
- code_lm-0.3.1/src/lm_code/session.py +0 -97
- {code_lm-0.3.1 → code_lm-0.3.2}/MANIFEST.in +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/setup.cfg +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/setup.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/code_lm.egg-info/SOURCES.txt +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/code_lm.egg-info/dependency_links.txt +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/code_lm.egg-info/entry_points.txt +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/code_lm.egg-info/requires.txt +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/code_lm.egg-info/top_level.txt +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/config.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/models/__init__.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/__init__.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/base.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/directory_tools.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/file_tools.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/quality_tools.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/summarizer_tool.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/system_tools.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/task_complete_tool.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/test_runner.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/tools/tree_tool.py +0 -0
- {code_lm-0.3.1 → code_lm-0.3.2}/src/lm_code/utils.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: code-lm
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.2
|
|
4
4
|
Summary: An AI coding assistant using various LLM models.
|
|
5
5
|
Home-page: https://github.com/Panagiotis897/lm-code
|
|
6
6
|
Author: Panagiotis897
|
|
@@ -39,6 +39,10 @@ LM Code is a powerful AI coding assistant for your terminal supporting 17 free m
|
|
|
39
39
|
- Markdown rendering for improved readability.
|
|
40
40
|
- **17 Free Models via OpenRouter**:
|
|
41
41
|
- NVIDIA Nemotron, Qwen, OpenAI, Meta Llama, Mistral, and more.
|
|
42
|
+
- **Session Persistence**:
|
|
43
|
+
- Conversations are saved per project directory.
|
|
44
|
+
- Resume previous sessions automatically or manually with `/load`.
|
|
45
|
+
- Auto-saves after each exchange.
|
|
42
46
|
- **Automated Tool Usage**:
|
|
43
47
|
- File operations: `view`, `edit`, `grep`, `glob`.
|
|
44
48
|
- Directory operations: `ls`, `tree`, `create_directory`.
|
|
@@ -47,12 +51,6 @@ LM Code is a powerful AI coding assistant for your terminal supporting 17 free m
|
|
|
47
51
|
- Test running: `pytest` and similar tools.
|
|
48
52
|
- **Customizable Configurations**:
|
|
49
53
|
- Easily set default models and API keys.
|
|
50
|
-
- **Session Persistence**:
|
|
51
|
-
- Conversation history is saved per project directory.
|
|
52
|
-
- Resume previous sessions when you restart in the same directory.
|
|
53
|
-
- **Mid-Session Commands**:
|
|
54
|
-
- `/compact` — Summarize conversation history to stay within context limits.
|
|
55
|
-
- `/model` — Switch models mid-session without restarting.
|
|
56
54
|
|
|
57
55
|
---
|
|
58
56
|
|
|
@@ -138,11 +136,12 @@ lmcode list-models
|
|
|
138
136
|
|
|
139
137
|
During an interactive session:
|
|
140
138
|
|
|
141
|
-
- **`/exit`**:
|
|
139
|
+
- **`/exit`**: Save session and exit.
|
|
142
140
|
- **`/help`**: Display help information.
|
|
143
|
-
- **`/
|
|
144
|
-
- **`/
|
|
145
|
-
- **`/
|
|
141
|
+
- **`/sessions`**: List all sessions for the current project.
|
|
142
|
+
- **`/load <id>`**: Load a session by ID.
|
|
143
|
+
- **`/new`**: Start a new session (saves current first).
|
|
144
|
+
- **`/save`**: Manually save the current session.
|
|
146
145
|
|
|
147
146
|
---
|
|
148
147
|
|
|
@@ -162,11 +161,11 @@ LM Code is under active development. Contributions, feature requests, and feedba
|
|
|
162
161
|
|
|
163
162
|
### Changelog
|
|
164
163
|
|
|
165
|
-
#### v0.3.
|
|
166
|
-
- Added
|
|
167
|
-
-
|
|
168
|
-
-
|
|
169
|
-
-
|
|
164
|
+
#### v0.3.2
|
|
165
|
+
- Added session persistence: conversations are saved per project directory.
|
|
166
|
+
- Sessions auto-save after each exchange and on exit.
|
|
167
|
+
- New commands: `/sessions`, `/load <id>`, `/new`, `/save`.
|
|
168
|
+
- On startup, offers to resume previous sessions for the current project.
|
|
170
169
|
|
|
171
170
|
#### v0.3.0
|
|
172
171
|
- Updated default model to NVIDIA Nemotron 3 Super 120B.
|
|
@@ -11,6 +11,10 @@ LM Code is a powerful AI coding assistant for your terminal supporting 17 free m
|
|
|
11
11
|
- Markdown rendering for improved readability.
|
|
12
12
|
- **17 Free Models via OpenRouter**:
|
|
13
13
|
- NVIDIA Nemotron, Qwen, OpenAI, Meta Llama, Mistral, and more.
|
|
14
|
+
- **Session Persistence**:
|
|
15
|
+
- Conversations are saved per project directory.
|
|
16
|
+
- Resume previous sessions automatically or manually with `/load`.
|
|
17
|
+
- Auto-saves after each exchange.
|
|
14
18
|
- **Automated Tool Usage**:
|
|
15
19
|
- File operations: `view`, `edit`, `grep`, `glob`.
|
|
16
20
|
- Directory operations: `ls`, `tree`, `create_directory`.
|
|
@@ -19,12 +23,6 @@ LM Code is a powerful AI coding assistant for your terminal supporting 17 free m
|
|
|
19
23
|
- Test running: `pytest` and similar tools.
|
|
20
24
|
- **Customizable Configurations**:
|
|
21
25
|
- Easily set default models and API keys.
|
|
22
|
-
- **Session Persistence**:
|
|
23
|
-
- Conversation history is saved per project directory.
|
|
24
|
-
- Resume previous sessions when you restart in the same directory.
|
|
25
|
-
- **Mid-Session Commands**:
|
|
26
|
-
- `/compact` — Summarize conversation history to stay within context limits.
|
|
27
|
-
- `/model` — Switch models mid-session without restarting.
|
|
28
26
|
|
|
29
27
|
---
|
|
30
28
|
|
|
@@ -110,11 +108,12 @@ lmcode list-models
|
|
|
110
108
|
|
|
111
109
|
During an interactive session:
|
|
112
110
|
|
|
113
|
-
- **`/exit`**:
|
|
111
|
+
- **`/exit`**: Save session and exit.
|
|
114
112
|
- **`/help`**: Display help information.
|
|
115
|
-
- **`/
|
|
116
|
-
- **`/
|
|
117
|
-
- **`/
|
|
113
|
+
- **`/sessions`**: List all sessions for the current project.
|
|
114
|
+
- **`/load <id>`**: Load a session by ID.
|
|
115
|
+
- **`/new`**: Start a new session (saves current first).
|
|
116
|
+
- **`/save`**: Manually save the current session.
|
|
118
117
|
|
|
119
118
|
---
|
|
120
119
|
|
|
@@ -134,11 +133,11 @@ LM Code is under active development. Contributions, feature requests, and feedba
|
|
|
134
133
|
|
|
135
134
|
### Changelog
|
|
136
135
|
|
|
137
|
-
#### v0.3.
|
|
138
|
-
- Added
|
|
139
|
-
-
|
|
140
|
-
-
|
|
141
|
-
-
|
|
136
|
+
#### v0.3.2
|
|
137
|
+
- Added session persistence: conversations are saved per project directory.
|
|
138
|
+
- Sessions auto-save after each exchange and on exit.
|
|
139
|
+
- New commands: `/sessions`, `/load <id>`, `/new`, `/save`.
|
|
140
|
+
- On startup, offers to resume previous sessions for the current project.
|
|
142
141
|
|
|
143
142
|
#### v0.3.0
|
|
144
143
|
- Updated default model to NVIDIA Nemotron 3 Super 120B.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: code-lm
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.2
|
|
4
4
|
Summary: An AI coding assistant using various LLM models.
|
|
5
5
|
Home-page: https://github.com/Panagiotis897/lm-code
|
|
6
6
|
Author: Panagiotis897
|
|
@@ -39,6 +39,10 @@ LM Code is a powerful AI coding assistant for your terminal supporting 17 free m
|
|
|
39
39
|
- Markdown rendering for improved readability.
|
|
40
40
|
- **17 Free Models via OpenRouter**:
|
|
41
41
|
- NVIDIA Nemotron, Qwen, OpenAI, Meta Llama, Mistral, and more.
|
|
42
|
+
- **Session Persistence**:
|
|
43
|
+
- Conversations are saved per project directory.
|
|
44
|
+
- Resume previous sessions automatically or manually with `/load`.
|
|
45
|
+
- Auto-saves after each exchange.
|
|
42
46
|
- **Automated Tool Usage**:
|
|
43
47
|
- File operations: `view`, `edit`, `grep`, `glob`.
|
|
44
48
|
- Directory operations: `ls`, `tree`, `create_directory`.
|
|
@@ -47,12 +51,6 @@ LM Code is a powerful AI coding assistant for your terminal supporting 17 free m
|
|
|
47
51
|
- Test running: `pytest` and similar tools.
|
|
48
52
|
- **Customizable Configurations**:
|
|
49
53
|
- Easily set default models and API keys.
|
|
50
|
-
- **Session Persistence**:
|
|
51
|
-
- Conversation history is saved per project directory.
|
|
52
|
-
- Resume previous sessions when you restart in the same directory.
|
|
53
|
-
- **Mid-Session Commands**:
|
|
54
|
-
- `/compact` — Summarize conversation history to stay within context limits.
|
|
55
|
-
- `/model` — Switch models mid-session without restarting.
|
|
56
54
|
|
|
57
55
|
---
|
|
58
56
|
|
|
@@ -138,11 +136,12 @@ lmcode list-models
|
|
|
138
136
|
|
|
139
137
|
During an interactive session:
|
|
140
138
|
|
|
141
|
-
- **`/exit`**:
|
|
139
|
+
- **`/exit`**: Save session and exit.
|
|
142
140
|
- **`/help`**: Display help information.
|
|
143
|
-
- **`/
|
|
144
|
-
- **`/
|
|
145
|
-
- **`/
|
|
141
|
+
- **`/sessions`**: List all sessions for the current project.
|
|
142
|
+
- **`/load <id>`**: Load a session by ID.
|
|
143
|
+
- **`/new`**: Start a new session (saves current first).
|
|
144
|
+
- **`/save`**: Manually save the current session.
|
|
146
145
|
|
|
147
146
|
---
|
|
148
147
|
|
|
@@ -162,11 +161,11 @@ LM Code is under active development. Contributions, feature requests, and feedba
|
|
|
162
161
|
|
|
163
162
|
### Changelog
|
|
164
163
|
|
|
165
|
-
#### v0.3.
|
|
166
|
-
- Added
|
|
167
|
-
-
|
|
168
|
-
-
|
|
169
|
-
-
|
|
164
|
+
#### v0.3.2
|
|
165
|
+
- Added session persistence: conversations are saved per project directory.
|
|
166
|
+
- Sessions auto-save after each exchange and on exit.
|
|
167
|
+
- New commands: `/sessions`, `/load <id>`, `/new`, `/save`.
|
|
168
|
+
- On startup, offers to resume previous sessions for the current project.
|
|
170
169
|
|
|
171
170
|
#### v0.3.0
|
|
172
171
|
- Updated default model to NVIDIA Nemotron 3 Super 120B.
|
|
@@ -10,6 +10,7 @@ from rich.console import Console
|
|
|
10
10
|
from rich.markdown import Markdown
|
|
11
11
|
from rich.panel import Panel
|
|
12
12
|
from pathlib import Path
|
|
13
|
+
from typing import Optional, Dict, Any
|
|
13
14
|
import yaml
|
|
14
15
|
import logging
|
|
15
16
|
import time
|
|
@@ -261,51 +262,18 @@ def start_interactive_session(model_name: str, console: Console):
|
|
|
261
262
|
)
|
|
262
263
|
return
|
|
263
264
|
|
|
264
|
-
# --- Session
|
|
265
|
-
|
|
266
|
-
project_dir =
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
if saved_session:
|
|
270
|
-
saved_model = saved_session.get("model_name", "?")
|
|
271
|
-
saved_time = time.strftime(
|
|
272
|
-
"%Y-%m-%d %H:%M",
|
|
273
|
-
time.localtime(saved_session.get("timestamp", 0)),
|
|
274
|
-
)
|
|
275
|
-
msg_count = len(saved_session.get("chat_history", []))
|
|
276
|
-
console.print(
|
|
277
|
-
f"\n[yellow]Found saved session[/yellow] (model: {saved_model}, "
|
|
278
|
-
f"saved: {saved_time}, {msg_count} messages)"
|
|
279
|
-
)
|
|
280
|
-
try:
|
|
281
|
-
resume = questionary.confirm(
|
|
282
|
-
"Resume previous session?",
|
|
283
|
-
default=True,
|
|
284
|
-
auto_enter=False,
|
|
285
|
-
).ask()
|
|
286
|
-
except (KeyboardInterrupt, EOFError):
|
|
287
|
-
resume = None
|
|
288
|
-
|
|
289
|
-
if resume is None:
|
|
290
|
-
console.print("\n[yellow]Exiting.[/yellow]")
|
|
291
|
-
return
|
|
292
|
-
else:
|
|
293
|
-
resume = False
|
|
265
|
+
# --- Session Persistence Setup ---
|
|
266
|
+
session_manager = SessionManager()
|
|
267
|
+
project_dir = os.getcwd()
|
|
268
|
+
current_session_id = None
|
|
269
|
+
current_model_name = model_name
|
|
294
270
|
# ---
|
|
295
271
|
|
|
296
272
|
try:
|
|
297
273
|
console.print(f"\nInitializing model [bold]{model_name}[/bold]...")
|
|
298
274
|
# Pass the console object to OpenRouterModel constructor
|
|
299
275
|
model = OpenRouterModel(api_key=api_key, console=console, model_name=model_name)
|
|
300
|
-
|
|
301
|
-
# Restore session history if resuming
|
|
302
|
-
if resume and saved_session:
|
|
303
|
-
model.chat_history = saved_session.get("chat_history", model.chat_history)
|
|
304
|
-
console.print(
|
|
305
|
-
f"[green]Session restored ({len(model.chat_history)} messages).[/green]\n"
|
|
306
|
-
)
|
|
307
|
-
else:
|
|
308
|
-
console.print("[green]Model initialized successfully.[/green]\n")
|
|
276
|
+
console.print("[green]Model initialized successfully.[/green]\n")
|
|
309
277
|
|
|
310
278
|
except Exception as e:
|
|
311
279
|
console.print(
|
|
@@ -317,21 +285,128 @@ def start_interactive_session(model_name: str, console: Console):
|
|
|
317
285
|
)
|
|
318
286
|
return
|
|
319
287
|
|
|
288
|
+
# --- Check for existing sessions and offer to resume ---
|
|
289
|
+
existing_sessions = session_manager.list_sessions(project_dir)
|
|
290
|
+
if existing_sessions:
|
|
291
|
+
console.print(
|
|
292
|
+
f"\n[yellow]Found {len(existing_sessions)} previous session(s) for this project:[/yellow]"
|
|
293
|
+
)
|
|
294
|
+
for i, session in enumerate(existing_sessions[:5], 1):
|
|
295
|
+
updated = session.get("updated_at", "")[:19].replace("T", " ")
|
|
296
|
+
messages = session.get("message_count", 0)
|
|
297
|
+
model = session.get("model_name", "unknown")
|
|
298
|
+
console.print(f" {i}. [{updated}] {messages} messages (model: {model})")
|
|
299
|
+
|
|
300
|
+
resume_choice = questionary.select(
|
|
301
|
+
"Resume a previous session?",
|
|
302
|
+
choices=["Start new session", "Resume latest", "Choose session"],
|
|
303
|
+
default="Start new session",
|
|
304
|
+
).ask()
|
|
305
|
+
|
|
306
|
+
if resume_choice == "Resume latest":
|
|
307
|
+
latest_session = session_manager.get_latest_session(project_dir)
|
|
308
|
+
if latest_session:
|
|
309
|
+
model.chat_history = latest_session["chat_history"]
|
|
310
|
+
current_session_id = latest_session["id"]
|
|
311
|
+
current_model_name = latest_session.get("model_name", model_name)
|
|
312
|
+
console.print(
|
|
313
|
+
f"[green]Resumed session {current_session_id} ({len(model.chat_history)} messages)[/green]\n"
|
|
314
|
+
)
|
|
315
|
+
else:
|
|
316
|
+
console.print("[yellow]No session to resume. Starting new.[/yellow]\n")
|
|
317
|
+
elif resume_choice == "Choose session":
|
|
318
|
+
session_choices = [
|
|
319
|
+
f"{s['id']} ({s['message_count']} msgs, {s['updated_at'][:16]})"
|
|
320
|
+
for s in existing_sessions[:10]
|
|
321
|
+
]
|
|
322
|
+
session_choices.append("Cancel - start new session")
|
|
323
|
+
chosen = questionary.select(
|
|
324
|
+
"Select a session to resume:",
|
|
325
|
+
choices=session_choices,
|
|
326
|
+
).ask()
|
|
327
|
+
if chosen and chosen != "Cancel - start new session":
|
|
328
|
+
chosen_id = chosen.split(" (")[0]
|
|
329
|
+
loaded_session = session_manager.load_session(project_dir, chosen_id)
|
|
330
|
+
if loaded_session:
|
|
331
|
+
model.chat_history = loaded_session["chat_history"]
|
|
332
|
+
current_session_id = loaded_session["id"]
|
|
333
|
+
current_model_name = loaded_session.get("model_name", model_name)
|
|
334
|
+
console.print(
|
|
335
|
+
f"[green]Resumed session {current_session_id} ({len(model.chat_history)} messages)[/green]\n"
|
|
336
|
+
)
|
|
337
|
+
else:
|
|
338
|
+
console.print("[red]Failed to load session. Starting new.[/red]\n")
|
|
339
|
+
else:
|
|
340
|
+
console.print("[yellow]Starting new session.[/yellow]\n")
|
|
341
|
+
else:
|
|
342
|
+
console.print("[yellow]Starting new session.[/yellow]\n")
|
|
343
|
+
# --- End Session Resume Check ---
|
|
344
|
+
|
|
320
345
|
# --- Session Start Message ---
|
|
321
346
|
console.print("Type '/help' for commands, '/exit' or Ctrl+C to quit.")
|
|
322
|
-
console.print(f"[dim]Current model: {model.current_model_name}[/dim]")
|
|
323
347
|
|
|
324
348
|
while True:
|
|
325
349
|
try:
|
|
326
350
|
user_input = console.input("[bold green]You:[/bold green] ")
|
|
327
351
|
|
|
328
352
|
if user_input.lower() == "/exit":
|
|
353
|
+
# Save session before exiting
|
|
354
|
+
_save_current_session(
|
|
355
|
+
session_manager,
|
|
356
|
+
project_dir,
|
|
357
|
+
model,
|
|
358
|
+
current_model_name,
|
|
359
|
+
current_session_id,
|
|
360
|
+
console,
|
|
361
|
+
)
|
|
329
362
|
break
|
|
330
363
|
elif user_input.lower() == "/help":
|
|
331
364
|
show_help()
|
|
332
365
|
continue
|
|
333
|
-
elif user_input.lower()
|
|
334
|
-
|
|
366
|
+
elif user_input.lower() == "/sessions":
|
|
367
|
+
_show_sessions(session_manager, project_dir, console)
|
|
368
|
+
continue
|
|
369
|
+
elif user_input.lower().startswith("/load "):
|
|
370
|
+
session_id = user_input[6:].strip()
|
|
371
|
+
loaded = _load_session_by_id(
|
|
372
|
+
session_manager, project_dir, session_id, model, console
|
|
373
|
+
)
|
|
374
|
+
if loaded:
|
|
375
|
+
current_session_id = loaded["id"]
|
|
376
|
+
current_model_name = loaded.get("model_name", model_name)
|
|
377
|
+
continue
|
|
378
|
+
elif user_input.lower() == "/new":
|
|
379
|
+
# Save current session first
|
|
380
|
+
_save_current_session(
|
|
381
|
+
session_manager,
|
|
382
|
+
project_dir,
|
|
383
|
+
model,
|
|
384
|
+
current_model_name,
|
|
385
|
+
current_session_id,
|
|
386
|
+
console,
|
|
387
|
+
)
|
|
388
|
+
# Reset chat history
|
|
389
|
+
model.chat_history = [
|
|
390
|
+
{"role": "system", "content": model.system_instruction},
|
|
391
|
+
{
|
|
392
|
+
"role": "assistant",
|
|
393
|
+
"content": "Okay, I'm ready. Provide the directory context and your request.",
|
|
394
|
+
},
|
|
395
|
+
]
|
|
396
|
+
current_session_id = None
|
|
397
|
+
console.print("[green]Started new session.[/green]\n")
|
|
398
|
+
continue
|
|
399
|
+
elif user_input.lower() == "/save":
|
|
400
|
+
session_id = _save_current_session(
|
|
401
|
+
session_manager,
|
|
402
|
+
project_dir,
|
|
403
|
+
model,
|
|
404
|
+
current_model_name,
|
|
405
|
+
current_session_id,
|
|
406
|
+
console,
|
|
407
|
+
)
|
|
408
|
+
if session_id:
|
|
409
|
+
current_session_id = session_id
|
|
335
410
|
continue
|
|
336
411
|
|
|
337
412
|
# Display initial "thinking" status - generate handles intermediate ones
|
|
@@ -345,19 +420,29 @@ def start_interactive_session(model_name: str, console: Console):
|
|
|
345
420
|
log.warning("generate() returned None unexpectedly.")
|
|
346
421
|
continue
|
|
347
422
|
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
423
|
+
console.print("[bold green]Assistant:[/bold green]")
|
|
424
|
+
console.print(Markdown(response_text), highlight=True)
|
|
425
|
+
|
|
426
|
+
# Auto-save after each exchange
|
|
427
|
+
current_session_id = _auto_save_session(
|
|
428
|
+
session_manager,
|
|
429
|
+
project_dir,
|
|
430
|
+
model,
|
|
431
|
+
current_model_name,
|
|
432
|
+
current_session_id,
|
|
433
|
+
console,
|
|
358
434
|
)
|
|
359
435
|
|
|
360
436
|
except KeyboardInterrupt:
|
|
437
|
+
# Save session before exiting
|
|
438
|
+
_save_current_session(
|
|
439
|
+
session_manager,
|
|
440
|
+
project_dir,
|
|
441
|
+
model,
|
|
442
|
+
current_model_name,
|
|
443
|
+
current_session_id,
|
|
444
|
+
console,
|
|
445
|
+
)
|
|
361
446
|
console.print("\n[yellow]Session interrupted. Exiting.[/yellow]")
|
|
362
447
|
break
|
|
363
448
|
except Exception as e:
|
|
@@ -366,42 +451,95 @@ def start_interactive_session(model_name: str, console: Console):
|
|
|
366
451
|
)
|
|
367
452
|
log.error("Error during interactive loop", exc_info=True)
|
|
368
453
|
|
|
369
|
-
# Save session on exit
|
|
370
|
-
session_mgr.save_session(project_dir, model.chat_history, model.current_model_name)
|
|
371
|
-
console.print("[dim]Session saved.[/dim]")
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
def _handle_model_command(
|
|
375
|
-
user_input: str, model, console: Console, supported_models: list
|
|
376
|
-
):
|
|
377
|
-
"""Handle /model command to switch models mid-session."""
|
|
378
|
-
parts = user_input.strip().split(None, 1)
|
|
379
|
-
if len(parts) < 2:
|
|
380
|
-
# No model specified — show available models
|
|
381
|
-
console.print("[cyan]Available models:[/cyan]")
|
|
382
|
-
for m in supported_models:
|
|
383
|
-
current = (
|
|
384
|
-
" [bold green](current)[/bold green]"
|
|
385
|
-
if m["id"] == model.current_model_name
|
|
386
|
-
else ""
|
|
387
|
-
)
|
|
388
|
-
console.print(f" [bold]{m['id']}[/bold]{current} — {m['description']}")
|
|
389
|
-
console.print("\nUsage: /model <model_id>")
|
|
390
|
-
return
|
|
391
454
|
|
|
392
|
-
|
|
455
|
+
def _save_current_session(
|
|
456
|
+
session_manager: SessionManager,
|
|
457
|
+
project_dir: str,
|
|
458
|
+
model: OpenRouterModel,
|
|
459
|
+
model_name: str,
|
|
460
|
+
session_id: Optional[str],
|
|
461
|
+
console: Console,
|
|
462
|
+
) -> Optional[str]:
|
|
463
|
+
"""Save the current session. Returns the session ID if saved."""
|
|
464
|
+
# Don't save if there are no user messages
|
|
465
|
+
user_messages = [msg for msg in model.chat_history if msg.get("role") == "user"]
|
|
466
|
+
if not user_messages:
|
|
467
|
+
return None
|
|
393
468
|
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
469
|
+
try:
|
|
470
|
+
saved_id = session_manager.save_session(
|
|
471
|
+
project_dir=project_dir,
|
|
472
|
+
chat_history=model.chat_history,
|
|
473
|
+
model_name=model_name,
|
|
474
|
+
session_id=session_id,
|
|
475
|
+
)
|
|
476
|
+
console.print(f"[dim]Session saved: {saved_id}[/dim]")
|
|
477
|
+
return saved_id
|
|
478
|
+
except Exception as e:
|
|
479
|
+
log.error(f"Failed to save session: {e}")
|
|
480
|
+
console.print(f"[red]Warning: Failed to save session: {e}[/red]")
|
|
481
|
+
return None
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
def _auto_save_session(
|
|
485
|
+
session_manager: SessionManager,
|
|
486
|
+
project_dir: str,
|
|
487
|
+
model: OpenRouterModel,
|
|
488
|
+
model_name: str,
|
|
489
|
+
session_id: Optional[str],
|
|
490
|
+
console: Console,
|
|
491
|
+
) -> Optional[str]:
|
|
492
|
+
"""Auto-save session silently. Returns the session ID."""
|
|
493
|
+
try:
|
|
494
|
+
saved_id = session_manager.save_session(
|
|
495
|
+
project_dir=project_dir,
|
|
496
|
+
chat_history=model.chat_history,
|
|
497
|
+
model_name=model_name,
|
|
498
|
+
session_id=session_id,
|
|
499
|
+
)
|
|
500
|
+
return saved_id
|
|
501
|
+
except Exception as e:
|
|
502
|
+
log.error(f"Auto-save failed: {e}")
|
|
503
|
+
return session_id
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
def _show_sessions(session_manager: SessionManager, project_dir: str, console: Console):
|
|
507
|
+
"""Display sessions for the current project."""
|
|
508
|
+
sessions = session_manager.list_sessions(project_dir)
|
|
509
|
+
if not sessions:
|
|
510
|
+
console.print("[yellow]No sessions found for this project.[/yellow]")
|
|
399
511
|
return
|
|
400
512
|
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
513
|
+
console.print(f"\n[bold cyan]Sessions for current project:[/bold cyan]")
|
|
514
|
+
for i, session in enumerate(sessions, 1):
|
|
515
|
+
updated = session.get("updated_at", "")[:19].replace("T", " ")
|
|
516
|
+
messages = session.get("message_count", 0)
|
|
517
|
+
model = session.get("model_name", "unknown")
|
|
518
|
+
console.print(
|
|
519
|
+
f" {i}. ID: [green]{session['id']}[/green] | {updated} | {messages} msgs | {model}"
|
|
520
|
+
)
|
|
521
|
+
console.print("\nUse '/load <session_id>' to resume a session.\n")
|
|
522
|
+
|
|
523
|
+
|
|
524
|
+
def _load_session_by_id(
|
|
525
|
+
session_manager: SessionManager,
|
|
526
|
+
project_dir: str,
|
|
527
|
+
session_id: str,
|
|
528
|
+
model: OpenRouterModel,
|
|
529
|
+
console: Console,
|
|
530
|
+
) -> Optional[Dict[str, Any]]:
|
|
531
|
+
"""Load a session by ID. Returns session data or None."""
|
|
532
|
+
loaded_session = session_manager.load_session(project_dir, session_id)
|
|
533
|
+
if loaded_session:
|
|
534
|
+
model.chat_history = loaded_session["chat_history"]
|
|
535
|
+
console.print(
|
|
536
|
+
f"[green]Loaded session {session_id} ({len(model.chat_history)} messages)[/green]\n"
|
|
537
|
+
)
|
|
538
|
+
return loaded_session
|
|
539
|
+
else:
|
|
540
|
+
console.print(f"[red]Session '{session_id}' not found.[/red]")
|
|
541
|
+
console.print("[yellow]Use '/sessions' to list available sessions.[/yellow]\n")
|
|
542
|
+
return None
|
|
405
543
|
|
|
406
544
|
|
|
407
545
|
def show_help():
|
|
@@ -419,11 +557,12 @@ def show_help():
|
|
|
419
557
|
help_text = f""" [bold]Help[/bold]
|
|
420
558
|
|
|
421
559
|
[cyan]Interactive Commands:[/cyan]
|
|
422
|
-
/exit
|
|
423
|
-
/help
|
|
424
|
-
/
|
|
425
|
-
/
|
|
426
|
-
/
|
|
560
|
+
/exit - Save session and exit
|
|
561
|
+
/help - Show this help message
|
|
562
|
+
/sessions - List sessions for current project
|
|
563
|
+
/load <id> - Load a session by ID
|
|
564
|
+
/new - Start a new session (saves current first)
|
|
565
|
+
/save - Manually save current session
|
|
427
566
|
|
|
428
567
|
[cyan]CLI Commands:[/cyan]
|
|
429
568
|
lmcode setup KEY
|
|
@@ -433,6 +572,10 @@ def show_help():
|
|
|
433
572
|
|
|
434
573
|
[cyan]Workflow Hint:[/cyan] Analyze -> Plan -> Execute -> Verify -> Summarize
|
|
435
574
|
|
|
575
|
+
[cyan]Session Persistence:[/cyan]
|
|
576
|
+
Sessions are automatically saved per project directory.
|
|
577
|
+
Resume previous sessions with /load or on startup.
|
|
578
|
+
|
|
436
579
|
[cyan]Available Tools:[/cyan]
|
|
437
580
|
{tool_list_formatted}
|
|
438
581
|
"""
|
|
@@ -188,99 +188,6 @@ class OpenRouterModel:
|
|
|
188
188
|
)
|
|
189
189
|
return tools
|
|
190
190
|
|
|
191
|
-
# --- Model switching ---
|
|
192
|
-
def switch_model(self, model_name: str) -> None:
|
|
193
|
-
"""Switch to a different model without resetting conversation history."""
|
|
194
|
-
self.current_model_name = model_name
|
|
195
|
-
log.info(f"Switched model to: {model_name}")
|
|
196
|
-
|
|
197
|
-
# --- Compact history via summarization ---
|
|
198
|
-
def compact_history(self) -> str:
|
|
199
|
-
"""Summarize the conversation history to reduce token usage."""
|
|
200
|
-
if len(self.chat_history) <= 2:
|
|
201
|
-
return "Nothing to compact."
|
|
202
|
-
|
|
203
|
-
self.console.print("[yellow]Compacting conversation history...[/yellow]")
|
|
204
|
-
|
|
205
|
-
# Extract user-facing conversation (skip system message at index 0)
|
|
206
|
-
conversation_text = ""
|
|
207
|
-
for msg in self.chat_history[1:]:
|
|
208
|
-
role = msg.get("role", "?")
|
|
209
|
-
content = msg.get("content", "")
|
|
210
|
-
if role == "user":
|
|
211
|
-
# Strip the orientation context prefix for cleaner summary
|
|
212
|
-
lines = content.split("\n")
|
|
213
|
-
user_lines = []
|
|
214
|
-
capturing = False
|
|
215
|
-
for line in lines:
|
|
216
|
-
if line.startswith("User request:"):
|
|
217
|
-
capturing = True
|
|
218
|
-
if capturing:
|
|
219
|
-
user_lines.append(line)
|
|
220
|
-
if user_lines:
|
|
221
|
-
conversation_text += f"User: {' '.join(user_lines)}\n"
|
|
222
|
-
else:
|
|
223
|
-
conversation_text += f"User: {content[:500]}\n"
|
|
224
|
-
elif role == "assistant":
|
|
225
|
-
conversation_text += f"Assistant: {content[:1000]}\n"
|
|
226
|
-
elif role == "tool":
|
|
227
|
-
name = msg.get("name", "tool")
|
|
228
|
-
conversation_text += f"Tool({name}): {content[:500]}\n"
|
|
229
|
-
|
|
230
|
-
if not conversation_text.strip():
|
|
231
|
-
return "Nothing to compact."
|
|
232
|
-
|
|
233
|
-
summarization_prompt = (
|
|
234
|
-
"You are a summarizer. Summarize the following conversation between a user and an AI coding assistant. "
|
|
235
|
-
"Preserve all key details: what was discussed, what files were viewed/edited, "
|
|
236
|
-
"what decisions were made, and what the current state of work is. "
|
|
237
|
-
"Be concise but complete. This summary will replace the conversation history.\n\n"
|
|
238
|
-
f"Conversation:\n{conversation_text}\n\n"
|
|
239
|
-
"Provide a concise summary:"
|
|
240
|
-
)
|
|
241
|
-
|
|
242
|
-
try:
|
|
243
|
-
payload = {
|
|
244
|
-
"model": self.current_model_name,
|
|
245
|
-
"messages": [
|
|
246
|
-
{
|
|
247
|
-
"role": "system",
|
|
248
|
-
"content": "You are a helpful summarizer. Return only the summary, no preamble.",
|
|
249
|
-
},
|
|
250
|
-
{"role": "user", "content": summarization_prompt},
|
|
251
|
-
],
|
|
252
|
-
"temperature": 0.3,
|
|
253
|
-
"max_tokens": 1000,
|
|
254
|
-
}
|
|
255
|
-
|
|
256
|
-
with self.console.status("[yellow]Generating summary...", spinner="dots"):
|
|
257
|
-
response = requests.post(
|
|
258
|
-
self.base_url, headers=self.headers, json=payload
|
|
259
|
-
)
|
|
260
|
-
response.raise_for_status()
|
|
261
|
-
data = response.json()
|
|
262
|
-
|
|
263
|
-
summary = data["choices"][0]["message"]["content"]
|
|
264
|
-
original_count = len(self.chat_history)
|
|
265
|
-
|
|
266
|
-
# Keep system message, replace everything else with summary
|
|
267
|
-
self.chat_history = [
|
|
268
|
-
self.chat_history[0], # system message
|
|
269
|
-
{
|
|
270
|
-
"role": "assistant",
|
|
271
|
-
"content": f"[Conversation summary]\n{summary}",
|
|
272
|
-
},
|
|
273
|
-
]
|
|
274
|
-
|
|
275
|
-
log.info(
|
|
276
|
-
f"Compacted history from {original_count} to {len(self.chat_history)} messages"
|
|
277
|
-
)
|
|
278
|
-
return f"Compacted {original_count} messages down to a summary."
|
|
279
|
-
|
|
280
|
-
except Exception as e:
|
|
281
|
-
log.error(f"Compaction failed: {e}", exc_info=True)
|
|
282
|
-
return f"Compaction failed: {e}"
|
|
283
|
-
|
|
284
191
|
# --- Native Function Calling Agent Loop ---
|
|
285
192
|
def generate(self, prompt: str) -> str | None:
|
|
286
193
|
logging.info(
|
|
@@ -289,11 +196,10 @@ class OpenRouterModel:
|
|
|
289
196
|
original_user_prompt = prompt
|
|
290
197
|
if prompt.startswith("/"):
|
|
291
198
|
command = prompt.split()[0].lower()
|
|
199
|
+
# Handle commands like /compact here eventually
|
|
292
200
|
if command in ["/exit", "/help"]:
|
|
293
201
|
logging.info(f"Handled command: {command}")
|
|
294
|
-
return None
|
|
295
|
-
if command == "/compact":
|
|
296
|
-
return self.compact_history()
|
|
202
|
+
return None # Or return specific help text
|
|
297
203
|
|
|
298
204
|
# === Step 1: Mandatory Orientation ===
|
|
299
205
|
orientation_context = ""
|
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Session persistence for LM Code CLI.
|
|
3
|
+
Saves and loads conversation history per project directory.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
import hashlib
|
|
8
|
+
import os
|
|
9
|
+
import logging
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import List, Dict, Optional, Any
|
|
13
|
+
|
|
14
|
+
log = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SessionManager:
|
|
18
|
+
"""Manages session persistence for conversation history."""
|
|
19
|
+
|
|
20
|
+
def __init__(self, sessions_dir: Optional[str] = None):
|
|
21
|
+
self.sessions_dir = (
|
|
22
|
+
Path(sessions_dir)
|
|
23
|
+
if sessions_dir
|
|
24
|
+
else Path.home() / ".config" / "lm-code" / "sessions"
|
|
25
|
+
)
|
|
26
|
+
self.sessions_dir.mkdir(parents=True, exist_ok=True)
|
|
27
|
+
self.current_session_id = None
|
|
28
|
+
self.current_project_dir = None
|
|
29
|
+
|
|
30
|
+
def _get_project_hash(self, project_dir: str) -> str:
|
|
31
|
+
"""Generate a unique hash for a project directory path."""
|
|
32
|
+
normalized = os.path.normpath(os.path.abspath(project_dir))
|
|
33
|
+
return hashlib.sha256(normalized.encode()).hexdigest()[:16]
|
|
34
|
+
|
|
35
|
+
def _get_project_sessions_dir(self, project_dir: str) -> Path:
|
|
36
|
+
"""Get the sessions directory for a specific project."""
|
|
37
|
+
project_hash = self._get_project_hash(project_dir)
|
|
38
|
+
project_sessions_dir = self.sessions_dir / project_hash
|
|
39
|
+
project_sessions_dir.mkdir(parents=True, exist_ok=True)
|
|
40
|
+
return project_sessions_dir
|
|
41
|
+
|
|
42
|
+
def save_session(
|
|
43
|
+
self,
|
|
44
|
+
project_dir: str,
|
|
45
|
+
chat_history: List[Dict[str, Any]],
|
|
46
|
+
model_name: str,
|
|
47
|
+
session_id: Optional[str] = None,
|
|
48
|
+
) -> str:
|
|
49
|
+
"""
|
|
50
|
+
Save a session to disk.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
project_dir: The project directory path
|
|
54
|
+
chat_history: The chat history list from OpenRouterModel
|
|
55
|
+
model_name: The model being used
|
|
56
|
+
session_id: Optional existing session ID to overwrite
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
The session ID of the saved session
|
|
60
|
+
"""
|
|
61
|
+
if session_id is None:
|
|
62
|
+
session_id = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
63
|
+
|
|
64
|
+
project_sessions_dir = self._get_project_sessions_dir(project_dir)
|
|
65
|
+
|
|
66
|
+
session_data = {
|
|
67
|
+
"id": session_id,
|
|
68
|
+
"project_dir": os.path.abspath(project_dir),
|
|
69
|
+
"model_name": model_name,
|
|
70
|
+
"created_at": datetime.now().isoformat(),
|
|
71
|
+
"updated_at": datetime.now().isoformat(),
|
|
72
|
+
"chat_history": chat_history,
|
|
73
|
+
"message_count": len(
|
|
74
|
+
[
|
|
75
|
+
msg
|
|
76
|
+
for msg in chat_history
|
|
77
|
+
if msg.get("role") in ("user", "assistant")
|
|
78
|
+
]
|
|
79
|
+
),
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
session_file = project_sessions_dir / f"{session_id}.json"
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
with open(session_file, "w", encoding="utf-8") as f:
|
|
86
|
+
json.dump(session_data, f, indent=2, ensure_ascii=False)
|
|
87
|
+
log.info(f"Session saved: {session_id} for project {project_dir}")
|
|
88
|
+
self.current_session_id = session_id
|
|
89
|
+
self.current_project_dir = project_dir
|
|
90
|
+
return session_id
|
|
91
|
+
except Exception as e:
|
|
92
|
+
log.error(f"Failed to save session: {e}", exc_info=True)
|
|
93
|
+
raise
|
|
94
|
+
|
|
95
|
+
def load_session(
|
|
96
|
+
self, project_dir: str, session_id: str
|
|
97
|
+
) -> Optional[Dict[str, Any]]:
|
|
98
|
+
"""
|
|
99
|
+
Load a session from disk.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
project_dir: The project directory path
|
|
103
|
+
session_id: The session ID to load
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
Session data dict or None if not found
|
|
107
|
+
"""
|
|
108
|
+
project_sessions_dir = self._get_project_sessions_dir(project_dir)
|
|
109
|
+
session_file = project_sessions_dir / f"{session_id}.json"
|
|
110
|
+
|
|
111
|
+
if not session_file.exists():
|
|
112
|
+
log.warning(f"Session file not found: {session_file}")
|
|
113
|
+
return None
|
|
114
|
+
|
|
115
|
+
try:
|
|
116
|
+
with open(session_file, "r", encoding="utf-8") as f:
|
|
117
|
+
session_data = json.load(f)
|
|
118
|
+
log.info(f"Session loaded: {session_id}")
|
|
119
|
+
self.current_session_id = session_id
|
|
120
|
+
self.current_project_dir = project_dir
|
|
121
|
+
return session_data
|
|
122
|
+
except Exception as e:
|
|
123
|
+
log.error(f"Failed to load session: {e}", exc_info=True)
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
def list_sessions(self, project_dir: str) -> List[Dict[str, Any]]:
|
|
127
|
+
"""
|
|
128
|
+
List all sessions for a project directory.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
project_dir: The project directory path
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
List of session metadata dicts, sorted by most recent first
|
|
135
|
+
"""
|
|
136
|
+
project_sessions_dir = self._get_project_sessions_dir(project_dir)
|
|
137
|
+
sessions = []
|
|
138
|
+
|
|
139
|
+
for session_file in project_sessions_dir.glob("*.json"):
|
|
140
|
+
try:
|
|
141
|
+
with open(session_file, "r", encoding="utf-8") as f:
|
|
142
|
+
session_data = json.load(f)
|
|
143
|
+
sessions.append(
|
|
144
|
+
{
|
|
145
|
+
"id": session_data.get("id", session_file.stem),
|
|
146
|
+
"project_dir": session_data.get("project_dir", project_dir),
|
|
147
|
+
"model_name": session_data.get("model_name", "unknown"),
|
|
148
|
+
"created_at": session_data.get("created_at", ""),
|
|
149
|
+
"updated_at": session_data.get("updated_at", ""),
|
|
150
|
+
"message_count": session_data.get("message_count", 0),
|
|
151
|
+
}
|
|
152
|
+
)
|
|
153
|
+
except Exception as e:
|
|
154
|
+
log.warning(f"Failed to read session file {session_file}: {e}")
|
|
155
|
+
continue
|
|
156
|
+
|
|
157
|
+
# Sort by most recent first
|
|
158
|
+
sessions.sort(key=lambda s: s.get("updated_at", ""), reverse=True)
|
|
159
|
+
return sessions
|
|
160
|
+
|
|
161
|
+
def list_all_sessions(self) -> List[Dict[str, Any]]:
|
|
162
|
+
"""
|
|
163
|
+
List all sessions across all projects.
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
List of session metadata dicts, sorted by most recent first
|
|
167
|
+
"""
|
|
168
|
+
all_sessions = []
|
|
169
|
+
|
|
170
|
+
for project_hash_dir in self.sessions_dir.iterdir():
|
|
171
|
+
if not project_hash_dir.is_dir():
|
|
172
|
+
continue
|
|
173
|
+
|
|
174
|
+
for session_file in project_hash_dir.glob("*.json"):
|
|
175
|
+
try:
|
|
176
|
+
with open(session_file, "r", encoding="utf-8") as f:
|
|
177
|
+
session_data = json.load(f)
|
|
178
|
+
all_sessions.append(
|
|
179
|
+
{
|
|
180
|
+
"id": session_data.get("id", session_file.stem),
|
|
181
|
+
"project_dir": session_data.get("project_dir", "unknown"),
|
|
182
|
+
"model_name": session_data.get("model_name", "unknown"),
|
|
183
|
+
"created_at": session_data.get("created_at", ""),
|
|
184
|
+
"updated_at": session_data.get("updated_at", ""),
|
|
185
|
+
"message_count": session_data.get("message_count", 0),
|
|
186
|
+
}
|
|
187
|
+
)
|
|
188
|
+
except Exception as e:
|
|
189
|
+
log.warning(f"Failed to read session file {session_file}: {e}")
|
|
190
|
+
continue
|
|
191
|
+
|
|
192
|
+
# Sort by most recent first
|
|
193
|
+
all_sessions.sort(key=lambda s: s.get("updated_at", ""), reverse=True)
|
|
194
|
+
return all_sessions
|
|
195
|
+
|
|
196
|
+
def get_latest_session(self, project_dir: str) -> Optional[Dict[str, Any]]:
|
|
197
|
+
"""
|
|
198
|
+
Get the most recent session for a project directory.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
project_dir: The project directory path
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
Session data dict or None if no sessions exist
|
|
205
|
+
"""
|
|
206
|
+
sessions = self.list_sessions(project_dir)
|
|
207
|
+
if not sessions:
|
|
208
|
+
return None
|
|
209
|
+
|
|
210
|
+
latest_id = sessions[0]["id"]
|
|
211
|
+
return self.load_session(project_dir, latest_id)
|
|
212
|
+
|
|
213
|
+
def delete_session(self, project_dir: str, session_id: str) -> bool:
|
|
214
|
+
"""
|
|
215
|
+
Delete a session.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
project_dir: The project directory path
|
|
219
|
+
session_id: The session ID to delete
|
|
220
|
+
|
|
221
|
+
Returns:
|
|
222
|
+
True if deleted successfully, False otherwise
|
|
223
|
+
"""
|
|
224
|
+
project_sessions_dir = self._get_project_sessions_dir(project_dir)
|
|
225
|
+
session_file = project_sessions_dir / f"{session_id}.json"
|
|
226
|
+
|
|
227
|
+
if not session_file.exists():
|
|
228
|
+
log.warning(f"Session file not found for deletion: {session_file}")
|
|
229
|
+
return False
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
session_file.unlink()
|
|
233
|
+
log.info(f"Session deleted: {session_id}")
|
|
234
|
+
if self.current_session_id == session_id:
|
|
235
|
+
self.current_session_id = None
|
|
236
|
+
return True
|
|
237
|
+
except Exception as e:
|
|
238
|
+
log.error(f"Failed to delete session: {e}", exc_info=True)
|
|
239
|
+
return False
|
|
240
|
+
|
|
241
|
+
def update_session(
|
|
242
|
+
self, project_dir: str, session_id: str, chat_history: List[Dict[str, Any]]
|
|
243
|
+
) -> bool:
|
|
244
|
+
"""
|
|
245
|
+
Update an existing session with new chat history.
|
|
246
|
+
|
|
247
|
+
Args:
|
|
248
|
+
project_dir: The project directory path
|
|
249
|
+
session_id: The session ID to update
|
|
250
|
+
chat_history: The updated chat history
|
|
251
|
+
|
|
252
|
+
Returns:
|
|
253
|
+
True if updated successfully, False otherwise
|
|
254
|
+
"""
|
|
255
|
+
project_sessions_dir = self._get_project_sessions_dir(project_dir)
|
|
256
|
+
session_file = project_sessions_dir / f"{session_id}.json"
|
|
257
|
+
|
|
258
|
+
if not session_file.exists():
|
|
259
|
+
log.warning(f"Session file not found for update: {session_file}")
|
|
260
|
+
return False
|
|
261
|
+
|
|
262
|
+
try:
|
|
263
|
+
with open(session_file, "r", encoding="utf-8") as f:
|
|
264
|
+
session_data = json.load(f)
|
|
265
|
+
|
|
266
|
+
session_data["chat_history"] = chat_history
|
|
267
|
+
session_data["updated_at"] = datetime.now().isoformat()
|
|
268
|
+
session_data["message_count"] = len(
|
|
269
|
+
[
|
|
270
|
+
msg
|
|
271
|
+
for msg in chat_history
|
|
272
|
+
if msg.get("role") in ("user", "assistant")
|
|
273
|
+
]
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
with open(session_file, "w", encoding="utf-8") as f:
|
|
277
|
+
json.dump(session_data, f, indent=2, ensure_ascii=False)
|
|
278
|
+
|
|
279
|
+
log.info(f"Session updated: {session_id}")
|
|
280
|
+
return True
|
|
281
|
+
except Exception as e:
|
|
282
|
+
log.error(f"Failed to update session: {e}", exc_info=True)
|
|
283
|
+
return False
|
|
@@ -1,97 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Session persistence for LM Code.
|
|
3
|
-
Saves and loads conversation history per project directory.
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
import hashlib
|
|
7
|
-
import json
|
|
8
|
-
import logging
|
|
9
|
-
import os
|
|
10
|
-
import time
|
|
11
|
-
from pathlib import Path
|
|
12
|
-
from typing import Optional, Dict, List, Any
|
|
13
|
-
|
|
14
|
-
log = logging.getLogger(__name__)
|
|
15
|
-
|
|
16
|
-
SESSIONS_DIR = Path.home() / ".config" / "lm-code" / "sessions"
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def _project_hash(project_dir: str) -> str:
|
|
20
|
-
"""Create a stable hash from the absolute project directory path."""
|
|
21
|
-
abs_path = str(Path(project_dir).resolve())
|
|
22
|
-
return hashlib.sha256(abs_path.encode()).hexdigest()[:16]
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
class SessionManager:
|
|
26
|
-
"""Manages per-project session persistence."""
|
|
27
|
-
|
|
28
|
-
def __init__(self):
|
|
29
|
-
SESSIONS_DIR.mkdir(parents=True, exist_ok=True)
|
|
30
|
-
|
|
31
|
-
def _session_path(self, project_dir: str) -> Path:
|
|
32
|
-
h = _project_hash(project_dir)
|
|
33
|
-
return SESSIONS_DIR / f"{h}.json"
|
|
34
|
-
|
|
35
|
-
def save_session(
|
|
36
|
-
self,
|
|
37
|
-
project_dir: str,
|
|
38
|
-
chat_history: List[Dict[str, Any]],
|
|
39
|
-
model_name: str,
|
|
40
|
-
) -> None:
|
|
41
|
-
"""Save chat history and metadata for a project directory."""
|
|
42
|
-
data = {
|
|
43
|
-
"project_dir": str(Path(project_dir).resolve()),
|
|
44
|
-
"model_name": model_name,
|
|
45
|
-
"timestamp": time.time(),
|
|
46
|
-
"chat_history": chat_history,
|
|
47
|
-
}
|
|
48
|
-
path = self._session_path(project_dir)
|
|
49
|
-
try:
|
|
50
|
-
with open(path, "w", encoding="utf-8") as f:
|
|
51
|
-
json.dump(data, f, ensure_ascii=False, indent=2)
|
|
52
|
-
log.debug(f"Session saved to {path}")
|
|
53
|
-
except Exception as e:
|
|
54
|
-
log.error(f"Failed to save session: {e}")
|
|
55
|
-
|
|
56
|
-
def load_session(self, project_dir: str) -> Optional[Dict[str, Any]]:
|
|
57
|
-
"""Load a saved session for a project directory, or None."""
|
|
58
|
-
path = self._session_path(project_dir)
|
|
59
|
-
if not path.exists():
|
|
60
|
-
return None
|
|
61
|
-
try:
|
|
62
|
-
with open(path, "r", encoding="utf-8") as f:
|
|
63
|
-
data = json.load(f)
|
|
64
|
-
log.info(f"Session loaded from {path}")
|
|
65
|
-
return data
|
|
66
|
-
except Exception as e:
|
|
67
|
-
log.error(f"Failed to load session: {e}")
|
|
68
|
-
return None
|
|
69
|
-
|
|
70
|
-
def delete_session(self, project_dir: str) -> bool:
|
|
71
|
-
"""Delete a saved session. Returns True if one was deleted."""
|
|
72
|
-
path = self._session_path(project_dir)
|
|
73
|
-
if path.exists():
|
|
74
|
-
path.unlink()
|
|
75
|
-
log.info(f"Session deleted: {path}")
|
|
76
|
-
return True
|
|
77
|
-
return False
|
|
78
|
-
|
|
79
|
-
def list_sessions(self) -> List[Dict[str, Any]]:
|
|
80
|
-
"""List all saved sessions with metadata."""
|
|
81
|
-
sessions = []
|
|
82
|
-
for p in SESSIONS_DIR.glob("*.json"):
|
|
83
|
-
try:
|
|
84
|
-
with open(p, "r", encoding="utf-8") as f:
|
|
85
|
-
data = json.load(f)
|
|
86
|
-
sessions.append(
|
|
87
|
-
{
|
|
88
|
-
"project_dir": data.get("project_dir", "?"),
|
|
89
|
-
"model_name": data.get("model_name", "?"),
|
|
90
|
-
"timestamp": data.get("timestamp", 0),
|
|
91
|
-
"message_count": len(data.get("chat_history", [])),
|
|
92
|
-
}
|
|
93
|
-
)
|
|
94
|
-
except Exception:
|
|
95
|
-
continue
|
|
96
|
-
sessions.sort(key=lambda s: s["timestamp"], reverse=True)
|
|
97
|
-
return sessions
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|