npcpy 1.0.26__py3-none-any.whl → 1.2.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcpy/__init__.py +0 -7
- npcpy/data/audio.py +16 -99
- npcpy/data/image.py +43 -42
- npcpy/data/load.py +83 -124
- npcpy/data/text.py +28 -28
- npcpy/data/video.py +8 -32
- npcpy/data/web.py +51 -23
- npcpy/ft/diff.py +110 -0
- npcpy/ft/ge.py +115 -0
- npcpy/ft/memory_trainer.py +171 -0
- npcpy/ft/model_ensembler.py +357 -0
- npcpy/ft/rl.py +360 -0
- npcpy/ft/sft.py +248 -0
- npcpy/ft/usft.py +128 -0
- npcpy/gen/audio_gen.py +24 -0
- npcpy/gen/embeddings.py +13 -13
- npcpy/gen/image_gen.py +262 -117
- npcpy/gen/response.py +615 -415
- npcpy/gen/video_gen.py +53 -7
- npcpy/llm_funcs.py +1869 -437
- npcpy/main.py +1 -1
- npcpy/memory/command_history.py +844 -510
- npcpy/memory/kg_vis.py +833 -0
- npcpy/memory/knowledge_graph.py +892 -1845
- npcpy/memory/memory_processor.py +81 -0
- npcpy/memory/search.py +188 -90
- npcpy/mix/debate.py +192 -3
- npcpy/npc_compiler.py +1672 -801
- npcpy/npc_sysenv.py +593 -1266
- npcpy/serve.py +3120 -0
- npcpy/sql/ai_function_tools.py +257 -0
- npcpy/sql/database_ai_adapters.py +186 -0
- npcpy/sql/database_ai_functions.py +163 -0
- npcpy/sql/model_runner.py +19 -19
- npcpy/sql/npcsql.py +706 -507
- npcpy/sql/sql_model_compiler.py +156 -0
- npcpy/tools.py +183 -0
- npcpy/work/plan.py +13 -279
- npcpy/work/trigger.py +3 -3
- npcpy-1.2.32.dist-info/METADATA +803 -0
- npcpy-1.2.32.dist-info/RECORD +54 -0
- npcpy/data/dataframes.py +0 -171
- npcpy/memory/deep_research.py +0 -125
- npcpy/memory/sleep.py +0 -557
- npcpy/modes/_state.py +0 -78
- npcpy/modes/alicanto.py +0 -1075
- npcpy/modes/guac.py +0 -785
- npcpy/modes/mcp_npcsh.py +0 -822
- npcpy/modes/npc.py +0 -213
- npcpy/modes/npcsh.py +0 -1158
- npcpy/modes/plonk.py +0 -409
- npcpy/modes/pti.py +0 -234
- npcpy/modes/serve.py +0 -1637
- npcpy/modes/spool.py +0 -312
- npcpy/modes/wander.py +0 -549
- npcpy/modes/yap.py +0 -572
- npcpy/npc_team/alicanto.npc +0 -2
- npcpy/npc_team/alicanto.png +0 -0
- npcpy/npc_team/assembly_lines/test_pipeline.py +0 -181
- npcpy/npc_team/corca.npc +0 -13
- npcpy/npc_team/foreman.npc +0 -7
- npcpy/npc_team/frederic.npc +0 -6
- npcpy/npc_team/frederic4.png +0 -0
- npcpy/npc_team/guac.png +0 -0
- npcpy/npc_team/jinxs/automator.jinx +0 -18
- npcpy/npc_team/jinxs/bash_executer.jinx +0 -31
- npcpy/npc_team/jinxs/calculator.jinx +0 -11
- npcpy/npc_team/jinxs/edit_file.jinx +0 -96
- npcpy/npc_team/jinxs/file_chat.jinx +0 -14
- npcpy/npc_team/jinxs/gui_controller.jinx +0 -28
- npcpy/npc_team/jinxs/image_generation.jinx +0 -29
- npcpy/npc_team/jinxs/internet_search.jinx +0 -30
- npcpy/npc_team/jinxs/local_search.jinx +0 -152
- npcpy/npc_team/jinxs/npcsh_executor.jinx +0 -31
- npcpy/npc_team/jinxs/python_executor.jinx +0 -8
- npcpy/npc_team/jinxs/screen_cap.jinx +0 -25
- npcpy/npc_team/jinxs/sql_executor.jinx +0 -33
- npcpy/npc_team/kadiefa.npc +0 -3
- npcpy/npc_team/kadiefa.png +0 -0
- npcpy/npc_team/npcsh.ctx +0 -9
- npcpy/npc_team/npcsh_sibiji.png +0 -0
- npcpy/npc_team/plonk.npc +0 -2
- npcpy/npc_team/plonk.png +0 -0
- npcpy/npc_team/plonkjr.npc +0 -2
- npcpy/npc_team/plonkjr.png +0 -0
- npcpy/npc_team/sibiji.npc +0 -5
- npcpy/npc_team/sibiji.png +0 -0
- npcpy/npc_team/spool.png +0 -0
- npcpy/npc_team/templates/analytics/celona.npc +0 -0
- npcpy/npc_team/templates/hr_support/raone.npc +0 -0
- npcpy/npc_team/templates/humanities/eriane.npc +0 -4
- npcpy/npc_team/templates/it_support/lineru.npc +0 -0
- npcpy/npc_team/templates/marketing/slean.npc +0 -4
- npcpy/npc_team/templates/philosophy/maurawa.npc +0 -0
- npcpy/npc_team/templates/sales/turnic.npc +0 -4
- npcpy/npc_team/templates/software/welxor.npc +0 -0
- npcpy/npc_team/yap.png +0 -0
- npcpy/routes.py +0 -958
- npcpy/work/mcp_helpers.py +0 -357
- npcpy/work/mcp_server.py +0 -194
- npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/automator.jinx +0 -18
- npcpy-1.0.26.data/data/npcpy/npc_team/bash_executer.jinx +0 -31
- npcpy-1.0.26.data/data/npcpy/npc_team/calculator.jinx +0 -11
- npcpy-1.0.26.data/data/npcpy/npc_team/celona.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/corca.npc +0 -13
- npcpy-1.0.26.data/data/npcpy/npc_team/edit_file.jinx +0 -96
- npcpy-1.0.26.data/data/npcpy/npc_team/eriane.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/file_chat.jinx +0 -14
- npcpy-1.0.26.data/data/npcpy/npc_team/foreman.npc +0 -7
- npcpy-1.0.26.data/data/npcpy/npc_team/frederic.npc +0 -6
- npcpy-1.0.26.data/data/npcpy/npc_team/frederic4.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/guac.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/gui_controller.jinx +0 -28
- npcpy-1.0.26.data/data/npcpy/npc_team/image_generation.jinx +0 -29
- npcpy-1.0.26.data/data/npcpy/npc_team/internet_search.jinx +0 -30
- npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.npc +0 -3
- npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/lineru.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/local_search.jinx +0 -152
- npcpy-1.0.26.data/data/npcpy/npc_team/maurawa.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh.ctx +0 -9
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_executor.jinx +0 -31
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_sibiji.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/plonk.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/plonk.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/python_executor.jinx +0 -8
- npcpy-1.0.26.data/data/npcpy/npc_team/raone.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/screen_cap.jinx +0 -25
- npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.npc +0 -5
- npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/slean.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/spool.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/sql_executor.jinx +0 -33
- npcpy-1.0.26.data/data/npcpy/npc_team/test_pipeline.py +0 -181
- npcpy-1.0.26.data/data/npcpy/npc_team/turnic.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/welxor.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/yap.png +0 -0
- npcpy-1.0.26.dist-info/METADATA +0 -827
- npcpy-1.0.26.dist-info/RECORD +0 -139
- npcpy-1.0.26.dist-info/entry_points.txt +0 -11
- /npcpy/{modes → ft}/__init__.py +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/WHEEL +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/licenses/LICENSE +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/top_level.txt +0 -0
npcpy/modes/guac.py
DELETED
|
@@ -1,785 +0,0 @@
|
|
|
1
|
-
import re
|
|
2
|
-
import os
|
|
3
|
-
import sys
|
|
4
|
-
import code
|
|
5
|
-
import yaml
|
|
6
|
-
from pathlib import Path
|
|
7
|
-
import atexit
|
|
8
|
-
import traceback
|
|
9
|
-
from typing import Optional, List, Dict, Any, Tuple
|
|
10
|
-
from dataclasses import dataclass, field
|
|
11
|
-
import json
|
|
12
|
-
import pandas as pd
|
|
13
|
-
import numpy as np
|
|
14
|
-
import matplotlib.pyplot as plt
|
|
15
|
-
import datetime
|
|
16
|
-
import argparse
|
|
17
|
-
import io
|
|
18
|
-
import importlib.util
|
|
19
|
-
|
|
20
|
-
from npcpy.memory.command_history import CommandHistory
|
|
21
|
-
from npcpy.npc_compiler import Team, NPC
|
|
22
|
-
from npcpy.llm_funcs import get_llm_response, check_llm_command, execute_llm_command
|
|
23
|
-
from npcpy.modes._state import initial_state as npcsh_initial_state
|
|
24
|
-
from npcpy.npc_sysenv import render_markdown, print_and_process_stream_with_markdown
|
|
25
|
-
|
|
26
|
-
try:
|
|
27
|
-
import readline
|
|
28
|
-
except ImportError:
|
|
29
|
-
readline = None
|
|
30
|
-
|
|
31
|
-
GUAC_REFRESH_PERIOD = os.environ.get('GUAC_REFRESH_PERIOD', 100)
|
|
32
|
-
READLINE_HISTORY_FILE = os.path.expanduser("~/.guac_readline_history")
|
|
33
|
-
try:
|
|
34
|
-
npcsh_initial_state.GUAC_REFRESH_PERIOD = int(GUAC_REFRESH_PERIOD)
|
|
35
|
-
except ValueError:
|
|
36
|
-
npcsh_initial_state.GUAC_REFRESH_PERIOD = 100
|
|
37
|
-
|
|
38
|
-
@dataclass
|
|
39
|
-
class GuacState:
|
|
40
|
-
current_mode: str = "cmd"
|
|
41
|
-
current_path: str = field(default_factory=os.getcwd)
|
|
42
|
-
npc: Optional[NPC] = None
|
|
43
|
-
team: Optional[Team] = None
|
|
44
|
-
messages: List[Dict[str, str]] = field(default_factory=list)
|
|
45
|
-
locals: Dict[str, Any] = field(default_factory=dict)
|
|
46
|
-
command_history: Optional[CommandHistory] = None
|
|
47
|
-
chat_model: Optional[str] = npcsh_initial_state.chat_model
|
|
48
|
-
chat_provider: Optional[str] = npcsh_initial_state.chat_provider
|
|
49
|
-
stream_output: bool = True
|
|
50
|
-
config_dir: Optional[Path] = None
|
|
51
|
-
src_dir: Optional[Path] = None
|
|
52
|
-
command_count: int = 0
|
|
53
|
-
compile_buffer: List[str] = field(default_factory=list)
|
|
54
|
-
|
|
55
|
-
def get_multiline_input_guac(prompt_str: str, state: GuacState) -> str:
|
|
56
|
-
lines = list(state.compile_buffer)
|
|
57
|
-
current_prompt = prompt_str if not lines else "... "
|
|
58
|
-
while True:
|
|
59
|
-
try:
|
|
60
|
-
line = input(current_prompt)
|
|
61
|
-
lines.append(line)
|
|
62
|
-
current_prompt = "... "
|
|
63
|
-
if not line and len(lines) > 1 and not lines[-2].strip():
|
|
64
|
-
lines.pop()
|
|
65
|
-
lines.pop()
|
|
66
|
-
break
|
|
67
|
-
if not line and len(lines) == 1:
|
|
68
|
-
lines.pop()
|
|
69
|
-
break
|
|
70
|
-
if len(lines) == 1 and line.strip():
|
|
71
|
-
temp_line = line.strip()
|
|
72
|
-
is_block_starter = re.match(r"^\s*(def|class|for|while|if|try|with|@)", temp_line)
|
|
73
|
-
ends_with_colon_for_block = temp_line.endswith(":") and is_block_starter
|
|
74
|
-
if not is_block_starter and not ends_with_colon_for_block:
|
|
75
|
-
open_brackets = (temp_line.count('(') - temp_line.count(')') +
|
|
76
|
-
temp_line.count('[') - temp_line.count(']') +
|
|
77
|
-
temp_line.count('{') - temp_line.count('}'))
|
|
78
|
-
if open_brackets <= 0:
|
|
79
|
-
break
|
|
80
|
-
except EOFError:
|
|
81
|
-
print("\nGoodbye!")
|
|
82
|
-
sys.exit(0)
|
|
83
|
-
except KeyboardInterrupt:
|
|
84
|
-
print("\nKeyboardInterrupt")
|
|
85
|
-
state.compile_buffer.clear()
|
|
86
|
-
return ""
|
|
87
|
-
full_input = "\n".join(lines)
|
|
88
|
-
state.compile_buffer.clear()
|
|
89
|
-
return full_input
|
|
90
|
-
|
|
91
|
-
def is_python_code(text: str) -> bool:
|
|
92
|
-
text = text.strip()
|
|
93
|
-
if not text:
|
|
94
|
-
return False
|
|
95
|
-
try:
|
|
96
|
-
compile(text, "<input>", "eval")
|
|
97
|
-
return True
|
|
98
|
-
except SyntaxError:
|
|
99
|
-
try:
|
|
100
|
-
compile(text, "<input>", "exec")
|
|
101
|
-
return True
|
|
102
|
-
except SyntaxError:
|
|
103
|
-
return False
|
|
104
|
-
except (OverflowError, ValueError): # Other potential compile errors
|
|
105
|
-
return False
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def setup_guac_readline(history_file: str):
|
|
109
|
-
if not readline:
|
|
110
|
-
return
|
|
111
|
-
try:
|
|
112
|
-
readline.read_history_file(history_file)
|
|
113
|
-
except FileNotFoundError:
|
|
114
|
-
pass
|
|
115
|
-
except OSError:
|
|
116
|
-
pass
|
|
117
|
-
|
|
118
|
-
try:
|
|
119
|
-
if sys.stdin.isatty():
|
|
120
|
-
readline.set_history_length(1000)
|
|
121
|
-
try:
|
|
122
|
-
readline.parse_and_bind("set enable-bracketed-paste on")
|
|
123
|
-
except Exception:
|
|
124
|
-
pass
|
|
125
|
-
except Exception:
|
|
126
|
-
pass
|
|
127
|
-
|
|
128
|
-
def save_guac_readline_history(history_file: str):
|
|
129
|
-
if not readline:
|
|
130
|
-
return
|
|
131
|
-
try:
|
|
132
|
-
readline.write_history_file(history_file)
|
|
133
|
-
except OSError:
|
|
134
|
-
pass
|
|
135
|
-
except Exception:
|
|
136
|
-
pass
|
|
137
|
-
|
|
138
|
-
def _load_guac_helpers_into_state(state: GuacState):
|
|
139
|
-
if state.src_dir:
|
|
140
|
-
main_module_path = state.src_dir / "main.py"
|
|
141
|
-
if main_module_path.exists():
|
|
142
|
-
try:
|
|
143
|
-
p_path = str(state.src_dir.parent)
|
|
144
|
-
s_path = str(state.src_dir)
|
|
145
|
-
if p_path not in sys.path:
|
|
146
|
-
sys.path.insert(0, p_path)
|
|
147
|
-
if s_path not in sys.path:
|
|
148
|
-
sys.path.insert(0, s_path)
|
|
149
|
-
|
|
150
|
-
spec = importlib.util.spec_from_file_location("guac_main_helpers", main_module_path)
|
|
151
|
-
if spec and spec.loader:
|
|
152
|
-
guac_main = importlib.util.module_from_spec(spec)
|
|
153
|
-
spec.loader.exec_module(guac_main)
|
|
154
|
-
for name in dir(guac_main):
|
|
155
|
-
if not name.startswith('__'):
|
|
156
|
-
state.locals[name] = getattr(guac_main, name)
|
|
157
|
-
|
|
158
|
-
core_imports = {
|
|
159
|
-
'pd': pd, 'np': np, 'plt': plt, 'datetime': datetime,
|
|
160
|
-
'Path': Path, 'os': os, 'sys': sys, 'json': json,
|
|
161
|
-
'yaml': yaml, 're': re, 'traceback': traceback
|
|
162
|
-
}
|
|
163
|
-
state.locals.update(core_imports)
|
|
164
|
-
except Exception as e:
|
|
165
|
-
print(f"Warning: Could not load helpers from {main_module_path}: {e}", file=sys.stderr)
|
|
166
|
-
|
|
167
|
-
def setup_guac_mode(config_dir=None,
|
|
168
|
-
plots_dir=None,
|
|
169
|
-
npc_team_dir=None,
|
|
170
|
-
lang='python',
|
|
171
|
-
):
|
|
172
|
-
home_dir = Path.home()
|
|
173
|
-
config_dir = Path(config_dir) if config_dir else home_dir / ".npcsh" / "guac"
|
|
174
|
-
plots_dir = Path(plots_dir) if plots_dir else config_dir / "plots"
|
|
175
|
-
npc_team_dir = Path(npc_team_dir) if npc_team_dir else config_dir / "npc_team"
|
|
176
|
-
src_dir = config_dir / "src"
|
|
177
|
-
|
|
178
|
-
for p in [src_dir, plots_dir, npc_team_dir]:
|
|
179
|
-
p.mkdir(parents=True, exist_ok=True)
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
team_ctx = {
|
|
183
|
-
"team_name": "guac_team",
|
|
184
|
-
"description": f"A team of NPCs specialized in {lang} analysis",
|
|
185
|
-
"forenpc": "guac",
|
|
186
|
-
"model": os.environ.get("NPCSH_CHAT_MODEL", "llama3.2"),
|
|
187
|
-
"provider": os.environ.get("NPCSH_CHAT_PROVIDER", "ollama")
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
with open(npc_team_dir / "team.ctx", "w") as f:
|
|
191
|
-
yaml.dump(team_ctx, f, default_flow_style=False)
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
if not (config_dir / "__init__.py").exists():
|
|
195
|
-
(config_dir / "__init__.py").touch()
|
|
196
|
-
|
|
197
|
-
config_file = config_dir / "config.json"
|
|
198
|
-
default_mode_val = "cmd"
|
|
199
|
-
current_config = {}
|
|
200
|
-
|
|
201
|
-
if config_file.exists():
|
|
202
|
-
try:
|
|
203
|
-
with open(config_file, "r") as f:
|
|
204
|
-
current_config = json.load(f)
|
|
205
|
-
default_mode_val = current_config.get("default_mode", "cmd")
|
|
206
|
-
except json.JSONDecodeError:
|
|
207
|
-
pass
|
|
208
|
-
|
|
209
|
-
if not current_config or \
|
|
210
|
-
current_config.get("preferred_language") != lang or \
|
|
211
|
-
current_config.get("default_mode") is None:
|
|
212
|
-
current_config = {
|
|
213
|
-
"preferred_language": lang,
|
|
214
|
-
"plots_directory": str(plots_dir),
|
|
215
|
-
"npc_team_directory": str(npc_team_dir),
|
|
216
|
-
"default_mode": default_mode_val
|
|
217
|
-
}
|
|
218
|
-
with open(config_file, "w") as f:
|
|
219
|
-
json.dump(current_config, f, indent=2)
|
|
220
|
-
|
|
221
|
-
os.environ["NPCSH_GUAC_LANG"] = lang
|
|
222
|
-
os.environ["NPCSH_GUAC_PLOTS"] = str(plots_dir)
|
|
223
|
-
os.environ["NPCSH_GUAC_TEAM"] = str(npc_team_dir)
|
|
224
|
-
npcsh_initial_state.GUAC_DEFAULT_MODE = default_mode_val
|
|
225
|
-
|
|
226
|
-
if not (src_dir / "__init__.py").exists():
|
|
227
|
-
with open(src_dir / "__init__.py", "w") as f:
|
|
228
|
-
f.write("# Guac source directory\n")
|
|
229
|
-
|
|
230
|
-
main_py_content = """import pandas as pd
|
|
231
|
-
import numpy as np
|
|
232
|
-
import matplotlib.pyplot as plt
|
|
233
|
-
import os
|
|
234
|
-
import datetime
|
|
235
|
-
from pathlib import Path
|
|
236
|
-
|
|
237
|
-
def save_plot(name=None, plots_dir=None):
|
|
238
|
-
if plots_dir is None:
|
|
239
|
-
plots_dir = os.environ.get("NPCSH_GUAC_PLOTS", Path.home() / ".npcsh" / "guac" / "plots")
|
|
240
|
-
plots_dir = Path(plots_dir)
|
|
241
|
-
plots_dir.mkdir(parents=True, exist_ok=True)
|
|
242
|
-
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
|
243
|
-
filename = f"{timestamp}_{name}.png" if name else f"{timestamp}_plot.png"
|
|
244
|
-
filepath = plots_dir / filename
|
|
245
|
-
try:
|
|
246
|
-
if plt.get_fignums():
|
|
247
|
-
plt.savefig(filepath)
|
|
248
|
-
print(f"Plot saved to {filepath}")
|
|
249
|
-
else:
|
|
250
|
-
print("No active matplotlib plot to save.")
|
|
251
|
-
return None
|
|
252
|
-
except Exception as e:
|
|
253
|
-
print(f"Error saving plot: {e}")
|
|
254
|
-
return None
|
|
255
|
-
return filepath
|
|
256
|
-
|
|
257
|
-
def read_img(img_path):
|
|
258
|
-
try:
|
|
259
|
-
from PIL import Image
|
|
260
|
-
img = Image.open(img_path)
|
|
261
|
-
img.show()
|
|
262
|
-
except ImportError:
|
|
263
|
-
print("PIL (Pillow) not available. Please install it: pip install Pillow")
|
|
264
|
-
except FileNotFoundError:
|
|
265
|
-
print(f"Image file not found: {img_path}")
|
|
266
|
-
except Exception as e:
|
|
267
|
-
print(f"Error reading image {img_path}: {e}")
|
|
268
|
-
return img_path
|
|
269
|
-
"""
|
|
270
|
-
if not (src_dir / "main.py").exists():
|
|
271
|
-
with open(src_dir / "main.py", "w") as f:
|
|
272
|
-
f.write(main_py_content)
|
|
273
|
-
|
|
274
|
-
if str(config_dir) not in sys.path:
|
|
275
|
-
sys.path.insert(0, str(config_dir))
|
|
276
|
-
if str(config_dir.parent) not in sys.path:
|
|
277
|
-
sys.path.insert(0, str(config_dir.parent))
|
|
278
|
-
|
|
279
|
-
setup_npc_team(npc_team_dir, lang)
|
|
280
|
-
return {
|
|
281
|
-
"language": lang, "src_dir": src_dir, "config_path": config_file,
|
|
282
|
-
"plots_dir": plots_dir, "npc_team_dir": npc_team_dir,
|
|
283
|
-
"config_dir": config_dir, "default_mode": default_mode_val
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
def setup_npc_team(npc_team_dir, lang):
|
|
287
|
-
guac_npc = {
|
|
288
|
-
"name": "guac",
|
|
289
|
-
"primary_directive": (
|
|
290
|
-
f"You are guac, an AI assistant operating in a Python environment. "
|
|
291
|
-
f"When asked to perform actions or generate code, prioritize Python. "
|
|
292
|
-
f"For general queries, provide concise answers. "
|
|
293
|
-
f"When routing tasks (agent mode), consider Python-based tools or direct Python code generation if appropriate. "
|
|
294
|
-
f"If generating code directly (cmd mode), ensure it's Python."
|
|
295
|
-
)
|
|
296
|
-
}
|
|
297
|
-
caug_npc = {
|
|
298
|
-
"name": "caug",
|
|
299
|
-
"primary_directive": f"You are caug, a specialist in big data statistical methods in {lang}."
|
|
300
|
-
}
|
|
301
|
-
|
|
302
|
-
parsely_npc = {
|
|
303
|
-
"name": "parsely",
|
|
304
|
-
"primary_directive": f"You are parsely, a specialist in mathematical methods in {lang}."
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
toon_npc = {
|
|
308
|
-
"name": "toon",
|
|
309
|
-
"primary_directive": f"You are toon, a specialist in brute force methods in {lang}."
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
for npc_data in [guac_npc, caug_npc, parsely_npc, toon_npc]:
|
|
313
|
-
npc_file = npc_team_dir / f"{npc_data['name']}.npc"
|
|
314
|
-
with open(npc_file, "w") as f:
|
|
315
|
-
yaml.dump(npc_data, f, default_flow_style=False)
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
team_ctx_model = os.environ.get("NPCSH_CHAT_MODEL", npcsh_initial_state.chat_model or "llama3.2")
|
|
319
|
-
team_ctx_provider = os.environ.get("NPCSH_CHAT_PROVIDER", npcsh_initial_state.chat_provider or "ollama")
|
|
320
|
-
team_ctx = {
|
|
321
|
-
"team_name": "guac_team", "description": f"A team for {lang} analysis", "foreman": "guac",
|
|
322
|
-
"model": team_ctx_model, "provider": team_ctx_provider
|
|
323
|
-
}
|
|
324
|
-
npcsh_initial_state.chat_model = team_ctx_model
|
|
325
|
-
npcsh_initial_state.chat_provider = team_ctx_provider
|
|
326
|
-
with open(npc_team_dir / "team.ctx", "w") as f:
|
|
327
|
-
yaml.dump(team_ctx, f, default_flow_style=False)
|
|
328
|
-
|
|
329
|
-
def print_guac_bowl():
|
|
330
|
-
bowl_art = """
|
|
331
|
-
🟢🟢🟢🟢🟢
|
|
332
|
-
🟢 🟢
|
|
333
|
-
🟢
|
|
334
|
-
🟢
|
|
335
|
-
🟢
|
|
336
|
-
🟢 🟢🟢🟢 🟢 🟢 🟢🟢🟢 🟢🟢🟢
|
|
337
|
-
🟢 🟢 🟢 🟢 ⚫⚫🟢 🟢
|
|
338
|
-
🟢 🟢 🟢 🟢 ⚫🥑🧅⚫ 🟢
|
|
339
|
-
🟢 🟢 🟢 🟢 ⚫🥑🍅⚫ 🟢
|
|
340
|
-
🟢🟢🟢🟢🟢🟢 🟢🟢🟢🟢 ⚫⚫🟢 🟢🟢🟢
|
|
341
|
-
"""
|
|
342
|
-
print(bowl_art)
|
|
343
|
-
|
|
344
|
-
def get_guac_prompt_char(command_count: int) -> str:
|
|
345
|
-
period = int(npcsh_initial_state.GUAC_REFRESH_PERIOD)
|
|
346
|
-
period = max(1, period)
|
|
347
|
-
stages = ["\U0001F951", "\U0001F951🔪", "\U0001F951🥣", "\U0001F951🥣🧂", "\U0001F958 REFRESH?"]
|
|
348
|
-
divisor = max(1, period // (len(stages)-1) if len(stages) > 1 else period)
|
|
349
|
-
stage_index = min(command_count // divisor, len(stages) - 1)
|
|
350
|
-
return stages[stage_index]
|
|
351
|
-
|
|
352
|
-
def _handle_guac_refresh(state: GuacState):
|
|
353
|
-
if not state.command_history or not state.npc:
|
|
354
|
-
print("Cannot refresh: command history or NPC not available.")
|
|
355
|
-
return
|
|
356
|
-
history_entries = state.command_history.get_all()
|
|
357
|
-
if not history_entries:
|
|
358
|
-
print("No command history to analyze for refresh.")
|
|
359
|
-
return
|
|
360
|
-
|
|
361
|
-
py_commands = []
|
|
362
|
-
for entry in history_entries:
|
|
363
|
-
if len(entry) > 2 and isinstance(entry[2], str) and entry[2].strip() and not entry[2].startswith('/'):
|
|
364
|
-
py_commands.append(entry[2])
|
|
365
|
-
|
|
366
|
-
if not py_commands:
|
|
367
|
-
print("No relevant commands in history to analyze for refresh.")
|
|
368
|
-
return
|
|
369
|
-
|
|
370
|
-
prompt_parts = [
|
|
371
|
-
"Analyze the following Python commands or natural language queries that led to Python code execution by a user:",
|
|
372
|
-
"\n```python",
|
|
373
|
-
"\n".join(py_commands[-20:]),
|
|
374
|
-
"```\n",
|
|
375
|
-
"Based on these, suggest 1-3 useful Python helper functions that the user might find valuable.",
|
|
376
|
-
"Provide only the Python code for these functions, wrapped in ```python ... ``` blocks.",
|
|
377
|
-
"Do not include any other text or explanation outside the code blocks."
|
|
378
|
-
]
|
|
379
|
-
prompt = "\n".join(prompt_parts)
|
|
380
|
-
|
|
381
|
-
try:
|
|
382
|
-
response = get_llm_response(prompt, model=state.chat_model, provider=state.chat_provider, npc=state.npc, stream=False)
|
|
383
|
-
suggested_code_raw = response.get("response", "").strip()
|
|
384
|
-
code_blocks = re.findall(r'```python\s*(.*?)\s*```', suggested_code_raw, re.DOTALL)
|
|
385
|
-
|
|
386
|
-
if not code_blocks:
|
|
387
|
-
if "def " in suggested_code_raw:
|
|
388
|
-
code_blocks = [suggested_code_raw]
|
|
389
|
-
else:
|
|
390
|
-
print("\nNo functions suggested by LLM or format not recognized.")
|
|
391
|
-
return
|
|
392
|
-
|
|
393
|
-
suggested_functions_code = "\n\n".join(block.strip() for block in code_blocks)
|
|
394
|
-
if not suggested_functions_code.strip():
|
|
395
|
-
print("\nLLM did not suggest any functions.")
|
|
396
|
-
return
|
|
397
|
-
|
|
398
|
-
print("\n=== Suggested Helper Functions ===\n")
|
|
399
|
-
render_markdown(f"```python\n{suggested_functions_code}\n```")
|
|
400
|
-
print("\n===============================\n")
|
|
401
|
-
|
|
402
|
-
user_choice = input("Add these functions to your main.py? (y/n): ").strip().lower()
|
|
403
|
-
if user_choice == 'y':
|
|
404
|
-
main_py_path = state.src_dir / "main.py"
|
|
405
|
-
with open(main_py_path, "a") as f:
|
|
406
|
-
f.write("\n\n# --- Functions suggested by /refresh ---\n")
|
|
407
|
-
f.write(suggested_functions_code)
|
|
408
|
-
f.write("\n# --- End of suggested functions ---\n")
|
|
409
|
-
print(f"Functions appended to {main_py_path}.")
|
|
410
|
-
print("To use them in the current session: import importlib; importlib.reload(guac.src.main); from guac.src.main import *")
|
|
411
|
-
else:
|
|
412
|
-
print("Suggested functions not added.")
|
|
413
|
-
except Exception as e:
|
|
414
|
-
print(f"Error during /refresh: {e}")
|
|
415
|
-
traceback.print_exc()
|
|
416
|
-
|
|
417
|
-
def execute_python_code(code_str: str, state: GuacState) -> Tuple[GuacState, Any]:
|
|
418
|
-
output_capture = io.StringIO()
|
|
419
|
-
original_stdout = sys.stdout
|
|
420
|
-
original_stderr = sys.stderr
|
|
421
|
-
final_output_str = None
|
|
422
|
-
is_expression = False
|
|
423
|
-
|
|
424
|
-
try:
|
|
425
|
-
sys.stdout = output_capture
|
|
426
|
-
sys.stderr = output_capture
|
|
427
|
-
|
|
428
|
-
if '\n' not in code_str.strip() and not re.match(r"^\s*(def|class|for|while|if|try|with|import|from|@)", code_str.strip()):
|
|
429
|
-
try:
|
|
430
|
-
compiled_expr = compile(code_str, "<input>", "eval")
|
|
431
|
-
exec_result = eval(compiled_expr, state.locals)
|
|
432
|
-
if exec_result is not None and not output_capture.getvalue().strip():
|
|
433
|
-
print(repr(exec_result), file=sys.stdout)
|
|
434
|
-
is_expression = True
|
|
435
|
-
except SyntaxError:
|
|
436
|
-
is_expression = False
|
|
437
|
-
except Exception:
|
|
438
|
-
is_expression = False
|
|
439
|
-
raise
|
|
440
|
-
|
|
441
|
-
if not is_expression:
|
|
442
|
-
compiled_code = compile(code_str, "<input>", "exec")
|
|
443
|
-
exec(compiled_code, state.locals)
|
|
444
|
-
|
|
445
|
-
except SyntaxError:
|
|
446
|
-
exc_type, exc_value, _ = sys.exc_info()
|
|
447
|
-
error_lines = traceback.format_exception_only(exc_type, exc_value)
|
|
448
|
-
adjusted_error_lines = [line.replace('File "<input>"', 'Syntax error in input') for line in error_lines]
|
|
449
|
-
print("".join(adjusted_error_lines), file=output_capture, end="")
|
|
450
|
-
except Exception:
|
|
451
|
-
exc_type, exc_value, exc_tb = sys.exc_info()
|
|
452
|
-
traceback.print_exception(exc_type, exc_value, exc_tb, file=output_capture)
|
|
453
|
-
finally:
|
|
454
|
-
sys.stdout = original_stdout
|
|
455
|
-
sys.stderr = original_stderr
|
|
456
|
-
final_output_str = output_capture.getvalue().strip()
|
|
457
|
-
output_capture.close()
|
|
458
|
-
|
|
459
|
-
if state.command_history:
|
|
460
|
-
state.command_history.add_command(code_str, [final_output_str if final_output_str else ""], "", state.current_path)
|
|
461
|
-
return state, final_output_str
|
|
462
|
-
|
|
463
|
-
def execute_guac_command(command: str, state: GuacState) -> Tuple[GuacState, Any]:
|
|
464
|
-
stripped_command = command.strip()
|
|
465
|
-
output = None
|
|
466
|
-
|
|
467
|
-
if not stripped_command:
|
|
468
|
-
return state, None
|
|
469
|
-
if stripped_command.lower() in ["exit", "quit", "exit()", "quit()"]:
|
|
470
|
-
raise SystemExit("Exiting Guac Mode.")
|
|
471
|
-
|
|
472
|
-
# Check for shell-like commands first, before Python code detection
|
|
473
|
-
parts = stripped_command.split(maxsplit=1)
|
|
474
|
-
cmd_name = parts[0].lower()
|
|
475
|
-
args = parts[1] if len(parts) > 1 else ""
|
|
476
|
-
|
|
477
|
-
# Handle shell-like commands without / prefix
|
|
478
|
-
if cmd_name == "ls":
|
|
479
|
-
try:
|
|
480
|
-
ls_path = args.strip() if args.strip() else state.current_path
|
|
481
|
-
output = "\n".join(os.listdir(ls_path))
|
|
482
|
-
except Exception as e:
|
|
483
|
-
output = f"Error listing directory: {e}"
|
|
484
|
-
if state.command_history:
|
|
485
|
-
state.command_history.add_command(command, [str(output)], "", state.current_path)
|
|
486
|
-
return state, output
|
|
487
|
-
elif cmd_name == "pwd":
|
|
488
|
-
output = state.current_path
|
|
489
|
-
if state.command_history:
|
|
490
|
-
state.command_history.add_command(command, [str(output)], "", state.current_path)
|
|
491
|
-
return state, output
|
|
492
|
-
elif cmd_name == "cd":
|
|
493
|
-
target_dir = args.strip() if args.strip() else str(Path.home())
|
|
494
|
-
try:
|
|
495
|
-
os.chdir(target_dir)
|
|
496
|
-
state.current_path = os.getcwd()
|
|
497
|
-
output = f"Changed directory to {state.current_path}"
|
|
498
|
-
except FileNotFoundError:
|
|
499
|
-
output = f"Error: Directory not found: {target_dir}"
|
|
500
|
-
except Exception as e:
|
|
501
|
-
output = f"Error changing directory: {e}"
|
|
502
|
-
if state.command_history:
|
|
503
|
-
state.command_history.add_command(command, [str(output)], "", state.current_path)
|
|
504
|
-
return state, output
|
|
505
|
-
elif cmd_name == "run" and args.strip().endswith(".py"):
|
|
506
|
-
script_path = Path(args.strip())
|
|
507
|
-
if script_path.exists():
|
|
508
|
-
try:
|
|
509
|
-
with open(script_path, "r") as f:
|
|
510
|
-
script_code = f.read()
|
|
511
|
-
_, script_exec_output = execute_python_code(script_code, state)
|
|
512
|
-
output = (f"Executed script '{script_path}'.\n"
|
|
513
|
-
f"Output from script:\n{script_exec_output if script_exec_output else '(No direct output)'}")
|
|
514
|
-
except Exception as e:
|
|
515
|
-
output = f"Error running script {script_path}: {e}"
|
|
516
|
-
else:
|
|
517
|
-
output = f"Error: Script not found: {script_path}"
|
|
518
|
-
if state.command_history:
|
|
519
|
-
state.command_history.add_command(command, [str(output)], "", state.current_path)
|
|
520
|
-
return state, output
|
|
521
|
-
|
|
522
|
-
# Now check if it's Python code
|
|
523
|
-
if is_python_code(stripped_command):
|
|
524
|
-
state, output = execute_python_code(stripped_command, state)
|
|
525
|
-
return state, output
|
|
526
|
-
|
|
527
|
-
# Handle / prefixed commands
|
|
528
|
-
if stripped_command.startswith("/"):
|
|
529
|
-
parts = stripped_command.split(maxsplit=1)
|
|
530
|
-
cmd_name = parts[0].lower()
|
|
531
|
-
args = parts[1] if len(parts) > 1 else ""
|
|
532
|
-
is_core_cmd = True
|
|
533
|
-
|
|
534
|
-
if cmd_name == "/agent":
|
|
535
|
-
state.current_mode = "agent"
|
|
536
|
-
output = "Switched to AGENT mode."
|
|
537
|
-
elif cmd_name == "/chat":
|
|
538
|
-
state.current_mode = "chat"
|
|
539
|
-
output = "Switched to CHAT mode."
|
|
540
|
-
elif cmd_name == "/cmd":
|
|
541
|
-
state.current_mode = "cmd"
|
|
542
|
-
output = "Switched to CMD mode."
|
|
543
|
-
elif cmd_name == "/ride":
|
|
544
|
-
state.current_mode = "ride"
|
|
545
|
-
output = "Switched to RIDE mode (placeholder)."
|
|
546
|
-
elif cmd_name == "/refresh":
|
|
547
|
-
_handle_guac_refresh(state)
|
|
548
|
-
output = "Refresh process initiated."
|
|
549
|
-
elif cmd_name == "/mode":
|
|
550
|
-
output = f"Current mode: {state.current_mode.upper()}"
|
|
551
|
-
elif cmd_name == "/show_vars":
|
|
552
|
-
temp_output_list = ["Current Python Environment Variables:"]
|
|
553
|
-
if state.locals:
|
|
554
|
-
for k, v_obj in state.locals.items():
|
|
555
|
-
if not k.startswith("__"):
|
|
556
|
-
try:
|
|
557
|
-
v_repr = repr(v_obj)
|
|
558
|
-
temp_output_list.append(f" {k}: {v_repr[:100]}{'...' if len(v_repr) > 100 else ''}")
|
|
559
|
-
except Exception:
|
|
560
|
-
temp_output_list.append(f" {k}: <Error representing value>")
|
|
561
|
-
else:
|
|
562
|
-
temp_output_list.append(" (empty)")
|
|
563
|
-
output = "\n".join(temp_output_list)
|
|
564
|
-
|
|
565
|
-
else:
|
|
566
|
-
is_core_cmd = False
|
|
567
|
-
|
|
568
|
-
if is_core_cmd:
|
|
569
|
-
if state.command_history:
|
|
570
|
-
state.command_history.add_command(command, [str(output if output else "")], "", state.current_path)
|
|
571
|
-
return state, output
|
|
572
|
-
|
|
573
|
-
nl_input_for_llm = stripped_command
|
|
574
|
-
|
|
575
|
-
if state.current_mode == "agent":
|
|
576
|
-
llm_result_dict = check_llm_command(
|
|
577
|
-
command=nl_input_for_llm,
|
|
578
|
-
model=state.chat_model,
|
|
579
|
-
provider=state.chat_provider,
|
|
580
|
-
npc=state.npc,
|
|
581
|
-
team=state.team,
|
|
582
|
-
messages=state.messages, # Pass current messages for context
|
|
583
|
-
stream=state.stream_output,
|
|
584
|
-
# tools and jinxs would be sourced from state.npc or state.team if check_llm_command uses them
|
|
585
|
-
)
|
|
586
|
-
output = llm_result_dict.get("output")
|
|
587
|
-
state.messages = llm_result_dict.get("messages", state.messages) # Update messages from check_llm_command
|
|
588
|
-
|
|
589
|
-
history_output = str(output) if not (state.stream_output and hasattr(output, '__iter__') and not isinstance(output, (str,bytes))) else "[Streamed Agent Response]"
|
|
590
|
-
if state.command_history:
|
|
591
|
-
state.command_history.add_command(nl_input_for_llm, [history_output], "", state.current_path)
|
|
592
|
-
|
|
593
|
-
elif state.current_mode == "chat":
|
|
594
|
-
llm_response_dict = get_llm_response(
|
|
595
|
-
nl_input_for_llm,
|
|
596
|
-
model=state.chat_model,
|
|
597
|
-
provider=state.chat_provider,
|
|
598
|
-
npc=state.npc,
|
|
599
|
-
messages=state.messages, # Pass current messages
|
|
600
|
-
stream=state.stream_output
|
|
601
|
-
)
|
|
602
|
-
output = llm_response_dict.get("response")
|
|
603
|
-
state.messages = llm_response_dict.get("messages", state.messages) # Update messages
|
|
604
|
-
|
|
605
|
-
history_output = str(output) if not (state.stream_output and hasattr(output, '__iter__') and not isinstance(output, (str,bytes))) else "[Streamed Chat Response]"
|
|
606
|
-
if state.command_history:
|
|
607
|
-
state.command_history.add_command(nl_input_for_llm, [history_output], "", state.current_path)
|
|
608
|
-
|
|
609
|
-
elif state.current_mode == "cmd":
|
|
610
|
-
prompt_cmd = (
|
|
611
|
-
f"User input for Python CMD mode: '{nl_input_for_llm}'.\n"
|
|
612
|
-
f"Generate ONLY executable Python code required to fulfill this.\n"
|
|
613
|
-
f"Do not include any explanations, leading markdown like ```python, or any text other than the Python code itself.\n"
|
|
614
|
-
)
|
|
615
|
-
llm_response = get_llm_response(
|
|
616
|
-
prompt_cmd,
|
|
617
|
-
model=state.chat_model,
|
|
618
|
-
provider=state.chat_provider,
|
|
619
|
-
npc=state.npc,
|
|
620
|
-
stream=False,
|
|
621
|
-
messages=state.messages # Pass messages for context if LLM uses them
|
|
622
|
-
)
|
|
623
|
-
if llm_response.get('response').startswith('```python'):
|
|
624
|
-
generated_code = llm_response.get("response", "").strip()[len('```python'):].strip()
|
|
625
|
-
generated_code = generated_code.rsplit('```', 1)[0].strip()
|
|
626
|
-
else:
|
|
627
|
-
generated_code = llm_response.get("response", "").strip()
|
|
628
|
-
state.messages = llm_response.get("messages", state.messages)
|
|
629
|
-
|
|
630
|
-
if generated_code and not generated_code.startswith("# Error:"):
|
|
631
|
-
print(f"\n# LLM Generated Code (Cmd Mode):\n---\n{generated_code}\n---\n")
|
|
632
|
-
_, exec_output = execute_python_code(generated_code, state)
|
|
633
|
-
output = f"# Code executed.\n# Output:\n{exec_output if exec_output else '(No direct output)'}"
|
|
634
|
-
else:
|
|
635
|
-
output = generated_code if generated_code else "# Error: LLM did not generate Python code."
|
|
636
|
-
|
|
637
|
-
if state.command_history:
|
|
638
|
-
state.command_history.add_command(nl_input_for_llm, [str(output if output else "")], "", state.current_path)
|
|
639
|
-
|
|
640
|
-
elif state.current_mode == "ride":
|
|
641
|
-
output = "RIDE mode is not yet implemented. Your input was: " + nl_input_for_llm
|
|
642
|
-
if state.command_history:
|
|
643
|
-
state.command_history.add_command(nl_input_for_llm, [str(output)], "", state.current_path)
|
|
644
|
-
|
|
645
|
-
return state, output
|
|
646
|
-
|
|
647
|
-
def run_guac_repl(initial_guac_state: GuacState):
|
|
648
|
-
state = initial_guac_state
|
|
649
|
-
_load_guac_helpers_into_state(state)
|
|
650
|
-
print_guac_bowl()
|
|
651
|
-
print(f"Welcome to Guac Mode! Current mode: {state.current_mode.upper()}. Type /agent, /chat, or /cmd to switch modes.")
|
|
652
|
-
|
|
653
|
-
while True:
|
|
654
|
-
try:
|
|
655
|
-
state.current_path = os.getcwd()
|
|
656
|
-
path_display = Path(state.current_path).name
|
|
657
|
-
prompt_char = get_guac_prompt_char(state.command_count)
|
|
658
|
-
mode_display = state.current_mode.upper()
|
|
659
|
-
npc_display = f":{state.npc.name}" if state.npc and state.npc.name else ""
|
|
660
|
-
prompt_str = f"[{path_display}|{mode_display}{npc_display}] {prompt_char} > "
|
|
661
|
-
|
|
662
|
-
user_input = get_multiline_input_guac(prompt_str, state)
|
|
663
|
-
if not user_input.strip() and not state.compile_buffer:
|
|
664
|
-
if state.compile_buffer:
|
|
665
|
-
state.compile_buffer.clear()
|
|
666
|
-
continue
|
|
667
|
-
|
|
668
|
-
state.command_count +=1
|
|
669
|
-
new_state, result = execute_guac_command(user_input, state)
|
|
670
|
-
state = new_state
|
|
671
|
-
|
|
672
|
-
if result is not None:
|
|
673
|
-
if state.stream_output and hasattr(result, '__iter__') and not isinstance(result, (str, bytes, dict)):
|
|
674
|
-
full_streamed_output_for_history = print_and_process_stream_with_markdown(result, state.chat_model, state.chat_provider)
|
|
675
|
-
if (state.current_mode == "chat" or state.current_mode == "agent") and \
|
|
676
|
-
state.messages and state.messages[-1].get("role") == "assistant":
|
|
677
|
-
state.messages[-1]["content"] = full_streamed_output_for_history
|
|
678
|
-
|
|
679
|
-
if state.command_history:
|
|
680
|
-
try:
|
|
681
|
-
last_entry_id = state.command_history.get_last_entry_id()
|
|
682
|
-
if last_entry_id:
|
|
683
|
-
state.command_history.update_command_output(last_entry_id, [full_streamed_output_for_history])
|
|
684
|
-
except AttributeError:
|
|
685
|
-
pass
|
|
686
|
-
elif isinstance(result, str):
|
|
687
|
-
if result.strip():
|
|
688
|
-
render_markdown(result)
|
|
689
|
-
elif not (state.stream_output and hasattr(result, '__iter__')):
|
|
690
|
-
if result:
|
|
691
|
-
print(str(result))
|
|
692
|
-
print()
|
|
693
|
-
|
|
694
|
-
except (KeyboardInterrupt, EOFError):
|
|
695
|
-
print("\nExiting Guac Mode...")
|
|
696
|
-
break
|
|
697
|
-
except SystemExit as e:
|
|
698
|
-
print(f"\n{e}")
|
|
699
|
-
break
|
|
700
|
-
except Exception:
|
|
701
|
-
print("An unexpected error occurred in the REPL:")
|
|
702
|
-
traceback.print_exc()
|
|
703
|
-
|
|
704
|
-
def enter_guac_mode(npc=None,
|
|
705
|
-
team=None,
|
|
706
|
-
config_dir=None,
|
|
707
|
-
plots_dir=None,
|
|
708
|
-
npc_team_dir=None,
|
|
709
|
-
refresh_period=None,
|
|
710
|
-
lang=None,
|
|
711
|
-
default_mode_choice=None):
|
|
712
|
-
|
|
713
|
-
if refresh_period is not None:
|
|
714
|
-
try:
|
|
715
|
-
npcsh_initial_state.GUAC_REFRESH_PERIOD = int(refresh_period)
|
|
716
|
-
except ValueError:
|
|
717
|
-
pass
|
|
718
|
-
|
|
719
|
-
setup_result = setup_guac_mode(
|
|
720
|
-
config_dir=config_dir,
|
|
721
|
-
plots_dir=plots_dir,
|
|
722
|
-
npc_team_dir=npc_team_dir
|
|
723
|
-
)
|
|
724
|
-
guac_config_dir = setup_result["config_dir"]
|
|
725
|
-
guac_src_dir = setup_result["src_dir"]
|
|
726
|
-
guac_npc_team_dir = setup_result["npc_team_dir"]
|
|
727
|
-
guac_default_mode = default_mode_choice or setup_result.get("default_mode", "cmd")
|
|
728
|
-
|
|
729
|
-
cmd_history = CommandHistory()
|
|
730
|
-
current_npc = npc
|
|
731
|
-
current_team = team
|
|
732
|
-
|
|
733
|
-
if current_npc is None and current_team is None:
|
|
734
|
-
try:
|
|
735
|
-
current_team = Team(team_path=str(guac_npc_team_dir), db_conn=None)
|
|
736
|
-
if current_team and current_team.npcs:
|
|
737
|
-
current_npc = current_team.get_npc("guac")
|
|
738
|
-
if not current_npc:
|
|
739
|
-
current_npc = current_team.get_foreman() or next(iter(current_team.npcs.values()), None)
|
|
740
|
-
except Exception as e:
|
|
741
|
-
print(f"Warning: Could not load Guac NPC team from {guac_npc_team_dir}: {e}", file=sys.stderr)
|
|
742
|
-
|
|
743
|
-
initial_guac_state = GuacState(
|
|
744
|
-
current_mode=guac_default_mode,
|
|
745
|
-
npc=current_npc,
|
|
746
|
-
team=current_team,
|
|
747
|
-
command_history=cmd_history,
|
|
748
|
-
chat_model=npcsh_initial_state.chat_model,
|
|
749
|
-
chat_provider=npcsh_initial_state.chat_provider,
|
|
750
|
-
config_dir=guac_config_dir,
|
|
751
|
-
src_dir=guac_src_dir,
|
|
752
|
-
locals={}
|
|
753
|
-
)
|
|
754
|
-
|
|
755
|
-
try:
|
|
756
|
-
setup_guac_readline(READLINE_HISTORY_FILE)
|
|
757
|
-
atexit.register(save_guac_readline_history, READLINE_HISTORY_FILE)
|
|
758
|
-
except Exception as e:
|
|
759
|
-
print(f'Could not set up readline: {e}', file=sys.stderr)
|
|
760
|
-
|
|
761
|
-
atexit.register(cmd_history.close)
|
|
762
|
-
run_guac_repl(initial_guac_state)
|
|
763
|
-
|
|
764
|
-
def main():
|
|
765
|
-
parser = argparse.ArgumentParser(description="Enter Guac Mode - Interactive Python with LLM assistance.")
|
|
766
|
-
parser.add_argument("--config_dir", type=str, help="Guac configuration directory.")
|
|
767
|
-
parser.add_argument("--plots_dir", type=str, help="Directory to save plots.")
|
|
768
|
-
parser.add_argument("--npc_team_dir", type=str, default=os.path.expanduser('~/.npcsh/guac/npc_team/'),
|
|
769
|
-
help="NPC team directory for Guac.")
|
|
770
|
-
parser.add_argument("--refresh_period", type=int, help="Number of commands before suggesting /refresh.")
|
|
771
|
-
parser.add_argument("--default_mode", type=str, choices=["agent", "chat", "cmd", "ride"],
|
|
772
|
-
help="Default mode to start in.")
|
|
773
|
-
|
|
774
|
-
args = parser.parse_args()
|
|
775
|
-
|
|
776
|
-
enter_guac_mode(
|
|
777
|
-
config_dir=args.config_dir,
|
|
778
|
-
plots_dir=args.plots_dir,
|
|
779
|
-
npc_team_dir=args.npc_team_dir,
|
|
780
|
-
refresh_period=args.refresh_period,
|
|
781
|
-
default_mode_choice=args.default_mode
|
|
782
|
-
)
|
|
783
|
-
|
|
784
|
-
if __name__ == "__main__":
|
|
785
|
-
main()
|