npcsh 1.0.28__py3-none-any.whl → 1.0.30__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/_state.py +18 -15
- npcsh/corca.py +130 -84
- npcsh/guac.py +157 -141
- npcsh/npcsh.py +15 -22
- {npcsh-1.0.28.dist-info → npcsh-1.0.30.dist-info}/METADATA +1 -1
- {npcsh-1.0.28.dist-info → npcsh-1.0.30.dist-info}/RECORD +36 -36
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/alicanto.npc +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/alicanto.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/bash_executer.jinx +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/corca.npc +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/corca.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/edit_file.jinx +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/foreman.npc +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/frederic.npc +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/frederic4.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/guac.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/image_generation.jinx +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/internet_search.jinx +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/kadiefa.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/plonk.npc +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/plonk.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/plonkjr.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/python_executor.jinx +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/screen_cap.jinx +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/sibiji.npc +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/sibiji.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/spool.png +0 -0
- {npcsh-1.0.28.data → npcsh-1.0.30.data}/data/npcsh/npc_team/yap.png +0 -0
- {npcsh-1.0.28.dist-info → npcsh-1.0.30.dist-info}/WHEEL +0 -0
- {npcsh-1.0.28.dist-info → npcsh-1.0.30.dist-info}/entry_points.txt +0 -0
- {npcsh-1.0.28.dist-info → npcsh-1.0.30.dist-info}/licenses/LICENSE +0 -0
- {npcsh-1.0.28.dist-info → npcsh-1.0.30.dist-info}/top_level.txt +0 -0
npcsh/guac.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
from chroptiks.plotting_utils import *
|
|
2
1
|
from datetime import datetime
|
|
3
2
|
import json
|
|
4
3
|
import numpy as np
|
|
@@ -7,12 +6,24 @@ import pandas as pd
|
|
|
7
6
|
import sys
|
|
8
7
|
import argparse
|
|
9
8
|
import importlib.metadata
|
|
10
|
-
import matplotlib
|
|
9
|
+
import matplotlib
|
|
10
|
+
import platform
|
|
11
|
+
import queue
|
|
12
|
+
plot_queue = queue.Queue()
|
|
13
|
+
|
|
14
|
+
if platform.system() == 'Darwin':
|
|
15
|
+
try:
|
|
16
|
+
matplotlib.use('TkAgg')
|
|
17
|
+
except ImportError:
|
|
18
|
+
matplotlib.use('Agg')
|
|
19
|
+
else:
|
|
20
|
+
matplotlib.use('TkAgg')
|
|
21
|
+
|
|
22
|
+
import matplotlib.pyplot as plt
|
|
23
|
+
from chroptiks.plotting_utils import *
|
|
11
24
|
|
|
12
25
|
import logging
|
|
13
|
-
plt.ioff()
|
|
14
26
|
import shlex
|
|
15
|
-
import platform
|
|
16
27
|
import yaml
|
|
17
28
|
import re
|
|
18
29
|
from pathlib import Path
|
|
@@ -30,7 +41,7 @@ from npcpy.memory.command_history import CommandHistory, start_new_conversation
|
|
|
30
41
|
from npcpy.npc_compiler import Team, NPC
|
|
31
42
|
from npcpy.llm_funcs import get_llm_response
|
|
32
43
|
from npcpy.npc_sysenv import render_markdown,print_and_process_stream
|
|
33
|
-
|
|
44
|
+
from npcpy.data.load import load_file_contents
|
|
34
45
|
|
|
35
46
|
from npcsh._state import (
|
|
36
47
|
ShellState,
|
|
@@ -40,7 +51,8 @@ from npcsh._state import (
|
|
|
40
51
|
readline_safe_prompt,
|
|
41
52
|
setup_shell,
|
|
42
53
|
get_multiline_input,
|
|
43
|
-
orange
|
|
54
|
+
orange,
|
|
55
|
+
get_team_ctx_path,
|
|
44
56
|
)
|
|
45
57
|
import threading
|
|
46
58
|
import time
|
|
@@ -97,9 +109,6 @@ def _clear_readline_buffer():
|
|
|
97
109
|
return False
|
|
98
110
|
|
|
99
111
|
def _file_drop_monitor(npc_team_dir: Path, state: ShellState, locals_dict: Dict[str, Any], poll_interval: float = 0.2):
|
|
100
|
-
"""
|
|
101
|
-
Background thread: poll readline.get_line_buffer() and process file drops immediately.
|
|
102
|
-
"""
|
|
103
112
|
processed_bufs = set()
|
|
104
113
|
stop_event = _guac_monitor_stop_event
|
|
105
114
|
while stop_event is None or not stop_event.is_set():
|
|
@@ -113,48 +122,35 @@ def _file_drop_monitor(npc_team_dir: Path, state: ShellState, locals_dict: Dict[
|
|
|
113
122
|
time.sleep(poll_interval)
|
|
114
123
|
continue
|
|
115
124
|
|
|
116
|
-
|
|
117
125
|
candidate = buf.strip()
|
|
118
|
-
|
|
119
126
|
if (candidate.startswith("'") and candidate.endswith("'")) or (candidate.startswith('"') and candidate.endswith('"')):
|
|
120
127
|
inner = candidate[1:-1]
|
|
121
128
|
else:
|
|
122
129
|
inner = candidate
|
|
123
130
|
|
|
124
|
-
|
|
125
131
|
if " " not in inner and Path(inner.replace('~', str(Path.home()))).expanduser().exists() and Path(inner.replace('~', str(Path.home()))).expanduser().is_file():
|
|
126
|
-
|
|
127
132
|
if buf in processed_bufs:
|
|
128
133
|
time.sleep(poll_interval)
|
|
129
134
|
continue
|
|
130
135
|
processed_bufs.add(buf)
|
|
131
136
|
|
|
132
|
-
|
|
133
137
|
try:
|
|
134
|
-
|
|
135
|
-
|
|
136
138
|
modified_input, processed_files = _handle_file_drop(buf, npc_team_dir)
|
|
137
139
|
if processed_files:
|
|
138
140
|
target_path = processed_files[0]
|
|
139
|
-
|
|
140
141
|
loading_code = _generate_file_analysis_code(inner, target_path)
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
_state, exec_output = execute_python_code(loading_code, state, locals_dict)
|
|
145
|
-
|
|
146
|
-
if exec_output:
|
|
147
|
-
print(exec_output)
|
|
148
|
-
|
|
142
|
+
|
|
143
|
+
plot_queue.put(('execute_code', loading_code, state, locals_dict))
|
|
144
|
+
print("\n[guac] Detected file drop — queued for processing...")
|
|
149
145
|
_clear_readline_buffer()
|
|
150
146
|
except Exception as e:
|
|
151
147
|
print(f"[guac][ERROR] file drop processing failed: {e}")
|
|
152
148
|
except Exception:
|
|
153
|
-
|
|
154
149
|
pass
|
|
155
150
|
time.sleep(poll_interval)
|
|
156
151
|
|
|
157
152
|
|
|
153
|
+
|
|
158
154
|
def is_python_code(text: str) -> bool:
|
|
159
155
|
text = text.strip()
|
|
160
156
|
if not text:
|
|
@@ -476,13 +472,43 @@ def ensure_global_guac_team():
|
|
|
476
472
|
print(f"✅ Created global guac team.ctx at {ctx_path}")
|
|
477
473
|
|
|
478
474
|
return team_dir
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
def setup_guac_mode(config_dir=None,
|
|
478
|
+
plots_dir=None,
|
|
479
|
+
npc_team_dir=None,
|
|
480
|
+
lang='python',
|
|
481
|
+
default_mode_choice=None):
|
|
482
|
+
base_dir = Path.cwd()
|
|
482
483
|
|
|
483
|
-
|
|
484
|
-
if
|
|
485
|
-
|
|
484
|
+
local_npc_team = base_dir / "npc_team"
|
|
485
|
+
if local_npc_team.exists():
|
|
486
|
+
npc_team_dir = local_npc_team
|
|
487
|
+
workspace_dirs = _get_workspace_dirs(npc_team_dir)
|
|
488
|
+
_ensure_workspace_dirs(workspace_dirs)
|
|
489
|
+
|
|
490
|
+
team_ctx_path = npc_team_dir / "team.ctx"
|
|
491
|
+
existing_ctx = {}
|
|
492
|
+
if team_ctx_path.exists():
|
|
493
|
+
try:
|
|
494
|
+
with open(team_ctx_path, "r") as f:
|
|
495
|
+
existing_ctx = yaml.safe_load(f) or {}
|
|
496
|
+
except Exception as e:
|
|
497
|
+
print(f"Warning: Could not read team.ctx: {e}")
|
|
498
|
+
|
|
499
|
+
package_root = existing_ctx.get("GUAC_PACKAGE_ROOT", str(base_dir))
|
|
500
|
+
package_name = existing_ctx.get("GUAC_PACKAGE_NAME", "project")
|
|
501
|
+
project_description = existing_ctx.get("GUAC_PROJECT_DESCRIPTION", "Local guac team")
|
|
502
|
+
|
|
503
|
+
return {
|
|
504
|
+
"language": lang, "package_root": Path(package_root), "plots_dir": plots_dir,
|
|
505
|
+
"npc_team_dir": npc_team_dir, "config_dir": config_dir, "default_mode": default_mode_choice or "agent",
|
|
506
|
+
"project_description": project_description, "package_name": package_name
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
global_flag_file = base_dir / ".npcsh_global"
|
|
510
|
+
if global_flag_file.exists() or os.environ.get("GUAC_USE_GLOBAL") == "1":
|
|
511
|
+
print("Using global Guac team")
|
|
486
512
|
team_dir = ensure_global_guac_team()
|
|
487
513
|
return {
|
|
488
514
|
"language": lang, "package_root": team_dir, "plots_dir": plots_dir,
|
|
@@ -490,7 +516,6 @@ def setup_guac_mode(config_dir=None, plots_dir=None, npc_team_dir=None,
|
|
|
490
516
|
"project_description": "Global guac team for analysis.", "package_name": "guac"
|
|
491
517
|
}
|
|
492
518
|
|
|
493
|
-
|
|
494
519
|
if npc_team_dir is None:
|
|
495
520
|
npc_team_dir = base_dir / "npc_team"
|
|
496
521
|
else:
|
|
@@ -518,8 +543,9 @@ def setup_guac_mode(config_dir=None, plots_dir=None, npc_team_dir=None,
|
|
|
518
543
|
response = input("Enter package name (Enter for 'project'): ").strip()
|
|
519
544
|
package_name = response if response else "project"
|
|
520
545
|
except (KeyboardInterrupt, EOFError):
|
|
521
|
-
print("
|
|
522
|
-
|
|
546
|
+
print("Project setup interrupted. Falling back to global guac team...")
|
|
547
|
+
global_flag_file.touch()
|
|
548
|
+
os.environ["GUAC_USE_GLOBAL"] = "1"
|
|
523
549
|
team_dir = ensure_global_guac_team()
|
|
524
550
|
return {
|
|
525
551
|
"language": lang, "package_root": team_dir, "plots_dir": plots_dir,
|
|
@@ -573,6 +599,7 @@ setup(name="{package_name}", version="0.0.1", description="{desc}", packages=fin
|
|
|
573
599
|
"npc_team_dir": npc_team_dir, "config_dir": config_dir, "default_mode": default_mode_val,
|
|
574
600
|
"project_description": project_description, "package_name": package_name
|
|
575
601
|
}
|
|
602
|
+
|
|
576
603
|
def setup_npc_team(npc_team_dir, lang, is_subteam=False):
|
|
577
604
|
|
|
578
605
|
guac_npc = {
|
|
@@ -694,50 +721,6 @@ class FileAnalysisState(Base):
|
|
|
694
721
|
variable_names = Column(Text)
|
|
695
722
|
timestamp = Column(DateTime, default=func.now())
|
|
696
723
|
|
|
697
|
-
def _capture_plot_state(session_id: str, db_path: str, npc_team_dir: Path):
|
|
698
|
-
"""Capture plot state if significant change"""
|
|
699
|
-
if not plt.get_fignums():
|
|
700
|
-
return
|
|
701
|
-
|
|
702
|
-
engine = create_engine(f'sqlite:///{db_path}')
|
|
703
|
-
Base.metadata.create_all(engine)
|
|
704
|
-
Session = sessionmaker(bind=engine)
|
|
705
|
-
session = Session()
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
fig = plt.gcf()
|
|
709
|
-
axes = fig.get_axes()
|
|
710
|
-
data_points = sum(len(line.get_xdata()) for ax in axes for line in ax.get_lines())
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
plot_hash = hashlib.md5(f"{len(axes)}{data_points}".encode()).hexdigest()
|
|
714
|
-
|
|
715
|
-
last = session.query(PlotState).filter(PlotState.session_id == session_id).order_by(PlotState.timestamp.desc()).first()
|
|
716
|
-
if last and last.plot_hash == plot_hash:
|
|
717
|
-
session.close()
|
|
718
|
-
return
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
722
|
-
workspace_dirs = _get_workspace_dirs(npc_team_dir)
|
|
723
|
-
plot_path = workspace_dirs["plots"] / f"state_{timestamp}.png"
|
|
724
|
-
plt.savefig(plot_path, dpi=150, bbox_inches='tight')
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
plot_state = PlotState(
|
|
728
|
-
session_id=session_id,
|
|
729
|
-
plot_hash=plot_hash,
|
|
730
|
-
plot_description=f"Plot with {len(axes)} axes, {data_points} points",
|
|
731
|
-
figure_path=str(plot_path),
|
|
732
|
-
data_summary=f"{data_points} data points",
|
|
733
|
-
change_significance=1.0 if not last else 0.5
|
|
734
|
-
)
|
|
735
|
-
|
|
736
|
-
session.add(plot_state)
|
|
737
|
-
session.commit()
|
|
738
|
-
session.close()
|
|
739
|
-
print(f"📊 Plot state captured -> {plot_path.name}")
|
|
740
|
-
|
|
741
724
|
def _capture_file_state(session_id: str, db_path: str, file_path: str, analysis_code: str, locals_dict: Dict):
|
|
742
725
|
"""Capture file analysis state"""
|
|
743
726
|
engine = create_engine(f'sqlite:///{db_path}')
|
|
@@ -986,24 +969,23 @@ def _handle_file_drop(input_text: str, npc_team_dir: Path) -> Tuple[str, List[st
|
|
|
986
969
|
|
|
987
970
|
return modified_input, processed_files, file_paths
|
|
988
971
|
|
|
989
|
-
|
|
990
972
|
def _capture_plot_state(session_id: str, db_path: str, npc_team_dir: Path):
|
|
991
|
-
"""Capture plot state if significant change"""
|
|
992
973
|
if not plt.get_fignums():
|
|
993
974
|
return
|
|
994
975
|
|
|
995
976
|
try:
|
|
977
|
+
workspace_dirs = _get_workspace_dirs(npc_team_dir)
|
|
978
|
+
workspace_dirs["plots"].mkdir(parents=True, exist_ok=True)
|
|
979
|
+
|
|
996
980
|
engine = create_engine(f'sqlite:///{db_path}')
|
|
997
981
|
Base.metadata.create_all(engine)
|
|
998
982
|
Session = sessionmaker(bind=engine)
|
|
999
983
|
session = Session()
|
|
1000
984
|
|
|
1001
|
-
|
|
1002
985
|
fig = plt.gcf()
|
|
1003
986
|
axes = fig.get_axes()
|
|
1004
987
|
data_points = sum(len(line.get_xdata()) for ax in axes for line in ax.get_lines())
|
|
1005
988
|
|
|
1006
|
-
|
|
1007
989
|
plot_hash = hashlib.md5(f"{len(axes)}{data_points}".encode()).hexdigest()
|
|
1008
990
|
|
|
1009
991
|
last = session.query(PlotState).filter(PlotState.session_id == session_id).order_by(PlotState.timestamp.desc()).first()
|
|
@@ -1011,13 +993,10 @@ def _capture_plot_state(session_id: str, db_path: str, npc_team_dir: Path):
|
|
|
1011
993
|
session.close()
|
|
1012
994
|
return
|
|
1013
995
|
|
|
1014
|
-
|
|
1015
996
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
1016
|
-
workspace_dirs = _get_workspace_dirs(npc_team_dir)
|
|
1017
997
|
plot_path = workspace_dirs["plots"] / f"state_{timestamp}.png"
|
|
1018
998
|
plt.savefig(plot_path, dpi=150, bbox_inches='tight')
|
|
1019
999
|
|
|
1020
|
-
|
|
1021
1000
|
plot_state = PlotState(
|
|
1022
1001
|
session_id=session_id,
|
|
1023
1002
|
plot_hash=plot_hash,
|
|
@@ -1026,15 +1005,14 @@ def _capture_plot_state(session_id: str, db_path: str, npc_team_dir: Path):
|
|
|
1026
1005
|
data_summary=f"{data_points} data points",
|
|
1027
1006
|
change_significance=1.0 if not last else 0.5
|
|
1028
1007
|
)
|
|
1029
|
-
|
|
1008
|
+
|
|
1030
1009
|
session.add(plot_state)
|
|
1031
1010
|
session.commit()
|
|
1032
1011
|
session.close()
|
|
1033
|
-
print(f"
|
|
1012
|
+
print(f"Plot state captured -> {plot_path.name}")
|
|
1034
1013
|
|
|
1035
1014
|
except Exception as e:
|
|
1036
1015
|
print(f"Error capturing plot state: {e}")
|
|
1037
|
-
|
|
1038
1016
|
def _capture_file_state(session_id: str, db_path: str, file_path: str, analysis_code: str, locals_dict: Dict):
|
|
1039
1017
|
"""Capture file analysis state"""
|
|
1040
1018
|
try:
|
|
@@ -1165,14 +1143,12 @@ def _get_guac_agent_emoji(failures: int, max_fail: int = 3) -> str:
|
|
|
1165
1143
|
return "🥑❓"
|
|
1166
1144
|
|
|
1167
1145
|
|
|
1168
|
-
GUAC_GLOBAL_FLAG_FILE = Path.home() / ".npcsh" / ".guac_use_global"
|
|
1169
1146
|
|
|
1170
1147
|
|
|
1171
1148
|
def _run_agentic_mode(command: str,
|
|
1172
1149
|
state: ShellState,
|
|
1173
1150
|
locals_dict: Dict[str, Any],
|
|
1174
1151
|
npc_team_dir: Path) -> Tuple[ShellState, Any]:
|
|
1175
|
-
"""Run agentic mode with continuous iteration based on progress"""
|
|
1176
1152
|
max_iterations = 5
|
|
1177
1153
|
iteration = 0
|
|
1178
1154
|
full_output = []
|
|
@@ -1180,6 +1156,37 @@ def _run_agentic_mode(command: str,
|
|
|
1180
1156
|
consecutive_failures = 0
|
|
1181
1157
|
max_consecutive_failures = 3
|
|
1182
1158
|
|
|
1159
|
+
if len(state.messages) > 15:
|
|
1160
|
+
planning_state = {
|
|
1161
|
+
"goal": "ongoing guac session",
|
|
1162
|
+
"facts": [f"Working in {state.current_path}", f"Variables: {list(locals_dict.keys())[:10]}"],
|
|
1163
|
+
"successes": [],
|
|
1164
|
+
"mistakes": [],
|
|
1165
|
+
"todos": [],
|
|
1166
|
+
"constraints": ["Focus on Python code execution", "Use existing variables when possible"]
|
|
1167
|
+
}
|
|
1168
|
+
compressed_state = state.npc.compress_planning_state(planning_state)
|
|
1169
|
+
state.messages = [{"role": "system", "content": f"Session context: {compressed_state}"}]
|
|
1170
|
+
|
|
1171
|
+
existing_vars_context = "EXISTING VARIABLES IN ENVIRONMENT:\n"
|
|
1172
|
+
for var_name, var_value in locals_dict.items():
|
|
1173
|
+
if not var_name.startswith('_') and var_name not in ['In', 'Out', 'exit', 'quit', 'get_ipython']:
|
|
1174
|
+
try:
|
|
1175
|
+
var_type = type(var_value).__name__
|
|
1176
|
+
var_repr = repr(var_value)
|
|
1177
|
+
if len(var_repr) > 100:
|
|
1178
|
+
var_repr = var_repr[:97] + "..."
|
|
1179
|
+
existing_vars_context += f"- {var_name} ({var_type}): {var_repr}\n"
|
|
1180
|
+
except:
|
|
1181
|
+
existing_vars_context += f"- {var_name} ({type(var_value).__name__}): <unrepresentable>\n"
|
|
1182
|
+
previous_code = ''
|
|
1183
|
+
next_step = ''
|
|
1184
|
+
steps = []
|
|
1185
|
+
while iteration < max_iterations and consecutive_failures < max_consecutive_failures:
|
|
1186
|
+
iteration += 1
|
|
1187
|
+
print(f"\n{_get_guac_agent_emoji(consecutive_failures, max_consecutive_failures)} Agentic iteration {iteration} ")
|
|
1188
|
+
|
|
1189
|
+
|
|
1183
1190
|
|
|
1184
1191
|
existing_vars_context = "EXISTING VARIABLES IN ENVIRONMENT:\n"
|
|
1185
1192
|
for var_name, var_value in locals_dict.items():
|
|
@@ -1211,8 +1218,9 @@ def _run_agentic_mode(command: str,
|
|
|
1211
1218
|
|
|
1212
1219
|
DO NOT SIMPLY COPY A PREVIOUS ATTEMPT.
|
|
1213
1220
|
|
|
1214
|
-
Your goal is to generate Python code that BUILDS ON EXISTING VARIABLES to
|
|
1221
|
+
Your goal is to generate Python code that BUILDS ON EXISTING VARIABLES to respond to this task: USER TASK: "{current_command}", with this next step planned: `{next_step} `
|
|
1215
1222
|
|
|
1223
|
+
If there is no relevant code to build on or the user is simply asking a question, generate new code as needed to respond to their questions.
|
|
1216
1224
|
|
|
1217
1225
|
You will notice in the local envs that there are functions for reading, editing, and loading files.
|
|
1218
1226
|
You should use these to your advantage as they will help you to clearly understand the user's system best.
|
|
@@ -1250,21 +1258,26 @@ def _run_agentic_mode(command: str,
|
|
|
1250
1258
|
|
|
1251
1259
|
Do not over- complicate the code.
|
|
1252
1260
|
|
|
1253
|
-
|
|
1261
|
+
DO NOT include any '__name__'=='__main__' block.
|
|
1254
1262
|
"""
|
|
1255
1263
|
|
|
1256
1264
|
npc_model = state.npc.model if state.npc and state.npc.model else state.chat_model
|
|
1257
1265
|
npc_provider = state.npc.provider if state.npc and state.npc.provider else state.chat_provider
|
|
1258
1266
|
|
|
1267
|
+
print(state.npc.model)
|
|
1268
|
+
print(state.chat_model)
|
|
1259
1269
|
llm_response = get_llm_response(prompt,
|
|
1260
1270
|
npc=state.npc,
|
|
1261
1271
|
stream=True,
|
|
1262
1272
|
messages=state.messages,
|
|
1263
1273
|
thinking=False)
|
|
1264
1274
|
|
|
1275
|
+
print(llm_response.get('response'))
|
|
1276
|
+
print(npc_model, npc_provider)
|
|
1277
|
+
|
|
1265
1278
|
generated_code = print_and_process_stream(llm_response.get('response'),
|
|
1266
1279
|
npc_model,
|
|
1267
|
-
npc_provider
|
|
1280
|
+
npc_provider,
|
|
1268
1281
|
)
|
|
1269
1282
|
|
|
1270
1283
|
state.messages.append({'role':'user', 'content':current_command })
|
|
@@ -1560,8 +1573,6 @@ def execute_guac_command(command: str, state: ShellState, locals_dict: Dict[str,
|
|
|
1560
1573
|
def run_guac_repl(state: ShellState, project_name: str, package_root: Path, package_name: str):
|
|
1561
1574
|
from npcsh.routes import router
|
|
1562
1575
|
|
|
1563
|
-
|
|
1564
|
-
|
|
1565
1576
|
npc_team_dir = Path.cwd() / "npc_team"
|
|
1566
1577
|
workspace_dirs = _get_workspace_dirs(npc_team_dir)
|
|
1567
1578
|
_ensure_workspace_dirs(workspace_dirs)
|
|
@@ -1593,15 +1604,8 @@ def run_guac_repl(state: ShellState, project_name: str, package_root: Path, pack
|
|
|
1593
1604
|
|
|
1594
1605
|
except Exception as e:
|
|
1595
1606
|
print(f"Warning: Could not load package {package_name}: {e}", file=sys.stderr)
|
|
1596
|
-
|
|
1597
|
-
from npcpy.data.load import load_file_contents
|
|
1598
1607
|
|
|
1599
1608
|
def read_file(file_path, max_lines=10000, encoding='utf-8'):
|
|
1600
|
-
"""
|
|
1601
|
-
Read and print file contents up to max_lines.
|
|
1602
|
-
Uses npcpy.data.load for specialized file types, falls back to text reading.
|
|
1603
|
-
Returns the content as a string for further processing.
|
|
1604
|
-
"""
|
|
1605
1609
|
path = Path(file_path).expanduser().resolve()
|
|
1606
1610
|
|
|
1607
1611
|
if not path.exists():
|
|
@@ -1613,7 +1617,6 @@ def run_guac_repl(state: ShellState, project_name: str, package_root: Path, pack
|
|
|
1613
1617
|
return None
|
|
1614
1618
|
|
|
1615
1619
|
try:
|
|
1616
|
-
|
|
1617
1620
|
file_ext = path.suffix.upper().lstrip('.')
|
|
1618
1621
|
if file_ext in ['PDF', 'DOCX', 'PPTX', 'HTML', 'HTM', 'CSV', 'XLS', 'XLSX', 'JSON']:
|
|
1619
1622
|
chunks = load_file_contents(str(path), chunk_size=10000)
|
|
@@ -1635,7 +1638,6 @@ def run_guac_repl(state: ShellState, project_name: str, package_root: Path, pack
|
|
|
1635
1638
|
print(f"End of {path.name}")
|
|
1636
1639
|
return content
|
|
1637
1640
|
|
|
1638
|
-
|
|
1639
1641
|
with open(path, 'r', encoding=encoding) as f:
|
|
1640
1642
|
lines = []
|
|
1641
1643
|
for i, line in enumerate(f, 1):
|
|
@@ -1674,19 +1676,10 @@ def run_guac_repl(state: ShellState, project_name: str, package_root: Path, pack
|
|
|
1674
1676
|
return None
|
|
1675
1677
|
|
|
1676
1678
|
def edit_file(file_path, content=None, line_number=None, new_line=None, insert_at=None, append=False, backup=True):
|
|
1677
|
-
"""
|
|
1678
|
-
Edit file contents in various ways:
|
|
1679
|
-
- edit_file(path, content="new content") - replace entire file
|
|
1680
|
-
- edit_file(path, line_number=5, new_line="new text") - replace specific line
|
|
1681
|
-
- edit_file(path, insert_at=5, new_line="inserted text") - insert at line
|
|
1682
|
-
- edit_file(path, append=True, content="appended") - append to file
|
|
1683
|
-
"""
|
|
1684
1679
|
path = Path(file_path).expanduser().resolve()
|
|
1685
1680
|
|
|
1686
|
-
|
|
1687
1681
|
path.parent.mkdir(parents=True, exist_ok=True)
|
|
1688
1682
|
|
|
1689
|
-
|
|
1690
1683
|
if backup and path.exists():
|
|
1691
1684
|
backup_path = path.with_suffix(path.suffix + '.backup')
|
|
1692
1685
|
import shutil
|
|
@@ -1694,7 +1687,6 @@ def run_guac_repl(state: ShellState, project_name: str, package_root: Path, pack
|
|
|
1694
1687
|
print(f"Backup saved: {backup_path.name}")
|
|
1695
1688
|
|
|
1696
1689
|
try:
|
|
1697
|
-
|
|
1698
1690
|
existing_lines = []
|
|
1699
1691
|
if path.exists():
|
|
1700
1692
|
with open(path, 'r', encoding='utf-8') as f:
|
|
@@ -1754,10 +1746,6 @@ def run_guac_repl(state: ShellState, project_name: str, package_root: Path, pack
|
|
|
1754
1746
|
return False
|
|
1755
1747
|
|
|
1756
1748
|
def load_file(file_path):
|
|
1757
|
-
"""
|
|
1758
|
-
Simple wrapper around npcpy's load_file_contents for direct data loading.
|
|
1759
|
-
Returns the loaded data in appropriate format.
|
|
1760
|
-
"""
|
|
1761
1749
|
path = Path(file_path).expanduser().resolve()
|
|
1762
1750
|
|
|
1763
1751
|
if not path.exists():
|
|
@@ -1782,7 +1770,6 @@ def run_guac_repl(state: ShellState, project_name: str, package_root: Path, pack
|
|
|
1782
1770
|
'load_file':load_file,
|
|
1783
1771
|
}
|
|
1784
1772
|
|
|
1785
|
-
|
|
1786
1773
|
locals_dict.update(core_imports)
|
|
1787
1774
|
locals_dict.update({f"guac_{k}": v for k, v in workspace_dirs.items()})
|
|
1788
1775
|
|
|
@@ -1801,10 +1788,20 @@ def run_guac_repl(state: ShellState, project_name: str, package_root: Path, pack
|
|
|
1801
1788
|
|
|
1802
1789
|
while True:
|
|
1803
1790
|
try:
|
|
1791
|
+
try:
|
|
1792
|
+
while True:
|
|
1793
|
+
operation, code, exec_state, exec_locals = plot_queue.get_nowait()
|
|
1794
|
+
if operation == 'execute_code':
|
|
1795
|
+
print("\n[guac] Processing queued file drop...")
|
|
1796
|
+
exec_state, exec_output = execute_python_code(code, exec_state, exec_locals)
|
|
1797
|
+
if exec_output:
|
|
1798
|
+
print(exec_output)
|
|
1799
|
+
except queue.Empty:
|
|
1800
|
+
pass
|
|
1801
|
+
|
|
1804
1802
|
state.current_path = os.getcwd()
|
|
1805
1803
|
|
|
1806
1804
|
display_model = state.chat_model
|
|
1807
|
-
|
|
1808
1805
|
if isinstance(state.npc, NPC) and state.npc.model:
|
|
1809
1806
|
display_model = state.npc.model
|
|
1810
1807
|
|
|
@@ -1865,7 +1862,6 @@ def run_guac_repl(state: ShellState, project_name: str, package_root: Path, pack
|
|
|
1865
1862
|
|
|
1866
1863
|
|
|
1867
1864
|
|
|
1868
|
-
|
|
1869
1865
|
def enter_guac_mode(npc=None,
|
|
1870
1866
|
team=None,
|
|
1871
1867
|
config_dir=None,
|
|
@@ -1897,25 +1893,6 @@ def enter_guac_mode(npc=None,
|
|
|
1897
1893
|
|
|
1898
1894
|
command_history, default_team, default_npc = setup_shell()
|
|
1899
1895
|
|
|
1900
|
-
|
|
1901
|
-
if npc is None and default_npc is None:
|
|
1902
|
-
|
|
1903
|
-
guac_npc_path = Path(npc_team_dir) / "guac.npc"
|
|
1904
|
-
if guac_npc_path.exists():
|
|
1905
|
-
npc = NPC(file=str(guac_npc_path), db_conn=command_history.engine)
|
|
1906
|
-
|
|
1907
|
-
team_ctx_path = Path(npc_team_dir) / "team.ctx"
|
|
1908
|
-
if team_ctx_path.exists():
|
|
1909
|
-
with open(team_ctx_path, "r") as f:
|
|
1910
|
-
team_ctx = yaml.safe_load(f) or {}
|
|
1911
|
-
team = Team(team_path=str(npc_team_dir), forenpc=npc, jinxs={})
|
|
1912
|
-
team.name = team_ctx.get("team_name", "guac_global_team")
|
|
1913
|
-
else:
|
|
1914
|
-
raise RuntimeError(f"No NPC loaded and {guac_npc_path} not found!")
|
|
1915
|
-
elif default_npc and npc is None:
|
|
1916
|
-
|
|
1917
|
-
npc = default_npc
|
|
1918
|
-
|
|
1919
1896
|
|
|
1920
1897
|
state = ShellState(
|
|
1921
1898
|
conversation_id=start_new_conversation(),
|
|
@@ -1930,6 +1907,45 @@ def enter_guac_mode(npc=None,
|
|
|
1930
1907
|
|
|
1931
1908
|
state.command_history = command_history
|
|
1932
1909
|
|
|
1910
|
+
if npc is None and default_npc is None:
|
|
1911
|
+
guac_npc_path = Path(npc_team_dir) / "guac.npc"
|
|
1912
|
+
if guac_npc_path.exists():
|
|
1913
|
+
npc = NPC(file=str(guac_npc_path),
|
|
1914
|
+
db_conn=command_history.engine)
|
|
1915
|
+
print(guac_npc_path, npc)
|
|
1916
|
+
|
|
1917
|
+
team_ctx_path = get_team_ctx_path(str(npc_team_dir))
|
|
1918
|
+
team_ctx = {}
|
|
1919
|
+
if team_ctx_path and Path(team_ctx_path).exists():
|
|
1920
|
+
with open(team_ctx_path, "r") as f:
|
|
1921
|
+
team_ctx = yaml.safe_load(f) or {}
|
|
1922
|
+
print(team_ctx, team_ctx_path)
|
|
1923
|
+
team = Team(team_path=str(npc_team_dir),
|
|
1924
|
+
forenpc=npc,
|
|
1925
|
+
jinxs={})
|
|
1926
|
+
team.name = team_ctx.get("team_name", "guac_global_team")
|
|
1927
|
+
team.team_ctx = team_ctx
|
|
1928
|
+
print(team)
|
|
1929
|
+
if npc.model is None:
|
|
1930
|
+
npc.model = team_ctx.get("model", state.chat_model)
|
|
1931
|
+
if npc.provider is None:
|
|
1932
|
+
npc.provider = team_ctx.get("provider", state.chat_provider)
|
|
1933
|
+
|
|
1934
|
+
for npc_name, npc_obj in team.npcs.items():
|
|
1935
|
+
if not npc_obj.model:
|
|
1936
|
+
npc_obj.model = team_ctx.get("model", state.chat_model)
|
|
1937
|
+
if not npc_obj.provider:
|
|
1938
|
+
npc_obj.provider = team_ctx.get("provider", state.chat_provider)
|
|
1939
|
+
else:
|
|
1940
|
+
raise RuntimeError(f"No NPC loaded and {guac_npc_path} not found!")
|
|
1941
|
+
elif default_npc and npc is None:
|
|
1942
|
+
npc = default_npc
|
|
1943
|
+
state.npc = npc or default_npc
|
|
1944
|
+
state.team = team or default_team
|
|
1945
|
+
|
|
1946
|
+
state.plots_dir = setup_result.get("plots_dir")
|
|
1947
|
+
state.config_dir = setup_result.get("config_dir")
|
|
1948
|
+
|
|
1933
1949
|
try:
|
|
1934
1950
|
readline.read_history_file(READLINE_HISTORY_FILE)
|
|
1935
1951
|
readline.set_history_length(1000)
|
npcsh/npcsh.py
CHANGED
|
@@ -68,22 +68,14 @@ Begin by asking a question, issuing a bash command, or typing '/help' for more i
|
|
|
68
68
|
)
|
|
69
69
|
|
|
70
70
|
|
|
71
|
-
|
|
72
71
|
def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
'''
|
|
76
|
-
Func for running the npcsh repl
|
|
77
|
-
'''
|
|
78
72
|
state = initial_state
|
|
79
73
|
print_welcome_message()
|
|
80
74
|
|
|
81
|
-
|
|
82
75
|
render_markdown(f'- Using {state.current_mode} mode. Use /agent, /cmd, or /chat to switch to other modes')
|
|
83
76
|
render_markdown(f'- To switch to a different NPC, type /npc <npc_name> or /n <npc_name> to switch to that NPC.')
|
|
84
77
|
render_markdown('\n- Here are the current NPCs available in your team: ' + ', '.join([npc_name for npc_name in state.team.npcs.keys()]))
|
|
85
78
|
|
|
86
|
-
|
|
87
79
|
is_windows = platform.system().lower().startswith("win")
|
|
88
80
|
try:
|
|
89
81
|
completer = make_completer(state)
|
|
@@ -92,23 +84,16 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
|
92
84
|
pass
|
|
93
85
|
session_scopes = set()
|
|
94
86
|
|
|
95
|
-
|
|
96
87
|
def exit_shell(current_state: ShellState):
|
|
97
|
-
"""
|
|
98
|
-
On exit, iterates through all active scopes from the session and
|
|
99
|
-
creates/updates the specific knowledge graph for each one.
|
|
100
|
-
"""
|
|
101
88
|
print("\nGoodbye!")
|
|
102
89
|
print(colored("Processing and archiving all session knowledge...", "cyan"))
|
|
103
90
|
|
|
104
91
|
engine = command_history.engine
|
|
105
92
|
|
|
106
|
-
|
|
107
93
|
for team_name, npc_name, path in session_scopes:
|
|
108
94
|
try:
|
|
109
95
|
print(f" -> Archiving knowledge for: T='{team_name}', N='{npc_name}', P='{path}'")
|
|
110
96
|
|
|
111
|
-
|
|
112
97
|
convo_id = current_state.conversation_id
|
|
113
98
|
all_messages = command_history.get_conversations_by_id(convo_id)
|
|
114
99
|
|
|
@@ -123,10 +108,8 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
|
123
108
|
print(" ...No content for this scope, skipping.")
|
|
124
109
|
continue
|
|
125
110
|
|
|
126
|
-
|
|
127
111
|
current_kg = load_kg_from_db(engine, team_name, npc_name, path)
|
|
128
112
|
|
|
129
|
-
|
|
130
113
|
evolved_kg, _ = kg_evolve_incremental(
|
|
131
114
|
existing_kg=current_kg,
|
|
132
115
|
new_content_text=full_text,
|
|
@@ -137,10 +120,8 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
|
137
120
|
link_concepts_facts = True,
|
|
138
121
|
link_concepts_concepts = True,
|
|
139
122
|
link_facts_facts = True,
|
|
140
|
-
|
|
141
123
|
)
|
|
142
124
|
|
|
143
|
-
|
|
144
125
|
save_kg_to_db(engine,
|
|
145
126
|
evolved_kg,
|
|
146
127
|
team_name,
|
|
@@ -154,10 +135,20 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
|
154
135
|
|
|
155
136
|
sys.exit(0)
|
|
156
137
|
|
|
157
|
-
|
|
158
|
-
|
|
159
138
|
while True:
|
|
160
139
|
try:
|
|
140
|
+
if len(state.messages) > 20:
|
|
141
|
+
planning_state = {
|
|
142
|
+
"goal": "ongoing npcsh session",
|
|
143
|
+
"facts": [f"Working in {state.current_path}", f"Current mode: {state.current_mode}"],
|
|
144
|
+
"successes": [],
|
|
145
|
+
"mistakes": [],
|
|
146
|
+
"todos": [],
|
|
147
|
+
"constraints": ["Follow user requests", "Use appropriate mode for tasks"]
|
|
148
|
+
}
|
|
149
|
+
compressed_state = state.npc.compress_planning_state(planning_state)
|
|
150
|
+
state.messages = [{"role": "system", "content": f"Session context: {compressed_state}"}]
|
|
151
|
+
|
|
161
152
|
try:
|
|
162
153
|
completer = make_completer(state)
|
|
163
154
|
readline.set_completer(completer)
|
|
@@ -198,6 +189,7 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
|
198
189
|
continue
|
|
199
190
|
else:
|
|
200
191
|
exit_shell(state)
|
|
192
|
+
|
|
201
193
|
team_name = state.team.name if state.team else "__none__"
|
|
202
194
|
npc_name = state.npc.name if isinstance(state.npc, NPC) else "__none__"
|
|
203
195
|
session_scopes.add((team_name, npc_name, state.current_path))
|
|
@@ -224,7 +216,8 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
|
224
216
|
if is_windows and "EOF" in str(e).lower():
|
|
225
217
|
print("\nHint: On Windows, use Ctrl+Z then Enter for EOF, or type 'exit'")
|
|
226
218
|
continue
|
|
227
|
-
raise
|
|
219
|
+
raise
|
|
220
|
+
|
|
228
221
|
|
|
229
222
|
def main() -> None:
|
|
230
223
|
parser = argparse.ArgumentParser(description="npcsh - An NPC-powered shell.")
|