npcpy 1.0.26__py3-none-any.whl → 1.2.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcpy/__init__.py +0 -7
- npcpy/data/audio.py +16 -99
- npcpy/data/image.py +43 -42
- npcpy/data/load.py +83 -124
- npcpy/data/text.py +28 -28
- npcpy/data/video.py +8 -32
- npcpy/data/web.py +51 -23
- npcpy/ft/diff.py +110 -0
- npcpy/ft/ge.py +115 -0
- npcpy/ft/memory_trainer.py +171 -0
- npcpy/ft/model_ensembler.py +357 -0
- npcpy/ft/rl.py +360 -0
- npcpy/ft/sft.py +248 -0
- npcpy/ft/usft.py +128 -0
- npcpy/gen/audio_gen.py +24 -0
- npcpy/gen/embeddings.py +13 -13
- npcpy/gen/image_gen.py +262 -117
- npcpy/gen/response.py +615 -415
- npcpy/gen/video_gen.py +53 -7
- npcpy/llm_funcs.py +1869 -437
- npcpy/main.py +1 -1
- npcpy/memory/command_history.py +844 -510
- npcpy/memory/kg_vis.py +833 -0
- npcpy/memory/knowledge_graph.py +892 -1845
- npcpy/memory/memory_processor.py +81 -0
- npcpy/memory/search.py +188 -90
- npcpy/mix/debate.py +192 -3
- npcpy/npc_compiler.py +1672 -801
- npcpy/npc_sysenv.py +593 -1266
- npcpy/serve.py +3120 -0
- npcpy/sql/ai_function_tools.py +257 -0
- npcpy/sql/database_ai_adapters.py +186 -0
- npcpy/sql/database_ai_functions.py +163 -0
- npcpy/sql/model_runner.py +19 -19
- npcpy/sql/npcsql.py +706 -507
- npcpy/sql/sql_model_compiler.py +156 -0
- npcpy/tools.py +183 -0
- npcpy/work/plan.py +13 -279
- npcpy/work/trigger.py +3 -3
- npcpy-1.2.32.dist-info/METADATA +803 -0
- npcpy-1.2.32.dist-info/RECORD +54 -0
- npcpy/data/dataframes.py +0 -171
- npcpy/memory/deep_research.py +0 -125
- npcpy/memory/sleep.py +0 -557
- npcpy/modes/_state.py +0 -78
- npcpy/modes/alicanto.py +0 -1075
- npcpy/modes/guac.py +0 -785
- npcpy/modes/mcp_npcsh.py +0 -822
- npcpy/modes/npc.py +0 -213
- npcpy/modes/npcsh.py +0 -1158
- npcpy/modes/plonk.py +0 -409
- npcpy/modes/pti.py +0 -234
- npcpy/modes/serve.py +0 -1637
- npcpy/modes/spool.py +0 -312
- npcpy/modes/wander.py +0 -549
- npcpy/modes/yap.py +0 -572
- npcpy/npc_team/alicanto.npc +0 -2
- npcpy/npc_team/alicanto.png +0 -0
- npcpy/npc_team/assembly_lines/test_pipeline.py +0 -181
- npcpy/npc_team/corca.npc +0 -13
- npcpy/npc_team/foreman.npc +0 -7
- npcpy/npc_team/frederic.npc +0 -6
- npcpy/npc_team/frederic4.png +0 -0
- npcpy/npc_team/guac.png +0 -0
- npcpy/npc_team/jinxs/automator.jinx +0 -18
- npcpy/npc_team/jinxs/bash_executer.jinx +0 -31
- npcpy/npc_team/jinxs/calculator.jinx +0 -11
- npcpy/npc_team/jinxs/edit_file.jinx +0 -96
- npcpy/npc_team/jinxs/file_chat.jinx +0 -14
- npcpy/npc_team/jinxs/gui_controller.jinx +0 -28
- npcpy/npc_team/jinxs/image_generation.jinx +0 -29
- npcpy/npc_team/jinxs/internet_search.jinx +0 -30
- npcpy/npc_team/jinxs/local_search.jinx +0 -152
- npcpy/npc_team/jinxs/npcsh_executor.jinx +0 -31
- npcpy/npc_team/jinxs/python_executor.jinx +0 -8
- npcpy/npc_team/jinxs/screen_cap.jinx +0 -25
- npcpy/npc_team/jinxs/sql_executor.jinx +0 -33
- npcpy/npc_team/kadiefa.npc +0 -3
- npcpy/npc_team/kadiefa.png +0 -0
- npcpy/npc_team/npcsh.ctx +0 -9
- npcpy/npc_team/npcsh_sibiji.png +0 -0
- npcpy/npc_team/plonk.npc +0 -2
- npcpy/npc_team/plonk.png +0 -0
- npcpy/npc_team/plonkjr.npc +0 -2
- npcpy/npc_team/plonkjr.png +0 -0
- npcpy/npc_team/sibiji.npc +0 -5
- npcpy/npc_team/sibiji.png +0 -0
- npcpy/npc_team/spool.png +0 -0
- npcpy/npc_team/templates/analytics/celona.npc +0 -0
- npcpy/npc_team/templates/hr_support/raone.npc +0 -0
- npcpy/npc_team/templates/humanities/eriane.npc +0 -4
- npcpy/npc_team/templates/it_support/lineru.npc +0 -0
- npcpy/npc_team/templates/marketing/slean.npc +0 -4
- npcpy/npc_team/templates/philosophy/maurawa.npc +0 -0
- npcpy/npc_team/templates/sales/turnic.npc +0 -4
- npcpy/npc_team/templates/software/welxor.npc +0 -0
- npcpy/npc_team/yap.png +0 -0
- npcpy/routes.py +0 -958
- npcpy/work/mcp_helpers.py +0 -357
- npcpy/work/mcp_server.py +0 -194
- npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/automator.jinx +0 -18
- npcpy-1.0.26.data/data/npcpy/npc_team/bash_executer.jinx +0 -31
- npcpy-1.0.26.data/data/npcpy/npc_team/calculator.jinx +0 -11
- npcpy-1.0.26.data/data/npcpy/npc_team/celona.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/corca.npc +0 -13
- npcpy-1.0.26.data/data/npcpy/npc_team/edit_file.jinx +0 -96
- npcpy-1.0.26.data/data/npcpy/npc_team/eriane.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/file_chat.jinx +0 -14
- npcpy-1.0.26.data/data/npcpy/npc_team/foreman.npc +0 -7
- npcpy-1.0.26.data/data/npcpy/npc_team/frederic.npc +0 -6
- npcpy-1.0.26.data/data/npcpy/npc_team/frederic4.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/guac.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/gui_controller.jinx +0 -28
- npcpy-1.0.26.data/data/npcpy/npc_team/image_generation.jinx +0 -29
- npcpy-1.0.26.data/data/npcpy/npc_team/internet_search.jinx +0 -30
- npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.npc +0 -3
- npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/lineru.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/local_search.jinx +0 -152
- npcpy-1.0.26.data/data/npcpy/npc_team/maurawa.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh.ctx +0 -9
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_executor.jinx +0 -31
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_sibiji.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/plonk.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/plonk.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/python_executor.jinx +0 -8
- npcpy-1.0.26.data/data/npcpy/npc_team/raone.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/screen_cap.jinx +0 -25
- npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.npc +0 -5
- npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/slean.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/spool.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/sql_executor.jinx +0 -33
- npcpy-1.0.26.data/data/npcpy/npc_team/test_pipeline.py +0 -181
- npcpy-1.0.26.data/data/npcpy/npc_team/turnic.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/welxor.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/yap.png +0 -0
- npcpy-1.0.26.dist-info/METADATA +0 -827
- npcpy-1.0.26.dist-info/RECORD +0 -139
- npcpy-1.0.26.dist-info/entry_points.txt +0 -11
- /npcpy/{modes → ft}/__init__.py +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/WHEEL +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/licenses/LICENSE +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/top_level.txt +0 -0
npcpy/modes/serve.py
DELETED
|
@@ -1,1637 +0,0 @@
|
|
|
1
|
-
from flask import Flask, request, jsonify, Response
|
|
2
|
-
import configparser # Add this with your other imports
|
|
3
|
-
from flask_sse import sse
|
|
4
|
-
import redis
|
|
5
|
-
|
|
6
|
-
from flask_cors import CORS
|
|
7
|
-
import os
|
|
8
|
-
import sqlite3
|
|
9
|
-
from datetime import datetime
|
|
10
|
-
import json
|
|
11
|
-
from pathlib import Path
|
|
12
|
-
import yaml
|
|
13
|
-
from dotenv import load_dotenv
|
|
14
|
-
|
|
15
|
-
from PIL import Image
|
|
16
|
-
from PIL import ImageFile
|
|
17
|
-
from io import BytesIO
|
|
18
|
-
|
|
19
|
-
from npcpy.npc_sysenv import get_locally_available_models
|
|
20
|
-
from npcpy.memory.command_history import (
|
|
21
|
-
CommandHistory,
|
|
22
|
-
save_conversation_message,
|
|
23
|
-
)
|
|
24
|
-
from npcpy.npc_compiler import Jinx, NPC
|
|
25
|
-
|
|
26
|
-
from npcpy.llm_funcs import (
|
|
27
|
-
get_llm_response, check_llm_command
|
|
28
|
-
)
|
|
29
|
-
from npcpy.npc_compiler import NPC
|
|
30
|
-
import base64
|
|
31
|
-
|
|
32
|
-
import json
|
|
33
|
-
import os
|
|
34
|
-
from pathlib import Path
|
|
35
|
-
from flask_cors import CORS
|
|
36
|
-
|
|
37
|
-
# Path for storing settings
|
|
38
|
-
SETTINGS_FILE = Path(os.path.expanduser("~/.npcshrc"))
|
|
39
|
-
|
|
40
|
-
# Configuration
|
|
41
|
-
db_path = os.path.expanduser("~/npcsh_history.db")
|
|
42
|
-
user_npc_directory = os.path.expanduser("~/.npcsh/npc_team")
|
|
43
|
-
# Make project_npc_directory a function that updates based on current path
|
|
44
|
-
# instead of a static path relative to server launch directory
|
|
45
|
-
def get_project_npc_directory(current_path=None):
|
|
46
|
-
"""
|
|
47
|
-
Get the project NPC directory based on the current path
|
|
48
|
-
|
|
49
|
-
Args:
|
|
50
|
-
current_path: The current path where project NPCs should be looked for
|
|
51
|
-
|
|
52
|
-
Returns:
|
|
53
|
-
Path to the project's npc_team directory
|
|
54
|
-
"""
|
|
55
|
-
if current_path:
|
|
56
|
-
return os.path.join(current_path, "npc_team")
|
|
57
|
-
else:
|
|
58
|
-
# Fallback to the old behavior if no path provided
|
|
59
|
-
return os.path.abspath("./npc_team")
|
|
60
|
-
|
|
61
|
-
def load_project_env(current_path):
|
|
62
|
-
"""
|
|
63
|
-
Load environment variables from a project's .env file
|
|
64
|
-
|
|
65
|
-
Args:
|
|
66
|
-
current_path: The current project directory path
|
|
67
|
-
|
|
68
|
-
Returns:
|
|
69
|
-
Dictionary of environment variables that were loaded
|
|
70
|
-
"""
|
|
71
|
-
if not current_path:
|
|
72
|
-
return {}
|
|
73
|
-
|
|
74
|
-
env_path = os.path.join(current_path, ".env")
|
|
75
|
-
loaded_vars = {}
|
|
76
|
-
|
|
77
|
-
if os.path.exists(env_path):
|
|
78
|
-
print(f"Loading project environment from {env_path}")
|
|
79
|
-
# Load the environment variables into the current process
|
|
80
|
-
# Note: load_dotenv returns a boolean, not a dictionary
|
|
81
|
-
success = load_dotenv(env_path, override=True)
|
|
82
|
-
|
|
83
|
-
if success:
|
|
84
|
-
# Manually build a dictionary of loaded variables
|
|
85
|
-
with open(env_path, "r") as f:
|
|
86
|
-
for line in f:
|
|
87
|
-
line = line.strip()
|
|
88
|
-
if line and not line.startswith("#"):
|
|
89
|
-
if "=" in line:
|
|
90
|
-
key, value = line.split("=", 1)
|
|
91
|
-
loaded_vars[key.strip()] = value.strip().strip("\"'")
|
|
92
|
-
|
|
93
|
-
print(f"Loaded {len(loaded_vars)} variables from project .env file")
|
|
94
|
-
else:
|
|
95
|
-
print(f"Failed to load environment variables from {env_path}")
|
|
96
|
-
else:
|
|
97
|
-
print(f"No .env file found at {env_path}")
|
|
98
|
-
|
|
99
|
-
return loaded_vars
|
|
100
|
-
|
|
101
|
-
# Initialize components
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
app = Flask(__name__)
|
|
105
|
-
app.config["REDIS_URL"] = "redis://localhost:6379"
|
|
106
|
-
app.register_blueprint(sse, url_prefix="/stream")
|
|
107
|
-
|
|
108
|
-
redis_client = redis.Redis(host="localhost", port=6379, decode_responses=True)
|
|
109
|
-
|
|
110
|
-
available_models = {}
|
|
111
|
-
CORS(
|
|
112
|
-
app,
|
|
113
|
-
origins=["http://localhost:5173"],
|
|
114
|
-
allow_headers=["Content-Type", "Authorization"],
|
|
115
|
-
methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
|
|
116
|
-
supports_credentials=True,
|
|
117
|
-
)
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
def get_db_connection():
|
|
121
|
-
conn = sqlite3.connect(db_path)
|
|
122
|
-
conn.row_factory = sqlite3.Row
|
|
123
|
-
return conn
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
extension_map = {
|
|
127
|
-
"PNG": "images",
|
|
128
|
-
"JPG": "images",
|
|
129
|
-
"JPEG": "images",
|
|
130
|
-
"GIF": "images",
|
|
131
|
-
"SVG": "images",
|
|
132
|
-
"MP4": "videos",
|
|
133
|
-
"AVI": "videos",
|
|
134
|
-
"MOV": "videos",
|
|
135
|
-
"WMV": "videos",
|
|
136
|
-
"MPG": "videos",
|
|
137
|
-
"MPEG": "videos",
|
|
138
|
-
"DOC": "documents",
|
|
139
|
-
"DOCX": "documents",
|
|
140
|
-
"PDF": "documents",
|
|
141
|
-
"PPT": "documents",
|
|
142
|
-
"PPTX": "documents",
|
|
143
|
-
"XLS": "documents",
|
|
144
|
-
"XLSX": "documents",
|
|
145
|
-
"TXT": "documents",
|
|
146
|
-
"CSV": "documents",
|
|
147
|
-
"ZIP": "archives",
|
|
148
|
-
"RAR": "archives",
|
|
149
|
-
"7Z": "archives",
|
|
150
|
-
"TAR": "archives",
|
|
151
|
-
"GZ": "archives",
|
|
152
|
-
"BZ2": "archives",
|
|
153
|
-
"ISO": "archives",
|
|
154
|
-
}
|
|
155
|
-
def load_npc_by_name_and_source(name, source, db_conn=None, current_path=None):
|
|
156
|
-
"""
|
|
157
|
-
Loads an NPC from either project or global directory based on source
|
|
158
|
-
|
|
159
|
-
Args:
|
|
160
|
-
name: The name of the NPC to load
|
|
161
|
-
source: Either 'project' or 'global' indicating where to look for the NPC
|
|
162
|
-
db_conn: Optional database connection
|
|
163
|
-
current_path: The current path where project NPCs should be looked for
|
|
164
|
-
|
|
165
|
-
Returns:
|
|
166
|
-
NPC object or None if not found
|
|
167
|
-
"""
|
|
168
|
-
if not db_conn:
|
|
169
|
-
db_conn = get_db_connection()
|
|
170
|
-
|
|
171
|
-
# Determine which directory to search
|
|
172
|
-
if source == 'project':
|
|
173
|
-
npc_directory = get_project_npc_directory(current_path)
|
|
174
|
-
print(f"Looking for project NPC in: {npc_directory}")
|
|
175
|
-
else: # Default to global if not specified or unknown
|
|
176
|
-
npc_directory = user_npc_directory
|
|
177
|
-
print(f"Looking for global NPC in: {npc_directory}")
|
|
178
|
-
|
|
179
|
-
# Look for the NPC file in the appropriate directory
|
|
180
|
-
npc_path = os.path.join(npc_directory, f"{name}.npc")
|
|
181
|
-
|
|
182
|
-
if os.path.exists(npc_path):
|
|
183
|
-
try:
|
|
184
|
-
npc = NPC(file=npc_path, db_conn=db_conn)
|
|
185
|
-
return npc
|
|
186
|
-
except Exception as e:
|
|
187
|
-
print(f"Error loading NPC {name} from {source}: {str(e)}")
|
|
188
|
-
return None
|
|
189
|
-
else:
|
|
190
|
-
print(f"NPC file not found: {npc_path}")
|
|
191
|
-
return None
|
|
192
|
-
|
|
193
|
-
def fetch_messages_for_conversation(conversation_id):
|
|
194
|
-
conn = get_db_connection()
|
|
195
|
-
cursor = conn.cursor()
|
|
196
|
-
|
|
197
|
-
query = """
|
|
198
|
-
SELECT role, content, timestamp
|
|
199
|
-
FROM conversation_history
|
|
200
|
-
WHERE conversation_id = ?
|
|
201
|
-
ORDER BY timestamp ASC
|
|
202
|
-
"""
|
|
203
|
-
cursor.execute(query, (conversation_id,))
|
|
204
|
-
messages = cursor.fetchall()
|
|
205
|
-
conn.close()
|
|
206
|
-
|
|
207
|
-
return [
|
|
208
|
-
{
|
|
209
|
-
"role": message["role"],
|
|
210
|
-
"content": message["content"],
|
|
211
|
-
"timestamp": message["timestamp"],
|
|
212
|
-
}
|
|
213
|
-
for message in messages
|
|
214
|
-
]
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
@app.route("/api/attachments/<message_id>", methods=["GET"])
|
|
218
|
-
def get_message_attachments(message_id):
|
|
219
|
-
"""Get all attachments for a message"""
|
|
220
|
-
try:
|
|
221
|
-
command_history = CommandHistory(db_path)
|
|
222
|
-
attachments = command_history.get_message_attachments(message_id)
|
|
223
|
-
return jsonify({"attachments": attachments, "error": None})
|
|
224
|
-
except Exception as e:
|
|
225
|
-
return jsonify({"error": str(e)}), 500
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
@app.route("/api/attachment/<attachment_id>", methods=["GET"])
|
|
229
|
-
def get_attachment(attachment_id):
|
|
230
|
-
"""Get specific attachment data"""
|
|
231
|
-
try:
|
|
232
|
-
command_history = CommandHistory(db_path)
|
|
233
|
-
data, name, type = command_history.get_attachment_data(attachment_id)
|
|
234
|
-
|
|
235
|
-
if data:
|
|
236
|
-
# Convert binary data to base64 for sending
|
|
237
|
-
base64_data = base64.b64encode(data).decode("utf-8")
|
|
238
|
-
return jsonify(
|
|
239
|
-
{"data": base64_data, "name": name, "type": type, "error": None}
|
|
240
|
-
)
|
|
241
|
-
return jsonify({"error": "Attachment not found"}), 404
|
|
242
|
-
except Exception as e:
|
|
243
|
-
return jsonify({"error": str(e)}), 500
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
@app.route("/api/capture_screenshot", methods=["GET"])
|
|
247
|
-
def capture():
|
|
248
|
-
# Capture screenshot using NPC-based method
|
|
249
|
-
screenshot = capture_screenshot(None, full=True)
|
|
250
|
-
|
|
251
|
-
# Ensure screenshot was captured successfully
|
|
252
|
-
if not screenshot:
|
|
253
|
-
print("Screenshot capture failed")
|
|
254
|
-
return None
|
|
255
|
-
|
|
256
|
-
return jsonify({"screenshot": screenshot})
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
@app.route("/api/settings/global", methods=["GET", "OPTIONS"])
|
|
260
|
-
def get_global_settings():
|
|
261
|
-
if request.method == "OPTIONS":
|
|
262
|
-
return "", 200
|
|
263
|
-
|
|
264
|
-
try:
|
|
265
|
-
npcshrc_path = os.path.expanduser("~/.npcshrc")
|
|
266
|
-
|
|
267
|
-
# Default settings
|
|
268
|
-
global_settings = {
|
|
269
|
-
"model": "llama3.2",
|
|
270
|
-
"provider": "ollama",
|
|
271
|
-
"embedding_model": "nomic-embed-text",
|
|
272
|
-
"embedding_provider": "ollama",
|
|
273
|
-
"search_provider": "perplexity",
|
|
274
|
-
"NPCSH_LICENSE_KEY": "",
|
|
275
|
-
"default_folder": os.path.expanduser("~/.npcsh/"),
|
|
276
|
-
}
|
|
277
|
-
global_vars = {}
|
|
278
|
-
|
|
279
|
-
if os.path.exists(npcshrc_path):
|
|
280
|
-
with open(npcshrc_path, "r") as f:
|
|
281
|
-
for line in f:
|
|
282
|
-
# Skip comments and empty lines
|
|
283
|
-
line = line.split("#")[0].strip()
|
|
284
|
-
if not line:
|
|
285
|
-
continue
|
|
286
|
-
|
|
287
|
-
if "=" not in line:
|
|
288
|
-
continue
|
|
289
|
-
|
|
290
|
-
# Split on first = only
|
|
291
|
-
key, value = line.split("=", 1)
|
|
292
|
-
key = key.strip()
|
|
293
|
-
if key.startswith("export "):
|
|
294
|
-
key = key[7:]
|
|
295
|
-
|
|
296
|
-
# Clean up the value - handle quoted strings properly
|
|
297
|
-
value = value.strip()
|
|
298
|
-
if value.startswith('"') and value.endswith('"'):
|
|
299
|
-
value = value[1:-1]
|
|
300
|
-
elif value.startswith("'") and value.endswith("'"):
|
|
301
|
-
value = value[1:-1]
|
|
302
|
-
|
|
303
|
-
# Map environment variables to settings
|
|
304
|
-
key_mapping = {
|
|
305
|
-
"NPCSH_MODEL": "model",
|
|
306
|
-
"NPCSH_PROVIDER": "provider",
|
|
307
|
-
"NPCSH_EMBEDDING_MODEL": "embedding_model",
|
|
308
|
-
"NPCSH_EMBEDDING_PROVIDER": "embedding_provider",
|
|
309
|
-
"NPCSH_SEARCH_PROVIDER": "search_provider",
|
|
310
|
-
"NPCSH_LICENSE_KEY": "NPCSH_LICENSE_KEY",
|
|
311
|
-
"NPCSH_STREAM_OUTPUT": "NPCSH_STREAM_OUTPUT",
|
|
312
|
-
"NPC_STUDIO_DEFAULT_FOLDER": "default_folder",
|
|
313
|
-
}
|
|
314
|
-
|
|
315
|
-
if key in key_mapping:
|
|
316
|
-
global_settings[key_mapping[key]] = value
|
|
317
|
-
else:
|
|
318
|
-
global_vars[key] = value
|
|
319
|
-
|
|
320
|
-
print("Global settings loaded from .npcshrc")
|
|
321
|
-
print(global_settings)
|
|
322
|
-
return jsonify(
|
|
323
|
-
{
|
|
324
|
-
"global_settings": global_settings,
|
|
325
|
-
"global_vars": global_vars,
|
|
326
|
-
"error": None,
|
|
327
|
-
}
|
|
328
|
-
)
|
|
329
|
-
|
|
330
|
-
except Exception as e:
|
|
331
|
-
print(f"Error in get_global_settings: {str(e)}")
|
|
332
|
-
return jsonify({"error": str(e)}), 500
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
@app.route("/api/settings/global", methods=["POST", "OPTIONS"])
|
|
336
|
-
def save_global_settings():
|
|
337
|
-
if request.method == "OPTIONS":
|
|
338
|
-
return "", 200
|
|
339
|
-
|
|
340
|
-
try:
|
|
341
|
-
data = request.json
|
|
342
|
-
npcshrc_path = os.path.expanduser("~/.npcshrc")
|
|
343
|
-
|
|
344
|
-
key_mapping = {
|
|
345
|
-
"model": "NPCSH_CHAT_MODEL",
|
|
346
|
-
"provider": "NPCSH_CHAT_PROVIDER",
|
|
347
|
-
"embedding_model": "NPCSH_EMBEDDING_MODEL",
|
|
348
|
-
"embedding_provider": "NPCSH_EMBEDDING_PROVIDER",
|
|
349
|
-
"search_provider": "NPCSH_SEARCH_PROVIDER",
|
|
350
|
-
"NPCSH_LICENSE_KEY": "NPCSH_LICENSE_KEY",
|
|
351
|
-
"NPCSH_STREAM_OUTPUT": "NPCSH_STREAM_OUTPUT",
|
|
352
|
-
"default_folder": "NPC_STUDIO_DEFAULT_FOLDER",
|
|
353
|
-
}
|
|
354
|
-
|
|
355
|
-
os.makedirs(os.path.dirname(npcshrc_path), exist_ok=True)
|
|
356
|
-
print(data)
|
|
357
|
-
with open(npcshrc_path, "w") as f:
|
|
358
|
-
# Write settings as environment variables
|
|
359
|
-
for key, value in data.get("global_settings", {}).items():
|
|
360
|
-
if key in key_mapping and value:
|
|
361
|
-
# Quote value if it contains spaces
|
|
362
|
-
if " " in str(value):
|
|
363
|
-
value = f'"{value}"'
|
|
364
|
-
f.write(f"export {key_mapping[key]}={value}\n")
|
|
365
|
-
|
|
366
|
-
# Write custom variables
|
|
367
|
-
for key, value in data.get("global_vars", {}).items():
|
|
368
|
-
if key and value:
|
|
369
|
-
if " " in str(value):
|
|
370
|
-
value = f'"{value}"'
|
|
371
|
-
f.write(f"export {key}={value}\n")
|
|
372
|
-
|
|
373
|
-
return jsonify({"message": "Global settings saved successfully", "error": None})
|
|
374
|
-
|
|
375
|
-
except Exception as e:
|
|
376
|
-
print(f"Error in save_global_settings: {str(e)}")
|
|
377
|
-
return jsonify({"error": str(e)}), 500
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
@app.route("/api/settings/project", methods=["GET", "OPTIONS"]) # Add OPTIONS
|
|
381
|
-
def get_project_settings():
|
|
382
|
-
if request.method == "OPTIONS":
|
|
383
|
-
return "", 200
|
|
384
|
-
|
|
385
|
-
try:
|
|
386
|
-
current_dir = request.args.get("path")
|
|
387
|
-
if not current_dir:
|
|
388
|
-
return jsonify({"error": "No path provided"}), 400
|
|
389
|
-
|
|
390
|
-
env_path = os.path.join(current_dir, ".env")
|
|
391
|
-
env_vars = {}
|
|
392
|
-
|
|
393
|
-
if os.path.exists(env_path):
|
|
394
|
-
with open(env_path, "r") as f:
|
|
395
|
-
for line in f:
|
|
396
|
-
line = line.strip()
|
|
397
|
-
if line and not line.startswith("#"):
|
|
398
|
-
if "=" in line:
|
|
399
|
-
key, value = line.split("=", 1)
|
|
400
|
-
env_vars[key.strip()] = value.strip().strip("\"'")
|
|
401
|
-
|
|
402
|
-
return jsonify({"env_vars": env_vars, "error": None})
|
|
403
|
-
|
|
404
|
-
except Exception as e:
|
|
405
|
-
print(f"Error in get_project_settings: {str(e)}")
|
|
406
|
-
return jsonify({"error": str(e)}), 500
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
@app.route("/api/settings/project", methods=["POST", "OPTIONS"]) # Add OPTIONS
|
|
410
|
-
def save_project_settings():
|
|
411
|
-
if request.method == "OPTIONS":
|
|
412
|
-
return "", 200
|
|
413
|
-
|
|
414
|
-
try:
|
|
415
|
-
current_dir = request.args.get("path")
|
|
416
|
-
if not current_dir:
|
|
417
|
-
return jsonify({"error": "No path provided"}), 400
|
|
418
|
-
|
|
419
|
-
data = request.json
|
|
420
|
-
env_path = os.path.join(current_dir, ".env")
|
|
421
|
-
|
|
422
|
-
with open(env_path, "w") as f:
|
|
423
|
-
for key, value in data.get("env_vars", {}).items():
|
|
424
|
-
f.write(f"{key}={value}\n")
|
|
425
|
-
|
|
426
|
-
return jsonify(
|
|
427
|
-
{"message": "Project settings saved successfully", "error": None}
|
|
428
|
-
)
|
|
429
|
-
|
|
430
|
-
except Exception as e:
|
|
431
|
-
print(f"Error in save_project_settings: {str(e)}")
|
|
432
|
-
return jsonify({"error": str(e)}), 500
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
@app.route("/api/models", methods=["GET"])
|
|
436
|
-
def get_models():
|
|
437
|
-
"""
|
|
438
|
-
Endpoint to retrieve available models based on the current project path.
|
|
439
|
-
Checks for local configurations (.env) and Ollama.
|
|
440
|
-
"""
|
|
441
|
-
global available_models
|
|
442
|
-
current_path = request.args.get("currentPath")
|
|
443
|
-
if not current_path:
|
|
444
|
-
# Fallback to a default path or user home if needed,
|
|
445
|
-
# but ideally the frontend should always provide it.
|
|
446
|
-
current_path = os.path.expanduser("~/.npcsh") # Or handle error
|
|
447
|
-
print("Warning: No currentPath provided for /api/models, using default.")
|
|
448
|
-
# return jsonify({"error": "currentPath parameter is required"}), 400
|
|
449
|
-
|
|
450
|
-
try:
|
|
451
|
-
# Reuse the existing function to detect models
|
|
452
|
-
available_models = get_locally_available_models(current_path)
|
|
453
|
-
|
|
454
|
-
# Optionally, add more details or format the response if needed
|
|
455
|
-
# Example: Add a display name
|
|
456
|
-
formatted_models = []
|
|
457
|
-
for m, p in available_models.items():
|
|
458
|
-
# Basic formatting, customize as needed
|
|
459
|
-
text_only = (
|
|
460
|
-
"(text only)"
|
|
461
|
-
if p == "ollama"
|
|
462
|
-
and m in ["llama3.2", "deepseek-v3", "phi4"]
|
|
463
|
-
else ""
|
|
464
|
-
)
|
|
465
|
-
# Handle specific known model names for display
|
|
466
|
-
display_model = m
|
|
467
|
-
if "claude-3-5-haiku-latest" in m:
|
|
468
|
-
display_model = "claude-3.5-haiku"
|
|
469
|
-
elif "claude-3-5-sonnet-latest" in m:
|
|
470
|
-
display_model = "claude-3.5-sonnet"
|
|
471
|
-
elif "gemini-1.5-flash" in m:
|
|
472
|
-
display_model = "gemini-1.5-flash" # Handle multiple versions if neede
|
|
473
|
-
elif "gemini-2.0-flash-lite-preview-02-05" in m:
|
|
474
|
-
display_model = "gemini-2.0-flash-lite-preview"
|
|
475
|
-
|
|
476
|
-
display_name = f"{display_model} | {p} {text_only}".strip()
|
|
477
|
-
|
|
478
|
-
formatted_models.append(
|
|
479
|
-
{
|
|
480
|
-
"value": m, # Use the actual model ID as the value
|
|
481
|
-
"provider": p,
|
|
482
|
-
"display_name": display_name,
|
|
483
|
-
}
|
|
484
|
-
)
|
|
485
|
-
print(m, p)
|
|
486
|
-
return jsonify({"models": formatted_models, "error": None})
|
|
487
|
-
|
|
488
|
-
except Exception as e:
|
|
489
|
-
print(f"Error getting available models: {str(e)}")
|
|
490
|
-
|
|
491
|
-
traceback.print_exc()
|
|
492
|
-
# Return an empty list or a specific error structure
|
|
493
|
-
return jsonify({"models": [], "error": str(e)}), 500
|
|
494
|
-
|
|
495
|
-
@app.route('/api/<command>', methods=['POST'])
|
|
496
|
-
def api_command(command):
|
|
497
|
-
data = request.json or {}
|
|
498
|
-
|
|
499
|
-
# Check if command exists
|
|
500
|
-
handler = router.get_route(command)
|
|
501
|
-
if not handler:
|
|
502
|
-
return jsonify({"error": f"Unknown command: {command}"})
|
|
503
|
-
|
|
504
|
-
# Check if it's shell-only
|
|
505
|
-
if router.shell_only.get(command, False):
|
|
506
|
-
return jsonify({"error": f"Command {command} is only available in shell mode"})
|
|
507
|
-
|
|
508
|
-
# Execute the command handler
|
|
509
|
-
try:
|
|
510
|
-
# Convert positional args from JSON
|
|
511
|
-
args = data.get('args', [])
|
|
512
|
-
kwargs = data.get('kwargs', {})
|
|
513
|
-
|
|
514
|
-
# Add command name back to the command string
|
|
515
|
-
command_str = command
|
|
516
|
-
if args:
|
|
517
|
-
command_str += " " + " ".join(str(arg) for arg in args)
|
|
518
|
-
|
|
519
|
-
result = handler(command_str, **kwargs)
|
|
520
|
-
return jsonify(result)
|
|
521
|
-
except Exception as e:
|
|
522
|
-
return jsonify({"error": str(e)})
|
|
523
|
-
@app.route("/api/stream", methods=["POST"])
|
|
524
|
-
def stream():
|
|
525
|
-
data = request.json
|
|
526
|
-
print(data)
|
|
527
|
-
commandstr = data.get("commandstr")
|
|
528
|
-
conversation_id = data.get("conversationId")
|
|
529
|
-
model = data.get("model", None)
|
|
530
|
-
provider = data.get("provider", None)
|
|
531
|
-
if provider is None:
|
|
532
|
-
provider = available_models.get(model)
|
|
533
|
-
|
|
534
|
-
npc_name = data.get("npc", None)
|
|
535
|
-
npc_source = data.get("npcSource", "global") # Default to global if not specified
|
|
536
|
-
team = data.get("team", None)
|
|
537
|
-
current_path = data.get("currentPath")
|
|
538
|
-
|
|
539
|
-
# Load project-specific environment variables if currentPath is provided
|
|
540
|
-
if current_path:
|
|
541
|
-
loaded_vars = load_project_env(current_path)
|
|
542
|
-
print(f"Loaded project env variables for stream request: {list(loaded_vars.keys())}")
|
|
543
|
-
|
|
544
|
-
# Load the NPC if a name was provided
|
|
545
|
-
npc_object = None
|
|
546
|
-
if npc_name:
|
|
547
|
-
db_conn = get_db_connection()
|
|
548
|
-
# Pass the current_path parameter when loading project NPCs
|
|
549
|
-
npc_object = load_npc_by_name_and_source(npc_name, npc_source, db_conn, current_path)
|
|
550
|
-
|
|
551
|
-
if not npc_object and npc_source == 'project':
|
|
552
|
-
# Try global as fallback
|
|
553
|
-
print(f"NPC {npc_name} not found in project directory, trying global...")
|
|
554
|
-
npc_object = load_npc_by_name_and_source(npc_name, 'global', db_conn)
|
|
555
|
-
|
|
556
|
-
if npc_object:
|
|
557
|
-
print(f"Successfully loaded NPC {npc_name} from {npc_source} directory")
|
|
558
|
-
else:
|
|
559
|
-
print(f"Warning: Could not load NPC {npc_name}")
|
|
560
|
-
print(npc_object, type(npc_object))
|
|
561
|
-
attachments = data.get("attachments", [])
|
|
562
|
-
|
|
563
|
-
command_history = CommandHistory(db_path)
|
|
564
|
-
|
|
565
|
-
# Process attachments and save them properly
|
|
566
|
-
images = []
|
|
567
|
-
print(attachments)
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
attachments_loaded = []
|
|
572
|
-
|
|
573
|
-
if attachments:
|
|
574
|
-
for attachment in attachments:
|
|
575
|
-
extension = attachment["name"].split(".")[-1]
|
|
576
|
-
extension_mapped = extension_map.get(extension.upper(), "others")
|
|
577
|
-
file_path = os.path.expanduser(
|
|
578
|
-
"~/.npcsh/" + extension_mapped + "/" + attachment["name"]
|
|
579
|
-
)
|
|
580
|
-
|
|
581
|
-
if extension_mapped == "images":
|
|
582
|
-
# Open the image file and save it to the file path
|
|
583
|
-
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
|
584
|
-
img = Image.open(attachment["path"])
|
|
585
|
-
|
|
586
|
-
# Save the image to a BytesIO buffer (to extract binary data)
|
|
587
|
-
img_byte_arr = BytesIO()
|
|
588
|
-
img.save(img_byte_arr, format="PNG") # or the appropriate format
|
|
589
|
-
img_byte_arr.seek(0) # Rewind the buffer to the beginning
|
|
590
|
-
|
|
591
|
-
# Save the image to a file
|
|
592
|
-
img.save(file_path, optimize=True, quality=50)
|
|
593
|
-
|
|
594
|
-
# Add to images list for LLM processing
|
|
595
|
-
images.append({"filename": attachment["name"], "file_path": file_path})
|
|
596
|
-
|
|
597
|
-
# Add the image data (in binary form) to attachments_loaded
|
|
598
|
-
attachments_loaded.append(
|
|
599
|
-
{
|
|
600
|
-
"name": attachment["name"],
|
|
601
|
-
"type": extension_mapped,
|
|
602
|
-
"data": img_byte_arr.read(), # Read binary data from the buffer
|
|
603
|
-
"size": os.path.getsize(file_path),
|
|
604
|
-
}
|
|
605
|
-
)
|
|
606
|
-
|
|
607
|
-
messages = fetch_messages_for_conversation(conversation_id)
|
|
608
|
-
if len(messages) == 0 and npc_object is not None:
|
|
609
|
-
messages = [{'role': 'system', 'content': npc_object.get_system_prompt()}]
|
|
610
|
-
elif len(messages)>0 and messages[0]['role'] != 'system' and npc_object is not None:
|
|
611
|
-
# If the first message is not a system prompt, we need to add it
|
|
612
|
-
messages.insert(0, {'role': 'system', 'content': npc_object.get_system_prompt()})
|
|
613
|
-
elif len(messages) > 0 and npc_object is not None:
|
|
614
|
-
messages[0]['content'] = npc_object.get_system_prompt()
|
|
615
|
-
# if we switch between npcs mid conversation, need to change the system prompt
|
|
616
|
-
if npc_object is not None and messages and messages[0]['role'] == 'system':
|
|
617
|
-
messages[0]['content'] = npc_object.get_system_prompt()
|
|
618
|
-
print("messages ", messages)
|
|
619
|
-
print("commandstr ", commandstr)
|
|
620
|
-
message_id = command_history.generate_message_id()
|
|
621
|
-
|
|
622
|
-
save_conversation_message(
|
|
623
|
-
command_history,
|
|
624
|
-
conversation_id,
|
|
625
|
-
"user",
|
|
626
|
-
commandstr,
|
|
627
|
-
wd=current_path,
|
|
628
|
-
model=model,
|
|
629
|
-
provider=provider,
|
|
630
|
-
npc=npc_name,
|
|
631
|
-
team=team,
|
|
632
|
-
attachments=attachments_loaded,
|
|
633
|
-
message_id=message_id,
|
|
634
|
-
)
|
|
635
|
-
message_id = command_history.generate_message_id()
|
|
636
|
-
|
|
637
|
-
stream_response = get_llm_response(
|
|
638
|
-
commandstr,
|
|
639
|
-
messages=messages,
|
|
640
|
-
images=images,
|
|
641
|
-
model=model,
|
|
642
|
-
provider=provider,
|
|
643
|
-
npc=npc_object, # Pass the NPC object instead of just the name
|
|
644
|
-
stream=True,
|
|
645
|
-
)
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
def event_stream():
|
|
649
|
-
complete_response = []
|
|
650
|
-
dot_count = 0
|
|
651
|
-
tool_call_data = {"id": None, "function_name": None, "arguments": ""}
|
|
652
|
-
|
|
653
|
-
for response_chunk in stream_response['response']:
|
|
654
|
-
# Print progress dots for terminal feedback
|
|
655
|
-
print('.', end="", flush=True)
|
|
656
|
-
dot_count += 1
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
if "hf.co" in model or provider == 'ollama':
|
|
660
|
-
#print("streaming from hf model through ollama")
|
|
661
|
-
chunk_content = response_chunk["message"]["content"] if "message" in response_chunk and "content" in response_chunk["message"] else ""
|
|
662
|
-
|
|
663
|
-
# Extract tool call info for Ollama
|
|
664
|
-
if "message" in response_chunk and "tool_calls" in response_chunk["message"]:
|
|
665
|
-
for tool_call in response_chunk["message"]["tool_calls"]:
|
|
666
|
-
if "id" in tool_call:
|
|
667
|
-
tool_call_data["id"] = tool_call["id"]
|
|
668
|
-
if "function" in tool_call:
|
|
669
|
-
if "name" in tool_call["function"]:
|
|
670
|
-
tool_call_data["function_name"] = tool_call["function"]["name"]
|
|
671
|
-
if "arguments" in tool_call["function"]:
|
|
672
|
-
tool_call_data["arguments"] += tool_call["function"]["arguments"]
|
|
673
|
-
|
|
674
|
-
if chunk_content:
|
|
675
|
-
complete_response.append(chunk_content)
|
|
676
|
-
|
|
677
|
-
# Keep original structure but add tool calls data
|
|
678
|
-
chunk_data = {
|
|
679
|
-
"id": None,
|
|
680
|
-
"object": None,
|
|
681
|
-
"created": response_chunk["created_at"],
|
|
682
|
-
"model": response_chunk["model"],
|
|
683
|
-
"choices": [
|
|
684
|
-
{
|
|
685
|
-
"index": 0,
|
|
686
|
-
"delta": {
|
|
687
|
-
"content": chunk_content,
|
|
688
|
-
"role": response_chunk["message"]["role"],
|
|
689
|
-
},
|
|
690
|
-
"finish_reason": response_chunk.get("done_reason"),
|
|
691
|
-
}
|
|
692
|
-
],
|
|
693
|
-
}
|
|
694
|
-
yield f"data: {json.dumps(chunk_data)}\n\n"
|
|
695
|
-
|
|
696
|
-
else:
|
|
697
|
-
# For LiteLLM format
|
|
698
|
-
chunk_content = ""
|
|
699
|
-
reasoning_content = ""
|
|
700
|
-
|
|
701
|
-
# Extract tool call info for LiteLLM
|
|
702
|
-
for choice in response_chunk.choices:
|
|
703
|
-
if hasattr(choice.delta, "tool_calls") and choice.delta.tool_calls:
|
|
704
|
-
for tool_call in choice.delta.tool_calls:
|
|
705
|
-
if tool_call.id:
|
|
706
|
-
tool_call_data["id"] = tool_call.id
|
|
707
|
-
if tool_call.function:
|
|
708
|
-
if hasattr(tool_call.function, "name") and tool_call.function.name:
|
|
709
|
-
tool_call_data["function_name"] = tool_call.function.name
|
|
710
|
-
if hasattr(tool_call.function, "arguments") and tool_call.function.arguments:
|
|
711
|
-
tool_call_data["arguments"] += tool_call.function.arguments
|
|
712
|
-
|
|
713
|
-
# Check for reasoning content (thoughts)
|
|
714
|
-
for choice in response_chunk.choices:
|
|
715
|
-
if hasattr(choice.delta, "reasoning_content"):
|
|
716
|
-
reasoning_content += choice.delta.reasoning_content
|
|
717
|
-
|
|
718
|
-
# Get regular content
|
|
719
|
-
chunk_content = "".join(
|
|
720
|
-
choice.delta.content
|
|
721
|
-
for choice in response_chunk.choices
|
|
722
|
-
if choice.delta.content is not None
|
|
723
|
-
)
|
|
724
|
-
|
|
725
|
-
if chunk_content:
|
|
726
|
-
complete_response.append(chunk_content)
|
|
727
|
-
|
|
728
|
-
# Keep original structure but add reasoning content
|
|
729
|
-
chunk_data = {
|
|
730
|
-
"id": response_chunk.id,
|
|
731
|
-
"object": response_chunk.object,
|
|
732
|
-
"created": response_chunk.created,
|
|
733
|
-
"model": response_chunk.model,
|
|
734
|
-
"choices": [
|
|
735
|
-
{
|
|
736
|
-
"index": choice.index,
|
|
737
|
-
"delta": {
|
|
738
|
-
"content": choice.delta.content,
|
|
739
|
-
"role": choice.delta.role,
|
|
740
|
-
"reasoning_content": reasoning_content if hasattr(choice.delta, "reasoning_content") else None,
|
|
741
|
-
},
|
|
742
|
-
"finish_reason": choice.finish_reason,
|
|
743
|
-
}
|
|
744
|
-
for choice in response_chunk.choices
|
|
745
|
-
],
|
|
746
|
-
}
|
|
747
|
-
yield f"data: {json.dumps(chunk_data)}\n\n"
|
|
748
|
-
save_conversation_message(
|
|
749
|
-
command_history,
|
|
750
|
-
conversation_id,
|
|
751
|
-
"assistant",
|
|
752
|
-
''.join(complete_response),
|
|
753
|
-
wd=current_path,
|
|
754
|
-
model=model,
|
|
755
|
-
provider=provider,
|
|
756
|
-
npc = npc_object.name or '',
|
|
757
|
-
team=team,
|
|
758
|
-
message_id=message_id, # Save with the same message_id
|
|
759
|
-
)
|
|
760
|
-
|
|
761
|
-
# Clear the dots by returning to the start of line and printing spaces
|
|
762
|
-
print('\r' + ' ' * dot_count*2 + '\r', end="", flush=True)
|
|
763
|
-
print('\n')
|
|
764
|
-
|
|
765
|
-
# Send completion message
|
|
766
|
-
yield f"data: {json.dumps({'type': 'message_stop'})}\n\n"
|
|
767
|
-
save_conversation_message(
|
|
768
|
-
command_history,
|
|
769
|
-
conversation_id,
|
|
770
|
-
"assistant",
|
|
771
|
-
''.join(complete_response),
|
|
772
|
-
wd=current_path,
|
|
773
|
-
model=model,
|
|
774
|
-
provider=provider,
|
|
775
|
-
npc=npc_object.name or '',
|
|
776
|
-
team=team,
|
|
777
|
-
message_id=message_id,
|
|
778
|
-
)
|
|
779
|
-
|
|
780
|
-
response = Response(event_stream(), mimetype="text/event-stream")
|
|
781
|
-
|
|
782
|
-
return response
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
@app.route("/api/npc_team_global")
|
|
786
|
-
def get_npc_team_global():
|
|
787
|
-
try:
|
|
788
|
-
db_conn = get_db_connection()
|
|
789
|
-
global_npc_directory = os.path.expanduser("~/.npcsh/npc_team")
|
|
790
|
-
|
|
791
|
-
npc_data = []
|
|
792
|
-
|
|
793
|
-
# Use existing helper to get NPCs from the global directory
|
|
794
|
-
for file in os.listdir(global_npc_directory):
|
|
795
|
-
if file.endswith(".npc"):
|
|
796
|
-
npc_path = os.path.join(global_npc_directory, file)
|
|
797
|
-
npc = NPC(file=npc_path, db_conn=db_conn)
|
|
798
|
-
|
|
799
|
-
# Serialize the NPC data - updated for the new Jinx structure
|
|
800
|
-
serialized_npc = {
|
|
801
|
-
"name": npc.name,
|
|
802
|
-
"primary_directive": npc.primary_directive,
|
|
803
|
-
"model": npc.model,
|
|
804
|
-
"provider": npc.provider,
|
|
805
|
-
"api_url": npc.api_url,
|
|
806
|
-
"use_global_jinxs": npc.use_global_jinxs,
|
|
807
|
-
"jinxs": [
|
|
808
|
-
{
|
|
809
|
-
"jinx_name": jinx.jinx_name,
|
|
810
|
-
"inputs": jinx.inputs,
|
|
811
|
-
"steps": [
|
|
812
|
-
{
|
|
813
|
-
"name": step.get("name", f"step_{i}"),
|
|
814
|
-
"engine": step.get("engine", "natural"),
|
|
815
|
-
"code": step.get("code", "")
|
|
816
|
-
}
|
|
817
|
-
for i, step in enumerate(jinx.steps)
|
|
818
|
-
]
|
|
819
|
-
}
|
|
820
|
-
for jinx in npc.jinxs
|
|
821
|
-
],
|
|
822
|
-
}
|
|
823
|
-
npc_data.append(serialized_npc)
|
|
824
|
-
|
|
825
|
-
return jsonify({"npcs": npc_data, "error": None})
|
|
826
|
-
|
|
827
|
-
except Exception as e:
|
|
828
|
-
print(f"Error loading global NPCs: {str(e)}")
|
|
829
|
-
return jsonify({"npcs": [], "error": str(e)})
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
@app.route("/api/jinxs/global", methods=["GET"])
|
|
833
|
-
def get_global_jinxs():
|
|
834
|
-
# try:
|
|
835
|
-
user_home = os.path.expanduser("~")
|
|
836
|
-
jinxs_dir = os.path.join(user_home, ".npcsh", "npc_team", "jinxs")
|
|
837
|
-
jinxs = []
|
|
838
|
-
if os.path.exists(jinxs_dir):
|
|
839
|
-
for file in os.listdir(jinxs_dir):
|
|
840
|
-
if file.endswith(".jinx"):
|
|
841
|
-
with open(os.path.join(jinxs_dir, file), "r") as f:
|
|
842
|
-
jinx_data = yaml.safe_load(f)
|
|
843
|
-
jinxs.append(jinx_data)
|
|
844
|
-
print("file", file)
|
|
845
|
-
|
|
846
|
-
return jsonify({"jinxs": jinxs})
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
# except Exception as e:
|
|
850
|
-
# return jsonify({"error": str(e)}), 500
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
@app.route("/api/jinxs/project", methods=["GET"])
|
|
854
|
-
def get_project_jinxs():
|
|
855
|
-
current_path = request.args.get(
|
|
856
|
-
"currentPath"
|
|
857
|
-
) # Correctly retrieves `currentPath` from query params
|
|
858
|
-
if not current_path:
|
|
859
|
-
return jsonify({"jinxs": []})
|
|
860
|
-
|
|
861
|
-
if not current_path.endswith("npc_team"):
|
|
862
|
-
current_path = os.path.join(current_path, "npc_team")
|
|
863
|
-
|
|
864
|
-
jinxs_dir = os.path.join(current_path, "jinxs")
|
|
865
|
-
jinxs = []
|
|
866
|
-
if os.path.exists(jinxs_dir):
|
|
867
|
-
for file in os.listdir(jinxs_dir):
|
|
868
|
-
if file.endswith(".jinx"):
|
|
869
|
-
with open(os.path.join(jinxs_dir, file), "r") as f:
|
|
870
|
-
jinx_data = yaml.safe_load(f)
|
|
871
|
-
jinxs.append(jinx_data)
|
|
872
|
-
return jsonify({"jinxs": jinxs})
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
@app.route("/api/jinxs/save", methods=["POST"])
|
|
876
|
-
def save_jinx():
|
|
877
|
-
try:
|
|
878
|
-
data = request.json
|
|
879
|
-
jinx_data = data.get("jinx")
|
|
880
|
-
is_global = data.get("isGlobal")
|
|
881
|
-
current_path = data.get("currentPath")
|
|
882
|
-
jinx_name = jinx_data.get("jinx_name")
|
|
883
|
-
|
|
884
|
-
if not jinx_name:
|
|
885
|
-
return jsonify({"error": "Jinx name is required"}), 400
|
|
886
|
-
|
|
887
|
-
if is_global:
|
|
888
|
-
jinxs_dir = os.path.join(
|
|
889
|
-
os.path.expanduser("~"), ".npcsh", "npc_team", "jinxs"
|
|
890
|
-
)
|
|
891
|
-
else:
|
|
892
|
-
if not current_path.endswith("npc_team"):
|
|
893
|
-
current_path = os.path.join(current_path, "npc_team")
|
|
894
|
-
jinxs_dir = os.path.join(current_path, "jinxs")
|
|
895
|
-
|
|
896
|
-
os.makedirs(jinxs_dir, exist_ok=True)
|
|
897
|
-
|
|
898
|
-
# Full jinx structure
|
|
899
|
-
jinx_yaml = {
|
|
900
|
-
"description": jinx_data.get("description", ""),
|
|
901
|
-
"inputs": jinx_data.get("inputs", []),
|
|
902
|
-
"steps": jinx_data.get("steps", []),
|
|
903
|
-
}
|
|
904
|
-
|
|
905
|
-
file_path = os.path.join(jinxs_dir, f"{jinx_name}.jinx")
|
|
906
|
-
with open(file_path, "w") as f:
|
|
907
|
-
yaml.safe_dump(jinx_yaml, f, sort_keys=False)
|
|
908
|
-
|
|
909
|
-
return jsonify({"status": "success"})
|
|
910
|
-
except Exception as e:
|
|
911
|
-
return jsonify({"error": str(e)}), 500
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
@app.route("/api/save_npc", methods=["POST"])
|
|
915
|
-
def save_npc():
|
|
916
|
-
try:
|
|
917
|
-
data = request.json
|
|
918
|
-
npc_data = data.get("npc")
|
|
919
|
-
is_global = data.get("isGlobal")
|
|
920
|
-
current_path = data.get("currentPath")
|
|
921
|
-
|
|
922
|
-
if not npc_data or "name" not in npc_data:
|
|
923
|
-
return jsonify({"error": "Invalid NPC data"}), 400
|
|
924
|
-
|
|
925
|
-
# Determine the directory based on whether it's global or project
|
|
926
|
-
if is_global:
|
|
927
|
-
npc_directory = os.path.expanduser("~/.npcsh/npc_team")
|
|
928
|
-
else:
|
|
929
|
-
npc_directory = os.path.join(current_path, "npc_team")
|
|
930
|
-
|
|
931
|
-
# Ensure the directory exists
|
|
932
|
-
os.makedirs(npc_directory, exist_ok=True)
|
|
933
|
-
|
|
934
|
-
# Create the YAML content
|
|
935
|
-
yaml_content = f"""name: {npc_data['name']}
|
|
936
|
-
primary_directive: "{npc_data['primary_directive']}"
|
|
937
|
-
model: {npc_data['model']}
|
|
938
|
-
provider: {npc_data['provider']}
|
|
939
|
-
api_url: {npc_data.get('api_url', '')}
|
|
940
|
-
use_global_jinxs: {str(npc_data.get('use_global_jinxs', True)).lower()}
|
|
941
|
-
"""
|
|
942
|
-
|
|
943
|
-
# Save the file
|
|
944
|
-
file_path = os.path.join(npc_directory, f"{npc_data['name']}.npc")
|
|
945
|
-
with open(file_path, "w") as f:
|
|
946
|
-
f.write(yaml_content)
|
|
947
|
-
|
|
948
|
-
return jsonify({"message": "NPC saved successfully", "error": None})
|
|
949
|
-
|
|
950
|
-
except Exception as e:
|
|
951
|
-
print(f"Error saving NPC: {str(e)}")
|
|
952
|
-
return jsonify({"error": str(e)}), 500
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
@app.route("/api/npc_team_project", methods=["GET"])
|
|
956
|
-
def get_npc_team_project():
|
|
957
|
-
try:
|
|
958
|
-
db_conn = get_db_connection()
|
|
959
|
-
|
|
960
|
-
project_npc_directory = request.args.get("currentPath")
|
|
961
|
-
if not project_npc_directory.endswith("npc_team"):
|
|
962
|
-
project_npc_directory = os.path.join(project_npc_directory, "npc_team")
|
|
963
|
-
|
|
964
|
-
npc_data = []
|
|
965
|
-
|
|
966
|
-
for file in os.listdir(project_npc_directory):
|
|
967
|
-
print(file)
|
|
968
|
-
if file.endswith(".npc"):
|
|
969
|
-
npc_path = os.path.join(project_npc_directory, file)
|
|
970
|
-
npc = NPC(file=npc_path, db_conn=db_conn)
|
|
971
|
-
|
|
972
|
-
# Serialize the NPC data, updated for new Jinx structure
|
|
973
|
-
serialized_npc = {
|
|
974
|
-
"name": npc.name,
|
|
975
|
-
"primary_directive": npc.primary_directive,
|
|
976
|
-
"model": npc.model,
|
|
977
|
-
"provider": npc.provider,
|
|
978
|
-
"api_url": npc.api_url,
|
|
979
|
-
"use_global_jinxs": npc.use_global_jinxs,
|
|
980
|
-
"jinxs": [
|
|
981
|
-
{
|
|
982
|
-
"jinx_name": jinx.jinx_name,
|
|
983
|
-
"inputs": jinx.inputs,
|
|
984
|
-
"steps": [
|
|
985
|
-
{
|
|
986
|
-
"name": step.get("name", f"step_{i}"),
|
|
987
|
-
"engine": step.get("engine", "natural"),
|
|
988
|
-
"code": step.get("code", "")
|
|
989
|
-
}
|
|
990
|
-
for i, step in enumerate(jinx.steps)
|
|
991
|
-
]
|
|
992
|
-
}
|
|
993
|
-
for jinx in npc.jinxs
|
|
994
|
-
],
|
|
995
|
-
}
|
|
996
|
-
npc_data.append(serialized_npc)
|
|
997
|
-
|
|
998
|
-
print(npc_data)
|
|
999
|
-
return jsonify({"npcs": npc_data, "error": None})
|
|
1000
|
-
|
|
1001
|
-
except Exception as e:
|
|
1002
|
-
print(f"Error fetching NPC team: {str(e)}")
|
|
1003
|
-
return jsonify({"npcs": [], "error": str(e)})
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
@app.route("/api/get_attachment_response", methods=["POST"])
|
|
1007
|
-
def get_attachment_response():
|
|
1008
|
-
data = request.json
|
|
1009
|
-
attachments = data.get("attachments", [])
|
|
1010
|
-
messages = data.get("messages") # Get conversation ID
|
|
1011
|
-
conversation_id = data.get("conversationId")
|
|
1012
|
-
current_path = data.get("currentPath")
|
|
1013
|
-
command_history = CommandHistory(db_path)
|
|
1014
|
-
model = data.get("model")
|
|
1015
|
-
npc_name = data.get("npc")
|
|
1016
|
-
npc_source = data.get("npcSource", "global")
|
|
1017
|
-
team = data.get("team")
|
|
1018
|
-
provider = data.get("provider")
|
|
1019
|
-
message_id = data.get("messageId")
|
|
1020
|
-
|
|
1021
|
-
# Load project-specific environment variables if currentPath is provided
|
|
1022
|
-
if current_path:
|
|
1023
|
-
loaded_vars = load_project_env(current_path)
|
|
1024
|
-
print(f"Loaded project env variables for attachment response: {list(loaded_vars.keys())}")
|
|
1025
|
-
|
|
1026
|
-
# Load the NPC if a name was provided
|
|
1027
|
-
npc_object = None
|
|
1028
|
-
if npc_name:
|
|
1029
|
-
db_conn = get_db_connection()
|
|
1030
|
-
# Pass the current_path parameter when loading project NPCs
|
|
1031
|
-
npc_object = load_npc_by_name_and_source(npc_name, npc_source, db_conn, current_path)
|
|
1032
|
-
|
|
1033
|
-
if not npc_object and npc_source == 'project':
|
|
1034
|
-
# Try global as fallback
|
|
1035
|
-
print(f"NPC {npc_name} not found in project directory, trying global...")
|
|
1036
|
-
npc_object = load_npc_by_name_and_source(npc_name, 'global', db_conn)
|
|
1037
|
-
|
|
1038
|
-
if npc_object:
|
|
1039
|
-
print(f"Successfully loaded NPC {npc_name} from {npc_source} directory")
|
|
1040
|
-
else:
|
|
1041
|
-
print(f"Warning: Could not load NPC {npc_name}")
|
|
1042
|
-
|
|
1043
|
-
images = []
|
|
1044
|
-
for attachment in attachments:
|
|
1045
|
-
extension = attachment["name"].split(".")[-1]
|
|
1046
|
-
extension_mapped = extension_map.get(extension.upper(), "others")
|
|
1047
|
-
file_path = os.path.expanduser(
|
|
1048
|
-
"~/.npcsh/" + extension_mapped + "/" + attachment["name"]
|
|
1049
|
-
)
|
|
1050
|
-
if extension_mapped == "images":
|
|
1051
|
-
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
|
1052
|
-
img = Image.open(attachment["path"])
|
|
1053
|
-
img.save(file_path, optimize=True, quality=50)
|
|
1054
|
-
images.append({"filename": attachment["name"], "file_path": file_path})
|
|
1055
|
-
|
|
1056
|
-
message_to_send = messages[-1]["content"][0]
|
|
1057
|
-
|
|
1058
|
-
response = get_llm_response(
|
|
1059
|
-
message_to_send,
|
|
1060
|
-
images=images,
|
|
1061
|
-
messages=messages,
|
|
1062
|
-
model=model,
|
|
1063
|
-
npc=npc_object, # Pass the NPC object instead of just the name
|
|
1064
|
-
)
|
|
1065
|
-
messages = response["messages"]
|
|
1066
|
-
response = response["response"]
|
|
1067
|
-
|
|
1068
|
-
# Save new messages
|
|
1069
|
-
save_conversation_message(
|
|
1070
|
-
command_history, conversation_id, "user", message_to_send, wd=current_path, team=team,
|
|
1071
|
-
model=model, provider=provider, npc=npc, attachments=attachments
|
|
1072
|
-
|
|
1073
|
-
)
|
|
1074
|
-
|
|
1075
|
-
save_conversation_message(
|
|
1076
|
-
command_history,
|
|
1077
|
-
conversation_id,
|
|
1078
|
-
"assistant",
|
|
1079
|
-
response,
|
|
1080
|
-
wd=current_path,
|
|
1081
|
-
team=team,
|
|
1082
|
-
model=model,
|
|
1083
|
-
provider=provider,
|
|
1084
|
-
npc=npc,
|
|
1085
|
-
attachments=attachments,
|
|
1086
|
-
message_id=message_id, ) # Save with the same message_id )
|
|
1087
|
-
return jsonify(
|
|
1088
|
-
{
|
|
1089
|
-
"status": "success",
|
|
1090
|
-
"message": response,
|
|
1091
|
-
"conversationId": conversation_id,
|
|
1092
|
-
"messages": messages, # Optionally return fetched messages
|
|
1093
|
-
}
|
|
1094
|
-
)
|
|
1095
|
-
|
|
1096
|
-
@app.route("/api/execute", methods=["POST"])
|
|
1097
|
-
def execute():
|
|
1098
|
-
data = request.json
|
|
1099
|
-
print(data)
|
|
1100
|
-
commandstr = data.get("commandstr")
|
|
1101
|
-
conversation_id = data.get("conversationId")
|
|
1102
|
-
model = data.get("model", 'llama3.2')
|
|
1103
|
-
provider = data.get("provider", 'ollama')
|
|
1104
|
-
if provider is None:
|
|
1105
|
-
provider = available_models.get(model)
|
|
1106
|
-
|
|
1107
|
-
npc_name = data.get("npc", None)
|
|
1108
|
-
npc_source = data.get("npcSource", "global") # Default to global if not specified
|
|
1109
|
-
team = data.get("team", None)
|
|
1110
|
-
current_path = data.get("currentPath")
|
|
1111
|
-
|
|
1112
|
-
# Load project-specific environment variables if currentPath is provided
|
|
1113
|
-
if current_path:
|
|
1114
|
-
loaded_vars = load_project_env(current_path)
|
|
1115
|
-
print(f"Loaded project env variables for stream request: {list(loaded_vars.keys())}")
|
|
1116
|
-
|
|
1117
|
-
# Load the NPC if a name was provided
|
|
1118
|
-
npc_object = None
|
|
1119
|
-
if npc_name:
|
|
1120
|
-
db_conn = get_db_connection()
|
|
1121
|
-
# Pass the current_path parameter when loading project NPCs
|
|
1122
|
-
npc_object = load_npc_by_name_and_source(npc_name, npc_source, db_conn, current_path)
|
|
1123
|
-
|
|
1124
|
-
if not npc_object and npc_source == 'project':
|
|
1125
|
-
# Try global as fallback
|
|
1126
|
-
print(f"NPC {npc_name} not found in project directory, trying global...")
|
|
1127
|
-
npc_object = load_npc_by_name_and_source(npc_name, 'global', db_conn)
|
|
1128
|
-
|
|
1129
|
-
if npc_object:
|
|
1130
|
-
print(f"Successfully loaded NPC {npc_name} from {npc_source} directory")
|
|
1131
|
-
else:
|
|
1132
|
-
print(f"Warning: Could not load NPC {npc_name}")
|
|
1133
|
-
print(npc_object, type(npc_object))
|
|
1134
|
-
attachments = data.get("attachments", [])
|
|
1135
|
-
|
|
1136
|
-
command_history = CommandHistory(db_path)
|
|
1137
|
-
|
|
1138
|
-
# Process attachments and save them properly
|
|
1139
|
-
images = []
|
|
1140
|
-
print(attachments)
|
|
1141
|
-
attachments_loaded = []
|
|
1142
|
-
|
|
1143
|
-
if attachments:
|
|
1144
|
-
for attachment in attachments:
|
|
1145
|
-
extension = attachment["name"].split(".")[-1]
|
|
1146
|
-
extension_mapped = extension_map.get(extension.upper(), "others")
|
|
1147
|
-
file_path = os.path.expanduser(
|
|
1148
|
-
"~/.npcsh/" + extension_mapped + "/" + attachment["name"]
|
|
1149
|
-
)
|
|
1150
|
-
|
|
1151
|
-
if extension_mapped == "images":
|
|
1152
|
-
# Open the image file and save it to the file path
|
|
1153
|
-
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
|
1154
|
-
img = Image.open(attachment["path"])
|
|
1155
|
-
|
|
1156
|
-
# Save the image to a BytesIO buffer (to extract binary data)
|
|
1157
|
-
img_byte_arr = BytesIO()
|
|
1158
|
-
img.save(img_byte_arr, format="PNG") # or the appropriate format
|
|
1159
|
-
img_byte_arr.seek(0) # Rewind the buffer to the beginning
|
|
1160
|
-
|
|
1161
|
-
# Save the image to a file
|
|
1162
|
-
img.save(file_path, optimize=True, quality=50)
|
|
1163
|
-
|
|
1164
|
-
# Add to images list for LLM processing
|
|
1165
|
-
images.append({"filename": attachment["name"], "file_path": file_path})
|
|
1166
|
-
|
|
1167
|
-
# Add the image data (in binary form) to attachments_loaded
|
|
1168
|
-
attachments_loaded.append(
|
|
1169
|
-
{
|
|
1170
|
-
"name": attachment["name"],
|
|
1171
|
-
"type": extension_mapped,
|
|
1172
|
-
"data": img_byte_arr.read(), # Read binary data from the buffer
|
|
1173
|
-
"size": os.path.getsize(file_path),
|
|
1174
|
-
}
|
|
1175
|
-
)
|
|
1176
|
-
|
|
1177
|
-
messages = fetch_messages_for_conversation(conversation_id)
|
|
1178
|
-
if len(messages) == 0 and npc_object is not None:
|
|
1179
|
-
messages = [{'role': 'system', 'content': npc_object.get_system_prompt()}]
|
|
1180
|
-
elif len(messages)>0 and messages[0]['role'] != 'system' and npc_object is not None:
|
|
1181
|
-
# If the first message is not a system prompt, we need to add it
|
|
1182
|
-
messages.insert(0, {'role': 'system', 'content': npc_object.get_system_prompt()})
|
|
1183
|
-
elif len(messages) > 0 and npc_object is not None:
|
|
1184
|
-
messages[0]['content'] = npc_object.get_system_prompt()
|
|
1185
|
-
# if we switch between npcs mid conversation, need to change the system prompt
|
|
1186
|
-
if npc_object is not None and messages and messages[0]['role'] == 'system':
|
|
1187
|
-
messages[0]['content'] = npc_object.get_system_prompt()
|
|
1188
|
-
print("messages ", messages)
|
|
1189
|
-
print("commandstr ", commandstr)
|
|
1190
|
-
message_id = command_history.generate_message_id()
|
|
1191
|
-
|
|
1192
|
-
save_conversation_message(
|
|
1193
|
-
command_history,
|
|
1194
|
-
conversation_id,
|
|
1195
|
-
"user",
|
|
1196
|
-
commandstr,
|
|
1197
|
-
wd=current_path,
|
|
1198
|
-
model=model,
|
|
1199
|
-
provider=provider,
|
|
1200
|
-
npc=npc_name,
|
|
1201
|
-
team=team,
|
|
1202
|
-
attachments=attachments_loaded,
|
|
1203
|
-
message_id=message_id,
|
|
1204
|
-
)
|
|
1205
|
-
message_id = command_history.generate_message_id()
|
|
1206
|
-
|
|
1207
|
-
response_gen = check_llm_command(
|
|
1208
|
-
commandstr,
|
|
1209
|
-
messages=messages,
|
|
1210
|
-
images=images,
|
|
1211
|
-
model=model,
|
|
1212
|
-
provider=provider,
|
|
1213
|
-
npc=npc_object, # Pass the NPC object instead of just the name
|
|
1214
|
-
stream=True,
|
|
1215
|
-
)
|
|
1216
|
-
def event_stream():
|
|
1217
|
-
complete_response = []
|
|
1218
|
-
dot_count = 0
|
|
1219
|
-
tool_call_data = {"id": None, "function_name": None, "arguments": ""}
|
|
1220
|
-
decision = ''
|
|
1221
|
-
first_chunk = True
|
|
1222
|
-
for response_chunk in response_gen['output']:
|
|
1223
|
-
|
|
1224
|
-
# Print progress dots for terminal feedback
|
|
1225
|
-
print('.', end="", flush=True)
|
|
1226
|
-
dot_count += 1
|
|
1227
|
-
|
|
1228
|
-
# Handle decision events that come before action execution
|
|
1229
|
-
if isinstance(response_chunk, dict) and response_chunk.get("role") == "decision":
|
|
1230
|
-
chunk_data = {
|
|
1231
|
-
"id": None,
|
|
1232
|
-
"object": None,
|
|
1233
|
-
"created": None,
|
|
1234
|
-
"model": model,
|
|
1235
|
-
"choices": [
|
|
1236
|
-
{
|
|
1237
|
-
"index": 0,
|
|
1238
|
-
"delta": {
|
|
1239
|
-
"content": response_chunk.get('content'),
|
|
1240
|
-
"role": response_chunk.get('role'),
|
|
1241
|
-
},
|
|
1242
|
-
"finish_reason": None,
|
|
1243
|
-
}
|
|
1244
|
-
],
|
|
1245
|
-
}
|
|
1246
|
-
if response_chunk.get('content'):
|
|
1247
|
-
decision = response_chunk.get('content')
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
elif "hf.co" in model or provider == 'ollama':
|
|
1251
|
-
#print("streaming from hf model through ollama")
|
|
1252
|
-
|
|
1253
|
-
chunk_content = response_chunk["message"]["content"] if "message" in response_chunk and "content" in response_chunk["message"] else ""
|
|
1254
|
-
if first_chunk:
|
|
1255
|
-
chunk_content = decision + '\n' + chunk_content
|
|
1256
|
-
first_chunk = False
|
|
1257
|
-
# Extract tool call info for Ollama
|
|
1258
|
-
if "message" in response_chunk and "tool_calls" in response_chunk["message"]:
|
|
1259
|
-
for tool_call in response_chunk["message"]["tool_calls"]:
|
|
1260
|
-
if "id" in tool_call:
|
|
1261
|
-
tool_call_data["id"] = tool_call["id"]
|
|
1262
|
-
if "function" in tool_call:
|
|
1263
|
-
if "name" in tool_call["function"]:
|
|
1264
|
-
tool_call_data["function_name"] = tool_call["function"]["name"]
|
|
1265
|
-
if "arguments" in tool_call["function"]:
|
|
1266
|
-
tool_call_data["arguments"] += tool_call["function"]["arguments"]
|
|
1267
|
-
|
|
1268
|
-
if chunk_content:
|
|
1269
|
-
complete_response.append(chunk_content)
|
|
1270
|
-
|
|
1271
|
-
# Keep original structure but add tool calls data
|
|
1272
|
-
chunk_data = {
|
|
1273
|
-
"id": None,
|
|
1274
|
-
"object": None,
|
|
1275
|
-
"created": response_chunk["created_at"],
|
|
1276
|
-
"model": response_chunk["model"],
|
|
1277
|
-
"choices": [
|
|
1278
|
-
{
|
|
1279
|
-
"index": 0,
|
|
1280
|
-
"delta": {
|
|
1281
|
-
"content": chunk_content,
|
|
1282
|
-
"role": response_chunk["message"]["role"],
|
|
1283
|
-
},
|
|
1284
|
-
"finish_reason": response_chunk.get("done_reason"),
|
|
1285
|
-
}
|
|
1286
|
-
],
|
|
1287
|
-
}
|
|
1288
|
-
yield f"data: {json.dumps(chunk_data)}\n\n"
|
|
1289
|
-
|
|
1290
|
-
else:
|
|
1291
|
-
# For LiteLLM format
|
|
1292
|
-
chunk_content = ""
|
|
1293
|
-
reasoning_content = ""
|
|
1294
|
-
# Replace the print(response_chunk.) line with:
|
|
1295
|
-
# Extract tool call info for LiteLLM
|
|
1296
|
-
|
|
1297
|
-
for choice in response_chunk.choices:
|
|
1298
|
-
if hasattr(choice.delta, "tool_calls") and choice.delta.tool_calls:
|
|
1299
|
-
for tool_call in choice.delta.tool_calls:
|
|
1300
|
-
if tool_call.id:
|
|
1301
|
-
tool_call_data["id"] = tool_call.id
|
|
1302
|
-
if tool_call.function:
|
|
1303
|
-
if hasattr(tool_call.function, "name") and tool_call.function.name:
|
|
1304
|
-
tool_call_data["function_name"] = tool_call.function.name
|
|
1305
|
-
if hasattr(tool_call.function, "arguments") and tool_call.function.arguments:
|
|
1306
|
-
tool_call_data["arguments"] += tool_call.function.arguments
|
|
1307
|
-
|
|
1308
|
-
# Check for reasoning content (thoughts)
|
|
1309
|
-
for choice in response_chunk.choices:
|
|
1310
|
-
if hasattr(choice.delta, "reasoning_content"):
|
|
1311
|
-
reasoning_content += choice.delta.reasoning_content
|
|
1312
|
-
|
|
1313
|
-
# Get regular content
|
|
1314
|
-
chunk_content = "".join(
|
|
1315
|
-
choice.delta.content
|
|
1316
|
-
for choice in response_chunk.choices
|
|
1317
|
-
if choice.delta.content is not None
|
|
1318
|
-
)
|
|
1319
|
-
|
|
1320
|
-
if first_chunk:
|
|
1321
|
-
chunk_content = decision + '\n' + chunk_content
|
|
1322
|
-
first_chunk = False
|
|
1323
|
-
if chunk_content:
|
|
1324
|
-
complete_response.append(chunk_content)
|
|
1325
|
-
|
|
1326
|
-
# Keep original structure but add reasoning content
|
|
1327
|
-
chunk_data = {
|
|
1328
|
-
"id": response_chunk.id,
|
|
1329
|
-
"object": response_chunk.object,
|
|
1330
|
-
"created": response_chunk.created,
|
|
1331
|
-
"model": response_chunk.model,
|
|
1332
|
-
"choices": [
|
|
1333
|
-
{
|
|
1334
|
-
"index": choice.index,
|
|
1335
|
-
"delta": {
|
|
1336
|
-
"content": choice.delta.content,
|
|
1337
|
-
"role": choice.delta.role,
|
|
1338
|
-
"reasoning_content": reasoning_content if hasattr(choice.delta, "reasoning_content") else None,
|
|
1339
|
-
},
|
|
1340
|
-
"finish_reason": choice.finish_reason,
|
|
1341
|
-
}
|
|
1342
|
-
for choice in response_chunk.choices
|
|
1343
|
-
],
|
|
1344
|
-
}
|
|
1345
|
-
yield f"data: {json.dumps(chunk_data)}\n\n"
|
|
1346
|
-
save_conversation_message(
|
|
1347
|
-
command_history,
|
|
1348
|
-
conversation_id,
|
|
1349
|
-
"assistant",
|
|
1350
|
-
''.join(complete_response),
|
|
1351
|
-
wd=current_path,
|
|
1352
|
-
model=model,
|
|
1353
|
-
provider=provider,
|
|
1354
|
-
npc = npc_object.name or '',
|
|
1355
|
-
team=team,
|
|
1356
|
-
message_id=message_id, # Save with the same message_id
|
|
1357
|
-
)
|
|
1358
|
-
|
|
1359
|
-
# Clear the dots by returning to the start of line and printing spaces
|
|
1360
|
-
print('\r' + ' ' * dot_count*2 + '\r', end="", flush=True)
|
|
1361
|
-
print('\n')
|
|
1362
|
-
|
|
1363
|
-
# Send completion message
|
|
1364
|
-
yield f"data: {json.dumps({'type': 'message_stop'})}\n\n"
|
|
1365
|
-
npc_name = '' if npc_object is None else npc_object.name
|
|
1366
|
-
save_conversation_message(
|
|
1367
|
-
command_history,
|
|
1368
|
-
conversation_id,
|
|
1369
|
-
"assistant",
|
|
1370
|
-
''.join(complete_response),
|
|
1371
|
-
wd=current_path,
|
|
1372
|
-
model=model,
|
|
1373
|
-
provider=provider,
|
|
1374
|
-
npc= npc_name,
|
|
1375
|
-
team=team,
|
|
1376
|
-
message_id=message_id,
|
|
1377
|
-
)
|
|
1378
|
-
|
|
1379
|
-
response = Response(event_stream(), mimetype="text/event-stream")
|
|
1380
|
-
return response
|
|
1381
|
-
|
|
1382
|
-
def get_conversation_history(conversation_id):
|
|
1383
|
-
"""Fetch all messages for a conversation in chronological order."""
|
|
1384
|
-
if not conversation_id:
|
|
1385
|
-
return []
|
|
1386
|
-
|
|
1387
|
-
conn = get_db_connection()
|
|
1388
|
-
cursor = conn.cursor()
|
|
1389
|
-
|
|
1390
|
-
try:
|
|
1391
|
-
query = """
|
|
1392
|
-
SELECT role, content, timestamp
|
|
1393
|
-
FROM conversation_history
|
|
1394
|
-
WHERE conversation_id = ?
|
|
1395
|
-
ORDER BY timestamp ASC
|
|
1396
|
-
"""
|
|
1397
|
-
cursor.execute(query, (conversation_id,))
|
|
1398
|
-
messages = cursor.fetchall()
|
|
1399
|
-
|
|
1400
|
-
return [
|
|
1401
|
-
{
|
|
1402
|
-
"role": msg["role"],
|
|
1403
|
-
"content": msg["content"],
|
|
1404
|
-
"timestamp": msg["timestamp"],
|
|
1405
|
-
}
|
|
1406
|
-
for msg in messages
|
|
1407
|
-
]
|
|
1408
|
-
finally:
|
|
1409
|
-
conn.close()
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
@app.route("/api/conversations", methods=["GET"])
|
|
1413
|
-
def get_conversations():
|
|
1414
|
-
try:
|
|
1415
|
-
path = request.args.get("path")
|
|
1416
|
-
|
|
1417
|
-
if not path:
|
|
1418
|
-
return jsonify({"error": "No path provided", "conversations": []}), 400
|
|
1419
|
-
|
|
1420
|
-
conn = get_db_connection()
|
|
1421
|
-
try:
|
|
1422
|
-
cursor = conn.cursor()
|
|
1423
|
-
|
|
1424
|
-
query = """
|
|
1425
|
-
SELECT DISTINCT conversation_id,
|
|
1426
|
-
MIN(timestamp) as start_time,
|
|
1427
|
-
GROUP_CONCAT(content) as preview
|
|
1428
|
-
FROM conversation_history
|
|
1429
|
-
WHERE directory_path = ? OR directory_path = ?
|
|
1430
|
-
GROUP BY conversation_id
|
|
1431
|
-
ORDER BY start_time DESC
|
|
1432
|
-
"""
|
|
1433
|
-
|
|
1434
|
-
# Check both with and without trailing slash
|
|
1435
|
-
path_without_slash = path.rstrip('/')
|
|
1436
|
-
path_with_slash = path_without_slash + '/'
|
|
1437
|
-
|
|
1438
|
-
cursor.execute(query, [path_without_slash, path_with_slash])
|
|
1439
|
-
conversations = cursor.fetchall()
|
|
1440
|
-
|
|
1441
|
-
return jsonify(
|
|
1442
|
-
{
|
|
1443
|
-
"conversations": [
|
|
1444
|
-
{
|
|
1445
|
-
"id": conv["conversation_id"],
|
|
1446
|
-
"timestamp": conv["start_time"],
|
|
1447
|
-
"preview": (
|
|
1448
|
-
conv["preview"][:100] + "..."
|
|
1449
|
-
if conv["preview"] and len(conv["preview"]) > 100
|
|
1450
|
-
else conv["preview"]
|
|
1451
|
-
),
|
|
1452
|
-
}
|
|
1453
|
-
for conv in conversations
|
|
1454
|
-
],
|
|
1455
|
-
"error": None,
|
|
1456
|
-
}
|
|
1457
|
-
)
|
|
1458
|
-
|
|
1459
|
-
finally:
|
|
1460
|
-
conn.close()
|
|
1461
|
-
|
|
1462
|
-
except Exception as e:
|
|
1463
|
-
print(f"Error getting conversations: {str(e)}")
|
|
1464
|
-
return jsonify({"error": str(e), "conversations": []}), 500
|
|
1465
|
-
|
|
1466
|
-
@app.route("/api/conversation/<conversation_id>/messages", methods=["GET"])
|
|
1467
|
-
def get_conversation_messages(conversation_id):
|
|
1468
|
-
try:
|
|
1469
|
-
conn = get_db_connection()
|
|
1470
|
-
cursor = conn.cursor()
|
|
1471
|
-
|
|
1472
|
-
# Modified query to ensure proper ordering and deduplication
|
|
1473
|
-
query = """
|
|
1474
|
-
WITH ranked_messages AS (
|
|
1475
|
-
SELECT
|
|
1476
|
-
ch.*,
|
|
1477
|
-
GROUP_CONCAT(ma.id) as attachment_ids,
|
|
1478
|
-
ROW_NUMBER() OVER (
|
|
1479
|
-
PARTITION BY ch.role, strftime('%s', ch.timestamp)
|
|
1480
|
-
ORDER BY ch.id DESC
|
|
1481
|
-
) as rn
|
|
1482
|
-
FROM conversation_history ch
|
|
1483
|
-
LEFT JOIN message_attachments ma
|
|
1484
|
-
ON ch.message_id = ma.message_id
|
|
1485
|
-
WHERE ch.conversation_id = ?
|
|
1486
|
-
GROUP BY ch.id, ch.timestamp
|
|
1487
|
-
)
|
|
1488
|
-
SELECT *
|
|
1489
|
-
FROM ranked_messages
|
|
1490
|
-
WHERE rn = 1
|
|
1491
|
-
ORDER BY timestamp ASC, id ASC
|
|
1492
|
-
"""
|
|
1493
|
-
|
|
1494
|
-
cursor.execute(query, [conversation_id])
|
|
1495
|
-
messages = cursor.fetchall()
|
|
1496
|
-
#print(messages)
|
|
1497
|
-
|
|
1498
|
-
return jsonify(
|
|
1499
|
-
{
|
|
1500
|
-
"messages": [
|
|
1501
|
-
{
|
|
1502
|
-
"message_id": msg["message_id"],
|
|
1503
|
-
"role": msg["role"],
|
|
1504
|
-
"content": msg["content"],
|
|
1505
|
-
"timestamp": msg["timestamp"],
|
|
1506
|
-
"model": msg["model"],
|
|
1507
|
-
"provider": msg["provider"],
|
|
1508
|
-
"npc": msg["npc"],
|
|
1509
|
-
"attachments": (
|
|
1510
|
-
get_message_attachments(msg["message_id"])
|
|
1511
|
-
if msg["attachment_ids"]
|
|
1512
|
-
else []
|
|
1513
|
-
),
|
|
1514
|
-
}
|
|
1515
|
-
for msg in messages
|
|
1516
|
-
],
|
|
1517
|
-
"error": None,
|
|
1518
|
-
}
|
|
1519
|
-
)
|
|
1520
|
-
|
|
1521
|
-
except Exception as e:
|
|
1522
|
-
print(f"Error getting conversation messages: {str(e)}")
|
|
1523
|
-
return jsonify({"error": str(e), "messages": []}), 500
|
|
1524
|
-
finally:
|
|
1525
|
-
conn.close()
|
|
1526
|
-
|
|
1527
|
-
|
|
1528
|
-
|
|
1529
|
-
|
|
1530
|
-
@app.after_request
|
|
1531
|
-
def after_request(response):
|
|
1532
|
-
response.headers.add("Access-Control-Allow-Headers", "Content-Type,Authorization")
|
|
1533
|
-
response.headers.add("Access-Control-Allow-Methods", "GET,PUT,POST,DELETE,OPTIONS")
|
|
1534
|
-
response.headers.add("Access-Control-Allow-Credentials", "true")
|
|
1535
|
-
return response
|
|
1536
|
-
|
|
1537
|
-
|
|
1538
|
-
def get_db_connection():
|
|
1539
|
-
conn = sqlite3.connect(db_path)
|
|
1540
|
-
conn.row_factory = sqlite3.Row
|
|
1541
|
-
return conn
|
|
1542
|
-
|
|
1543
|
-
|
|
1544
|
-
extension_map = {
|
|
1545
|
-
"PNG": "images",
|
|
1546
|
-
"JPG": "images",
|
|
1547
|
-
"JPEG": "images",
|
|
1548
|
-
"GIF": "images",
|
|
1549
|
-
"SVG": "images",
|
|
1550
|
-
"MP4": "videos",
|
|
1551
|
-
"AVI": "videos",
|
|
1552
|
-
"MOV": "videos",
|
|
1553
|
-
"WMV": "videos",
|
|
1554
|
-
"MPG": "videos",
|
|
1555
|
-
"MPEG": "videos",
|
|
1556
|
-
"DOC": "documents",
|
|
1557
|
-
"DOCX": "documents",
|
|
1558
|
-
"PDF": "documents",
|
|
1559
|
-
"PPT": "documents",
|
|
1560
|
-
"PPTX": "documents",
|
|
1561
|
-
"XLS": "documents",
|
|
1562
|
-
"XLSX": "documents",
|
|
1563
|
-
"TXT": "documents",
|
|
1564
|
-
"CSV": "documents",
|
|
1565
|
-
"ZIP": "archives",
|
|
1566
|
-
"RAR": "archives",
|
|
1567
|
-
"7Z": "archives",
|
|
1568
|
-
"TAR": "archives",
|
|
1569
|
-
"GZ": "archives",
|
|
1570
|
-
"BZ2": "archives",
|
|
1571
|
-
"ISO": "archives",
|
|
1572
|
-
}
|
|
1573
|
-
|
|
1574
|
-
|
|
1575
|
-
def fetch_messages_for_conversation(conversation_id):
|
|
1576
|
-
conn = get_db_connection()
|
|
1577
|
-
cursor = conn.cursor()
|
|
1578
|
-
|
|
1579
|
-
query = """
|
|
1580
|
-
SELECT role, content, timestamp
|
|
1581
|
-
FROM conversation_history
|
|
1582
|
-
WHERE conversation_id = ?
|
|
1583
|
-
ORDER BY timestamp ASC
|
|
1584
|
-
"""
|
|
1585
|
-
cursor.execute(query, (conversation_id,))
|
|
1586
|
-
messages = cursor.fetchall()
|
|
1587
|
-
conn.close()
|
|
1588
|
-
|
|
1589
|
-
return [
|
|
1590
|
-
{
|
|
1591
|
-
"role": message["role"],
|
|
1592
|
-
"content": message["content"],
|
|
1593
|
-
"timestamp": message["timestamp"],
|
|
1594
|
-
}
|
|
1595
|
-
for message in messages
|
|
1596
|
-
]
|
|
1597
|
-
|
|
1598
|
-
|
|
1599
|
-
@app.route("/api/health", methods=["GET"])
|
|
1600
|
-
def health_check():
|
|
1601
|
-
return jsonify({"status": "ok", "error": None})
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
|
-
def start_flask_server(
|
|
1605
|
-
port=5337,
|
|
1606
|
-
cors_origins=None,
|
|
1607
|
-
static_files=None,
|
|
1608
|
-
debug = False
|
|
1609
|
-
):
|
|
1610
|
-
try:
|
|
1611
|
-
# Ensure the database tables exist
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
command_history = CommandHistory(db_path)
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
# Only apply CORS if origins are specified
|
|
1618
|
-
if cors_origins:
|
|
1619
|
-
|
|
1620
|
-
CORS(
|
|
1621
|
-
app,
|
|
1622
|
-
origins=cors_origins,
|
|
1623
|
-
allow_headers=["Content-Type", "Authorization"],
|
|
1624
|
-
methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
|
|
1625
|
-
supports_credentials=True,
|
|
1626
|
-
|
|
1627
|
-
)
|
|
1628
|
-
|
|
1629
|
-
# Run the Flask app on all interfaces
|
|
1630
|
-
print(f"Starting Flask server on http://0.0.0.0:{port}")
|
|
1631
|
-
app.run(host="0.0.0.0", port=port, debug=debug)
|
|
1632
|
-
except Exception as e:
|
|
1633
|
-
print(f"Error starting server: {str(e)}")
|
|
1634
|
-
|
|
1635
|
-
|
|
1636
|
-
if __name__ == "__main__":
|
|
1637
|
-
start_flask_server()
|