npcsh 0.3.32__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. npcsh/_state.py +942 -0
  2. npcsh/alicanto.py +1074 -0
  3. npcsh/guac.py +785 -0
  4. npcsh/mcp_helpers.py +357 -0
  5. npcsh/mcp_npcsh.py +822 -0
  6. npcsh/mcp_server.py +184 -0
  7. npcsh/npc.py +218 -0
  8. npcsh/npcsh.py +1161 -0
  9. npcsh/plonk.py +387 -269
  10. npcsh/pti.py +234 -0
  11. npcsh/routes.py +958 -0
  12. npcsh/spool.py +315 -0
  13. npcsh/wander.py +550 -0
  14. npcsh/yap.py +573 -0
  15. npcsh-1.0.0.dist-info/METADATA +596 -0
  16. npcsh-1.0.0.dist-info/RECORD +21 -0
  17. {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/WHEEL +1 -1
  18. npcsh-1.0.0.dist-info/entry_points.txt +9 -0
  19. {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/licenses/LICENSE +1 -1
  20. npcsh/audio.py +0 -569
  21. npcsh/audio_gen.py +0 -1
  22. npcsh/cli.py +0 -543
  23. npcsh/command_history.py +0 -566
  24. npcsh/conversation.py +0 -54
  25. npcsh/data_models.py +0 -46
  26. npcsh/dataframes.py +0 -171
  27. npcsh/embeddings.py +0 -168
  28. npcsh/helpers.py +0 -646
  29. npcsh/image.py +0 -298
  30. npcsh/image_gen.py +0 -79
  31. npcsh/knowledge_graph.py +0 -1006
  32. npcsh/llm_funcs.py +0 -2195
  33. npcsh/load_data.py +0 -83
  34. npcsh/main.py +0 -5
  35. npcsh/model_runner.py +0 -189
  36. npcsh/npc_compiler.py +0 -2879
  37. npcsh/npc_sysenv.py +0 -388
  38. npcsh/npc_team/assembly_lines/test_pipeline.py +0 -181
  39. npcsh/npc_team/corca.npc +0 -13
  40. npcsh/npc_team/foreman.npc +0 -7
  41. npcsh/npc_team/npcsh.ctx +0 -11
  42. npcsh/npc_team/sibiji.npc +0 -4
  43. npcsh/npc_team/templates/analytics/celona.npc +0 -0
  44. npcsh/npc_team/templates/hr_support/raone.npc +0 -0
  45. npcsh/npc_team/templates/humanities/eriane.npc +0 -4
  46. npcsh/npc_team/templates/it_support/lineru.npc +0 -0
  47. npcsh/npc_team/templates/marketing/slean.npc +0 -4
  48. npcsh/npc_team/templates/philosophy/maurawa.npc +0 -0
  49. npcsh/npc_team/templates/sales/turnic.npc +0 -4
  50. npcsh/npc_team/templates/software/welxor.npc +0 -0
  51. npcsh/npc_team/tools/bash_executer.tool +0 -32
  52. npcsh/npc_team/tools/calculator.tool +0 -8
  53. npcsh/npc_team/tools/code_executor.tool +0 -16
  54. npcsh/npc_team/tools/generic_search.tool +0 -27
  55. npcsh/npc_team/tools/image_generation.tool +0 -25
  56. npcsh/npc_team/tools/local_search.tool +0 -149
  57. npcsh/npc_team/tools/npcsh_executor.tool +0 -9
  58. npcsh/npc_team/tools/screen_cap.tool +0 -27
  59. npcsh/npc_team/tools/sql_executor.tool +0 -26
  60. npcsh/response.py +0 -272
  61. npcsh/search.py +0 -252
  62. npcsh/serve.py +0 -1467
  63. npcsh/shell.py +0 -524
  64. npcsh/shell_helpers.py +0 -3919
  65. npcsh/stream.py +0 -233
  66. npcsh/video.py +0 -52
  67. npcsh/video_gen.py +0 -69
  68. npcsh-0.3.32.data/data/npcsh/npc_team/bash_executer.tool +0 -32
  69. npcsh-0.3.32.data/data/npcsh/npc_team/calculator.tool +0 -8
  70. npcsh-0.3.32.data/data/npcsh/npc_team/celona.npc +0 -0
  71. npcsh-0.3.32.data/data/npcsh/npc_team/code_executor.tool +0 -16
  72. npcsh-0.3.32.data/data/npcsh/npc_team/corca.npc +0 -13
  73. npcsh-0.3.32.data/data/npcsh/npc_team/eriane.npc +0 -4
  74. npcsh-0.3.32.data/data/npcsh/npc_team/foreman.npc +0 -7
  75. npcsh-0.3.32.data/data/npcsh/npc_team/generic_search.tool +0 -27
  76. npcsh-0.3.32.data/data/npcsh/npc_team/image_generation.tool +0 -25
  77. npcsh-0.3.32.data/data/npcsh/npc_team/lineru.npc +0 -0
  78. npcsh-0.3.32.data/data/npcsh/npc_team/local_search.tool +0 -149
  79. npcsh-0.3.32.data/data/npcsh/npc_team/maurawa.npc +0 -0
  80. npcsh-0.3.32.data/data/npcsh/npc_team/npcsh.ctx +0 -11
  81. npcsh-0.3.32.data/data/npcsh/npc_team/npcsh_executor.tool +0 -9
  82. npcsh-0.3.32.data/data/npcsh/npc_team/raone.npc +0 -0
  83. npcsh-0.3.32.data/data/npcsh/npc_team/screen_cap.tool +0 -27
  84. npcsh-0.3.32.data/data/npcsh/npc_team/sibiji.npc +0 -4
  85. npcsh-0.3.32.data/data/npcsh/npc_team/slean.npc +0 -4
  86. npcsh-0.3.32.data/data/npcsh/npc_team/sql_executor.tool +0 -26
  87. npcsh-0.3.32.data/data/npcsh/npc_team/test_pipeline.py +0 -181
  88. npcsh-0.3.32.data/data/npcsh/npc_team/turnic.npc +0 -4
  89. npcsh-0.3.32.data/data/npcsh/npc_team/welxor.npc +0 -0
  90. npcsh-0.3.32.dist-info/METADATA +0 -779
  91. npcsh-0.3.32.dist-info/RECORD +0 -78
  92. npcsh-0.3.32.dist-info/entry_points.txt +0 -3
  93. {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/top_level.txt +0 -0
npcsh/serve.py DELETED
@@ -1,1467 +0,0 @@
1
- from flask import Flask, request, jsonify, Response
2
- import configparser # Add this with your other imports
3
- from flask_sse import sse
4
- import redis
5
-
6
- from flask_cors import CORS
7
- import os
8
- import sqlite3
9
- from datetime import datetime
10
- import json
11
- from pathlib import Path
12
-
13
- import yaml
14
-
15
- from PIL import Image
16
- from PIL import ImageFile
17
-
18
- from npcsh.command_history import (
19
- CommandHistory,
20
- save_conversation_message,
21
- )
22
- from npcsh.npc_compiler import NPCCompiler, Tool, NPC
23
- from npcsh.npc_sysenv import (
24
- get_model_and_provider,
25
- get_available_models,
26
- get_system_message,
27
- NPCSH_STREAM_OUTPUT,
28
- )
29
-
30
-
31
- from npcsh.llm_funcs import (
32
- check_llm_command,
33
- get_llm_response,
34
- get_stream,
35
- get_conversation,
36
- )
37
- from npcsh.helpers import get_directory_npcs, get_db_npcs, get_npc_path
38
- from npcsh.npc_compiler import load_npc_from_file
39
- from npcsh.shell_helpers import execute_command, execute_command_stream
40
- import base64
41
-
42
- import json
43
- import os
44
- from pathlib import Path
45
-
46
- # Path for storing settings
47
- SETTINGS_FILE = Path(os.path.expanduser("~/.npcshrc"))
48
-
49
- # Configuration
50
- db_path = os.path.expanduser("~/npcsh_history.db")
51
- user_npc_directory = os.path.expanduser("~/.npcsh/npc_team")
52
- project_npc_directory = os.path.abspath("./npc_team")
53
-
54
- # Initialize components
55
- npc_compiler = NPCCompiler(user_npc_directory, db_path)
56
-
57
- app = Flask(__name__)
58
- app.config["REDIS_URL"] = "redis://localhost:6379"
59
- app.register_blueprint(sse, url_prefix="/stream")
60
-
61
- redis_client = redis.Redis(host="localhost", port=6379, decode_responses=True)
62
-
63
- CORS(
64
- app,
65
- origins=["http://localhost:5173"],
66
- allow_headers=["Content-Type", "Authorization"],
67
- methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
68
- supports_credentials=True,
69
- )
70
-
71
-
72
- def get_locally_available_models(project_directory):
73
- # check if anthropic, gemini, openai keys exist in project folder env
74
- # also try to get ollama
75
- available_models_providers = []
76
- # get the project env
77
- env_path = os.path.join(project_directory, ".env")
78
- env_vars = {}
79
- if os.path.exists(env_path):
80
- with open(env_path, "r") as f:
81
- for line in f:
82
- line = line.strip()
83
- if line and not line.startswith("#"):
84
- if "=" in line:
85
- key, value = line.split("=", 1)
86
- env_vars[key.strip()] = value.strip().strip("\"'")
87
- # check if the keys exist in the env
88
- if "ANTHROPIC_API_KEY" in env_vars:
89
- import anthropic
90
-
91
- client = anthropic.Anthropic(api_key=os.environ.get("ANTHROPIC_API_KEY"))
92
- models = client.models.list()
93
- for model in models.data:
94
-
95
- available_models_providers.append(
96
- {
97
- "model": model.id,
98
- "provider": "anthropic",
99
- }
100
- )
101
-
102
- if "OPENAI_API_KEY" in env_vars:
103
- import openai
104
-
105
- openai.api_key = env_vars["OPENAI_API_KEY"]
106
- models = openai.models.list()
107
-
108
- for model in models.data:
109
- if (
110
- (
111
- "gpt" in model.id
112
- or "o1" in model.id
113
- or "o3" in model.id
114
- or "chat" in model.id
115
- )
116
- and "audio" not in model.id
117
- and "realtime" not in model.id
118
- ):
119
-
120
- available_models_providers.append(
121
- {
122
- "model": model.id,
123
- "provider": "openai",
124
- }
125
- )
126
- if "GEMINI_API_KEY" in env_vars:
127
- import google.generativeai as gemini
128
-
129
- gemini.configure(api_key=env_vars["GEMINI_API_KEY"])
130
- models = gemini.list_models()
131
- # available_models_providers.append(
132
- # {
133
- # "model": "gemini-2.5-pro",
134
- # "provider": "gemini",
135
- # }
136
- # )
137
- available_models_providers.append(
138
- {
139
- "model": "gemini-2.0-flash-lite",
140
- "provider": "gemini",
141
- }
142
- )
143
-
144
- if "DEEPSEEK_API_KEY" in env_vars:
145
- available_models_providers.append(
146
- {"model": "deepseek-chat", "provider": "deepseek"}
147
- )
148
- available_models_providers.append(
149
- {"model": "deepseek-reasoner", "provider": "deepseek"}
150
- )
151
-
152
- try:
153
- import ollama
154
-
155
- models = ollama.list()
156
- for model in models:
157
- if "embed" not in model.model:
158
- mod = model.model
159
- available_models_providers.append(
160
- {
161
- "model": mod,
162
- "provider": "ollama",
163
- }
164
- )
165
-
166
- except Exception as e:
167
- print(f"Error loading ollama models: {e}")
168
- return available_models_providers
169
-
170
-
171
- def get_db_connection():
172
- conn = sqlite3.connect(db_path)
173
- conn.row_factory = sqlite3.Row
174
- return conn
175
-
176
-
177
- extension_map = {
178
- "PNG": "images",
179
- "JPG": "images",
180
- "JPEG": "images",
181
- "GIF": "images",
182
- "SVG": "images",
183
- "MP4": "videos",
184
- "AVI": "videos",
185
- "MOV": "videos",
186
- "WMV": "videos",
187
- "MPG": "videos",
188
- "MPEG": "videos",
189
- "DOC": "documents",
190
- "DOCX": "documents",
191
- "PDF": "documents",
192
- "PPT": "documents",
193
- "PPTX": "documents",
194
- "XLS": "documents",
195
- "XLSX": "documents",
196
- "TXT": "documents",
197
- "CSV": "documents",
198
- "ZIP": "archives",
199
- "RAR": "archives",
200
- "7Z": "archives",
201
- "TAR": "archives",
202
- "GZ": "archives",
203
- "BZ2": "archives",
204
- "ISO": "archives",
205
- }
206
-
207
-
208
- def fetch_messages_for_conversation(conversation_id):
209
- conn = get_db_connection()
210
- cursor = conn.cursor()
211
-
212
- query = """
213
- SELECT role, content
214
- FROM conversation_history
215
- WHERE conversation_id = ?
216
- ORDER BY timestamp ASC
217
- """
218
- cursor.execute(query, (conversation_id,))
219
- messages = cursor.fetchall()
220
- conn.close()
221
-
222
- return [
223
- {
224
- "role": message["role"],
225
- "content": message["content"],
226
- }
227
- for message in messages
228
- ]
229
-
230
-
231
- @app.route("/api/attachments/<message_id>", methods=["GET"])
232
- def get_message_attachments(message_id):
233
- """Get all attachments for a message"""
234
- try:
235
- command_history = CommandHistory(db_path)
236
- attachments = command_history.get_message_attachments(message_id)
237
- return jsonify({"attachments": attachments, "error": None})
238
- except Exception as e:
239
- return jsonify({"error": str(e)}), 500
240
-
241
-
242
- @app.route("/api/attachment/<attachment_id>", methods=["GET"])
243
- def get_attachment(attachment_id):
244
- """Get specific attachment data"""
245
- try:
246
- command_history = CommandHistory(db_path)
247
- data, name, type = command_history.get_attachment_data(attachment_id)
248
-
249
- if data:
250
- # Convert binary data to base64 for sending
251
- base64_data = base64.b64encode(data).decode("utf-8")
252
- return jsonify(
253
- {"data": base64_data, "name": name, "type": type, "error": None}
254
- )
255
- return jsonify({"error": "Attachment not found"}), 404
256
- except Exception as e:
257
- return jsonify({"error": str(e)}), 500
258
-
259
-
260
- @app.route("/api/capture_screenshot", methods=["GET"])
261
- def capture():
262
- # Capture screenshot using NPC-based method
263
- screenshot = capture_screenshot(None, full=True)
264
-
265
- # Ensure screenshot was captured successfully
266
- if not screenshot:
267
- print("Screenshot capture failed")
268
- return None
269
-
270
- return jsonify({"screenshot": screenshot})
271
-
272
-
273
- @app.route("/api/settings/global", methods=["GET", "OPTIONS"])
274
- def get_global_settings():
275
- if request.method == "OPTIONS":
276
- return "", 200
277
-
278
- try:
279
- npcshrc_path = os.path.expanduser("~/.npcshrc")
280
-
281
- # Default settings
282
- global_settings = {
283
- "model": "llama3.2",
284
- "provider": "ollama",
285
- "embedding_model": "nomic-embed-text",
286
- "embedding_provider": "ollama",
287
- "search_provider": "google",
288
- "NPCSH_LICENSE_KEY": "",
289
- }
290
- global_vars = {}
291
-
292
- if os.path.exists(npcshrc_path):
293
- with open(npcshrc_path, "r") as f:
294
- for line in f:
295
- # Skip comments and empty lines
296
- line = line.split("#")[0].strip()
297
- if not line:
298
- continue
299
-
300
- if "=" not in line:
301
- continue
302
-
303
- # Split on first = only
304
- key, value = line.split("=", 1)
305
- key = key.strip()
306
- if key.startswith("export "):
307
- key = key[7:]
308
-
309
- # Clean up the value - handle quoted strings properly
310
- value = value.strip()
311
- if value.startswith('"') and value.endswith('"'):
312
- value = value[1:-1]
313
- elif value.startswith("'") and value.endswith("'"):
314
- value = value[1:-1]
315
-
316
- # Map environment variables to settings
317
- key_mapping = {
318
- "NPCSH_MODEL": "model",
319
- "NPCSH_PROVIDER": "provider",
320
- "NPCSH_EMBEDDING_MODEL": "embedding_model",
321
- "NPCSH_EMBEDDING_PROVIDER": "embedding_provider",
322
- "NPCSH_SEARCH_PROVIDER": "search_provider",
323
- "NPCSH_LICENSE_KEY": "NPCSH_LICENSE_KEY",
324
- "NPCSH_STREAM_OUTPUT": "NPCSH_STREAM_OUTPUT",
325
- }
326
-
327
- if key in key_mapping:
328
- global_settings[key_mapping[key]] = value
329
- else:
330
- global_vars[key] = value
331
-
332
- return jsonify(
333
- {
334
- "global_settings": global_settings,
335
- "global_vars": global_vars,
336
- "error": None,
337
- }
338
- )
339
-
340
- except Exception as e:
341
- print(f"Error in get_global_settings: {str(e)}")
342
- return jsonify({"error": str(e)}), 500
343
-
344
-
345
- @app.route("/api/settings/global", methods=["POST", "OPTIONS"])
346
- def save_global_settings():
347
- if request.method == "OPTIONS":
348
- return "", 200
349
-
350
- try:
351
- data = request.json
352
- npcshrc_path = os.path.expanduser("~/.npcshrc")
353
-
354
- key_mapping = {
355
- "model": "NPCSH_CHAT_MODEL",
356
- "provider": "NPCSH_CHAT_PROVIDER",
357
- "embedding_model": "NPCSH_EMBEDDING_MODEL",
358
- "embedding_provider": "NPCSH_EMBEDDING_PROVIDER",
359
- "search_provider": "NPCSH_SEARCH_PROVIDER",
360
- "NPCSH_LICENSE_KEY": "NPCSH_LICENSE_KEY",
361
- "NPCSH_STREAM_OUTPUT": "NPCSH_STREAM_OUTPUT",
362
- }
363
-
364
- os.makedirs(os.path.dirname(npcshrc_path), exist_ok=True)
365
-
366
- with open(npcshrc_path, "w") as f:
367
- # Write settings as environment variables
368
- for key, value in data.get("global_settings", {}).items():
369
- if key in key_mapping and value:
370
- # Quote value if it contains spaces
371
- if " " in str(value):
372
- value = f'"{value}"'
373
- f.write(f"export {key_mapping[key]}={value}\n")
374
-
375
- # Write custom variables
376
- for key, value in data.get("global_vars", {}).items():
377
- if key and value:
378
- if " " in str(value):
379
- value = f'"{value}"'
380
- f.write(f"export {key}={value}\n")
381
-
382
- return jsonify({"message": "Global settings saved successfully", "error": None})
383
-
384
- except Exception as e:
385
- print(f"Error in save_global_settings: {str(e)}")
386
- return jsonify({"error": str(e)}), 500
387
-
388
-
389
- @app.route("/api/settings/project", methods=["GET", "OPTIONS"]) # Add OPTIONS
390
- def get_project_settings():
391
- if request.method == "OPTIONS":
392
- return "", 200
393
-
394
- try:
395
- current_dir = request.args.get("path")
396
- if not current_dir:
397
- return jsonify({"error": "No path provided"}), 400
398
-
399
- env_path = os.path.join(current_dir, ".env")
400
- env_vars = {}
401
-
402
- if os.path.exists(env_path):
403
- with open(env_path, "r") as f:
404
- for line in f:
405
- line = line.strip()
406
- if line and not line.startswith("#"):
407
- if "=" in line:
408
- key, value = line.split("=", 1)
409
- env_vars[key.strip()] = value.strip().strip("\"'")
410
-
411
- return jsonify({"env_vars": env_vars, "error": None})
412
-
413
- except Exception as e:
414
- print(f"Error in get_project_settings: {str(e)}")
415
- return jsonify({"error": str(e)}), 500
416
-
417
-
418
- @app.route("/api/settings/project", methods=["POST", "OPTIONS"]) # Add OPTIONS
419
- def save_project_settings():
420
- if request.method == "OPTIONS":
421
- return "", 200
422
-
423
- try:
424
- current_dir = request.args.get("path")
425
- if not current_dir:
426
- return jsonify({"error": "No path provided"}), 400
427
-
428
- data = request.json
429
- env_path = os.path.join(current_dir, ".env")
430
-
431
- with open(env_path, "w") as f:
432
- for key, value in data.get("env_vars", {}).items():
433
- f.write(f"{key}={value}\n")
434
-
435
- return jsonify(
436
- {"message": "Project settings saved successfully", "error": None}
437
- )
438
-
439
- except Exception as e:
440
- print(f"Error in save_project_settings: {str(e)}")
441
- return jsonify({"error": str(e)}), 500
442
-
443
-
444
- @app.route("/api/models", methods=["GET"])
445
- def get_models():
446
- """
447
- Endpoint to retrieve available models based on the current project path.
448
- Checks for local configurations (.env) and Ollama.
449
- """
450
- current_path = request.args.get("currentPath")
451
- if not current_path:
452
- # Fallback to a default path or user home if needed,
453
- # but ideally the frontend should always provide it.
454
- current_path = os.path.expanduser("~/.npcsh") # Or handle error
455
- print("Warning: No currentPath provided for /api/models, using default.")
456
- # return jsonify({"error": "currentPath parameter is required"}), 400
457
-
458
- try:
459
- # Reuse the existing function to detect models
460
- available_models = get_locally_available_models(current_path)
461
-
462
- # Optionally, add more details or format the response if needed
463
- # Example: Add a display name
464
- formatted_models = []
465
- for m in available_models:
466
- # Basic formatting, customize as needed
467
- text_only = (
468
- "(text only)"
469
- if m["provider"] == "ollama"
470
- and m["model"] in ["llama3.2", "deepseek-v3", "phi4"]
471
- else ""
472
- )
473
- # Handle specific known model names for display
474
- display_model = m["model"]
475
- if "claude-3-5-haiku-latest" in m["model"]:
476
- display_model = "claude-3.5-haiku"
477
- elif "claude-3-5-sonnet-latest" in m["model"]:
478
- display_model = "claude-3.5-sonnet"
479
- elif "gemini-1.5-flash" in m["model"]:
480
- display_model = "gemini-1.5-flash" # Handle multiple versions if needed
481
- elif "gemini-2.0-flash-lite-preview-02-05" in m["model"]:
482
- display_model = "gemini-2.0-flash-lite-preview"
483
-
484
- display_name = f"{display_model} | {m['provider']} {text_only}".strip()
485
-
486
- formatted_models.append(
487
- {
488
- "value": m["model"], # Use the actual model ID as the value
489
- "provider": m["provider"],
490
- "display_name": display_name,
491
- }
492
- )
493
-
494
- return jsonify({"models": formatted_models, "error": None})
495
-
496
- except Exception as e:
497
- print(f"Error getting available models: {str(e)}")
498
- import traceback
499
-
500
- traceback.print_exc()
501
- # Return an empty list or a specific error structure
502
- return jsonify({"models": [], "error": str(e)}), 500
503
-
504
-
505
- @app.route("/api/stream", methods=["POST"])
506
- def stream():
507
- """SSE stream that takes messages, models, providers, and attachments from frontend."""
508
- data = request.json
509
- commandstr = data.get("commandstr")
510
- conversation_id = data.get("conversationId")
511
- model = data.get("model", None)
512
- provider = data.get("provider", None)
513
- npc = data.get("npc", None)
514
- attachments = data.get("attachments", [])
515
- current_path = data.get("currentPath")
516
-
517
- command_history = CommandHistory(db_path)
518
-
519
- # Process attachments and save them properly
520
- images = []
521
- print(attachments)
522
-
523
- from io import BytesIO
524
- from PIL import Image
525
-
526
- attachments_loaded = []
527
-
528
- if attachments:
529
- for attachment in attachments:
530
- extension = attachment["name"].split(".")[-1]
531
- extension_mapped = extension_map.get(extension.upper(), "others")
532
- file_path = os.path.expanduser(
533
- "~/.npcsh/" + extension_mapped + "/" + attachment["name"]
534
- )
535
-
536
- if extension_mapped == "images":
537
- # Open the image file and save it to the file path
538
- ImageFile.LOAD_TRUNCATED_IMAGES = True
539
- img = Image.open(attachment["path"])
540
-
541
- # Save the image to a BytesIO buffer (to extract binary data)
542
- img_byte_arr = BytesIO()
543
- img.save(img_byte_arr, format="PNG") # or the appropriate format
544
- img_byte_arr.seek(0) # Rewind the buffer to the beginning
545
-
546
- # Save the image to a file
547
- img.save(file_path, optimize=True, quality=50)
548
-
549
- # Add to images list for LLM processing
550
- images.append({"filename": attachment["name"], "file_path": file_path})
551
-
552
- # Add the image data (in binary form) to attachments_loaded
553
- attachments_loaded.append(
554
- {
555
- "name": attachment["name"],
556
- "type": extension_mapped,
557
- "data": img_byte_arr.read(), # Read binary data from the buffer
558
- "size": os.path.getsize(file_path),
559
- }
560
- )
561
-
562
- messages = fetch_messages_for_conversation(conversation_id)
563
- messages.append({"role": "user", "content": commandstr})
564
- if not messages:
565
- return jsonify({"error": "No messages provided"}), 400
566
-
567
- # Save the user message with attachments in the database
568
- print("commandstr ", commandstr)
569
- message_id = command_history.generate_message_id()
570
-
571
- save_conversation_message(
572
- command_history,
573
- conversation_id,
574
- "user",
575
- commandstr,
576
- wd=current_path,
577
- model=model,
578
- provider=provider,
579
- npc=npc,
580
- attachments=attachments_loaded,
581
- message_id=message_id,
582
- )
583
- message_id = command_history.generate_message_id()
584
-
585
- stream_response = get_stream(
586
- messages,
587
- images=images,
588
- model=model,
589
- provider=provider,
590
- npc=npc if isinstance(npc, NPC) else None,
591
- )
592
-
593
- final_response = "" # To accumulate the assistant's response
594
-
595
- complete_response = [] # List to store all chunks
596
-
597
- def event_stream():
598
- for response_chunk in stream_response:
599
- chunk_content = ""
600
- chunk_content = "".join(
601
- choice.delta.content
602
- for choice in response_chunk.choices
603
- if choice.delta.content is not None
604
- )
605
- if chunk_content:
606
- complete_response.append(chunk_content)
607
- chunk_data = {
608
- "id": response_chunk.id,
609
- "object": response_chunk.object,
610
- "created": response_chunk.created,
611
- "model": response_chunk.model,
612
- "choices": [
613
- {
614
- "index": choice.index,
615
- "delta": {
616
- "content": choice.delta.content,
617
- "role": choice.delta.role,
618
- },
619
- "finish_reason": choice.finish_reason,
620
- }
621
- for choice in response_chunk.choices
622
- ],
623
- }
624
- yield f"data: {json.dumps(chunk_data)}\n\n"
625
- save_conversation_message(
626
- command_history,
627
- conversation_id,
628
- "assistant",
629
- chunk_content,
630
- wd=current_path,
631
- model=model,
632
- provider=provider,
633
- npc=npc,
634
- message_id=message_id, # Save with the same message_id
635
- )
636
-
637
- # Send completion message
638
- yield f"data: {json.dumps({'type': 'message_stop'})}\n\n"
639
- full_content = command_history.get_full_message_content(message_id)
640
- command_history.update_message_content(message_id, full_content)
641
-
642
- response = Response(event_stream(), mimetype="text/event-stream")
643
-
644
- return response
645
-
646
-
647
- @app.route("/api/npc_team_global")
648
- def get_npc_team_global():
649
- try:
650
- db_conn = get_db_connection()
651
- global_npc_directory = os.path.expanduser("~/.npcsh/npc_team")
652
-
653
- npc_data = []
654
-
655
- # Use existing helper to get NPCs from the global directory
656
- for npc_file in os.listdir(global_npc_directory):
657
- if npc_file.endswith(".npc"):
658
- npc_path = os.path.join(global_npc_directory, npc_file)
659
- npc = load_npc_from_file(npc_path, db_conn)
660
-
661
- # Serialize the NPC data
662
- serialized_npc = {
663
- "name": npc.name,
664
- "primary_directive": npc.primary_directive,
665
- "model": npc.model,
666
- "provider": npc.provider,
667
- "api_url": npc.api_url,
668
- "use_global_tools": npc.use_global_tools,
669
- "tools": [
670
- {
671
- "tool_name": tool.tool_name,
672
- "inputs": tool.inputs,
673
- "preprocess": tool.preprocess,
674
- "prompt": tool.prompt,
675
- "postprocess": tool.postprocess,
676
- }
677
- for tool in npc.tools
678
- ],
679
- }
680
- npc_data.append(serialized_npc)
681
-
682
- return jsonify({"npcs": npc_data, "error": None})
683
-
684
- except Exception as e:
685
- print(f"Error loading global NPCs: {str(e)}")
686
- return jsonify({"npcs": [], "error": str(e)})
687
-
688
-
689
- @app.route("/api/tools/global", methods=["GET"])
690
- def get_global_tools():
691
- # try:
692
- user_home = os.path.expanduser("~")
693
- tools_dir = os.path.join(user_home, ".npcsh", "npc_team", "tools")
694
- tools = []
695
- if os.path.exists(tools_dir):
696
- for file in os.listdir(tools_dir):
697
- if file.endswith(".tool"):
698
- with open(os.path.join(tools_dir, file), "r") as f:
699
- tool_data = yaml.safe_load(f)
700
- tools.append(tool_data)
701
- return jsonify({"tools": tools})
702
-
703
-
704
- # except Exception as e:
705
- # return jsonify({"error": str(e)}), 500
706
-
707
-
708
- @app.route("/api/tools/project", methods=["GET"])
709
- def get_project_tools():
710
- current_path = request.args.get(
711
- "currentPath"
712
- ) # Correctly retrieves `currentPath` from query params
713
- if not current_path:
714
- return jsonify({"tools": []})
715
-
716
- if not current_path.endswith("npc_team"):
717
- current_path = os.path.join(current_path, "npc_team")
718
-
719
- tools_dir = os.path.join(current_path, "tools")
720
- tools = []
721
- if os.path.exists(tools_dir):
722
- for file in os.listdir(tools_dir):
723
- if file.endswith(".tool"):
724
- with open(os.path.join(tools_dir, file), "r") as f:
725
- tool_data = yaml.safe_load(f)
726
- tools.append(tool_data)
727
- return jsonify({"tools": tools})
728
-
729
-
730
- @app.route("/api/tools/save", methods=["POST"])
731
- def save_tool():
732
- try:
733
- data = request.json
734
- tool_data = data.get("tool")
735
- is_global = data.get("isGlobal")
736
- current_path = data.get("currentPath")
737
- tool_name = tool_data.get("tool_name")
738
-
739
- if not tool_name:
740
- return jsonify({"error": "Tool name is required"}), 400
741
-
742
- if is_global:
743
- tools_dir = os.path.join(
744
- os.path.expanduser("~"), ".npcsh", "npc_team", "tools"
745
- )
746
- else:
747
- if not current_path.endswith("npc_team"):
748
- current_path = os.path.join(current_path, "npc_team")
749
- tools_dir = os.path.join(current_path, "tools")
750
-
751
- os.makedirs(tools_dir, exist_ok=True)
752
-
753
- # Full tool structure
754
- tool_yaml = {
755
- "description": tool_data.get("description", ""),
756
- "inputs": tool_data.get("inputs", []),
757
- "steps": tool_data.get("steps", []),
758
- }
759
-
760
- file_path = os.path.join(tools_dir, f"{tool_name}.tool")
761
- with open(file_path, "w") as f:
762
- yaml.safe_dump(tool_yaml, f, sort_keys=False)
763
-
764
- return jsonify({"status": "success"})
765
- except Exception as e:
766
- return jsonify({"error": str(e)}), 500
767
-
768
-
769
- @app.route("/api/save_npc", methods=["POST"])
770
- def save_npc():
771
- try:
772
- data = request.json
773
- npc_data = data.get("npc")
774
- is_global = data.get("isGlobal")
775
- current_path = data.get("currentPath")
776
-
777
- if not npc_data or "name" not in npc_data:
778
- return jsonify({"error": "Invalid NPC data"}), 400
779
-
780
- # Determine the directory based on whether it's global or project
781
- if is_global:
782
- npc_directory = os.path.expanduser("~/.npcsh/npc_team")
783
- else:
784
- npc_directory = os.path.join(current_path, "npc_team")
785
-
786
- # Ensure the directory exists
787
- os.makedirs(npc_directory, exist_ok=True)
788
-
789
- # Create the YAML content
790
- yaml_content = f"""name: {npc_data['name']}
791
- primary_directive: "{npc_data['primary_directive']}"
792
- model: {npc_data['model']}
793
- provider: {npc_data['provider']}
794
- api_url: {npc_data.get('api_url', '')}
795
- use_global_tools: {str(npc_data.get('use_global_tools', True)).lower()}
796
- """
797
-
798
- # Save the file
799
- npc_file_path = os.path.join(npc_directory, f"{npc_data['name']}.npc")
800
- with open(npc_file_path, "w") as f:
801
- f.write(yaml_content)
802
-
803
- return jsonify({"message": "NPC saved successfully", "error": None})
804
-
805
- except Exception as e:
806
- print(f"Error saving NPC: {str(e)}")
807
- return jsonify({"error": str(e)}), 500
808
-
809
-
810
- @app.route("/api/npc_team_project", methods=["GET"])
811
- def get_npc_team_project():
812
- try:
813
- db_conn = get_db_connection()
814
-
815
- project_npc_directory = request.args.get("currentPath")
816
- if not project_npc_directory.endswith("npc_team"):
817
- project_npc_directory = os.path.join(project_npc_directory, "npc_team")
818
-
819
- npc_data = []
820
-
821
- for npc_file in os.listdir(project_npc_directory):
822
- print(npc_file)
823
- if npc_file.endswith(".npc"):
824
- npc_path = os.path.join(project_npc_directory, npc_file)
825
- npc = load_npc_from_file(npc_path, db_conn)
826
-
827
- # Serialize the NPC data, including tools
828
- serialized_npc = {
829
- "name": npc.name,
830
- "primary_directive": npc.primary_directive,
831
- "model": npc.model,
832
- "provider": npc.provider,
833
- "api_url": npc.api_url,
834
- "use_global_tools": npc.use_global_tools,
835
- "tools": [
836
- {
837
- "tool_name": tool.tool_name,
838
- "inputs": tool.inputs,
839
- "preprocess": tool.preprocess,
840
- "prompt": tool.prompt,
841
- "postprocess": tool.postprocess,
842
- }
843
- for tool in npc.tools
844
- ],
845
- }
846
- npc_data.append(serialized_npc)
847
-
848
- print(npc_data)
849
- return jsonify({"npcs": npc_data, "error": None})
850
-
851
- except Exception as e:
852
- print(f"Error fetching NPC team: {str(e)}")
853
- return jsonify({"npcs": [], "error": str(e)})
854
-
855
-
856
- @app.route("/api/get_attachment_response", methods=["POST"])
857
- def get_attachment_response():
858
- data = request.json
859
- attachments = data.get("attachments", [])
860
- messages = data.get("messages") # Get conversation ID
861
- conversation_id = data.get("conversationId")
862
- current_path = data.get("currentPath")
863
- command_history = CommandHistory(db_path)
864
- model = data.get("model")
865
- npc = data.get("npc")
866
- # load the npc properly
867
- # try global /porject
868
-
869
- # Process each attachment
870
- images = []
871
- for attachment in attachments:
872
- extension = attachment["name"].split(".")[-1]
873
- extension_mapped = extension_map.get(extension.upper(), "others")
874
- file_path = os.path.expanduser(
875
- "~/.npcsh/" + extension_mapped + "/" + attachment["name"]
876
- )
877
- if extension_mapped == "images":
878
- ImageFile.LOAD_TRUNCATED_IMAGES = True
879
- img = Image.open(attachment["path"])
880
- img.save(file_path, optimize=True, quality=50)
881
- images.append({"filename": attachment["name"], "file_path": file_path})
882
-
883
- message_to_send = messages[-1]["content"][0]
884
-
885
- response = get_llm_response(
886
- message_to_send,
887
- images=images,
888
- messages=messages,
889
- model=model,
890
- )
891
- messages = response["messages"]
892
- response = response["response"]
893
-
894
- # Save new messages
895
- save_conversation_message(
896
- command_history, conversation_id, "user", message_to_send, wd=current_path
897
- )
898
-
899
- save_conversation_message(
900
- command_history,
901
- conversation_id,
902
- "assistant",
903
- response,
904
- wd=current_path,
905
- )
906
- return jsonify(
907
- {
908
- "status": "success",
909
- "message": response,
910
- "conversationId": conversation_id,
911
- "messages": messages, # Optionally return fetched messages
912
- }
913
- )
914
-
915
-
916
- @app.route("/api/execute", methods=["POST"])
917
- def execute():
918
- try:
919
- data = request.json
920
- command = data.get("commandstr")
921
- current_path = data.get("currentPath")
922
- conversation_id = data.get("conversationId")
923
- model = data.get("model")
924
- print("model", model)
925
- npc = data.get("npc")
926
- print("npc", npc)
927
- # have to add something to actually load the npc, try project first then global , if none proceed
928
- # with the command as is but notify.
929
- # also inthefrontend need to make it so that it wiwll just list the npcs properly.
930
-
931
- # Clean command
932
- command = command.strip().replace('"', "").replace("'", "").replace("`", "")
933
-
934
- if not command:
935
- return (
936
- jsonify(
937
- {
938
- "error": "No command provided",
939
- "output": "Error: No command provided",
940
- }
941
- ),
942
- 400,
943
- )
944
-
945
- command_history = CommandHistory(db_path)
946
-
947
- # Fetch conversation history
948
- if conversation_id:
949
- conn = get_db_connection()
950
- cursor = conn.cursor()
951
-
952
- # Get all messages for this conversation in order
953
- cursor.execute(
954
- """
955
- SELECT role, content, timestamp
956
- FROM conversation_history
957
- WHERE conversation_id = ?
958
- ORDER BY timestamp ASC
959
- """,
960
- (conversation_id,),
961
- )
962
-
963
- conversation_messages = cursor.fetchall()
964
-
965
- # Format messages for LLM
966
- messages = [
967
- {
968
- "role": msg["role"],
969
- "content": msg["content"],
970
- }
971
- for msg in conversation_messages
972
- ]
973
-
974
- conn.close()
975
- else:
976
- messages = []
977
-
978
- # Execute command with conversation history
979
-
980
- result = execute_command(
981
- command=command,
982
- command_history=command_history,
983
- db_path=db_path,
984
- npc_compiler=npc_compiler,
985
- conversation_id=conversation_id,
986
- messages=messages, # Pass the conversation history,
987
- model=model,
988
- )
989
-
990
- # Save new messages
991
- save_conversation_message(
992
- command_history, conversation_id, "user", command, wd=current_path
993
- )
994
-
995
- save_conversation_message(
996
- command_history,
997
- conversation_id,
998
- "assistant",
999
- result.get("output", ""),
1000
- wd=current_path,
1001
- )
1002
-
1003
- return jsonify(
1004
- {
1005
- "output": result.get("output", ""),
1006
- "currentPath": os.getcwd(),
1007
- "error": None,
1008
- "messages": messages, # Return updated messages
1009
- }
1010
- )
1011
-
1012
- except Exception as e:
1013
- print(f"Error executing command: {str(e)}")
1014
- import traceback
1015
-
1016
- traceback.print_exc()
1017
- return (
1018
- jsonify(
1019
- {
1020
- "error": str(e),
1021
- "output": f"Error: {str(e)}",
1022
- "currentPath": data.get("currentPath", None),
1023
- }
1024
- ),
1025
- 500,
1026
- )
1027
-
1028
-
1029
- def get_conversation_history(conversation_id):
1030
- """Fetch all messages for a conversation in chronological order."""
1031
- if not conversation_id:
1032
- return []
1033
-
1034
- conn = get_db_connection()
1035
- cursor = conn.cursor()
1036
-
1037
- try:
1038
- query = """
1039
- SELECT role, content, timestamp
1040
- FROM conversation_history
1041
- WHERE conversation_id = ?
1042
- ORDER BY timestamp ASC
1043
- """
1044
- cursor.execute(query, (conversation_id,))
1045
- messages = cursor.fetchall()
1046
-
1047
- return [
1048
- {
1049
- "role": msg["role"],
1050
- "content": msg["content"],
1051
- "timestamp": msg["timestamp"],
1052
- }
1053
- for msg in messages
1054
- ]
1055
- finally:
1056
- conn.close()
1057
-
1058
-
1059
- @app.route("/api/conversations", methods=["GET"])
1060
- def get_conversations():
1061
- try:
1062
- path = request.args.get("path")
1063
- if not path:
1064
- return jsonify({"error": "No path provided", "conversations": []}), 400
1065
-
1066
- conn = get_db_connection()
1067
- try:
1068
- cursor = conn.cursor()
1069
-
1070
- query = """
1071
- SELECT DISTINCT conversation_id,
1072
- MIN(timestamp) as start_time,
1073
- GROUP_CONCAT(content) as preview
1074
- FROM conversation_history
1075
- WHERE directory_path = ?
1076
- GROUP BY conversation_id
1077
- ORDER BY start_time DESC
1078
- """
1079
-
1080
- cursor.execute(query, [path])
1081
- conversations = cursor.fetchall()
1082
-
1083
- return jsonify(
1084
- {
1085
- "conversations": [
1086
- {
1087
- "id": conv["conversation_id"],
1088
- "timestamp": conv["start_time"],
1089
- "preview": (
1090
- conv["preview"][:100] + "..."
1091
- if conv["preview"] and len(conv["preview"]) > 100
1092
- else conv["preview"]
1093
- ),
1094
- }
1095
- for conv in conversations
1096
- ],
1097
- "error": None,
1098
- }
1099
- )
1100
-
1101
- finally:
1102
- conn.close()
1103
-
1104
- except Exception as e:
1105
- print(f"Error getting conversations: {str(e)}")
1106
- return jsonify({"error": str(e), "conversations": []}), 500
1107
-
1108
-
1109
- @app.route("/api/conversation/<conversation_id>/messages", methods=["GET"])
1110
- def get_conversation_messages(conversation_id):
1111
- try:
1112
- conn = get_db_connection()
1113
- cursor = conn.cursor()
1114
-
1115
- # Modified query to ensure proper ordering and deduplication
1116
- query = """
1117
- WITH ranked_messages AS (
1118
- SELECT
1119
- ch.*,
1120
- GROUP_CONCAT(ma.id) as attachment_ids,
1121
- ROW_NUMBER() OVER (
1122
- PARTITION BY ch.role, strftime('%s', ch.timestamp)
1123
- ORDER BY ch.id DESC
1124
- ) as rn
1125
- FROM conversation_history ch
1126
- LEFT JOIN message_attachments ma
1127
- ON ch.message_id = ma.message_id
1128
- WHERE ch.conversation_id = ?
1129
- GROUP BY ch.id, ch.timestamp
1130
- )
1131
- SELECT *
1132
- FROM ranked_messages
1133
- WHERE rn = 1
1134
- ORDER BY timestamp ASC, id ASC
1135
- """
1136
-
1137
- cursor.execute(query, [conversation_id])
1138
- messages = cursor.fetchall()
1139
- print(messages)
1140
-
1141
- return jsonify(
1142
- {
1143
- "messages": [
1144
- {
1145
- "message_id": msg["message_id"],
1146
- "role": msg["role"],
1147
- "content": msg["content"],
1148
- "timestamp": msg["timestamp"],
1149
- "model": msg["model"],
1150
- "provider": msg["provider"],
1151
- "npc": msg["npc"],
1152
- "attachments": (
1153
- get_message_attachments(msg["message_id"])
1154
- if msg["attachment_ids"]
1155
- else []
1156
- ),
1157
- }
1158
- for msg in messages
1159
- ],
1160
- "error": None,
1161
- }
1162
- )
1163
-
1164
- except Exception as e:
1165
- print(f"Error getting conversation messages: {str(e)}")
1166
- return jsonify({"error": str(e), "messages": []}), 500
1167
- finally:
1168
- conn.close()
1169
-
1170
-
1171
- @app.route("/api/stream", methods=["POST"])
1172
- def stream_raw():
1173
- """SSE stream that takes messages, models, providers, and attachments from frontend."""
1174
- data = request.json
1175
- commandstr = data.get("commandstr")
1176
- conversation_id = data.get("conversationId")
1177
- model = data.get("model", None)
1178
- provider = data.get("provider", None)
1179
- save_to_sqlite3 = data.get("saveToSqlite3", False)
1180
- npc = data.get("npc", None)
1181
- attachments = data.get("attachments", [])
1182
- current_path = data.get("currentPath")
1183
- print(data)
1184
-
1185
- messages = data.get("messages", [])
1186
- print("messages", messages)
1187
- command_history = CommandHistory(db_path)
1188
-
1189
- images = []
1190
- attachments_loaded = []
1191
-
1192
- if attachments:
1193
- for attachment in attachments:
1194
- extension = attachment["name"].split(".")[-1]
1195
- extension_mapped = extension_map.get(extension.upper(), "others")
1196
- file_path = os.path.expanduser(
1197
- "~/.npcsh/" + extension_mapped + "/" + attachment["name"]
1198
- )
1199
-
1200
- if extension_mapped == "images":
1201
- # Open the image file and save it to the file path
1202
- ImageFile.LOAD_TRUNCATED_IMAGES = True
1203
- img = Image.open(attachment["path"])
1204
-
1205
- # Save the image to a BytesIO buffer (to extract binary data)
1206
- img_byte_arr = BytesIO()
1207
- img.save(img_byte_arr, format="PNG") # or the appropriate format
1208
- img_byte_arr.seek(0) # Rewind the buffer to the beginning
1209
-
1210
- # Save the image to a file
1211
- img.save(file_path, optimize=True, quality=50)
1212
-
1213
- # Add to images list for LLM processing
1214
- images.append({"filename": attachment["name"], "file_path": file_path})
1215
-
1216
- # Add the image data (in binary form) to attachments_loaded
1217
- attachments_loaded.append(
1218
- {
1219
- "name": attachment["name"],
1220
- "type": extension_mapped,
1221
- "data": img_byte_arr.read(), # Read binary data from the buffer
1222
- "size": os.path.getsize(file_path),
1223
- }
1224
- )
1225
- if save_to_sqlite3:
1226
- if len(messages) == 0:
1227
- # load the conversation messages
1228
- messages = fetch_messages_for_conversation(conversation_id)
1229
- if not messages:
1230
- return jsonify({"error": "No messages provided"}), 400
1231
- messages.append({"role": "user", "content": commandstr})
1232
- message_id = command_history.generate_message_id()
1233
-
1234
- save_conversation_message(
1235
- command_history,
1236
- conversation_id,
1237
- "user",
1238
- commandstr,
1239
- wd=current_path,
1240
- model=model,
1241
- provider=provider,
1242
- npc=npc,
1243
- attachments=attachments_loaded,
1244
- message_id=message_id,
1245
- )
1246
- message_id = command_history.generate_message_id()
1247
-
1248
- stream_response = get_stream(
1249
- messages,
1250
- images=images,
1251
- model=model,
1252
- provider=provider,
1253
- npc=npc if isinstance(npc, NPC) else None,
1254
- )
1255
-
1256
- """else:
1257
-
1258
- stream_response = execute_command_stream(
1259
- commandstr,
1260
- command_history,
1261
- db_path,
1262
- npc_compiler,
1263
- model=model,
1264
- provider=provider,
1265
- messages=messages,
1266
- images=images, # Pass the processed images
1267
- ) # Get all conversation messages so far
1268
- """
1269
- final_response = "" # To accumulate the assistant's response
1270
- complete_response = [] # List to store all chunks
1271
-
1272
- def event_stream():
1273
- for response_chunk in stream_response:
1274
- chunk_content = ""
1275
-
1276
- chunk_content = "".join(
1277
- choice.delta.content
1278
- for choice in response_chunk.choices
1279
- if choice.delta.content is not None
1280
- )
1281
- if chunk_content:
1282
- complete_response.append(chunk_content)
1283
- chunk_data = {
1284
- "type": "content", # Added type
1285
- "id": response_chunk.id,
1286
- "object": response_chunk.object,
1287
- "created": response_chunk.created,
1288
- "model": response_chunk.model,
1289
- "choices": [
1290
- {
1291
- "index": choice.index,
1292
- "delta": {
1293
- "content": choice.delta.content,
1294
- "role": choice.delta.role,
1295
- },
1296
- "finish_reason": choice.finish_reason,
1297
- }
1298
- for choice in response_chunk.choices
1299
- ],
1300
- }
1301
- yield f"{json.dumps(chunk_data)}\n\n"
1302
-
1303
- if save_to_sqlite3:
1304
- save_conversation_message(
1305
- command_history,
1306
- conversation_id,
1307
- "assistant",
1308
- chunk_content,
1309
- wd=current_path,
1310
- model=model,
1311
- provider=provider,
1312
- npc=npc,
1313
- message_id=message_id, # Save with the same message_id
1314
- )
1315
-
1316
- # Send completion message
1317
- yield f"{json.dumps({'type': 'message_stop'})}\n\n"
1318
- if save_to_sqlite3:
1319
- full_content = command_history.get_full_message_content(message_id)
1320
- command_history.update_message_content(message_id, full_content)
1321
-
1322
- response = Response(event_stream(), mimetype="text/event-stream")
1323
-
1324
- return response
1325
-
1326
- response = Response(event_stream(), mimetype="text/event-stream")
1327
-
1328
- return response
1329
-
1330
-
1331
- @app.after_request
1332
- def after_request(response):
1333
- response.headers.add("Access-Control-Allow-Headers", "Content-Type,Authorization")
1334
- response.headers.add("Access-Control-Allow-Methods", "GET,PUT,POST,DELETE,OPTIONS")
1335
- response.headers.add("Access-Control-Allow-Credentials", "true")
1336
- return response
1337
-
1338
-
1339
- def get_db_connection():
1340
- conn = sqlite3.connect(db_path)
1341
- conn.row_factory = sqlite3.Row
1342
- return conn
1343
-
1344
-
1345
- extension_map = {
1346
- "PNG": "images",
1347
- "JPG": "images",
1348
- "JPEG": "images",
1349
- "GIF": "images",
1350
- "SVG": "images",
1351
- "MP4": "videos",
1352
- "AVI": "videos",
1353
- "MOV": "videos",
1354
- "WMV": "videos",
1355
- "MPG": "videos",
1356
- "MPEG": "videos",
1357
- "DOC": "documents",
1358
- "DOCX": "documents",
1359
- "PDF": "documents",
1360
- "PPT": "documents",
1361
- "PPTX": "documents",
1362
- "XLS": "documents",
1363
- "XLSX": "documents",
1364
- "TXT": "documents",
1365
- "CSV": "documents",
1366
- "ZIP": "archives",
1367
- "RAR": "archives",
1368
- "7Z": "archives",
1369
- "TAR": "archives",
1370
- "GZ": "archives",
1371
- "BZ2": "archives",
1372
- "ISO": "archives",
1373
- }
1374
-
1375
-
1376
- def fetch_messages_for_conversation(conversation_id):
1377
- conn = get_db_connection()
1378
- cursor = conn.cursor()
1379
-
1380
- query = """
1381
- SELECT role, content, timestamp
1382
- FROM conversation_history
1383
- WHERE conversation_id = ?
1384
- ORDER BY timestamp ASC
1385
- """
1386
- cursor.execute(query, (conversation_id,))
1387
- messages = cursor.fetchall()
1388
- conn.close()
1389
-
1390
- return [
1391
- {
1392
- "role": message["role"],
1393
- "content": message["content"],
1394
- "timestamp": message["timestamp"],
1395
- }
1396
- for message in messages
1397
- ]
1398
-
1399
-
1400
- @app.route("/api/health", methods=["GET"])
1401
- def health_check():
1402
- return jsonify({"status": "ok", "error": None})
1403
-
1404
-
1405
- def start_flask_server(
1406
- port=5337,
1407
- cors_origins=None,
1408
- ):
1409
- try:
1410
- # Ensure the database tables exist
1411
- conn = get_db_connection()
1412
- try:
1413
- cursor = conn.cursor()
1414
-
1415
- # Create tables if they don't exist
1416
- cursor.execute(
1417
- """
1418
- CREATE TABLE IF NOT EXISTS command_history (
1419
- id INTEGER PRIMARY KEY AUTOINCREMENT,
1420
- timestamp TEXT,
1421
- command TEXT,
1422
- tags TEXT,
1423
- response TEXT,
1424
- directory TEXT,
1425
- conversation_id TEXT
1426
- )
1427
- """
1428
- )
1429
-
1430
- cursor.execute(
1431
- """
1432
- CREATE TABLE IF NOT EXISTS conversation_history (
1433
- id INTEGER PRIMARY KEY AUTOINCREMENT,
1434
- timestamp TEXT,
1435
- role TEXT,
1436
- content TEXT,
1437
- conversation_id TEXT,
1438
- directory_path TEXT
1439
- )
1440
- """
1441
- )
1442
-
1443
- conn.commit()
1444
- finally:
1445
- conn.close()
1446
-
1447
- # Only apply CORS if origins are specified
1448
- if cors_origins:
1449
- from flask_cors import CORS
1450
-
1451
- CORS(
1452
- app,
1453
- origins=cors_origins,
1454
- allow_headers=["Content-Type", "Authorization"],
1455
- methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
1456
- supports_credentials=True,
1457
- )
1458
-
1459
- # Run the Flask app on all interfaces
1460
- print("Starting Flask server on http://0.0.0.0:5337")
1461
- app.run(host="0.0.0.0", port=5337, debug=True)
1462
- except Exception as e:
1463
- print(f"Error starting server: {str(e)}")
1464
-
1465
-
1466
- if __name__ == "__main__":
1467
- start_flask_server()