npcsh 0.3.31__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. npcsh/_state.py +942 -0
  2. npcsh/alicanto.py +1074 -0
  3. npcsh/guac.py +785 -0
  4. npcsh/mcp_helpers.py +357 -0
  5. npcsh/mcp_npcsh.py +822 -0
  6. npcsh/mcp_server.py +184 -0
  7. npcsh/npc.py +218 -0
  8. npcsh/npcsh.py +1161 -0
  9. npcsh/plonk.py +387 -269
  10. npcsh/pti.py +234 -0
  11. npcsh/routes.py +958 -0
  12. npcsh/spool.py +315 -0
  13. npcsh/wander.py +550 -0
  14. npcsh/yap.py +573 -0
  15. npcsh-1.0.0.dist-info/METADATA +596 -0
  16. npcsh-1.0.0.dist-info/RECORD +21 -0
  17. {npcsh-0.3.31.dist-info → npcsh-1.0.0.dist-info}/WHEEL +1 -1
  18. npcsh-1.0.0.dist-info/entry_points.txt +9 -0
  19. {npcsh-0.3.31.dist-info → npcsh-1.0.0.dist-info}/licenses/LICENSE +1 -1
  20. npcsh/audio.py +0 -210
  21. npcsh/cli.py +0 -545
  22. npcsh/command_history.py +0 -566
  23. npcsh/conversation.py +0 -291
  24. npcsh/data_models.py +0 -46
  25. npcsh/dataframes.py +0 -163
  26. npcsh/embeddings.py +0 -168
  27. npcsh/helpers.py +0 -641
  28. npcsh/image.py +0 -298
  29. npcsh/image_gen.py +0 -79
  30. npcsh/knowledge_graph.py +0 -1006
  31. npcsh/llm_funcs.py +0 -2027
  32. npcsh/load_data.py +0 -83
  33. npcsh/main.py +0 -5
  34. npcsh/model_runner.py +0 -189
  35. npcsh/npc_compiler.py +0 -2870
  36. npcsh/npc_sysenv.py +0 -383
  37. npcsh/npc_team/assembly_lines/test_pipeline.py +0 -181
  38. npcsh/npc_team/corca.npc +0 -13
  39. npcsh/npc_team/foreman.npc +0 -7
  40. npcsh/npc_team/npcsh.ctx +0 -11
  41. npcsh/npc_team/sibiji.npc +0 -4
  42. npcsh/npc_team/templates/analytics/celona.npc +0 -0
  43. npcsh/npc_team/templates/hr_support/raone.npc +0 -0
  44. npcsh/npc_team/templates/humanities/eriane.npc +0 -4
  45. npcsh/npc_team/templates/it_support/lineru.npc +0 -0
  46. npcsh/npc_team/templates/marketing/slean.npc +0 -4
  47. npcsh/npc_team/templates/philosophy/maurawa.npc +0 -0
  48. npcsh/npc_team/templates/sales/turnic.npc +0 -4
  49. npcsh/npc_team/templates/software/welxor.npc +0 -0
  50. npcsh/npc_team/tools/bash_executer.tool +0 -32
  51. npcsh/npc_team/tools/calculator.tool +0 -8
  52. npcsh/npc_team/tools/code_executor.tool +0 -16
  53. npcsh/npc_team/tools/generic_search.tool +0 -27
  54. npcsh/npc_team/tools/image_generation.tool +0 -25
  55. npcsh/npc_team/tools/local_search.tool +0 -149
  56. npcsh/npc_team/tools/npcsh_executor.tool +0 -9
  57. npcsh/npc_team/tools/screen_cap.tool +0 -27
  58. npcsh/npc_team/tools/sql_executor.tool +0 -26
  59. npcsh/response.py +0 -623
  60. npcsh/search.py +0 -248
  61. npcsh/serve.py +0 -1460
  62. npcsh/shell.py +0 -538
  63. npcsh/shell_helpers.py +0 -3529
  64. npcsh/stream.py +0 -700
  65. npcsh/video.py +0 -49
  66. npcsh-0.3.31.data/data/npcsh/npc_team/bash_executer.tool +0 -32
  67. npcsh-0.3.31.data/data/npcsh/npc_team/calculator.tool +0 -8
  68. npcsh-0.3.31.data/data/npcsh/npc_team/celona.npc +0 -0
  69. npcsh-0.3.31.data/data/npcsh/npc_team/code_executor.tool +0 -16
  70. npcsh-0.3.31.data/data/npcsh/npc_team/corca.npc +0 -13
  71. npcsh-0.3.31.data/data/npcsh/npc_team/eriane.npc +0 -4
  72. npcsh-0.3.31.data/data/npcsh/npc_team/foreman.npc +0 -7
  73. npcsh-0.3.31.data/data/npcsh/npc_team/generic_search.tool +0 -27
  74. npcsh-0.3.31.data/data/npcsh/npc_team/image_generation.tool +0 -25
  75. npcsh-0.3.31.data/data/npcsh/npc_team/lineru.npc +0 -0
  76. npcsh-0.3.31.data/data/npcsh/npc_team/local_search.tool +0 -149
  77. npcsh-0.3.31.data/data/npcsh/npc_team/maurawa.npc +0 -0
  78. npcsh-0.3.31.data/data/npcsh/npc_team/npcsh.ctx +0 -11
  79. npcsh-0.3.31.data/data/npcsh/npc_team/npcsh_executor.tool +0 -9
  80. npcsh-0.3.31.data/data/npcsh/npc_team/raone.npc +0 -0
  81. npcsh-0.3.31.data/data/npcsh/npc_team/screen_cap.tool +0 -27
  82. npcsh-0.3.31.data/data/npcsh/npc_team/sibiji.npc +0 -4
  83. npcsh-0.3.31.data/data/npcsh/npc_team/slean.npc +0 -4
  84. npcsh-0.3.31.data/data/npcsh/npc_team/sql_executor.tool +0 -26
  85. npcsh-0.3.31.data/data/npcsh/npc_team/test_pipeline.py +0 -181
  86. npcsh-0.3.31.data/data/npcsh/npc_team/turnic.npc +0 -4
  87. npcsh-0.3.31.data/data/npcsh/npc_team/welxor.npc +0 -0
  88. npcsh-0.3.31.dist-info/METADATA +0 -1853
  89. npcsh-0.3.31.dist-info/RECORD +0 -76
  90. npcsh-0.3.31.dist-info/entry_points.txt +0 -3
  91. {npcsh-0.3.31.dist-info → npcsh-1.0.0.dist-info}/top_level.txt +0 -0
npcsh/serve.py DELETED
@@ -1,1460 +0,0 @@
1
- from flask import Flask, request, jsonify, Response
2
- import configparser # Add this with your other imports
3
- from flask_sse import sse
4
- import redis
5
-
6
- from flask_cors import CORS
7
- import os
8
- import sqlite3
9
- from datetime import datetime
10
- import json
11
- from pathlib import Path
12
-
13
- import yaml
14
-
15
- from PIL import Image
16
- from PIL import ImageFile
17
-
18
- from npcsh.command_history import (
19
- CommandHistory,
20
- save_conversation_message,
21
- )
22
- from npcsh.npc_compiler import NPCCompiler, Tool, NPC
23
- from npcsh.npc_sysenv import (
24
- get_model_and_provider,
25
- get_available_models,
26
- get_system_message,
27
- NPCSH_STREAM_OUTPUT,
28
- )
29
-
30
-
31
- from npcsh.llm_funcs import (
32
- check_llm_command,
33
- get_llm_response,
34
- get_stream,
35
- get_conversation,
36
- )
37
- from npcsh.helpers import get_directory_npcs, get_db_npcs, get_npc_path
38
- from npcsh.npc_compiler import load_npc_from_file
39
- from npcsh.shell_helpers import execute_command, execute_command_stream
40
- import base64
41
-
42
- import json
43
- import os
44
- from pathlib import Path
45
-
46
- # Path for storing settings
47
- SETTINGS_FILE = Path(os.path.expanduser("~/.npcshrc"))
48
-
49
- # Configuration
50
- db_path = os.path.expanduser("~/npcsh_history.db")
51
- user_npc_directory = os.path.expanduser("~/.npcsh/npc_team")
52
- project_npc_directory = os.path.abspath("./npc_team")
53
-
54
- # Initialize components
55
- npc_compiler = NPCCompiler(user_npc_directory, db_path)
56
-
57
- app = Flask(__name__)
58
- app.config["REDIS_URL"] = "redis://localhost:6379"
59
- app.register_blueprint(sse, url_prefix="/stream")
60
-
61
- redis_client = redis.Redis(host="localhost", port=6379, decode_responses=True)
62
-
63
- CORS(
64
- app,
65
- origins=["http://localhost:5173"],
66
- allow_headers=["Content-Type", "Authorization"],
67
- methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
68
- supports_credentials=True,
69
- )
70
-
71
-
72
- def get_db_connection():
73
- conn = sqlite3.connect(db_path)
74
- conn.row_factory = sqlite3.Row
75
- return conn
76
-
77
-
78
- extension_map = {
79
- "PNG": "images",
80
- "JPG": "images",
81
- "JPEG": "images",
82
- "GIF": "images",
83
- "SVG": "images",
84
- "MP4": "videos",
85
- "AVI": "videos",
86
- "MOV": "videos",
87
- "WMV": "videos",
88
- "MPG": "videos",
89
- "MPEG": "videos",
90
- "DOC": "documents",
91
- "DOCX": "documents",
92
- "PDF": "documents",
93
- "PPT": "documents",
94
- "PPTX": "documents",
95
- "XLS": "documents",
96
- "XLSX": "documents",
97
- "TXT": "documents",
98
- "CSV": "documents",
99
- "ZIP": "archives",
100
- "RAR": "archives",
101
- "7Z": "archives",
102
- "TAR": "archives",
103
- "GZ": "archives",
104
- "BZ2": "archives",
105
- "ISO": "archives",
106
- }
107
-
108
-
109
- def fetch_messages_for_conversation(conversation_id):
110
- conn = get_db_connection()
111
- cursor = conn.cursor()
112
-
113
- query = """
114
- SELECT role, content
115
- FROM conversation_history
116
- WHERE conversation_id = ?
117
- ORDER BY timestamp ASC
118
- """
119
- cursor.execute(query, (conversation_id,))
120
- messages = cursor.fetchall()
121
- conn.close()
122
-
123
- return [
124
- {
125
- "role": message["role"],
126
- "content": message["content"],
127
- }
128
- for message in messages
129
- ]
130
-
131
-
132
- @app.route("/api/attachments/<message_id>", methods=["GET"])
133
- def get_message_attachments(message_id):
134
- """Get all attachments for a message"""
135
- try:
136
- command_history = CommandHistory(db_path)
137
- attachments = command_history.get_message_attachments(message_id)
138
- return jsonify({"attachments": attachments, "error": None})
139
- except Exception as e:
140
- return jsonify({"error": str(e)}), 500
141
-
142
-
143
- @app.route("/api/attachment/<attachment_id>", methods=["GET"])
144
- def get_attachment(attachment_id):
145
- """Get specific attachment data"""
146
- try:
147
- command_history = CommandHistory(db_path)
148
- data, name, type = command_history.get_attachment_data(attachment_id)
149
-
150
- if data:
151
- # Convert binary data to base64 for sending
152
- base64_data = base64.b64encode(data).decode("utf-8")
153
- return jsonify(
154
- {"data": base64_data, "name": name, "type": type, "error": None}
155
- )
156
- return jsonify({"error": "Attachment not found"}), 404
157
- except Exception as e:
158
- return jsonify({"error": str(e)}), 500
159
-
160
-
161
- @app.route("/api/capture_screenshot", methods=["GET"])
162
- def capture():
163
- # Capture screenshot using NPC-based method
164
- screenshot = capture_screenshot(None, full=True)
165
-
166
- # Ensure screenshot was captured successfully
167
- if not screenshot:
168
- print("Screenshot capture failed")
169
- return None
170
-
171
- return jsonify({"screenshot": screenshot})
172
-
173
-
174
- @app.route("/api/settings/global", methods=["GET", "OPTIONS"])
175
- def get_global_settings():
176
- if request.method == "OPTIONS":
177
- return "", 200
178
-
179
- try:
180
- npcshrc_path = os.path.expanduser("~/.npcshrc")
181
-
182
- # Default settings
183
- global_settings = {
184
- "model": "llama3.2",
185
- "provider": "ollama",
186
- "embedding_model": "nomic-embed-text",
187
- "embedding_provider": "ollama",
188
- "search_provider": "google",
189
- "NPCSH_LICENSE_KEY": "",
190
- }
191
- global_vars = {}
192
-
193
- if os.path.exists(npcshrc_path):
194
- with open(npcshrc_path, "r") as f:
195
- for line in f:
196
- # Skip comments and empty lines
197
- line = line.split("#")[0].strip()
198
- if not line:
199
- continue
200
-
201
- if "=" not in line:
202
- continue
203
-
204
- # Split on first = only
205
- key, value = line.split("=", 1)
206
- key = key.strip()
207
- if key.startswith("export "):
208
- key = key[7:]
209
-
210
- # Clean up the value - handle quoted strings properly
211
- value = value.strip()
212
- if value.startswith('"') and value.endswith('"'):
213
- value = value[1:-1]
214
- elif value.startswith("'") and value.endswith("'"):
215
- value = value[1:-1]
216
-
217
- # Map environment variables to settings
218
- key_mapping = {
219
- "NPCSH_MODEL": "model",
220
- "NPCSH_PROVIDER": "provider",
221
- "NPCSH_EMBEDDING_MODEL": "embedding_model",
222
- "NPCSH_EMBEDDING_PROVIDER": "embedding_provider",
223
- "NPCSH_SEARCH_PROVIDER": "search_provider",
224
- "NPCSH_LICENSE_KEY": "NPCSH_LICENSE_KEY",
225
- "NPCSH_STREAM_OUTPUT": "NPCSH_STREAM_OUTPUT",
226
- }
227
-
228
- if key in key_mapping:
229
- global_settings[key_mapping[key]] = value
230
- else:
231
- global_vars[key] = value
232
-
233
- return jsonify(
234
- {
235
- "global_settings": global_settings,
236
- "global_vars": global_vars,
237
- "error": None,
238
- }
239
- )
240
-
241
- except Exception as e:
242
- print(f"Error in get_global_settings: {str(e)}")
243
- return jsonify({"error": str(e)}), 500
244
-
245
-
246
- @app.route("/api/settings/global", methods=["POST", "OPTIONS"])
247
- def save_global_settings():
248
- if request.method == "OPTIONS":
249
- return "", 200
250
-
251
- try:
252
- data = request.json
253
- npcshrc_path = os.path.expanduser("~/.npcshrc")
254
-
255
- key_mapping = {
256
- "model": "NPCSH_CHAT_MODEL",
257
- "provider": "NPCSH_CHAT_PROVIDER",
258
- "embedding_model": "NPCSH_EMBEDDING_MODEL",
259
- "embedding_provider": "NPCSH_EMBEDDING_PROVIDER",
260
- "search_provider": "NPCSH_SEARCH_PROVIDER",
261
- "NPCSH_LICENSE_KEY": "NPCSH_LICENSE_KEY",
262
- "NPCSH_STREAM_OUTPUT": "NPCSH_STREAM_OUTPUT",
263
- }
264
-
265
- os.makedirs(os.path.dirname(npcshrc_path), exist_ok=True)
266
-
267
- with open(npcshrc_path, "w") as f:
268
- # Write settings as environment variables
269
- for key, value in data.get("global_settings", {}).items():
270
- if key in key_mapping and value:
271
- # Quote value if it contains spaces
272
- if " " in str(value):
273
- value = f'"{value}"'
274
- f.write(f"export {key_mapping[key]}={value}\n")
275
-
276
- # Write custom variables
277
- for key, value in data.get("global_vars", {}).items():
278
- if key and value:
279
- if " " in str(value):
280
- value = f'"{value}"'
281
- f.write(f"export {key}={value}\n")
282
-
283
- return jsonify({"message": "Global settings saved successfully", "error": None})
284
-
285
- except Exception as e:
286
- print(f"Error in save_global_settings: {str(e)}")
287
- return jsonify({"error": str(e)}), 500
288
-
289
-
290
- @app.route("/api/settings/project", methods=["GET", "OPTIONS"]) # Add OPTIONS
291
- def get_project_settings():
292
- if request.method == "OPTIONS":
293
- return "", 200
294
-
295
- try:
296
- current_dir = request.args.get("path")
297
- if not current_dir:
298
- return jsonify({"error": "No path provided"}), 400
299
-
300
- env_path = os.path.join(current_dir, ".env")
301
- env_vars = {}
302
-
303
- if os.path.exists(env_path):
304
- with open(env_path, "r") as f:
305
- for line in f:
306
- line = line.strip()
307
- if line and not line.startswith("#"):
308
- if "=" in line:
309
- key, value = line.split("=", 1)
310
- env_vars[key.strip()] = value.strip().strip("\"'")
311
-
312
- return jsonify({"env_vars": env_vars, "error": None})
313
-
314
- except Exception as e:
315
- print(f"Error in get_project_settings: {str(e)}")
316
- return jsonify({"error": str(e)}), 500
317
-
318
-
319
- @app.route("/api/settings/project", methods=["POST", "OPTIONS"]) # Add OPTIONS
320
- def save_project_settings():
321
- if request.method == "OPTIONS":
322
- return "", 200
323
-
324
- try:
325
- current_dir = request.args.get("path")
326
- if not current_dir:
327
- return jsonify({"error": "No path provided"}), 400
328
-
329
- data = request.json
330
- env_path = os.path.join(current_dir, ".env")
331
-
332
- with open(env_path, "w") as f:
333
- for key, value in data.get("env_vars", {}).items():
334
- f.write(f"{key}={value}\n")
335
-
336
- return jsonify(
337
- {"message": "Project settings saved successfully", "error": None}
338
- )
339
-
340
- except Exception as e:
341
- print(f"Error in save_project_settings: {str(e)}")
342
- return jsonify({"error": str(e)}), 500
343
-
344
-
345
- @app.route("/api/stream", methods=["POST"])
346
- def stream():
347
- """SSE stream that takes messages, models, providers, and attachments from frontend."""
348
- data = request.json
349
- commandstr = data.get("commandstr")
350
- conversation_id = data.get("conversationId")
351
- model = data.get("model", None)
352
- provider = data.get("provider", None)
353
- npc = data.get("npc", None)
354
- attachments = data.get("attachments", [])
355
- current_path = data.get("currentPath")
356
-
357
- command_history = CommandHistory(db_path)
358
-
359
- # Process attachments and save them properly
360
- images = []
361
- print(attachments)
362
-
363
- from io import BytesIO
364
- from PIL import Image
365
-
366
- attachments_loaded = []
367
-
368
- if attachments:
369
- for attachment in attachments:
370
- extension = attachment["name"].split(".")[-1]
371
- extension_mapped = extension_map.get(extension.upper(), "others")
372
- file_path = os.path.expanduser(
373
- "~/.npcsh/" + extension_mapped + "/" + attachment["name"]
374
- )
375
-
376
- if extension_mapped == "images":
377
- # Open the image file and save it to the file path
378
- ImageFile.LOAD_TRUNCATED_IMAGES = True
379
- img = Image.open(attachment["path"])
380
-
381
- # Save the image to a BytesIO buffer (to extract binary data)
382
- img_byte_arr = BytesIO()
383
- img.save(img_byte_arr, format="PNG") # or the appropriate format
384
- img_byte_arr.seek(0) # Rewind the buffer to the beginning
385
-
386
- # Save the image to a file
387
- img.save(file_path, optimize=True, quality=50)
388
-
389
- # Add to images list for LLM processing
390
- images.append({"filename": attachment["name"], "file_path": file_path})
391
-
392
- # Add the image data (in binary form) to attachments_loaded
393
- attachments_loaded.append(
394
- {
395
- "name": attachment["name"],
396
- "type": extension_mapped,
397
- "data": img_byte_arr.read(), # Read binary data from the buffer
398
- "size": os.path.getsize(file_path),
399
- }
400
- )
401
-
402
- messages = fetch_messages_for_conversation(conversation_id)
403
- messages.append({"role": "user", "content": commandstr})
404
- if not messages:
405
- return jsonify({"error": "No messages provided"}), 400
406
-
407
- # Save the user message with attachments in the database
408
- print("commandstr ", commandstr)
409
- message_id = command_history.generate_message_id()
410
-
411
- save_conversation_message(
412
- command_history,
413
- conversation_id,
414
- "user",
415
- commandstr,
416
- wd=current_path,
417
- model=model,
418
- provider=provider,
419
- npc=npc,
420
- attachments=attachments_loaded,
421
- message_id=message_id,
422
- )
423
- message_id = command_history.generate_message_id()
424
-
425
- # if len(images) > 0:
426
- # go straight to get stream instead of executing , will continue this way to avoid npc
427
- # loading issues for now.
428
- stream_response = get_stream(
429
- messages,
430
- images=images,
431
- model=model,
432
- provider=provider,
433
- npc=npc if isinstance(npc, NPC) else None,
434
- )
435
-
436
- """else:
437
-
438
- stream_response = execute_command_stream(
439
- commandstr,
440
- command_history,
441
- db_path,
442
- npc_compiler,
443
- model=model,
444
- provider=provider,
445
- messages=messages,
446
- images=images, # Pass the processed images
447
- ) # Get all conversation messages so far
448
- """
449
- final_response = "" # To accumulate the assistant's response
450
-
451
- complete_response = [] # List to store all chunks
452
-
453
- def event_stream():
454
- for response_chunk in stream_response:
455
- chunk_content = ""
456
-
457
- # Extract content based on model type
458
- if model.startswith("gpt-4o"):
459
- chunk_content = "".join(
460
- choice.delta.content
461
- for choice in response_chunk.choices
462
- if choice.delta.content is not None
463
- )
464
- if chunk_content:
465
- complete_response.append(chunk_content)
466
- chunk_data = {
467
- "id": response_chunk.id,
468
- "object": response_chunk.object,
469
- "created": response_chunk.created,
470
- "model": response_chunk.model,
471
- "choices": [
472
- {
473
- "index": choice.index,
474
- "delta": {
475
- "content": choice.delta.content,
476
- "role": choice.delta.role,
477
- },
478
- "finish_reason": choice.finish_reason,
479
- }
480
- for choice in response_chunk.choices
481
- ],
482
- }
483
- yield f"data: {json.dumps(chunk_data)}\n\n"
484
-
485
- elif model.startswith("llama"):
486
- chunk_content = response_chunk["message"]["content"]
487
- if chunk_content:
488
- complete_response.append(chunk_content)
489
- chunk_data = {
490
- "id": None,
491
- "object": None,
492
- "created": response_chunk["created_at"],
493
- "model": response_chunk["model"],
494
- "choices": [
495
- {
496
- "index": 0,
497
- "delta": {
498
- "content": chunk_content,
499
- "role": response_chunk["message"]["role"],
500
- },
501
- "finish_reason": response_chunk.get("done_reason"),
502
- }
503
- ],
504
- }
505
- yield f"data: {json.dumps(chunk_data)}\n\n"
506
-
507
- elif model.startswith("claude"):
508
- print(response_chunk)
509
- if response_chunk.type == "message_start":
510
- chunk_data = {
511
- "id": None,
512
- "object": None,
513
- "created": None,
514
- "model": model,
515
- "choices": [
516
- {
517
- "index": 0,
518
- "delta": {
519
- "content": "",
520
- "role": "assistant",
521
- },
522
- "finish_reason": "",
523
- }
524
- ],
525
- }
526
- yield f"data: {json.dumps(chunk_data)}\n\n"
527
-
528
- if response_chunk.type == "content_block_delta":
529
- chunk_content = response_chunk.delta.text
530
- if chunk_content:
531
- complete_response.append(chunk_content)
532
- chunk_data = {
533
- "id": None,
534
- "object": None,
535
- "created": None,
536
- "model": model,
537
- "choices": [
538
- {
539
- "index": 0,
540
- "delta": {
541
- "content": chunk_content,
542
- "role": "assistant",
543
- },
544
- "finish_reason": response_chunk.delta.type,
545
- }
546
- ],
547
- }
548
- yield f"data: {json.dumps(chunk_data)}\n\n"
549
-
550
- save_conversation_message(
551
- command_history,
552
- conversation_id,
553
- "assistant",
554
- chunk_content,
555
- wd=current_path,
556
- model=model,
557
- provider=provider,
558
- npc=npc,
559
- message_id=message_id, # Save with the same message_id
560
- )
561
-
562
- # Send completion message
563
- yield f"data: {json.dumps({'type': 'message_stop'})}\n\n"
564
- full_content = command_history.get_full_message_content(message_id)
565
- command_history.update_message_content(message_id, full_content)
566
-
567
- response = Response(event_stream(), mimetype="text/event-stream")
568
-
569
- return response
570
-
571
-
572
- @app.route("/api/npc_team_global")
573
- def get_npc_team_global():
574
- try:
575
- db_conn = get_db_connection()
576
- global_npc_directory = os.path.expanduser("~/.npcsh/npc_team")
577
-
578
- npc_data = []
579
-
580
- # Use existing helper to get NPCs from the global directory
581
- for npc_file in os.listdir(global_npc_directory):
582
- if npc_file.endswith(".npc"):
583
- npc_path = os.path.join(global_npc_directory, npc_file)
584
- npc = load_npc_from_file(npc_path, db_conn)
585
-
586
- # Serialize the NPC data
587
- serialized_npc = {
588
- "name": npc.name,
589
- "primary_directive": npc.primary_directive,
590
- "model": npc.model,
591
- "provider": npc.provider,
592
- "api_url": npc.api_url,
593
- "use_global_tools": npc.use_global_tools,
594
- "tools": [
595
- {
596
- "tool_name": tool.tool_name,
597
- "inputs": tool.inputs,
598
- "preprocess": tool.preprocess,
599
- "prompt": tool.prompt,
600
- "postprocess": tool.postprocess,
601
- }
602
- for tool in npc.tools
603
- ],
604
- }
605
- npc_data.append(serialized_npc)
606
-
607
- return jsonify({"npcs": npc_data, "error": None})
608
-
609
- except Exception as e:
610
- print(f"Error loading global NPCs: {str(e)}")
611
- return jsonify({"npcs": [], "error": str(e)})
612
-
613
-
614
- @app.route("/api/tools/global", methods=["GET"])
615
- def get_global_tools():
616
- # try:
617
- user_home = os.path.expanduser("~")
618
- tools_dir = os.path.join(user_home, ".npcsh", "npc_team", "tools")
619
- tools = []
620
- if os.path.exists(tools_dir):
621
- for file in os.listdir(tools_dir):
622
- if file.endswith(".tool"):
623
- with open(os.path.join(tools_dir, file), "r") as f:
624
- tool_data = yaml.safe_load(f)
625
- tools.append(tool_data)
626
- return jsonify({"tools": tools})
627
-
628
-
629
- # except Exception as e:
630
- # return jsonify({"error": str(e)}), 500
631
-
632
-
633
- @app.route("/api/tools/project", methods=["GET"])
634
- def get_project_tools():
635
- current_path = request.args.get(
636
- "currentPath"
637
- ) # Correctly retrieves `currentPath` from query params
638
- if not current_path:
639
- return jsonify({"tools": []})
640
-
641
- if not current_path.endswith("npc_team"):
642
- current_path = os.path.join(current_path, "npc_team")
643
-
644
- tools_dir = os.path.join(current_path, "tools")
645
- tools = []
646
- if os.path.exists(tools_dir):
647
- for file in os.listdir(tools_dir):
648
- if file.endswith(".tool"):
649
- with open(os.path.join(tools_dir, file), "r") as f:
650
- tool_data = yaml.safe_load(f)
651
- tools.append(tool_data)
652
- return jsonify({"tools": tools})
653
-
654
-
655
- @app.route("/api/tools/save", methods=["POST"])
656
- def save_tool():
657
- try:
658
- data = request.json
659
- tool_data = data.get("tool")
660
- is_global = data.get("isGlobal")
661
- current_path = data.get("currentPath")
662
- tool_name = tool_data.get("tool_name")
663
-
664
- if not tool_name:
665
- return jsonify({"error": "Tool name is required"}), 400
666
-
667
- if is_global:
668
- tools_dir = os.path.join(
669
- os.path.expanduser("~"), ".npcsh", "npc_team", "tools"
670
- )
671
- else:
672
- if not current_path.endswith("npc_team"):
673
- current_path = os.path.join(current_path, "npc_team")
674
- tools_dir = os.path.join(current_path, "tools")
675
-
676
- os.makedirs(tools_dir, exist_ok=True)
677
-
678
- # Full tool structure
679
- tool_yaml = {
680
- "description": tool_data.get("description", ""),
681
- "inputs": tool_data.get("inputs", []),
682
- "steps": tool_data.get("steps", []),
683
- }
684
-
685
- file_path = os.path.join(tools_dir, f"{tool_name}.tool")
686
- with open(file_path, "w") as f:
687
- yaml.safe_dump(tool_yaml, f, sort_keys=False)
688
-
689
- return jsonify({"status": "success"})
690
- except Exception as e:
691
- return jsonify({"error": str(e)}), 500
692
-
693
-
694
- @app.route("/api/save_npc", methods=["POST"])
695
- def save_npc():
696
- try:
697
- data = request.json
698
- npc_data = data.get("npc")
699
- is_global = data.get("isGlobal")
700
- current_path = data.get("currentPath")
701
-
702
- if not npc_data or "name" not in npc_data:
703
- return jsonify({"error": "Invalid NPC data"}), 400
704
-
705
- # Determine the directory based on whether it's global or project
706
- if is_global:
707
- npc_directory = os.path.expanduser("~/.npcsh/npc_team")
708
- else:
709
- npc_directory = os.path.join(current_path, "npc_team")
710
-
711
- # Ensure the directory exists
712
- os.makedirs(npc_directory, exist_ok=True)
713
-
714
- # Create the YAML content
715
- yaml_content = f"""name: {npc_data['name']}
716
- primary_directive: "{npc_data['primary_directive']}"
717
- model: {npc_data['model']}
718
- provider: {npc_data['provider']}
719
- api_url: {npc_data.get('api_url', '')}
720
- use_global_tools: {str(npc_data.get('use_global_tools', True)).lower()}
721
- """
722
-
723
- # Save the file
724
- npc_file_path = os.path.join(npc_directory, f"{npc_data['name']}.npc")
725
- with open(npc_file_path, "w") as f:
726
- f.write(yaml_content)
727
-
728
- return jsonify({"message": "NPC saved successfully", "error": None})
729
-
730
- except Exception as e:
731
- print(f"Error saving NPC: {str(e)}")
732
- return jsonify({"error": str(e)}), 500
733
-
734
-
735
- @app.route("/api/npc_team_project", methods=["GET"])
736
- def get_npc_team_project():
737
- try:
738
- db_conn = get_db_connection()
739
-
740
- project_npc_directory = request.args.get("currentPath")
741
- if not project_npc_directory.endswith("npc_team"):
742
- project_npc_directory = os.path.join(project_npc_directory, "npc_team")
743
-
744
- npc_data = []
745
-
746
- for npc_file in os.listdir(project_npc_directory):
747
- print(npc_file)
748
- if npc_file.endswith(".npc"):
749
- npc_path = os.path.join(project_npc_directory, npc_file)
750
- npc = load_npc_from_file(npc_path, db_conn)
751
-
752
- # Serialize the NPC data, including tools
753
- serialized_npc = {
754
- "name": npc.name,
755
- "primary_directive": npc.primary_directive,
756
- "model": npc.model,
757
- "provider": npc.provider,
758
- "api_url": npc.api_url,
759
- "use_global_tools": npc.use_global_tools,
760
- "tools": [
761
- {
762
- "tool_name": tool.tool_name,
763
- "inputs": tool.inputs,
764
- "preprocess": tool.preprocess,
765
- "prompt": tool.prompt,
766
- "postprocess": tool.postprocess,
767
- }
768
- for tool in npc.tools
769
- ],
770
- }
771
- npc_data.append(serialized_npc)
772
-
773
- print(npc_data)
774
- return jsonify({"npcs": npc_data, "error": None})
775
-
776
- except Exception as e:
777
- print(f"Error fetching NPC team: {str(e)}")
778
- return jsonify({"npcs": [], "error": str(e)})
779
-
780
-
781
- @app.route("/api/get_attachment_response", methods=["POST"])
782
- def get_attachment_response():
783
- data = request.json
784
- attachments = data.get("attachments", [])
785
- messages = data.get("messages") # Get conversation ID
786
- conversation_id = data.get("conversationId")
787
- current_path = data.get("currentPath")
788
- command_history = CommandHistory(db_path)
789
- model = data.get("model")
790
- npc = data.get("npc")
791
- # load the npc properly
792
- # try global /porject
793
-
794
- # Process each attachment
795
- images = []
796
- for attachment in attachments:
797
- extension = attachment["name"].split(".")[-1]
798
- extension_mapped = extension_map.get(extension.upper(), "others")
799
- file_path = os.path.expanduser(
800
- "~/.npcsh/" + extension_mapped + "/" + attachment["name"]
801
- )
802
- if extension_mapped == "images":
803
- ImageFile.LOAD_TRUNCATED_IMAGES = True
804
- img = Image.open(attachment["path"])
805
- img.save(file_path, optimize=True, quality=50)
806
- images.append({"filename": attachment["name"], "file_path": file_path})
807
-
808
- message_to_send = messages[-1]["content"][0]
809
-
810
- response = get_llm_response(
811
- message_to_send,
812
- images=images,
813
- messages=messages,
814
- model=model,
815
- )
816
- messages = response["messages"]
817
- response = response["response"]
818
-
819
- # Save new messages
820
- save_conversation_message(
821
- command_history, conversation_id, "user", message_to_send, wd=current_path
822
- )
823
-
824
- save_conversation_message(
825
- command_history,
826
- conversation_id,
827
- "assistant",
828
- response,
829
- wd=current_path,
830
- )
831
- return jsonify(
832
- {
833
- "status": "success",
834
- "message": response,
835
- "conversationId": conversation_id,
836
- "messages": messages, # Optionally return fetched messages
837
- }
838
- )
839
-
840
-
841
- @app.route("/api/execute", methods=["POST"])
842
- def execute():
843
- try:
844
- data = request.json
845
- command = data.get("commandstr")
846
- current_path = data.get("currentPath")
847
- conversation_id = data.get("conversationId")
848
- model = data.get("model")
849
- print("model", model)
850
- npc = data.get("npc")
851
- print("npc", npc)
852
- # have to add something to actually load the npc, try project first then global , if none proceed
853
- # with the command as is but notify.
854
- # also inthefrontend need to make it so that it wiwll just list the npcs properly.
855
-
856
- # Clean command
857
- command = command.strip().replace('"', "").replace("'", "").replace("`", "")
858
-
859
- if not command:
860
- return (
861
- jsonify(
862
- {
863
- "error": "No command provided",
864
- "output": "Error: No command provided",
865
- }
866
- ),
867
- 400,
868
- )
869
-
870
- command_history = CommandHistory(db_path)
871
-
872
- # Fetch conversation history
873
- if conversation_id:
874
- conn = get_db_connection()
875
- cursor = conn.cursor()
876
-
877
- # Get all messages for this conversation in order
878
- cursor.execute(
879
- """
880
- SELECT role, content, timestamp
881
- FROM conversation_history
882
- WHERE conversation_id = ?
883
- ORDER BY timestamp ASC
884
- """,
885
- (conversation_id,),
886
- )
887
-
888
- conversation_messages = cursor.fetchall()
889
-
890
- # Format messages for LLM
891
- messages = [
892
- {
893
- "role": msg["role"],
894
- "content": msg["content"],
895
- }
896
- for msg in conversation_messages
897
- ]
898
-
899
- conn.close()
900
- else:
901
- messages = []
902
-
903
- # Execute command with conversation history
904
-
905
- result = execute_command(
906
- command=command,
907
- command_history=command_history,
908
- db_path=db_path,
909
- npc_compiler=npc_compiler,
910
- conversation_id=conversation_id,
911
- messages=messages, # Pass the conversation history,
912
- model=model,
913
- )
914
-
915
- # Save new messages
916
- save_conversation_message(
917
- command_history, conversation_id, "user", command, wd=current_path
918
- )
919
-
920
- save_conversation_message(
921
- command_history,
922
- conversation_id,
923
- "assistant",
924
- result.get("output", ""),
925
- wd=current_path,
926
- )
927
-
928
- return jsonify(
929
- {
930
- "output": result.get("output", ""),
931
- "currentPath": os.getcwd(),
932
- "error": None,
933
- "messages": messages, # Return updated messages
934
- }
935
- )
936
-
937
- except Exception as e:
938
- print(f"Error executing command: {str(e)}")
939
- import traceback
940
-
941
- traceback.print_exc()
942
- return (
943
- jsonify(
944
- {
945
- "error": str(e),
946
- "output": f"Error: {str(e)}",
947
- "currentPath": data.get("currentPath", None),
948
- }
949
- ),
950
- 500,
951
- )
952
-
953
-
954
- def get_conversation_history(conversation_id):
955
- """Fetch all messages for a conversation in chronological order."""
956
- if not conversation_id:
957
- return []
958
-
959
- conn = get_db_connection()
960
- cursor = conn.cursor()
961
-
962
- try:
963
- query = """
964
- SELECT role, content, timestamp
965
- FROM conversation_history
966
- WHERE conversation_id = ?
967
- ORDER BY timestamp ASC
968
- """
969
- cursor.execute(query, (conversation_id,))
970
- messages = cursor.fetchall()
971
-
972
- return [
973
- {
974
- "role": msg["role"],
975
- "content": msg["content"],
976
- "timestamp": msg["timestamp"],
977
- }
978
- for msg in messages
979
- ]
980
- finally:
981
- conn.close()
982
-
983
-
984
- @app.route("/api/conversations", methods=["GET"])
985
- def get_conversations():
986
- try:
987
- path = request.args.get("path")
988
- if not path:
989
- return jsonify({"error": "No path provided", "conversations": []}), 400
990
-
991
- conn = get_db_connection()
992
- try:
993
- cursor = conn.cursor()
994
-
995
- query = """
996
- SELECT DISTINCT conversation_id,
997
- MIN(timestamp) as start_time,
998
- GROUP_CONCAT(content) as preview
999
- FROM conversation_history
1000
- WHERE directory_path = ?
1001
- GROUP BY conversation_id
1002
- ORDER BY start_time DESC
1003
- """
1004
-
1005
- cursor.execute(query, [path])
1006
- conversations = cursor.fetchall()
1007
-
1008
- return jsonify(
1009
- {
1010
- "conversations": [
1011
- {
1012
- "id": conv["conversation_id"],
1013
- "timestamp": conv["start_time"],
1014
- "preview": (
1015
- conv["preview"][:100] + "..."
1016
- if conv["preview"] and len(conv["preview"]) > 100
1017
- else conv["preview"]
1018
- ),
1019
- }
1020
- for conv in conversations
1021
- ],
1022
- "error": None,
1023
- }
1024
- )
1025
-
1026
- finally:
1027
- conn.close()
1028
-
1029
- except Exception as e:
1030
- print(f"Error getting conversations: {str(e)}")
1031
- return jsonify({"error": str(e), "conversations": []}), 500
1032
-
1033
-
1034
- @app.route("/api/conversation/<conversation_id>/messages", methods=["GET"])
1035
- def get_conversation_messages(conversation_id):
1036
- try:
1037
- conn = get_db_connection()
1038
- cursor = conn.cursor()
1039
-
1040
- # Modified query to ensure proper ordering and deduplication
1041
- query = """
1042
- WITH ranked_messages AS (
1043
- SELECT
1044
- ch.*,
1045
- GROUP_CONCAT(ma.id) as attachment_ids,
1046
- ROW_NUMBER() OVER (
1047
- PARTITION BY ch.role, strftime('%s', ch.timestamp)
1048
- ORDER BY ch.id DESC
1049
- ) as rn
1050
- FROM conversation_history ch
1051
- LEFT JOIN message_attachments ma
1052
- ON ch.message_id = ma.message_id
1053
- WHERE ch.conversation_id = ?
1054
- GROUP BY ch.id, ch.timestamp
1055
- )
1056
- SELECT *
1057
- FROM ranked_messages
1058
- WHERE rn = 1
1059
- ORDER BY timestamp ASC, id ASC
1060
- """
1061
-
1062
- cursor.execute(query, [conversation_id])
1063
- messages = cursor.fetchall()
1064
- print(messages)
1065
-
1066
- return jsonify(
1067
- {
1068
- "messages": [
1069
- {
1070
- "message_id": msg["message_id"],
1071
- "role": msg["role"],
1072
- "content": msg["content"],
1073
- "timestamp": msg["timestamp"],
1074
- "model": msg["model"],
1075
- "provider": msg["provider"],
1076
- "npc": msg["npc"],
1077
- "attachments": (
1078
- get_message_attachments(msg["message_id"])
1079
- if msg["attachment_ids"]
1080
- else []
1081
- ),
1082
- }
1083
- for msg in messages
1084
- ],
1085
- "error": None,
1086
- }
1087
- )
1088
-
1089
- except Exception as e:
1090
- print(f"Error getting conversation messages: {str(e)}")
1091
- return jsonify({"error": str(e), "messages": []}), 500
1092
- finally:
1093
- conn.close()
1094
-
1095
-
1096
- @app.route("/api/stream", methods=["POST"])
1097
- def stream_raw():
1098
- """SSE stream that takes messages, models, providers, and attachments from frontend."""
1099
- data = request.json
1100
- commandstr = data.get("commandstr")
1101
- conversation_id = data.get("conversationId")
1102
- model = data.get("model", None)
1103
- provider = data.get("provider", None)
1104
- save_to_sqlite3 = data.get("saveToSqlite3", False)
1105
- npc = data.get("npc", None)
1106
- attachments = data.get("attachments", [])
1107
- current_path = data.get("currentPath")
1108
- print(data)
1109
-
1110
- messages = data.get("messages", [])
1111
- print("messages", messages)
1112
- command_history = CommandHistory(db_path)
1113
-
1114
- images = []
1115
- attachments_loaded = []
1116
-
1117
- if attachments:
1118
- for attachment in attachments:
1119
- extension = attachment["name"].split(".")[-1]
1120
- extension_mapped = extension_map.get(extension.upper(), "others")
1121
- file_path = os.path.expanduser(
1122
- "~/.npcsh/" + extension_mapped + "/" + attachment["name"]
1123
- )
1124
-
1125
- if extension_mapped == "images":
1126
- # Open the image file and save it to the file path
1127
- ImageFile.LOAD_TRUNCATED_IMAGES = True
1128
- img = Image.open(attachment["path"])
1129
-
1130
- # Save the image to a BytesIO buffer (to extract binary data)
1131
- img_byte_arr = BytesIO()
1132
- img.save(img_byte_arr, format="PNG") # or the appropriate format
1133
- img_byte_arr.seek(0) # Rewind the buffer to the beginning
1134
-
1135
- # Save the image to a file
1136
- img.save(file_path, optimize=True, quality=50)
1137
-
1138
- # Add to images list for LLM processing
1139
- images.append({"filename": attachment["name"], "file_path": file_path})
1140
-
1141
- # Add the image data (in binary form) to attachments_loaded
1142
- attachments_loaded.append(
1143
- {
1144
- "name": attachment["name"],
1145
- "type": extension_mapped,
1146
- "data": img_byte_arr.read(), # Read binary data from the buffer
1147
- "size": os.path.getsize(file_path),
1148
- }
1149
- )
1150
- if save_to_sqlite3:
1151
- if len(messages) == 0:
1152
- # load the conversation messages
1153
- messages = fetch_messages_for_conversation(conversation_id)
1154
- if not messages:
1155
- return jsonify({"error": "No messages provided"}), 400
1156
- messages.append({"role": "user", "content": commandstr})
1157
- message_id = command_history.generate_message_id()
1158
-
1159
- save_conversation_message(
1160
- command_history,
1161
- conversation_id,
1162
- "user",
1163
- commandstr,
1164
- wd=current_path,
1165
- model=model,
1166
- provider=provider,
1167
- npc=npc,
1168
- attachments=attachments_loaded,
1169
- message_id=message_id,
1170
- )
1171
- message_id = command_history.generate_message_id()
1172
-
1173
- stream_response = get_stream(
1174
- messages,
1175
- images=images,
1176
- model=model,
1177
- provider=provider,
1178
- npc=npc if isinstance(npc, NPC) else None,
1179
- )
1180
-
1181
- """else:
1182
-
1183
- stream_response = execute_command_stream(
1184
- commandstr,
1185
- command_history,
1186
- db_path,
1187
- npc_compiler,
1188
- model=model,
1189
- provider=provider,
1190
- messages=messages,
1191
- images=images, # Pass the processed images
1192
- ) # Get all conversation messages so far
1193
- """
1194
- final_response = "" # To accumulate the assistant's response
1195
- complete_response = [] # List to store all chunks
1196
-
1197
- def event_stream():
1198
- for response_chunk in stream_response:
1199
- chunk_content = ""
1200
-
1201
- # Extract content based on model type
1202
- if model.startswith("gpt-4o"):
1203
- chunk_content = "".join(
1204
- choice.delta.content
1205
- for choice in response_chunk.choices
1206
- if choice.delta.content is not None
1207
- )
1208
- if chunk_content:
1209
- complete_response.append(chunk_content)
1210
- chunk_data = {
1211
- "type": "content", # Added type
1212
- "id": response_chunk.id,
1213
- "object": response_chunk.object,
1214
- "created": response_chunk.created,
1215
- "model": response_chunk.model,
1216
- "choices": [
1217
- {
1218
- "index": choice.index,
1219
- "delta": {
1220
- "content": choice.delta.content,
1221
- "role": choice.delta.role,
1222
- },
1223
- "finish_reason": choice.finish_reason,
1224
- }
1225
- for choice in response_chunk.choices
1226
- ],
1227
- }
1228
- yield f"{json.dumps(chunk_data)}\n\n"
1229
-
1230
- elif model.startswith("llama"):
1231
- chunk_content = response_chunk["message"]["content"]
1232
- if chunk_content:
1233
- complete_response.append(chunk_content)
1234
- chunk_data = {
1235
- "type": "content", # Added type
1236
- "id": None,
1237
- "object": None,
1238
- "created": response_chunk["created_at"],
1239
- "model": response_chunk["model"],
1240
- "choices": [
1241
- {
1242
- "index": 0,
1243
- "delta": {
1244
- "content": chunk_content,
1245
- "role": response_chunk["message"]["role"],
1246
- },
1247
- "finish_reason": response_chunk.get("done_reason"),
1248
- }
1249
- ],
1250
- }
1251
- yield f"{json.dumps(chunk_data)}\n\n"
1252
- elif model.startswith("claude"):
1253
- print(response_chunk)
1254
- if response_chunk.type == "message_start":
1255
- chunk_data = {
1256
- "type": "message_start", # Added type
1257
- "id": None,
1258
- "object": None,
1259
- "created": None,
1260
- "model": model,
1261
- "choices": [
1262
- {
1263
- "index": 0,
1264
- "delta": {
1265
- "content": "",
1266
- "role": "assistant",
1267
- },
1268
- "finish_reason": "",
1269
- }
1270
- ],
1271
- }
1272
- yield f"{json.dumps(chunk_data)}\n\n"
1273
- if response_chunk.type == "content_block_delta":
1274
- chunk_content = response_chunk.delta.text
1275
- if chunk_content:
1276
- complete_response.append(chunk_content)
1277
- chunk_data = {
1278
- "type": "content", # Added type
1279
- "content": chunk_content,
1280
- "id": None,
1281
- "object": None,
1282
- "created": None,
1283
- "model": model,
1284
- "choices": [
1285
- {
1286
- "index": 0,
1287
- "delta": {
1288
- "content": chunk_content,
1289
- "role": "assistant",
1290
- },
1291
- "finish_reason": response_chunk.delta.type,
1292
- }
1293
- ],
1294
- }
1295
- yield f"{json.dumps(chunk_data)}\n\n"
1296
- if save_to_sqlite3:
1297
- save_conversation_message(
1298
- command_history,
1299
- conversation_id,
1300
- "assistant",
1301
- chunk_content,
1302
- wd=current_path,
1303
- model=model,
1304
- provider=provider,
1305
- npc=npc,
1306
- message_id=message_id, # Save with the same message_id
1307
- )
1308
-
1309
- # Send completion message
1310
- yield f"{json.dumps({'type': 'message_stop'})}\n\n"
1311
- if save_to_sqlite3:
1312
- full_content = command_history.get_full_message_content(message_id)
1313
- command_history.update_message_content(message_id, full_content)
1314
-
1315
- response = Response(event_stream(), mimetype="text/event-stream")
1316
-
1317
- return response
1318
-
1319
- response = Response(event_stream(), mimetype="text/event-stream")
1320
-
1321
- return response
1322
-
1323
-
1324
- @app.after_request
1325
- def after_request(response):
1326
- response.headers.add("Access-Control-Allow-Headers", "Content-Type,Authorization")
1327
- response.headers.add("Access-Control-Allow-Methods", "GET,PUT,POST,DELETE,OPTIONS")
1328
- response.headers.add("Access-Control-Allow-Credentials", "true")
1329
- return response
1330
-
1331
-
1332
- def get_db_connection():
1333
- conn = sqlite3.connect(db_path)
1334
- conn.row_factory = sqlite3.Row
1335
- return conn
1336
-
1337
-
1338
- extension_map = {
1339
- "PNG": "images",
1340
- "JPG": "images",
1341
- "JPEG": "images",
1342
- "GIF": "images",
1343
- "SVG": "images",
1344
- "MP4": "videos",
1345
- "AVI": "videos",
1346
- "MOV": "videos",
1347
- "WMV": "videos",
1348
- "MPG": "videos",
1349
- "MPEG": "videos",
1350
- "DOC": "documents",
1351
- "DOCX": "documents",
1352
- "PDF": "documents",
1353
- "PPT": "documents",
1354
- "PPTX": "documents",
1355
- "XLS": "documents",
1356
- "XLSX": "documents",
1357
- "TXT": "documents",
1358
- "CSV": "documents",
1359
- "ZIP": "archives",
1360
- "RAR": "archives",
1361
- "7Z": "archives",
1362
- "TAR": "archives",
1363
- "GZ": "archives",
1364
- "BZ2": "archives",
1365
- "ISO": "archives",
1366
- }
1367
-
1368
-
1369
- def fetch_messages_for_conversation(conversation_id):
1370
- conn = get_db_connection()
1371
- cursor = conn.cursor()
1372
-
1373
- query = """
1374
- SELECT role, content, timestamp
1375
- FROM conversation_history
1376
- WHERE conversation_id = ?
1377
- ORDER BY timestamp ASC
1378
- """
1379
- cursor.execute(query, (conversation_id,))
1380
- messages = cursor.fetchall()
1381
- conn.close()
1382
-
1383
- return [
1384
- {
1385
- "role": message["role"],
1386
- "content": message["content"],
1387
- "timestamp": message["timestamp"],
1388
- }
1389
- for message in messages
1390
- ]
1391
-
1392
-
1393
- @app.route("/api/health", methods=["GET"])
1394
- def health_check():
1395
- return jsonify({"status": "ok", "error": None})
1396
-
1397
-
1398
- def start_flask_server(
1399
- port=5337,
1400
- cors_origins=None,
1401
- ):
1402
- try:
1403
- # Ensure the database tables exist
1404
- conn = get_db_connection()
1405
- try:
1406
- cursor = conn.cursor()
1407
-
1408
- # Create tables if they don't exist
1409
- cursor.execute(
1410
- """
1411
- CREATE TABLE IF NOT EXISTS command_history (
1412
- id INTEGER PRIMARY KEY AUTOINCREMENT,
1413
- timestamp TEXT,
1414
- command TEXT,
1415
- tags TEXT,
1416
- response TEXT,
1417
- directory TEXT,
1418
- conversation_id TEXT
1419
- )
1420
- """
1421
- )
1422
-
1423
- cursor.execute(
1424
- """
1425
- CREATE TABLE IF NOT EXISTS conversation_history (
1426
- id INTEGER PRIMARY KEY AUTOINCREMENT,
1427
- timestamp TEXT,
1428
- role TEXT,
1429
- content TEXT,
1430
- conversation_id TEXT,
1431
- directory_path TEXT
1432
- )
1433
- """
1434
- )
1435
-
1436
- conn.commit()
1437
- finally:
1438
- conn.close()
1439
-
1440
- # Only apply CORS if origins are specified
1441
- if cors_origins:
1442
- from flask_cors import CORS
1443
-
1444
- CORS(
1445
- app,
1446
- origins=cors_origins,
1447
- allow_headers=["Content-Type", "Authorization"],
1448
- methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
1449
- supports_credentials=True,
1450
- )
1451
-
1452
- # Run the Flask app on all interfaces
1453
- print("Starting Flask server on http://0.0.0.0:5337")
1454
- app.run(host="0.0.0.0", port=5337, debug=True)
1455
- except Exception as e:
1456
- print(f"Error starting server: {str(e)}")
1457
-
1458
-
1459
- if __name__ == "__main__":
1460
- start_flask_server()