npcsh 0.3.27.7__tar.gz → 0.3.29__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. {npcsh-0.3.27.7/npcsh.egg-info → npcsh-0.3.29}/PKG-INFO +74 -8
  2. {npcsh-0.3.27.7 → npcsh-0.3.29}/README.md +72 -7
  3. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/llm_funcs.py +111 -43
  4. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_compiler.py +60 -3
  5. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/shell_helpers.py +525 -0
  6. {npcsh-0.3.27.7 → npcsh-0.3.29/npcsh.egg-info}/PKG-INFO +74 -8
  7. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh.egg-info/requires.txt +1 -0
  8. {npcsh-0.3.27.7 → npcsh-0.3.29}/setup.py +2 -1
  9. {npcsh-0.3.27.7 → npcsh-0.3.29}/LICENSE +0 -0
  10. {npcsh-0.3.27.7 → npcsh-0.3.29}/MANIFEST.in +0 -0
  11. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/__init__.py +0 -0
  12. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/audio.py +0 -0
  13. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/cli.py +0 -0
  14. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/command_history.py +0 -0
  15. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/conversation.py +0 -0
  16. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/data_models.py +0 -0
  17. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/dataframes.py +0 -0
  18. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/embeddings.py +0 -0
  19. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/helpers.py +0 -0
  20. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/image.py +0 -0
  21. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/image_gen.py +0 -0
  22. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/knowledge_graph.py +0 -0
  23. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/load_data.py +0 -0
  24. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/main.py +0 -0
  25. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/model_runner.py +0 -0
  26. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_sysenv.py +0 -0
  27. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/assembly_lines/test_pipeline.py +0 -0
  28. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/corca.npc +0 -0
  29. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/foreman.npc +0 -0
  30. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/npcsh.ctx +0 -0
  31. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/sibiji.npc +0 -0
  32. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/templates/analytics/celona.npc +0 -0
  33. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/templates/hr_support/raone.npc +0 -0
  34. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/templates/humanities/eriane.npc +0 -0
  35. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/templates/it_support/lineru.npc +0 -0
  36. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/templates/marketing/slean.npc +0 -0
  37. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/templates/philosophy/maurawa.npc +0 -0
  38. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/templates/sales/turnic.npc +0 -0
  39. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/templates/software/welxor.npc +0 -0
  40. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/tools/calculator.tool +0 -0
  41. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/tools/generic_search.tool +0 -0
  42. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/tools/image_generation.tool +0 -0
  43. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/tools/local_search.tool +0 -0
  44. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/tools/screen_cap.tool +0 -0
  45. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/npc_team/tools/sql_executor.tool +0 -0
  46. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/plonk.py +0 -0
  47. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/response.py +0 -0
  48. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/search.py +0 -0
  49. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/serve.py +0 -0
  50. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/shell.py +0 -0
  51. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/stream.py +0 -0
  52. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh/video.py +0 -0
  53. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh.egg-info/SOURCES.txt +0 -0
  54. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh.egg-info/dependency_links.txt +0 -0
  55. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh.egg-info/entry_points.txt +0 -0
  56. {npcsh-0.3.27.7 → npcsh-0.3.29}/npcsh.egg-info/top_level.txt +0 -0
  57. {npcsh-0.3.27.7 → npcsh-0.3.29}/setup.cfg +0 -0
  58. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_chromadb.py +0 -0
  59. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_embedding_check.py +0 -0
  60. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_embedding_methods.py +0 -0
  61. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_helpers.py +0 -0
  62. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_knowledge_graph_rag.py +0 -0
  63. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_llm_funcs.py +0 -0
  64. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_networkx_vis.py +0 -0
  65. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_npc_compiler.py +0 -0
  66. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_npcsh.py +0 -0
  67. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_npcteam.py +0 -0
  68. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_shell_helpers.py +0 -0
  69. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_tars.py +0 -0
  70. {npcsh-0.3.27.7 → npcsh-0.3.29}/tests/test_tool_use.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcsh
3
- Version: 0.3.27.7
3
+ Version: 0.3.29
4
4
  Summary: npcsh is a command line tool for integrating LLMs into everyday workflows and for orchestrating teams of NPCs.
5
5
  Home-page: https://github.com/cagostino/npcsh
6
6
  Author: Christopher Agostino
@@ -32,6 +32,7 @@ Requires-Dist: duckduckgo-search
32
32
  Requires-Dist: flask
33
33
  Requires-Dist: flask_cors
34
34
  Requires-Dist: redis
35
+ Requires-Dist: psycopg2-binary
35
36
  Requires-Dist: flask_sse
36
37
  Provides-Extra: lite
37
38
  Requires-Dist: anthropic; extra == "lite"
@@ -468,6 +469,7 @@ if __name__ == "__main__":
468
469
  ### Linux install
469
470
  ```bash
470
471
 
472
+ # for audio primarily
471
473
  sudo apt-get install espeak
472
474
  sudo apt-get install portaudio19-dev python3-pyaudio
473
475
  sudo apt-get install alsa-base alsa-utils
@@ -475,6 +477,10 @@ sudo apt-get install libcairo2-dev
475
477
  sudo apt-get install libgirepository1.0-dev
476
478
  sudo apt-get install ffmpeg
477
479
 
480
+ # for triggers
481
+ sudo apt install inotify-tools
482
+
483
+
478
484
  #And if you don't have ollama installed, use this:
479
485
  curl -fsSL https://ollama.com/install.sh | sh
480
486
 
@@ -482,25 +488,46 @@ ollama pull llama3.2
482
488
  ollama pull llava:7b
483
489
  ollama pull nomic-embed-text
484
490
  pip install npcsh
485
- ```
486
- If you'd like to install the abilities to use STT and TTS, additionall install the following
487
- ```
488
- pip install openai-whisper pyaudio gtts playsound
489
- ```
491
+ # if you want to install with the API libraries
492
+ pip install npcsh[lite]
493
+ # if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
494
+ pip install npcsh[local]
495
+ # if you want to use tts/stt
496
+ pip install npcsh[whisper]
497
+
498
+ # if you want everything:
499
+ pip install npcsh[all]
500
+
490
501
 
491
502
 
492
503
 
493
504
  ### Mac install
494
505
  ```bash
506
+ #mainly for audio
495
507
  brew install portaudio
496
508
  brew install ffmpeg
509
+ brew install pygobject3
510
+
511
+ # for triggers
512
+ brew install ...
513
+
514
+
497
515
  brew install ollama
498
516
  brew services start ollama
499
- brew install pygobject3
500
517
  ollama pull llama3.2
501
518
  ollama pull llava:7b
502
519
  ollama pull nomic-embed-text
503
520
  pip install npcsh
521
+ # if you want to install with the API libraries
522
+ pip install npcsh[lite]
523
+ # if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
524
+ pip install npcsh[local]
525
+ # if you want to use tts/stt
526
+ pip install npcsh[whisper]
527
+
528
+ # if you want everything:
529
+ pip install npcsh[all]
530
+
504
531
  ```
505
532
  ### Windows Install
506
533
 
@@ -513,6 +540,16 @@ ollama pull llama3.2
513
540
  ollama pull llava:7b
514
541
  ollama pull nomic-embed-text
515
542
  pip install npcsh
543
+ # if you want to install with the API libraries
544
+ pip install npcsh[lite]
545
+ # if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
546
+ pip install npcsh[local]
547
+ # if you want to use tts/stt
548
+ pip install npcsh[whisper]
549
+
550
+ # if you want everything:
551
+ pip install npcsh[all]
552
+
516
553
  ```
517
554
  As of now, npcsh appears to work well with some of the core functionalities like /ots and /whisper.
518
555
 
@@ -1050,13 +1087,30 @@ npc ots -f test_data/catfight.PNG
1050
1087
  ### Plan : Schedule tasks to be run at regular intervals (under construction)
1051
1088
  Use the /plan macro to schedule tasks to be run at regular intervals.
1052
1089
  ```npcsh
1053
- npcsh> /plan run a rag search on the files in the current directory every 5 minutes
1090
+ npcsh> /plan run a rag search for 'moonbeam' on the files in the current directory every 5 minutes
1091
+ ```
1092
+
1093
+ ```npcsh
1094
+ npcsh> /plan record the cpu usage every 5 minutes
1054
1095
  ```
1055
1096
 
1097
+ ```npcsh
1098
+ npcsh> /plan record the apps that are using the most ram every 5 minutes
1099
+ ```
1100
+
1101
+
1102
+
1103
+
1056
1104
  ```bash
1057
1105
  npc plan -f 30m -t 'task'
1058
1106
  ```
1059
1107
 
1108
+ Plan will use platform-specific scheduling tools. In particular, it uses crontab on Linux and launchd on macOS and Schedule Tasks on Windows.
1109
+
1110
+ Implementations have been provided for Mac and Windows but only has been tested as of 3/23/2025 on Linux.
1111
+
1112
+
1113
+
1060
1114
  ### Plonk : Computer Control
1061
1115
  Use the /plonk macro to allow the LLM to control your computer.
1062
1116
  ```npcsh
@@ -1342,6 +1396,18 @@ npcsh> /spool model=llama3.3
1342
1396
  npc spool -n npc.npc
1343
1397
  ```
1344
1398
 
1399
+ ### Trigger
1400
+ Use the /trigger macro to execute specific actionss based on certain conditions.
1401
+
1402
+ ```npcsh
1403
+ npcsh> /trigger watch for new PDF downloads in the ~/Downloads directory and move them
1404
+ to the ~/Documents/PDFs directory . Ensure that the directory exists or create it if it does not.
1405
+ ```
1406
+
1407
+ On Linux, trigger makes use of inotify-tools to watch for file system events. On macOS, it uses fswatch, and on Windows, it uses Watch-Command.
1408
+
1409
+
1410
+
1345
1411
 
1346
1412
 
1347
1413
  ### Vixynt: Image Generation
@@ -385,6 +385,7 @@ if __name__ == "__main__":
385
385
  ### Linux install
386
386
  ```bash
387
387
 
388
+ # for audio primarily
388
389
  sudo apt-get install espeak
389
390
  sudo apt-get install portaudio19-dev python3-pyaudio
390
391
  sudo apt-get install alsa-base alsa-utils
@@ -392,6 +393,10 @@ sudo apt-get install libcairo2-dev
392
393
  sudo apt-get install libgirepository1.0-dev
393
394
  sudo apt-get install ffmpeg
394
395
 
396
+ # for triggers
397
+ sudo apt install inotify-tools
398
+
399
+
395
400
  #And if you don't have ollama installed, use this:
396
401
  curl -fsSL https://ollama.com/install.sh | sh
397
402
 
@@ -399,25 +404,46 @@ ollama pull llama3.2
399
404
  ollama pull llava:7b
400
405
  ollama pull nomic-embed-text
401
406
  pip install npcsh
402
- ```
403
- If you'd like to install the abilities to use STT and TTS, additionall install the following
404
- ```
405
- pip install openai-whisper pyaudio gtts playsound
406
- ```
407
+ # if you want to install with the API libraries
408
+ pip install npcsh[lite]
409
+ # if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
410
+ pip install npcsh[local]
411
+ # if you want to use tts/stt
412
+ pip install npcsh[whisper]
413
+
414
+ # if you want everything:
415
+ pip install npcsh[all]
416
+
407
417
 
408
418
 
409
419
 
410
420
  ### Mac install
411
421
  ```bash
422
+ #mainly for audio
412
423
  brew install portaudio
413
424
  brew install ffmpeg
425
+ brew install pygobject3
426
+
427
+ # for triggers
428
+ brew install ...
429
+
430
+
414
431
  brew install ollama
415
432
  brew services start ollama
416
- brew install pygobject3
417
433
  ollama pull llama3.2
418
434
  ollama pull llava:7b
419
435
  ollama pull nomic-embed-text
420
436
  pip install npcsh
437
+ # if you want to install with the API libraries
438
+ pip install npcsh[lite]
439
+ # if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
440
+ pip install npcsh[local]
441
+ # if you want to use tts/stt
442
+ pip install npcsh[whisper]
443
+
444
+ # if you want everything:
445
+ pip install npcsh[all]
446
+
421
447
  ```
422
448
  ### Windows Install
423
449
 
@@ -430,6 +456,16 @@ ollama pull llama3.2
430
456
  ollama pull llava:7b
431
457
  ollama pull nomic-embed-text
432
458
  pip install npcsh
459
+ # if you want to install with the API libraries
460
+ pip install npcsh[lite]
461
+ # if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
462
+ pip install npcsh[local]
463
+ # if you want to use tts/stt
464
+ pip install npcsh[whisper]
465
+
466
+ # if you want everything:
467
+ pip install npcsh[all]
468
+
433
469
  ```
434
470
  As of now, npcsh appears to work well with some of the core functionalities like /ots and /whisper.
435
471
 
@@ -967,13 +1003,30 @@ npc ots -f test_data/catfight.PNG
967
1003
  ### Plan : Schedule tasks to be run at regular intervals (under construction)
968
1004
  Use the /plan macro to schedule tasks to be run at regular intervals.
969
1005
  ```npcsh
970
- npcsh> /plan run a rag search on the files in the current directory every 5 minutes
1006
+ npcsh> /plan run a rag search for 'moonbeam' on the files in the current directory every 5 minutes
1007
+ ```
1008
+
1009
+ ```npcsh
1010
+ npcsh> /plan record the cpu usage every 5 minutes
971
1011
  ```
972
1012
 
1013
+ ```npcsh
1014
+ npcsh> /plan record the apps that are using the most ram every 5 minutes
1015
+ ```
1016
+
1017
+
1018
+
1019
+
973
1020
  ```bash
974
1021
  npc plan -f 30m -t 'task'
975
1022
  ```
976
1023
 
1024
+ Plan will use platform-specific scheduling tools. In particular, it uses crontab on Linux and launchd on macOS and Schedule Tasks on Windows.
1025
+
1026
+ Implementations have been provided for Mac and Windows but only has been tested as of 3/23/2025 on Linux.
1027
+
1028
+
1029
+
977
1030
  ### Plonk : Computer Control
978
1031
  Use the /plonk macro to allow the LLM to control your computer.
979
1032
  ```npcsh
@@ -1259,6 +1312,18 @@ npcsh> /spool model=llama3.3
1259
1312
  npc spool -n npc.npc
1260
1313
  ```
1261
1314
 
1315
+ ### Trigger
1316
+ Use the /trigger macro to execute specific actionss based on certain conditions.
1317
+
1318
+ ```npcsh
1319
+ npcsh> /trigger watch for new PDF downloads in the ~/Downloads directory and move them
1320
+ to the ~/Documents/PDFs directory . Ensure that the directory exists or create it if it does not.
1321
+ ```
1322
+
1323
+ On Linux, trigger makes use of inotify-tools to watch for file system events. On macOS, it uses fswatch, and on Windows, it uses Watch-Command.
1324
+
1325
+
1326
+
1262
1327
 
1263
1328
 
1264
1329
  ### Vixynt: Image Generation
@@ -17,7 +17,7 @@ import numpy as np
17
17
 
18
18
  from google.generativeai import types
19
19
  import google.generativeai as genai
20
-
20
+ from sqlalchemy import create_engine
21
21
 
22
22
  from .npc_sysenv import (
23
23
  get_system_message,
@@ -1554,7 +1554,7 @@ def check_output_sufficient(
1554
1554
 
1555
1555
  def process_data_output(
1556
1556
  llm_response: Dict[str, Any],
1557
- db_conn: sqlite3.Connection,
1557
+ db_conn,
1558
1558
  request: str,
1559
1559
  tables: str = None,
1560
1560
  history: str = None,
@@ -1572,9 +1572,15 @@ def process_data_output(
1572
1572
  if not query:
1573
1573
  return {"response": "No query provided", "code": 400}
1574
1574
 
1575
+ # Create SQLAlchemy engine based on connection type
1576
+ if "psycopg2" in db_conn.__class__.__module__:
1577
+ engine = create_engine("postgresql://caug:gobears@localhost/npc_test")
1578
+ else:
1579
+ engine = create_engine("sqlite:///test_sqlite.db")
1580
+
1575
1581
  if choice == 1: # Direct answer query
1576
1582
  try:
1577
- df = pd.read_sql_query(query, db_conn)
1583
+ df = pd.read_sql_query(query, engine)
1578
1584
  result = check_output_sufficient(
1579
1585
  request, df, query, model=model, provider=provider, npc=npc
1580
1586
  )
@@ -1591,7 +1597,7 @@ def process_data_output(
1591
1597
 
1592
1598
  elif choice == 2: # Exploratory query
1593
1599
  try:
1594
- df = pd.read_sql_query(query, db_conn)
1600
+ df = pd.read_sql_query(query, engine)
1595
1601
  extra_context = f"""
1596
1602
  Exploratory query results:
1597
1603
  Query: {query}
@@ -1621,7 +1627,7 @@ def process_data_output(
1621
1627
 
1622
1628
  def get_data_response(
1623
1629
  request: str,
1624
- db_conn: sqlite3.Connection,
1630
+ db_conn,
1625
1631
  tables: str = None,
1626
1632
  n_try_freq: int = 5,
1627
1633
  extra_context: str = None,
@@ -1634,9 +1640,73 @@ def get_data_response(
1634
1640
  """
1635
1641
  Generate a response to a data request, with retries for failed attempts.
1636
1642
  """
1643
+
1644
+ # Extract schema information based on connection type
1645
+ schema_info = ""
1646
+ if "psycopg2" in db_conn.__class__.__module__:
1647
+ cursor = db_conn.cursor()
1648
+ # Get all tables and their columns
1649
+ cursor.execute(
1650
+ """
1651
+ SELECT
1652
+ t.table_name,
1653
+ array_agg(c.column_name || ' ' || c.data_type) as columns,
1654
+ array_agg(
1655
+ CASE
1656
+ WHEN tc.constraint_type = 'FOREIGN KEY'
1657
+ THEN kcu.column_name || ' REFERENCES ' || ccu.table_name || '.' || ccu.column_name
1658
+ ELSE NULL
1659
+ END
1660
+ ) as foreign_keys
1661
+ FROM information_schema.tables t
1662
+ JOIN information_schema.columns c ON t.table_name = c.table_name
1663
+ LEFT JOIN information_schema.table_constraints tc
1664
+ ON t.table_name = tc.table_name
1665
+ AND tc.constraint_type = 'FOREIGN KEY'
1666
+ LEFT JOIN information_schema.key_column_usage kcu
1667
+ ON tc.constraint_name = kcu.constraint_name
1668
+ LEFT JOIN information_schema.constraint_column_usage ccu
1669
+ ON tc.constraint_name = ccu.constraint_name
1670
+ WHERE t.table_schema = 'public'
1671
+ GROUP BY t.table_name;
1672
+ """
1673
+ )
1674
+ for table, columns, fks in cursor.fetchall():
1675
+ schema_info += f"\nTable {table}:\n"
1676
+ schema_info += "Columns:\n"
1677
+ for col in columns:
1678
+ schema_info += f" - {col}\n"
1679
+ if any(fk for fk in fks if fk is not None):
1680
+ schema_info += "Foreign Keys:\n"
1681
+ for fk in fks:
1682
+ if fk:
1683
+ schema_info += f" - {fk}\n"
1684
+
1685
+ elif "sqlite3" in db_conn.__class__.__module__:
1686
+ cursor = db_conn.cursor()
1687
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
1688
+ tables = cursor.fetchall()
1689
+ for (table_name,) in tables:
1690
+ schema_info += f"\nTable {table_name}:\n"
1691
+ cursor.execute(f"PRAGMA table_info({table_name});")
1692
+ columns = cursor.fetchall()
1693
+ schema_info += "Columns:\n"
1694
+ for col in columns:
1695
+ schema_info += f" - {col[1]} {col[2]}\n"
1696
+
1697
+ cursor.execute(f"PRAGMA foreign_key_list({table_name});")
1698
+ foreign_keys = cursor.fetchall()
1699
+ if foreign_keys:
1700
+ schema_info += "Foreign Keys:\n"
1701
+ for fk in foreign_keys:
1702
+ schema_info += f" - {fk[3]} REFERENCES {fk[2]}({fk[4]})\n"
1703
+
1637
1704
  prompt = f"""
1638
1705
  User request: {request}
1639
- Available tables: {tables or 'Not specified'}
1706
+
1707
+ Database Schema:
1708
+ {schema_info}
1709
+
1640
1710
  {extra_context or ''}
1641
1711
  {f'Query history: {history}' if history else ''}
1642
1712
 
@@ -1655,49 +1725,47 @@ def get_data_response(
1655
1725
 
1656
1726
  failures = []
1657
1727
  for attempt in range(max_retries):
1658
- try:
1659
- llm_response = get_llm_response(
1660
- prompt, npc=npc, format="json", model=model, provider=provider
1661
- )
1728
+ # try:
1729
+ llm_response = get_llm_response(
1730
+ prompt, npc=npc, format="json", model=model, provider=provider
1731
+ )
1662
1732
 
1663
- # Clean response if it's a string
1664
- response_data = llm_response.get("response", {})
1665
- if isinstance(response_data, str):
1666
- response_data = (
1667
- response_data.replace("```json", "").replace("```", "").strip()
1668
- )
1669
- try:
1670
- response_data = json.loads(response_data)
1671
- except json.JSONDecodeError:
1672
- failures.append("Invalid JSON response")
1673
- continue
1674
-
1675
- result = process_data_output(
1676
- response_data,
1677
- db_conn,
1678
- request,
1679
- tables=tables,
1680
- history=failures,
1681
- npc=npc,
1682
- model=model,
1683
- provider=provider,
1733
+ # Clean response if it's a string
1734
+ response_data = llm_response.get("response", {})
1735
+ if isinstance(response_data, str):
1736
+ response_data = (
1737
+ response_data.replace("```json", "").replace("```", "").strip()
1684
1738
  )
1739
+ try:
1740
+ response_data = json.loads(response_data)
1741
+ except json.JSONDecodeError:
1742
+ failures.append("Invalid JSON response")
1743
+ continue
1744
+
1745
+ result = process_data_output(
1746
+ response_data,
1747
+ db_conn,
1748
+ request,
1749
+ tables=tables,
1750
+ history=failures,
1751
+ npc=npc,
1752
+ model=model,
1753
+ provider=provider,
1754
+ )
1685
1755
 
1686
- if result["code"] == 200:
1687
- return result
1688
-
1689
- failures.append(result["response"])
1756
+ if result["code"] == 200:
1757
+ return result
1690
1758
 
1691
- if attempt == max_retries - 1:
1692
- return {
1693
- "response": f"Failed after {max_retries} attempts. Errors: {'; '.join(failures)}",
1694
- "code": 400,
1695
- }
1759
+ failures.append(result["response"])
1696
1760
 
1697
- except Exception as e:
1698
- failures.append(str(e))
1761
+ if attempt == max_retries - 1:
1762
+ return {
1763
+ "response": f"Failed after {max_retries} attempts. Errors: {'; '.join(failures)}",
1764
+ "code": 400,
1765
+ }
1699
1766
 
1700
- return {"response": "Max retries exceeded", "code": 400}
1767
+ # except Exception as e:
1768
+ # failures.append(str(e))
1701
1769
 
1702
1770
 
1703
1771
  def enter_reasoning_human_in_the_loop(
@@ -788,11 +788,29 @@ class NPC:
788
788
  self.model = model
789
789
  self.db_conn = db_conn
790
790
  if self.db_conn is not None:
791
- self.tables = self.db_conn.execute(
792
- "SELECT name, sql FROM sqlite_master WHERE type='table';"
793
- ).fetchall()
791
+ # Determine database type
792
+ if "psycopg2" in self.db_conn.__class__.__module__:
793
+ # PostgreSQL connection
794
+ cursor = self.db_conn.cursor()
795
+ cursor.execute(
796
+ """
797
+ SELECT table_name, obj_description((quote_ident(table_name))::regclass, 'pg_class')
798
+ FROM information_schema.tables
799
+ WHERE table_schema='public';
800
+ """
801
+ )
802
+ self.tables = cursor.fetchall()
803
+ self.db_type = "postgres"
804
+ elif "sqlite3" in self.db_conn.__class__.__module__:
805
+ # SQLite connection
806
+ self.tables = self.db_conn.execute(
807
+ "SELECT name, sql FROM sqlite_master WHERE type='table';"
808
+ ).fetchall()
809
+ self.db_type = "sqlite"
794
810
  else:
795
811
  self.tables = None
812
+ self.db_type = None
813
+
796
814
  self.provider = provider
797
815
  self.api_url = api_url
798
816
  self.all_tools = all_tools or []
@@ -839,6 +857,45 @@ class NPC:
839
857
  else:
840
858
  self.parsed_npcs = []
841
859
 
860
+ def execute_query(self, query, params=None):
861
+ """Execute a query based on database type"""
862
+ if self.db_type == "postgres":
863
+ cursor = self.db_conn.cursor()
864
+ cursor.execute(query, params or ())
865
+ return cursor.fetchall()
866
+ else: # sqlite
867
+ cursor = self.db_conn.execute(query, params or ())
868
+ return cursor.fetchall()
869
+
870
+ def _determine_db_type(self):
871
+ """Determine if the connection is PostgreSQL or SQLite"""
872
+ # Check the connection object's class name
873
+ conn_type = self.db_conn.__class__.__module__.lower()
874
+
875
+ if "psycopg" in conn_type:
876
+ return "postgres"
877
+ elif "sqlite" in conn_type:
878
+ return "sqlite"
879
+ else:
880
+ raise ValueError(f"Unsupported database type: {conn_type}")
881
+
882
+ def _get_tables(self):
883
+ """Get table information based on database type"""
884
+ if self.db_type == "postgres":
885
+ cursor = self.db_conn.cursor()
886
+ cursor.execute(
887
+ """
888
+ SELECT table_name, obj_description((quote_ident(table_name))::regclass, 'pg_class') as description
889
+ FROM information_schema.tables
890
+ WHERE table_schema='public';
891
+ """
892
+ )
893
+ return cursor.fetchall()
894
+ else: # sqlite
895
+ return self.db_conn.execute(
896
+ "SELECT name, sql FROM sqlite_master WHERE type='table';"
897
+ ).fetchall()
898
+
842
899
  def get_memory(self):
843
900
  return
844
901