npcsh 0.3.27.6__py3-none-any.whl → 0.3.28__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/llm_funcs.py +111 -43
- npcsh/npc_compiler.py +60 -3
- npcsh/response.py +3 -1
- {npcsh-0.3.27.6.dist-info → npcsh-0.3.28.dist-info}/METADATA +36 -6
- {npcsh-0.3.27.6.dist-info → npcsh-0.3.28.dist-info}/RECORD +28 -28
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/calculator.tool +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/celona.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/corca.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/eriane.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/foreman.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/generic_search.tool +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/image_generation.tool +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/lineru.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/local_search.tool +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/maurawa.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/raone.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/screen_cap.tool +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/sibiji.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/slean.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/sql_executor.tool +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/test_pipeline.py +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/turnic.npc +0 -0
- {npcsh-0.3.27.6.data → npcsh-0.3.28.data}/data/npcsh/npc_team/welxor.npc +0 -0
- {npcsh-0.3.27.6.dist-info → npcsh-0.3.28.dist-info}/WHEEL +0 -0
- {npcsh-0.3.27.6.dist-info → npcsh-0.3.28.dist-info}/entry_points.txt +0 -0
- {npcsh-0.3.27.6.dist-info → npcsh-0.3.28.dist-info}/licenses/LICENSE +0 -0
- {npcsh-0.3.27.6.dist-info → npcsh-0.3.28.dist-info}/top_level.txt +0 -0
npcsh/llm_funcs.py
CHANGED
|
@@ -17,7 +17,7 @@ import numpy as np
|
|
|
17
17
|
|
|
18
18
|
from google.generativeai import types
|
|
19
19
|
import google.generativeai as genai
|
|
20
|
-
|
|
20
|
+
from sqlalchemy import create_engine
|
|
21
21
|
|
|
22
22
|
from .npc_sysenv import (
|
|
23
23
|
get_system_message,
|
|
@@ -1554,7 +1554,7 @@ def check_output_sufficient(
|
|
|
1554
1554
|
|
|
1555
1555
|
def process_data_output(
|
|
1556
1556
|
llm_response: Dict[str, Any],
|
|
1557
|
-
db_conn
|
|
1557
|
+
db_conn,
|
|
1558
1558
|
request: str,
|
|
1559
1559
|
tables: str = None,
|
|
1560
1560
|
history: str = None,
|
|
@@ -1572,9 +1572,15 @@ def process_data_output(
|
|
|
1572
1572
|
if not query:
|
|
1573
1573
|
return {"response": "No query provided", "code": 400}
|
|
1574
1574
|
|
|
1575
|
+
# Create SQLAlchemy engine based on connection type
|
|
1576
|
+
if "psycopg2" in db_conn.__class__.__module__:
|
|
1577
|
+
engine = create_engine("postgresql://caug:gobears@localhost/npc_test")
|
|
1578
|
+
else:
|
|
1579
|
+
engine = create_engine("sqlite:///test_sqlite.db")
|
|
1580
|
+
|
|
1575
1581
|
if choice == 1: # Direct answer query
|
|
1576
1582
|
try:
|
|
1577
|
-
df = pd.read_sql_query(query,
|
|
1583
|
+
df = pd.read_sql_query(query, engine)
|
|
1578
1584
|
result = check_output_sufficient(
|
|
1579
1585
|
request, df, query, model=model, provider=provider, npc=npc
|
|
1580
1586
|
)
|
|
@@ -1591,7 +1597,7 @@ def process_data_output(
|
|
|
1591
1597
|
|
|
1592
1598
|
elif choice == 2: # Exploratory query
|
|
1593
1599
|
try:
|
|
1594
|
-
df = pd.read_sql_query(query,
|
|
1600
|
+
df = pd.read_sql_query(query, engine)
|
|
1595
1601
|
extra_context = f"""
|
|
1596
1602
|
Exploratory query results:
|
|
1597
1603
|
Query: {query}
|
|
@@ -1621,7 +1627,7 @@ def process_data_output(
|
|
|
1621
1627
|
|
|
1622
1628
|
def get_data_response(
|
|
1623
1629
|
request: str,
|
|
1624
|
-
db_conn
|
|
1630
|
+
db_conn,
|
|
1625
1631
|
tables: str = None,
|
|
1626
1632
|
n_try_freq: int = 5,
|
|
1627
1633
|
extra_context: str = None,
|
|
@@ -1634,9 +1640,73 @@ def get_data_response(
|
|
|
1634
1640
|
"""
|
|
1635
1641
|
Generate a response to a data request, with retries for failed attempts.
|
|
1636
1642
|
"""
|
|
1643
|
+
|
|
1644
|
+
# Extract schema information based on connection type
|
|
1645
|
+
schema_info = ""
|
|
1646
|
+
if "psycopg2" in db_conn.__class__.__module__:
|
|
1647
|
+
cursor = db_conn.cursor()
|
|
1648
|
+
# Get all tables and their columns
|
|
1649
|
+
cursor.execute(
|
|
1650
|
+
"""
|
|
1651
|
+
SELECT
|
|
1652
|
+
t.table_name,
|
|
1653
|
+
array_agg(c.column_name || ' ' || c.data_type) as columns,
|
|
1654
|
+
array_agg(
|
|
1655
|
+
CASE
|
|
1656
|
+
WHEN tc.constraint_type = 'FOREIGN KEY'
|
|
1657
|
+
THEN kcu.column_name || ' REFERENCES ' || ccu.table_name || '.' || ccu.column_name
|
|
1658
|
+
ELSE NULL
|
|
1659
|
+
END
|
|
1660
|
+
) as foreign_keys
|
|
1661
|
+
FROM information_schema.tables t
|
|
1662
|
+
JOIN information_schema.columns c ON t.table_name = c.table_name
|
|
1663
|
+
LEFT JOIN information_schema.table_constraints tc
|
|
1664
|
+
ON t.table_name = tc.table_name
|
|
1665
|
+
AND tc.constraint_type = 'FOREIGN KEY'
|
|
1666
|
+
LEFT JOIN information_schema.key_column_usage kcu
|
|
1667
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
1668
|
+
LEFT JOIN information_schema.constraint_column_usage ccu
|
|
1669
|
+
ON tc.constraint_name = ccu.constraint_name
|
|
1670
|
+
WHERE t.table_schema = 'public'
|
|
1671
|
+
GROUP BY t.table_name;
|
|
1672
|
+
"""
|
|
1673
|
+
)
|
|
1674
|
+
for table, columns, fks in cursor.fetchall():
|
|
1675
|
+
schema_info += f"\nTable {table}:\n"
|
|
1676
|
+
schema_info += "Columns:\n"
|
|
1677
|
+
for col in columns:
|
|
1678
|
+
schema_info += f" - {col}\n"
|
|
1679
|
+
if any(fk for fk in fks if fk is not None):
|
|
1680
|
+
schema_info += "Foreign Keys:\n"
|
|
1681
|
+
for fk in fks:
|
|
1682
|
+
if fk:
|
|
1683
|
+
schema_info += f" - {fk}\n"
|
|
1684
|
+
|
|
1685
|
+
elif "sqlite3" in db_conn.__class__.__module__:
|
|
1686
|
+
cursor = db_conn.cursor()
|
|
1687
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
|
1688
|
+
tables = cursor.fetchall()
|
|
1689
|
+
for (table_name,) in tables:
|
|
1690
|
+
schema_info += f"\nTable {table_name}:\n"
|
|
1691
|
+
cursor.execute(f"PRAGMA table_info({table_name});")
|
|
1692
|
+
columns = cursor.fetchall()
|
|
1693
|
+
schema_info += "Columns:\n"
|
|
1694
|
+
for col in columns:
|
|
1695
|
+
schema_info += f" - {col[1]} {col[2]}\n"
|
|
1696
|
+
|
|
1697
|
+
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
|
|
1698
|
+
foreign_keys = cursor.fetchall()
|
|
1699
|
+
if foreign_keys:
|
|
1700
|
+
schema_info += "Foreign Keys:\n"
|
|
1701
|
+
for fk in foreign_keys:
|
|
1702
|
+
schema_info += f" - {fk[3]} REFERENCES {fk[2]}({fk[4]})\n"
|
|
1703
|
+
|
|
1637
1704
|
prompt = f"""
|
|
1638
1705
|
User request: {request}
|
|
1639
|
-
|
|
1706
|
+
|
|
1707
|
+
Database Schema:
|
|
1708
|
+
{schema_info}
|
|
1709
|
+
|
|
1640
1710
|
{extra_context or ''}
|
|
1641
1711
|
{f'Query history: {history}' if history else ''}
|
|
1642
1712
|
|
|
@@ -1655,49 +1725,47 @@ def get_data_response(
|
|
|
1655
1725
|
|
|
1656
1726
|
failures = []
|
|
1657
1727
|
for attempt in range(max_retries):
|
|
1658
|
-
try:
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1728
|
+
# try:
|
|
1729
|
+
llm_response = get_llm_response(
|
|
1730
|
+
prompt, npc=npc, format="json", model=model, provider=provider
|
|
1731
|
+
)
|
|
1662
1732
|
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
)
|
|
1669
|
-
try:
|
|
1670
|
-
response_data = json.loads(response_data)
|
|
1671
|
-
except json.JSONDecodeError:
|
|
1672
|
-
failures.append("Invalid JSON response")
|
|
1673
|
-
continue
|
|
1674
|
-
|
|
1675
|
-
result = process_data_output(
|
|
1676
|
-
response_data,
|
|
1677
|
-
db_conn,
|
|
1678
|
-
request,
|
|
1679
|
-
tables=tables,
|
|
1680
|
-
history=failures,
|
|
1681
|
-
npc=npc,
|
|
1682
|
-
model=model,
|
|
1683
|
-
provider=provider,
|
|
1733
|
+
# Clean response if it's a string
|
|
1734
|
+
response_data = llm_response.get("response", {})
|
|
1735
|
+
if isinstance(response_data, str):
|
|
1736
|
+
response_data = (
|
|
1737
|
+
response_data.replace("```json", "").replace("```", "").strip()
|
|
1684
1738
|
)
|
|
1739
|
+
try:
|
|
1740
|
+
response_data = json.loads(response_data)
|
|
1741
|
+
except json.JSONDecodeError:
|
|
1742
|
+
failures.append("Invalid JSON response")
|
|
1743
|
+
continue
|
|
1744
|
+
|
|
1745
|
+
result = process_data_output(
|
|
1746
|
+
response_data,
|
|
1747
|
+
db_conn,
|
|
1748
|
+
request,
|
|
1749
|
+
tables=tables,
|
|
1750
|
+
history=failures,
|
|
1751
|
+
npc=npc,
|
|
1752
|
+
model=model,
|
|
1753
|
+
provider=provider,
|
|
1754
|
+
)
|
|
1685
1755
|
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
|
|
1689
|
-
failures.append(result["response"])
|
|
1756
|
+
if result["code"] == 200:
|
|
1757
|
+
return result
|
|
1690
1758
|
|
|
1691
|
-
|
|
1692
|
-
return {
|
|
1693
|
-
"response": f"Failed after {max_retries} attempts. Errors: {'; '.join(failures)}",
|
|
1694
|
-
"code": 400,
|
|
1695
|
-
}
|
|
1759
|
+
failures.append(result["response"])
|
|
1696
1760
|
|
|
1697
|
-
|
|
1698
|
-
|
|
1761
|
+
if attempt == max_retries - 1:
|
|
1762
|
+
return {
|
|
1763
|
+
"response": f"Failed after {max_retries} attempts. Errors: {'; '.join(failures)}",
|
|
1764
|
+
"code": 400,
|
|
1765
|
+
}
|
|
1699
1766
|
|
|
1700
|
-
|
|
1767
|
+
# except Exception as e:
|
|
1768
|
+
# failures.append(str(e))
|
|
1701
1769
|
|
|
1702
1770
|
|
|
1703
1771
|
def enter_reasoning_human_in_the_loop(
|
npcsh/npc_compiler.py
CHANGED
|
@@ -788,11 +788,29 @@ class NPC:
|
|
|
788
788
|
self.model = model
|
|
789
789
|
self.db_conn = db_conn
|
|
790
790
|
if self.db_conn is not None:
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
791
|
+
# Determine database type
|
|
792
|
+
if "psycopg2" in self.db_conn.__class__.__module__:
|
|
793
|
+
# PostgreSQL connection
|
|
794
|
+
cursor = self.db_conn.cursor()
|
|
795
|
+
cursor.execute(
|
|
796
|
+
"""
|
|
797
|
+
SELECT table_name, obj_description((quote_ident(table_name))::regclass, 'pg_class')
|
|
798
|
+
FROM information_schema.tables
|
|
799
|
+
WHERE table_schema='public';
|
|
800
|
+
"""
|
|
801
|
+
)
|
|
802
|
+
self.tables = cursor.fetchall()
|
|
803
|
+
self.db_type = "postgres"
|
|
804
|
+
elif "sqlite3" in self.db_conn.__class__.__module__:
|
|
805
|
+
# SQLite connection
|
|
806
|
+
self.tables = self.db_conn.execute(
|
|
807
|
+
"SELECT name, sql FROM sqlite_master WHERE type='table';"
|
|
808
|
+
).fetchall()
|
|
809
|
+
self.db_type = "sqlite"
|
|
794
810
|
else:
|
|
795
811
|
self.tables = None
|
|
812
|
+
self.db_type = None
|
|
813
|
+
|
|
796
814
|
self.provider = provider
|
|
797
815
|
self.api_url = api_url
|
|
798
816
|
self.all_tools = all_tools or []
|
|
@@ -839,6 +857,45 @@ class NPC:
|
|
|
839
857
|
else:
|
|
840
858
|
self.parsed_npcs = []
|
|
841
859
|
|
|
860
|
+
def execute_query(self, query, params=None):
|
|
861
|
+
"""Execute a query based on database type"""
|
|
862
|
+
if self.db_type == "postgres":
|
|
863
|
+
cursor = self.db_conn.cursor()
|
|
864
|
+
cursor.execute(query, params or ())
|
|
865
|
+
return cursor.fetchall()
|
|
866
|
+
else: # sqlite
|
|
867
|
+
cursor = self.db_conn.execute(query, params or ())
|
|
868
|
+
return cursor.fetchall()
|
|
869
|
+
|
|
870
|
+
def _determine_db_type(self):
|
|
871
|
+
"""Determine if the connection is PostgreSQL or SQLite"""
|
|
872
|
+
# Check the connection object's class name
|
|
873
|
+
conn_type = self.db_conn.__class__.__module__.lower()
|
|
874
|
+
|
|
875
|
+
if "psycopg" in conn_type:
|
|
876
|
+
return "postgres"
|
|
877
|
+
elif "sqlite" in conn_type:
|
|
878
|
+
return "sqlite"
|
|
879
|
+
else:
|
|
880
|
+
raise ValueError(f"Unsupported database type: {conn_type}")
|
|
881
|
+
|
|
882
|
+
def _get_tables(self):
|
|
883
|
+
"""Get table information based on database type"""
|
|
884
|
+
if self.db_type == "postgres":
|
|
885
|
+
cursor = self.db_conn.cursor()
|
|
886
|
+
cursor.execute(
|
|
887
|
+
"""
|
|
888
|
+
SELECT table_name, obj_description((quote_ident(table_name))::regclass, 'pg_class') as description
|
|
889
|
+
FROM information_schema.tables
|
|
890
|
+
WHERE table_schema='public';
|
|
891
|
+
"""
|
|
892
|
+
)
|
|
893
|
+
return cursor.fetchall()
|
|
894
|
+
else: # sqlite
|
|
895
|
+
return self.db_conn.execute(
|
|
896
|
+
"SELECT name, sql FROM sqlite_master WHERE type='table';"
|
|
897
|
+
).fetchall()
|
|
898
|
+
|
|
842
899
|
def get_memory(self):
|
|
843
900
|
return
|
|
844
901
|
|
npcsh/response.py
CHANGED
|
@@ -230,7 +230,9 @@ def get_openai_response(
|
|
|
230
230
|
|
|
231
231
|
# try:
|
|
232
232
|
if api_key is None:
|
|
233
|
-
api_key = os.environ
|
|
233
|
+
api_key = os.environ.get("OPENAI_API_KEY", "")
|
|
234
|
+
if len(api_key) == 0:
|
|
235
|
+
raise ValueError("API key not found.")
|
|
234
236
|
client = OpenAI(api_key=api_key)
|
|
235
237
|
# print(npc)
|
|
236
238
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: npcsh
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.28
|
|
4
4
|
Summary: npcsh is a command line tool for integrating LLMs into everyday workflows and for orchestrating teams of NPCs.
|
|
5
5
|
Home-page: https://github.com/cagostino/npcsh
|
|
6
6
|
Author: Christopher Agostino
|
|
@@ -17,8 +17,12 @@ Requires-Dist: requests
|
|
|
17
17
|
Requires-Dist: matplotlib
|
|
18
18
|
Requires-Dist: markdown
|
|
19
19
|
Requires-Dist: PyYAML
|
|
20
|
+
Requires-Dist: PyMuPDF
|
|
21
|
+
Requires-Dist: pyautogui
|
|
20
22
|
Requires-Dist: pygments
|
|
23
|
+
Requires-Dist: sqlalchemy
|
|
21
24
|
Requires-Dist: termcolor
|
|
25
|
+
Requires-Dist: rich
|
|
22
26
|
Requires-Dist: colorama
|
|
23
27
|
Requires-Dist: Pillow
|
|
24
28
|
Requires-Dist: python-dotenv
|
|
@@ -28,6 +32,7 @@ Requires-Dist: duckduckgo-search
|
|
|
28
32
|
Requires-Dist: flask
|
|
29
33
|
Requires-Dist: flask_cors
|
|
30
34
|
Requires-Dist: redis
|
|
35
|
+
Requires-Dist: psycopg2-binary
|
|
31
36
|
Requires-Dist: flask_sse
|
|
32
37
|
Provides-Extra: lite
|
|
33
38
|
Requires-Dist: anthropic; extra == "lite"
|
|
@@ -478,11 +483,16 @@ ollama pull llama3.2
|
|
|
478
483
|
ollama pull llava:7b
|
|
479
484
|
ollama pull nomic-embed-text
|
|
480
485
|
pip install npcsh
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
pip install
|
|
485
|
-
|
|
486
|
+
# if you want to install with the API libraries
|
|
487
|
+
pip install npcsh[lite]
|
|
488
|
+
# if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
|
|
489
|
+
pip install npcsh[local]
|
|
490
|
+
# if you want to use tts/stt
|
|
491
|
+
pip install npcsh[whisper]
|
|
492
|
+
|
|
493
|
+
# if you want everything:
|
|
494
|
+
pip install npcsh[all]
|
|
495
|
+
|
|
486
496
|
|
|
487
497
|
|
|
488
498
|
|
|
@@ -497,6 +507,16 @@ ollama pull llama3.2
|
|
|
497
507
|
ollama pull llava:7b
|
|
498
508
|
ollama pull nomic-embed-text
|
|
499
509
|
pip install npcsh
|
|
510
|
+
# if you want to install with the API libraries
|
|
511
|
+
pip install npcsh[lite]
|
|
512
|
+
# if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
|
|
513
|
+
pip install npcsh[local]
|
|
514
|
+
# if you want to use tts/stt
|
|
515
|
+
pip install npcsh[whisper]
|
|
516
|
+
|
|
517
|
+
# if you want everything:
|
|
518
|
+
pip install npcsh[all]
|
|
519
|
+
|
|
500
520
|
```
|
|
501
521
|
### Windows Install
|
|
502
522
|
|
|
@@ -509,6 +529,16 @@ ollama pull llama3.2
|
|
|
509
529
|
ollama pull llava:7b
|
|
510
530
|
ollama pull nomic-embed-text
|
|
511
531
|
pip install npcsh
|
|
532
|
+
# if you want to install with the API libraries
|
|
533
|
+
pip install npcsh[lite]
|
|
534
|
+
# if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
|
|
535
|
+
pip install npcsh[local]
|
|
536
|
+
# if you want to use tts/stt
|
|
537
|
+
pip install npcsh[whisper]
|
|
538
|
+
|
|
539
|
+
# if you want everything:
|
|
540
|
+
pip install npcsh[all]
|
|
541
|
+
|
|
512
542
|
```
|
|
513
543
|
As of now, npcsh appears to work well with some of the core functionalities like /ots and /whisper.
|
|
514
544
|
|
|
@@ -10,14 +10,14 @@ npcsh/helpers.py,sha256=aiK6kzf1jEYTHFssSxMOhPWDIcRvE0jXyrxWqh0coek,18992
|
|
|
10
10
|
npcsh/image.py,sha256=nVnVsd0yqmUoyoyJWqbPLlEmjtFctNnpkiKZLdTSVIg,10533
|
|
11
11
|
npcsh/image_gen.py,sha256=PZQfFBtyXQuxfs1x2mJGL4RkudnKdzZp5Xem9gSevG0,2047
|
|
12
12
|
npcsh/knowledge_graph.py,sha256=YIE0SJmUUfYrn1GqG6L7lWG0dIsQBun7A5CeA86PO6o,31442
|
|
13
|
-
npcsh/llm_funcs.py,sha256=
|
|
13
|
+
npcsh/llm_funcs.py,sha256=U10KSPqsJp4oy2edNh7mjAjYbIuxjGO3S-zaSi-rX0o,69030
|
|
14
14
|
npcsh/load_data.py,sha256=Vh6YGxFVGWBMcn4cDrIgy8sC7QGCrWk0niJyR3l-k9U,1967
|
|
15
15
|
npcsh/main.py,sha256=rpf_2ysx3cR3eHsrvZApprJ-3D3-OrWcJ15bM1bc97I,81
|
|
16
16
|
npcsh/model_runner.py,sha256=riS6Hx3M7mj5erMm7OwBA8yufXYseVEbMYTRSfaDh2Y,6427
|
|
17
|
-
npcsh/npc_compiler.py,sha256=
|
|
17
|
+
npcsh/npc_compiler.py,sha256=2l3-B2czt_uZARJg_5PObFjjkr8WbmBY2rBJBTq3w0k,104268
|
|
18
18
|
npcsh/npc_sysenv.py,sha256=g5LPYo8g6jlKd22fWVGghanLCr7tqjxytKvFbu0k-C0,11505
|
|
19
19
|
npcsh/plonk.py,sha256=ewdkX6K1V9kLFkrNsQ5oGXLiuFaLev9mxXBY-B0PKus,9926
|
|
20
|
-
npcsh/response.py,sha256=
|
|
20
|
+
npcsh/response.py,sha256=DFh6uoIw06SqPqKGv9DGHzoltprUHJSz6ZxgFBZRzX8,22478
|
|
21
21
|
npcsh/search.py,sha256=K3AcYlefm_YNWQO_Yq5kTIRFKIIN3xX70CC3z038ox0,8605
|
|
22
22
|
npcsh/serve.py,sha256=Zy47IdaGCt5ziGnQ5KHTQH1K7xXyo7trwUMVDOLNxvU,48246
|
|
23
23
|
npcsh/shell.py,sha256=EFsvNJCW5EYdikkbm0KHQRu0tNFh0HcHB_62k5gqcUI,18218
|
|
@@ -43,28 +43,28 @@ npcsh/npc_team/tools/image_generation.tool,sha256=CaTkdjxWLFtMAghPvZnToMSwHbMQVu
|
|
|
43
43
|
npcsh/npc_team/tools/local_search.tool,sha256=g9Hc_Xx08fc6lu1IHvdwYU_18KxcOoHDwvIAfrGmn3U,6852
|
|
44
44
|
npcsh/npc_team/tools/screen_cap.tool,sha256=v7l54PWWdgszdd-n_DFqI9nyMlBSeYeNIfQsdFYSZ_4,1389
|
|
45
45
|
npcsh/npc_team/tools/sql_executor.tool,sha256=laEOBoyX2p3dbQpY2HoWnj5IXvxBoZ4kghGMkBwymQA,825
|
|
46
|
-
npcsh-0.3.
|
|
47
|
-
npcsh-0.3.
|
|
48
|
-
npcsh-0.3.
|
|
49
|
-
npcsh-0.3.
|
|
50
|
-
npcsh-0.3.
|
|
51
|
-
npcsh-0.3.
|
|
52
|
-
npcsh-0.3.
|
|
53
|
-
npcsh-0.3.
|
|
54
|
-
npcsh-0.3.
|
|
55
|
-
npcsh-0.3.
|
|
56
|
-
npcsh-0.3.
|
|
57
|
-
npcsh-0.3.
|
|
58
|
-
npcsh-0.3.
|
|
59
|
-
npcsh-0.3.
|
|
60
|
-
npcsh-0.3.
|
|
61
|
-
npcsh-0.3.
|
|
62
|
-
npcsh-0.3.
|
|
63
|
-
npcsh-0.3.
|
|
64
|
-
npcsh-0.3.
|
|
65
|
-
npcsh-0.3.
|
|
66
|
-
npcsh-0.3.
|
|
67
|
-
npcsh-0.3.
|
|
68
|
-
npcsh-0.3.
|
|
69
|
-
npcsh-0.3.
|
|
70
|
-
npcsh-0.3.
|
|
46
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/calculator.tool,sha256=ZKmQ0VusuJj8Ib5MDqC8MTDaqWIZCxzpazWPVkYZqYc,197
|
|
47
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/celona.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/corca.npc,sha256=9qs7922thBESU4r5GRygqAjvg9zvhQyMYqqyb4wwSew,662
|
|
49
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/eriane.npc,sha256=5z6L-RjEouEp06SLOzkQoOCEi0eb1K-CxVnvyIbNK3g,299
|
|
50
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/foreman.npc,sha256=WqB8jLfBToGmr8c1vip1KOnTHxfXlGXwDUGnZoDMQr0,327
|
|
51
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/generic_search.tool,sha256=sYA4aUuZm84vpx-5tNi6ADq9ywHaj_YfLt6PI0ZblUo,745
|
|
52
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/image_generation.tool,sha256=CaTkdjxWLFtMAghPvZnToMSwHbMQVusojhd9R9jybmI,577
|
|
53
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/lineru.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/local_search.tool,sha256=g9Hc_Xx08fc6lu1IHvdwYU_18KxcOoHDwvIAfrGmn3U,6852
|
|
55
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/maurawa.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
56
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/npcsh.ctx,sha256=VOd7omCBo_764gKCYuHxJcyVc61oC5YjB7rnCbDhnYU,275
|
|
57
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/raone.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
58
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/screen_cap.tool,sha256=v7l54PWWdgszdd-n_DFqI9nyMlBSeYeNIfQsdFYSZ_4,1389
|
|
59
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/sibiji.npc,sha256=MJZHU9xXmvUbZvwpX1wWinvkrwYiKm1J63t37l0EYGE,202
|
|
60
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/slean.npc,sha256=to3-d7qWMzhrYAeXalzNqVdt_3wROFGEfhprVXDttos,326
|
|
61
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/sql_executor.tool,sha256=laEOBoyX2p3dbQpY2HoWnj5IXvxBoZ4kghGMkBwymQA,825
|
|
62
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/test_pipeline.py,sha256=GKIcqw0fXDHsUDfMvu7GTj5cfPnqs7aX5xxiKPs2xCc,5657
|
|
63
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/turnic.npc,sha256=odTFzQTN01-xaWXvjoC698htaFi6mvrzfKMAob8Yqb0,335
|
|
64
|
+
npcsh-0.3.28.data/data/npcsh/npc_team/welxor.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
65
|
+
npcsh-0.3.28.dist-info/licenses/LICENSE,sha256=j0YPvce7Ng9e32zYOu0EmXjXeJ0Nwawd0RA3uSGGH4E,1070
|
|
66
|
+
npcsh-0.3.28.dist-info/METADATA,sha256=pTNSY27gaMZvz1Q3NsHVUy0nQWtmho5-cgtusHOyshg,82196
|
|
67
|
+
npcsh-0.3.28.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
|
|
68
|
+
npcsh-0.3.28.dist-info/entry_points.txt,sha256=Y2rAM_m1er_Effxc0DXtGh36sC1FOUfefqGAt6vEte0,64
|
|
69
|
+
npcsh-0.3.28.dist-info/top_level.txt,sha256=kHSNgKMCkfjV95-DH0YSp1LLBi0HXdF3w57j7MQON3E,6
|
|
70
|
+
npcsh-0.3.28.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|