npcsh 0.3.27.7__py3-none-any.whl → 0.3.29__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. npcsh/llm_funcs.py +111 -43
  2. npcsh/npc_compiler.py +60 -3
  3. npcsh/shell_helpers.py +525 -0
  4. {npcsh-0.3.27.7.dist-info → npcsh-0.3.29.dist-info}/METADATA +74 -8
  5. {npcsh-0.3.27.7.dist-info → npcsh-0.3.29.dist-info}/RECORD +28 -28
  6. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/calculator.tool +0 -0
  7. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/celona.npc +0 -0
  8. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/corca.npc +0 -0
  9. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/eriane.npc +0 -0
  10. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/foreman.npc +0 -0
  11. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/generic_search.tool +0 -0
  12. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/image_generation.tool +0 -0
  13. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/lineru.npc +0 -0
  14. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/local_search.tool +0 -0
  15. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/maurawa.npc +0 -0
  16. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
  17. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/raone.npc +0 -0
  18. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/screen_cap.tool +0 -0
  19. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/sibiji.npc +0 -0
  20. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/slean.npc +0 -0
  21. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/sql_executor.tool +0 -0
  22. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/test_pipeline.py +0 -0
  23. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/turnic.npc +0 -0
  24. {npcsh-0.3.27.7.data → npcsh-0.3.29.data}/data/npcsh/npc_team/welxor.npc +0 -0
  25. {npcsh-0.3.27.7.dist-info → npcsh-0.3.29.dist-info}/WHEEL +0 -0
  26. {npcsh-0.3.27.7.dist-info → npcsh-0.3.29.dist-info}/entry_points.txt +0 -0
  27. {npcsh-0.3.27.7.dist-info → npcsh-0.3.29.dist-info}/licenses/LICENSE +0 -0
  28. {npcsh-0.3.27.7.dist-info → npcsh-0.3.29.dist-info}/top_level.txt +0 -0
npcsh/llm_funcs.py CHANGED
@@ -17,7 +17,7 @@ import numpy as np
17
17
 
18
18
  from google.generativeai import types
19
19
  import google.generativeai as genai
20
-
20
+ from sqlalchemy import create_engine
21
21
 
22
22
  from .npc_sysenv import (
23
23
  get_system_message,
@@ -1554,7 +1554,7 @@ def check_output_sufficient(
1554
1554
 
1555
1555
  def process_data_output(
1556
1556
  llm_response: Dict[str, Any],
1557
- db_conn: sqlite3.Connection,
1557
+ db_conn,
1558
1558
  request: str,
1559
1559
  tables: str = None,
1560
1560
  history: str = None,
@@ -1572,9 +1572,15 @@ def process_data_output(
1572
1572
  if not query:
1573
1573
  return {"response": "No query provided", "code": 400}
1574
1574
 
1575
+ # Create SQLAlchemy engine based on connection type
1576
+ if "psycopg2" in db_conn.__class__.__module__:
1577
+ engine = create_engine("postgresql://caug:gobears@localhost/npc_test")
1578
+ else:
1579
+ engine = create_engine("sqlite:///test_sqlite.db")
1580
+
1575
1581
  if choice == 1: # Direct answer query
1576
1582
  try:
1577
- df = pd.read_sql_query(query, db_conn)
1583
+ df = pd.read_sql_query(query, engine)
1578
1584
  result = check_output_sufficient(
1579
1585
  request, df, query, model=model, provider=provider, npc=npc
1580
1586
  )
@@ -1591,7 +1597,7 @@ def process_data_output(
1591
1597
 
1592
1598
  elif choice == 2: # Exploratory query
1593
1599
  try:
1594
- df = pd.read_sql_query(query, db_conn)
1600
+ df = pd.read_sql_query(query, engine)
1595
1601
  extra_context = f"""
1596
1602
  Exploratory query results:
1597
1603
  Query: {query}
@@ -1621,7 +1627,7 @@ def process_data_output(
1621
1627
 
1622
1628
  def get_data_response(
1623
1629
  request: str,
1624
- db_conn: sqlite3.Connection,
1630
+ db_conn,
1625
1631
  tables: str = None,
1626
1632
  n_try_freq: int = 5,
1627
1633
  extra_context: str = None,
@@ -1634,9 +1640,73 @@ def get_data_response(
1634
1640
  """
1635
1641
  Generate a response to a data request, with retries for failed attempts.
1636
1642
  """
1643
+
1644
+ # Extract schema information based on connection type
1645
+ schema_info = ""
1646
+ if "psycopg2" in db_conn.__class__.__module__:
1647
+ cursor = db_conn.cursor()
1648
+ # Get all tables and their columns
1649
+ cursor.execute(
1650
+ """
1651
+ SELECT
1652
+ t.table_name,
1653
+ array_agg(c.column_name || ' ' || c.data_type) as columns,
1654
+ array_agg(
1655
+ CASE
1656
+ WHEN tc.constraint_type = 'FOREIGN KEY'
1657
+ THEN kcu.column_name || ' REFERENCES ' || ccu.table_name || '.' || ccu.column_name
1658
+ ELSE NULL
1659
+ END
1660
+ ) as foreign_keys
1661
+ FROM information_schema.tables t
1662
+ JOIN information_schema.columns c ON t.table_name = c.table_name
1663
+ LEFT JOIN information_schema.table_constraints tc
1664
+ ON t.table_name = tc.table_name
1665
+ AND tc.constraint_type = 'FOREIGN KEY'
1666
+ LEFT JOIN information_schema.key_column_usage kcu
1667
+ ON tc.constraint_name = kcu.constraint_name
1668
+ LEFT JOIN information_schema.constraint_column_usage ccu
1669
+ ON tc.constraint_name = ccu.constraint_name
1670
+ WHERE t.table_schema = 'public'
1671
+ GROUP BY t.table_name;
1672
+ """
1673
+ )
1674
+ for table, columns, fks in cursor.fetchall():
1675
+ schema_info += f"\nTable {table}:\n"
1676
+ schema_info += "Columns:\n"
1677
+ for col in columns:
1678
+ schema_info += f" - {col}\n"
1679
+ if any(fk for fk in fks if fk is not None):
1680
+ schema_info += "Foreign Keys:\n"
1681
+ for fk in fks:
1682
+ if fk:
1683
+ schema_info += f" - {fk}\n"
1684
+
1685
+ elif "sqlite3" in db_conn.__class__.__module__:
1686
+ cursor = db_conn.cursor()
1687
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
1688
+ tables = cursor.fetchall()
1689
+ for (table_name,) in tables:
1690
+ schema_info += f"\nTable {table_name}:\n"
1691
+ cursor.execute(f"PRAGMA table_info({table_name});")
1692
+ columns = cursor.fetchall()
1693
+ schema_info += "Columns:\n"
1694
+ for col in columns:
1695
+ schema_info += f" - {col[1]} {col[2]}\n"
1696
+
1697
+ cursor.execute(f"PRAGMA foreign_key_list({table_name});")
1698
+ foreign_keys = cursor.fetchall()
1699
+ if foreign_keys:
1700
+ schema_info += "Foreign Keys:\n"
1701
+ for fk in foreign_keys:
1702
+ schema_info += f" - {fk[3]} REFERENCES {fk[2]}({fk[4]})\n"
1703
+
1637
1704
  prompt = f"""
1638
1705
  User request: {request}
1639
- Available tables: {tables or 'Not specified'}
1706
+
1707
+ Database Schema:
1708
+ {schema_info}
1709
+
1640
1710
  {extra_context or ''}
1641
1711
  {f'Query history: {history}' if history else ''}
1642
1712
 
@@ -1655,49 +1725,47 @@ def get_data_response(
1655
1725
 
1656
1726
  failures = []
1657
1727
  for attempt in range(max_retries):
1658
- try:
1659
- llm_response = get_llm_response(
1660
- prompt, npc=npc, format="json", model=model, provider=provider
1661
- )
1728
+ # try:
1729
+ llm_response = get_llm_response(
1730
+ prompt, npc=npc, format="json", model=model, provider=provider
1731
+ )
1662
1732
 
1663
- # Clean response if it's a string
1664
- response_data = llm_response.get("response", {})
1665
- if isinstance(response_data, str):
1666
- response_data = (
1667
- response_data.replace("```json", "").replace("```", "").strip()
1668
- )
1669
- try:
1670
- response_data = json.loads(response_data)
1671
- except json.JSONDecodeError:
1672
- failures.append("Invalid JSON response")
1673
- continue
1674
-
1675
- result = process_data_output(
1676
- response_data,
1677
- db_conn,
1678
- request,
1679
- tables=tables,
1680
- history=failures,
1681
- npc=npc,
1682
- model=model,
1683
- provider=provider,
1733
+ # Clean response if it's a string
1734
+ response_data = llm_response.get("response", {})
1735
+ if isinstance(response_data, str):
1736
+ response_data = (
1737
+ response_data.replace("```json", "").replace("```", "").strip()
1684
1738
  )
1739
+ try:
1740
+ response_data = json.loads(response_data)
1741
+ except json.JSONDecodeError:
1742
+ failures.append("Invalid JSON response")
1743
+ continue
1744
+
1745
+ result = process_data_output(
1746
+ response_data,
1747
+ db_conn,
1748
+ request,
1749
+ tables=tables,
1750
+ history=failures,
1751
+ npc=npc,
1752
+ model=model,
1753
+ provider=provider,
1754
+ )
1685
1755
 
1686
- if result["code"] == 200:
1687
- return result
1688
-
1689
- failures.append(result["response"])
1756
+ if result["code"] == 200:
1757
+ return result
1690
1758
 
1691
- if attempt == max_retries - 1:
1692
- return {
1693
- "response": f"Failed after {max_retries} attempts. Errors: {'; '.join(failures)}",
1694
- "code": 400,
1695
- }
1759
+ failures.append(result["response"])
1696
1760
 
1697
- except Exception as e:
1698
- failures.append(str(e))
1761
+ if attempt == max_retries - 1:
1762
+ return {
1763
+ "response": f"Failed after {max_retries} attempts. Errors: {'; '.join(failures)}",
1764
+ "code": 400,
1765
+ }
1699
1766
 
1700
- return {"response": "Max retries exceeded", "code": 400}
1767
+ # except Exception as e:
1768
+ # failures.append(str(e))
1701
1769
 
1702
1770
 
1703
1771
  def enter_reasoning_human_in_the_loop(
npcsh/npc_compiler.py CHANGED
@@ -788,11 +788,29 @@ class NPC:
788
788
  self.model = model
789
789
  self.db_conn = db_conn
790
790
  if self.db_conn is not None:
791
- self.tables = self.db_conn.execute(
792
- "SELECT name, sql FROM sqlite_master WHERE type='table';"
793
- ).fetchall()
791
+ # Determine database type
792
+ if "psycopg2" in self.db_conn.__class__.__module__:
793
+ # PostgreSQL connection
794
+ cursor = self.db_conn.cursor()
795
+ cursor.execute(
796
+ """
797
+ SELECT table_name, obj_description((quote_ident(table_name))::regclass, 'pg_class')
798
+ FROM information_schema.tables
799
+ WHERE table_schema='public';
800
+ """
801
+ )
802
+ self.tables = cursor.fetchall()
803
+ self.db_type = "postgres"
804
+ elif "sqlite3" in self.db_conn.__class__.__module__:
805
+ # SQLite connection
806
+ self.tables = self.db_conn.execute(
807
+ "SELECT name, sql FROM sqlite_master WHERE type='table';"
808
+ ).fetchall()
809
+ self.db_type = "sqlite"
794
810
  else:
795
811
  self.tables = None
812
+ self.db_type = None
813
+
796
814
  self.provider = provider
797
815
  self.api_url = api_url
798
816
  self.all_tools = all_tools or []
@@ -839,6 +857,45 @@ class NPC:
839
857
  else:
840
858
  self.parsed_npcs = []
841
859
 
860
+ def execute_query(self, query, params=None):
861
+ """Execute a query based on database type"""
862
+ if self.db_type == "postgres":
863
+ cursor = self.db_conn.cursor()
864
+ cursor.execute(query, params or ())
865
+ return cursor.fetchall()
866
+ else: # sqlite
867
+ cursor = self.db_conn.execute(query, params or ())
868
+ return cursor.fetchall()
869
+
870
+ def _determine_db_type(self):
871
+ """Determine if the connection is PostgreSQL or SQLite"""
872
+ # Check the connection object's class name
873
+ conn_type = self.db_conn.__class__.__module__.lower()
874
+
875
+ if "psycopg" in conn_type:
876
+ return "postgres"
877
+ elif "sqlite" in conn_type:
878
+ return "sqlite"
879
+ else:
880
+ raise ValueError(f"Unsupported database type: {conn_type}")
881
+
882
+ def _get_tables(self):
883
+ """Get table information based on database type"""
884
+ if self.db_type == "postgres":
885
+ cursor = self.db_conn.cursor()
886
+ cursor.execute(
887
+ """
888
+ SELECT table_name, obj_description((quote_ident(table_name))::regclass, 'pg_class') as description
889
+ FROM information_schema.tables
890
+ WHERE table_schema='public';
891
+ """
892
+ )
893
+ return cursor.fetchall()
894
+ else: # sqlite
895
+ return self.db_conn.execute(
896
+ "SELECT name, sql FROM sqlite_master WHERE type='table';"
897
+ ).fetchall()
898
+
842
899
  def get_memory(self):
843
900
  return
844
901
 
npcsh/shell_helpers.py CHANGED
@@ -22,6 +22,7 @@ import tty
22
22
  import pty
23
23
  import select
24
24
  import signal
25
+ import platform
25
26
  import time
26
27
 
27
28
 
@@ -66,6 +67,7 @@ from .llm_funcs import (
66
67
  execute_llm_question,
67
68
  get_stream,
68
69
  get_conversation,
70
+ get_llm_response,
69
71
  check_llm_command,
70
72
  generate_image,
71
73
  get_embeddings,
@@ -1090,6 +1092,510 @@ def resize_image_tars(image_path):
1090
1092
  image.save(image_path, format="png")
1091
1093
 
1092
1094
 
1095
+ def execute_plan_command(
1096
+ command, npc=None, model=None, provider=None, messages=None, api_url=None
1097
+ ):
1098
+ parts = command.split(maxsplit=1)
1099
+ if len(parts) < 2:
1100
+ return {
1101
+ "messages": messages,
1102
+ "output": "Usage: /plan <command and schedule description>",
1103
+ }
1104
+
1105
+ request = parts[1]
1106
+ platform_system = platform.system()
1107
+
1108
+ # Create standard directories
1109
+ jobs_dir = os.path.expanduser("~/.npcsh/jobs")
1110
+ logs_dir = os.path.expanduser("~/.npcsh/logs")
1111
+ os.makedirs(jobs_dir, exist_ok=True)
1112
+ os.makedirs(logs_dir, exist_ok=True)
1113
+
1114
+ # First part - just the request formatting
1115
+ linux_request = f"""Convert this scheduling request into a crontab-based script:
1116
+ Request: {request}
1117
+
1118
+ """
1119
+
1120
+ # Second part - the static prompt with examples and requirements
1121
+ linux_prompt_static = """Example for "record CPU usage every 10 minutes":
1122
+ {
1123
+ "script": "#!/bin/bash
1124
+ set -euo pipefail
1125
+ IFS=$'\\n\\t'
1126
+
1127
+ LOGFILE=\"$HOME/.npcsh/logs/cpu_usage.log\"
1128
+
1129
+ log_info() {
1130
+ echo \"[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $*\" >> \"$LOGFILE\"
1131
+ }
1132
+
1133
+ log_error() {
1134
+ echo \"[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $*\" >> \"$LOGFILE\"
1135
+ }
1136
+
1137
+ record_cpu() {
1138
+ local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
1139
+ local cpu_usage=$(top -bn1 | grep 'Cpu(s)' | awk '{print $2}')
1140
+ log_info \"CPU Usage: $cpu_usage%\"
1141
+ }
1142
+
1143
+ record_cpu",
1144
+ "schedule": "*/10 * * * *",
1145
+ "description": "Record CPU usage every 10 minutes",
1146
+ "name": "record_cpu_usage"
1147
+ }
1148
+
1149
+ Your response must be valid json with the following keys:
1150
+ - script: The shell script content with proper functions and error handling. special characters must be escaped to ensure python json.loads will work correctly.
1151
+ - schedule: Crontab expression (5 fields: minute hour day month weekday)
1152
+ - description: A human readable description
1153
+ - name: A unique name for the job
1154
+
1155
+ Do not include any additional markdown formatting in your response or leading ```json tags."""
1156
+
1157
+ mac_request = f"""Convert this scheduling request into a launchd-compatible script:
1158
+ Request: {request}
1159
+
1160
+ """
1161
+
1162
+ mac_prompt_static = """Example for "record CPU usage every 10 minutes":
1163
+ {
1164
+ "script": "#!/bin/bash
1165
+ set -euo pipefail
1166
+ IFS=$'\\n\\t'
1167
+
1168
+ LOGFILE=\"$HOME/.npcsh/logs/cpu_usage.log\"
1169
+
1170
+ log_info() {
1171
+ echo \"[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $*\" >> \"$LOGFILE\"
1172
+ }
1173
+
1174
+ log_error() {
1175
+ echo \"[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $*\" >> \"$LOGFILE\"
1176
+ }
1177
+
1178
+ record_cpu() {
1179
+ local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
1180
+ local cpu_usage=$(top -l 1 | grep 'CPU usage' | awk '{print $3}' | tr -d '%')
1181
+ log_info \"CPU Usage: $cpu_usage%\"
1182
+ }
1183
+
1184
+ record_cpu",
1185
+ "schedule": "600",
1186
+ "description": "Record CPU usage every 10 minutes",
1187
+ "name": "record_cpu_usage"
1188
+ }
1189
+
1190
+ Your response must be valid json with the following keys:
1191
+ - script: The shell script content with proper functions and error handling. special characters must be escaped to ensure python json.loads will work correctly.
1192
+ - schedule: Interval in seconds (e.g. 600 for 10 minutes)
1193
+ - description: A human readable description
1194
+ - name: A unique name for the job
1195
+
1196
+ Do not include any additional markdown formatting in your response or leading ```json tags."""
1197
+
1198
+ windows_request = f"""Convert this scheduling request into a PowerShell script with Task Scheduler parameters:
1199
+ Request: {request}
1200
+
1201
+ """
1202
+
1203
+ windows_prompt_static = """Example for "record CPU usage every 10 minutes":
1204
+ {
1205
+ "script": "$ErrorActionPreference = 'Stop'
1206
+
1207
+ $LogFile = \"$HOME\\.npcsh\\logs\\cpu_usage.log\"
1208
+
1209
+ function Write-Log {
1210
+ param($Message, $Type = 'INFO')
1211
+ $timestamp = Get-Date -Format 'yyyy-MM-dd HH:mm:ss'
1212
+ \"[$timestamp] [$Type] $Message\" | Out-File -FilePath $LogFile -Append
1213
+ }
1214
+
1215
+ function Get-CpuUsage {
1216
+ try {
1217
+ $cpu = (Get-Counter '\\Processor(_Total)\\% Processor Time').CounterSamples.CookedValue
1218
+ Write-Log \"CPU Usage: $($cpu)%\"
1219
+ } catch {
1220
+ Write-Log $_.Exception.Message 'ERROR'
1221
+ throw
1222
+ }
1223
+ }
1224
+
1225
+ Get-CpuUsage",
1226
+ "schedule": "/sc minute /mo 10",
1227
+ "description": "Record CPU usage every 10 minutes",
1228
+ "name": "record_cpu_usage"
1229
+ }
1230
+
1231
+ Your response must be valid json with the following keys:
1232
+ - script: The PowerShell script content with proper functions and error handling. special characters must be escaped to ensure python json.loads will work correctly.
1233
+ - schedule: Task Scheduler parameters (e.g. /sc minute /mo 10)
1234
+ - description: A human readable description
1235
+ - name: A unique name for the job
1236
+
1237
+ Do not include any additional markdown formatting in your response or leading ```json tags."""
1238
+
1239
+ prompts = {
1240
+ "Linux": linux_request + linux_prompt_static,
1241
+ "Darwin": mac_request + mac_prompt_static,
1242
+ "Windows": windows_request + windows_prompt_static,
1243
+ }
1244
+
1245
+ prompt = prompts[platform_system]
1246
+ response = get_llm_response(
1247
+ prompt, npc=npc, model=model, provider=provider, format="json"
1248
+ )
1249
+ schedule_info = response.get("response")
1250
+ print("Received schedule info:", schedule_info)
1251
+
1252
+ job_name = f"job_{schedule_info['name']}"
1253
+
1254
+ if platform_system == "Windows":
1255
+ script_path = os.path.join(jobs_dir, f"{job_name}.ps1")
1256
+ else:
1257
+ script_path = os.path.join(jobs_dir, f"{job_name}.sh")
1258
+
1259
+ log_path = os.path.join(logs_dir, f"{job_name}.log")
1260
+
1261
+ # Write the script
1262
+ with open(script_path, "w") as f:
1263
+ f.write(schedule_info["script"])
1264
+ os.chmod(script_path, 0o755)
1265
+
1266
+ if platform_system == "Linux":
1267
+ try:
1268
+ current_crontab = subprocess.check_output(["crontab", "-l"], text=True)
1269
+ except subprocess.CalledProcessError:
1270
+ current_crontab = ""
1271
+
1272
+ crontab_line = f"{schedule_info['schedule']} {script_path} >> {log_path} 2>&1"
1273
+ new_crontab = current_crontab.strip() + "\n" + crontab_line + "\n"
1274
+
1275
+ with tempfile.NamedTemporaryFile(mode="w") as tmp:
1276
+ tmp.write(new_crontab)
1277
+ tmp.flush()
1278
+ subprocess.run(["crontab", tmp.name], check=True)
1279
+
1280
+ output = f"""Job created successfully:
1281
+ - Description: {schedule_info['description']}
1282
+ - Schedule: {schedule_info['schedule']}
1283
+ - Script: {script_path}
1284
+ - Log: {log_path}
1285
+ - Crontab entry: {crontab_line}"""
1286
+
1287
+ elif platform_system == "Darwin":
1288
+ plist_dir = os.path.expanduser("~/Library/LaunchAgents")
1289
+ os.makedirs(plist_dir, exist_ok=True)
1290
+ plist_path = os.path.join(plist_dir, f"com.npcsh.{job_name}.plist")
1291
+
1292
+ plist_content = f"""<?xml version="1.0" encoding="UTF-8"?>
1293
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
1294
+ <plist version="1.0">
1295
+ <dict>
1296
+ <key>Label</key>
1297
+ <string>com.npcsh.{job_name}</string>
1298
+ <key>ProgramArguments</key>
1299
+ <array>
1300
+ <string>{script_path}</string>
1301
+ </array>
1302
+ <key>StartInterval</key>
1303
+ <integer>{schedule_info['schedule']}</integer>
1304
+ <key>StandardOutPath</key>
1305
+ <string>{log_path}</string>
1306
+ <key>StandardErrorPath</key>
1307
+ <string>{log_path}</string>
1308
+ <key>RunAtLoad</key>
1309
+ <true/>
1310
+ </dict>
1311
+ </plist>"""
1312
+
1313
+ with open(plist_path, "w") as f:
1314
+ f.write(plist_content)
1315
+
1316
+ subprocess.run(["launchctl", "unload", plist_path], check=False)
1317
+ subprocess.run(["launchctl", "load", plist_path], check=True)
1318
+
1319
+ output = f"""Job created successfully:
1320
+ - Description: {schedule_info['description']}
1321
+ - Schedule: Every {schedule_info['schedule']} seconds
1322
+ - Script: {script_path}
1323
+ - Log: {log_path}
1324
+ - Launchd plist: {plist_path}"""
1325
+
1326
+ elif platform_system == "Windows":
1327
+ task_name = f"NPCSH_{job_name}"
1328
+
1329
+ # Parse schedule_info['schedule'] into individual parameters
1330
+ schedule_params = schedule_info["schedule"].split()
1331
+
1332
+ cmd = (
1333
+ [
1334
+ "schtasks",
1335
+ "/create",
1336
+ "/tn",
1337
+ task_name,
1338
+ "/tr",
1339
+ f"powershell -NoProfile -ExecutionPolicy Bypass -File {script_path}",
1340
+ ]
1341
+ + schedule_params
1342
+ + ["/f"]
1343
+ ) # /f forces creation if task exists
1344
+
1345
+ subprocess.run(cmd, check=True)
1346
+
1347
+ output = f"""Job created successfully:
1348
+ - Description: {schedule_info['description']}
1349
+ - Schedule: {schedule_info['schedule']}
1350
+ - Script: {script_path}
1351
+ - Log: {log_path}
1352
+ - Task name: {task_name}"""
1353
+
1354
+ return {"messages": messages, "output": output}
1355
+
1356
+
1357
+ def execute_trigger_command(
1358
+ command, npc=None, model=None, provider=None, messages=None, api_url=None
1359
+ ):
1360
+ parts = command.split(maxsplit=1)
1361
+ if len(parts) < 2:
1362
+ return {
1363
+ "messages": messages,
1364
+ "output": "Usage: /trigger <trigger condition and action description>",
1365
+ }
1366
+
1367
+ request = parts[1]
1368
+ platform_system = platform.system()
1369
+
1370
+ linux_request = f"""Convert this trigger request into a single event-monitoring daemon script:
1371
+ Request: {request}
1372
+
1373
+ """
1374
+
1375
+ linux_prompt_static = """Example for "Move PDFs from Downloads to Documents/PDFs":
1376
+ {
1377
+ "script": "#!/bin/bash\\nset -euo pipefail\\nIFS=$'\\n\\t'\\n\\nLOGFILE=\\\"$HOME/.npcsh/logs/pdf_mover.log\\\"\\nSOURCE=\\\"$HOME/Downloads\\\"\\nTARGET=\\\"$HOME/Documents/PDFs\\\"\\n\\nlog_info() {\\n echo \\\"[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $*\\\" >> \\\"$LOGFILE\\\"\\n}\\n\\nlog_error() {\\n echo \\\"[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $*\\\" >> \\\"$LOGFILE\\\"\\n}\\n\\ninotifywait -m -q -e create --format '%w%f' \\\"$SOURCE\\\" | while read filepath; do\\n if [[ \\\"$filepath\\\" =~ \\\\.pdf$ ]]; then\\n mv \\\"$filepath\\\" \\\"$TARGET/\\\" && log_info \\\"Moved $filepath to $TARGET\\\" || log_error \\\"Failed to move $filepath\\\"\\n fi\\ndone",
1378
+ "name": "pdf_mover",
1379
+ "description": "Move PDF files from Downloads to Documents/PDFs folder"
1380
+ }
1381
+
1382
+ The script MUST:
1383
+ - Use inotifywait -m -q -e create --format '%w%f' to get full paths
1384
+ - Double quote ALL file operations: "$SOURCE/$FILE"
1385
+ - Use $HOME for absolute paths
1386
+ - Echo both success and failure messages to log
1387
+
1388
+ Your response must be valid json with the following keys:
1389
+ - script: The shell script content with proper functions and error handling
1390
+ - name: A unique name for the trigger
1391
+ - description: A human readable description
1392
+
1393
+ Do not include any additional markdown formatting in your response."""
1394
+
1395
+ mac_request = f"""Convert this trigger request into a single event-monitoring daemon script:
1396
+ Request: {request}
1397
+
1398
+ """
1399
+
1400
+ mac_prompt_static = """Example for "Move PDFs from Downloads to Documents/PDFs":
1401
+ {
1402
+ "script": "#!/bin/bash\\nset -euo pipefail\\nIFS=$'\\n\\t'\\n\\nLOGFILE=\\\"$HOME/.npcsh/logs/pdf_mover.log\\\"\\nSOURCE=\\\"$HOME/Downloads\\\"\\nTARGET=\\\"$HOME/Documents/PDFs\\\"\\n\\nlog_info() {\\n echo \\\"[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $*\\\" >> \\\"$LOGFILE\\\"\\n}\\n\\nlog_error() {\\n echo \\\"[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $*\\\" >> \\\"$LOGFILE\\\"\\n}\\n\\nfswatch -0 -r -e '.*' --event Created --format '%p' \\\"$SOURCE\\\" | while read -d '' filepath; do\\n if [[ \\\"$filepath\\\" =~ \\\\.pdf$ ]]; then\\n mv \\\"$filepath\\\" \\\"$TARGET/\\\" && log_info \\\"Moved $filepath to $TARGET\\\" || log_error \\\"Failed to move $filepath\\\"\\n fi\\ndone",
1403
+ "name": "pdf_mover",
1404
+ "description": "Move PDF files from Downloads to Documents/PDFs folder"
1405
+ }
1406
+
1407
+ The script MUST:
1408
+ - Use fswatch -0 -r -e '.*' --event Created --format '%p' to get full paths
1409
+ - Double quote ALL file operations: "$SOURCE/$FILE"
1410
+ - Use $HOME for absolute paths
1411
+ - Echo both success and failure messages to log
1412
+
1413
+ Your response must be valid json with the following keys:
1414
+ - script: The shell script content with proper functions and error handling
1415
+ - name: A unique name for the trigger
1416
+ - description: A human readable description
1417
+
1418
+ Do not include any additional markdown formatting in your response."""
1419
+
1420
+ windows_request = f"""Convert this trigger request into a single event-monitoring daemon script:
1421
+ Request: {request}
1422
+
1423
+ """
1424
+
1425
+ windows_prompt_static = """Example for "Move PDFs from Downloads to Documents/PDFs":
1426
+ {
1427
+ "script": "$ErrorActionPreference = 'Stop'\\n\\n$LogFile = \\\"$HOME\\.npcsh\\logs\\pdf_mover.log\\\"\\n$Source = \\\"$HOME\\Downloads\\\"\\n$Target = \\\"$HOME\\Documents\\PDFs\\\"\\n\\nfunction Write-Log {\\n param($Message, $Type = 'INFO')\\n $timestamp = Get-Date -Format 'yyyy-MM-dd HH:mm:ss'\\n \\\"[$timestamp] [$Type] $Message\\\" | Out-File -FilePath $LogFile -Append\\n}\\n\\n$watcher = New-Object System.IO.FileSystemWatcher\\n$watcher.Path = $Source\\n$watcher.Filter = \\\"*.pdf\\\"\\n$watcher.IncludeSubdirectories = $true\\n$watcher.EnableRaisingEvents = $true\\n\\n$action = {\\n $path = $Event.SourceEventArgs.FullPath\\n try {\\n Move-Item -Path $path -Destination $Target\\n Write-Log \\\"Moved $path to $Target\\\"\\n } catch {\\n Write-Log $_.Exception.Message 'ERROR'\\n }\\n}\\n\\nRegister-ObjectEvent $watcher 'Created' -Action $action\\n\\nwhile ($true) { Start-Sleep 1 }",
1428
+ "name": "pdf_mover",
1429
+ "description": "Move PDF files from Downloads to Documents/PDFs folder"
1430
+ }
1431
+
1432
+ The script MUST:
1433
+ - Use FileSystemWatcher for monitoring
1434
+ - Double quote ALL file operations: "$Source\\$File"
1435
+ - Use $HOME for absolute paths
1436
+ - Echo both success and failure messages to log
1437
+
1438
+ Your response must be valid json with the following keys:
1439
+ - script: The PowerShell script content with proper functions and error handling
1440
+ - name: A unique name for the trigger
1441
+ - description: A human readable description
1442
+
1443
+ Do not include any additional markdown formatting in your response."""
1444
+
1445
+ prompts = {
1446
+ "Linux": linux_request + linux_prompt_static,
1447
+ "Darwin": mac_request + mac_prompt_static,
1448
+ "Windows": windows_request + windows_prompt_static,
1449
+ }
1450
+
1451
+ prompt = prompts[platform_system]
1452
+ response = get_llm_response(
1453
+ prompt, npc=npc, model=model, provider=provider, format="json"
1454
+ )
1455
+ trigger_info = response.get("response")
1456
+ print("Trigger info:", trigger_info)
1457
+
1458
+ triggers_dir = os.path.expanduser("~/.npcsh/triggers")
1459
+ logs_dir = os.path.expanduser("~/.npcsh/logs")
1460
+ os.makedirs(triggers_dir, exist_ok=True)
1461
+ os.makedirs(logs_dir, exist_ok=True)
1462
+
1463
+ trigger_name = f"trigger_{trigger_info['name']}"
1464
+ log_path = os.path.join(logs_dir, f"{trigger_name}.log")
1465
+
1466
+ if platform_system == "Linux":
1467
+ script_path = os.path.join(triggers_dir, f"{trigger_name}.sh")
1468
+
1469
+ with open(script_path, "w") as f:
1470
+ f.write(trigger_info["script"])
1471
+ os.chmod(script_path, 0o755)
1472
+
1473
+ service_dir = os.path.expanduser("~/.config/systemd/user")
1474
+ os.makedirs(service_dir, exist_ok=True)
1475
+ service_path = os.path.join(service_dir, f"npcsh-{trigger_name}.service")
1476
+
1477
+ service_content = f"""[Unit]
1478
+ Description={trigger_info['description']}
1479
+ After=network.target
1480
+
1481
+ [Service]
1482
+ Type=simple
1483
+ ExecStart={script_path}
1484
+ Restart=always
1485
+ StandardOutput=append:{log_path}
1486
+ StandardError=append:{log_path}
1487
+
1488
+ [Install]
1489
+ WantedBy=default.target
1490
+ """
1491
+
1492
+ with open(service_path, "w") as f:
1493
+ f.write(service_content)
1494
+
1495
+ subprocess.run(["systemctl", "--user", "daemon-reload"])
1496
+ subprocess.run(
1497
+ ["systemctl", "--user", "enable", f"npcsh-{trigger_name}.service"]
1498
+ )
1499
+ subprocess.run(
1500
+ ["systemctl", "--user", "start", f"npcsh-{trigger_name}.service"]
1501
+ )
1502
+
1503
+ status = subprocess.run(
1504
+ ["systemctl", "--user", "status", f"npcsh-{trigger_name}.service"],
1505
+ capture_output=True,
1506
+ text=True,
1507
+ )
1508
+
1509
+ output = f"""Trigger service created:
1510
+ - Description: {trigger_info['description']}
1511
+ - Script: {script_path}
1512
+ - Service: {service_path}
1513
+ - Log: {log_path}
1514
+
1515
+ Status:
1516
+ {status.stdout}"""
1517
+
1518
+ elif platform_system == "Darwin":
1519
+ script_path = os.path.join(triggers_dir, f"{trigger_name}.sh")
1520
+
1521
+ with open(script_path, "w") as f:
1522
+ f.write(trigger_info["script"])
1523
+ os.chmod(script_path, 0o755)
1524
+
1525
+ plist_dir = os.path.expanduser("~/Library/LaunchAgents")
1526
+ os.makedirs(plist_dir, exist_ok=True)
1527
+ plist_path = os.path.join(plist_dir, f"com.npcsh.{trigger_name}.plist")
1528
+
1529
+ plist_content = f"""<?xml version="1.0" encoding="UTF-8"?>
1530
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
1531
+ <plist version="1.0">
1532
+ <dict>
1533
+ <key>Label</key>
1534
+ <string>com.npcsh.{trigger_name}</string>
1535
+ <key>ProgramArguments</key>
1536
+ <array>
1537
+ <string>{script_path}</string>
1538
+ </array>
1539
+ <key>RunAtLoad</key>
1540
+ <true/>
1541
+ <key>KeepAlive</key>
1542
+ <true/>
1543
+ <key>StandardOutPath</key>
1544
+ <string>{log_path}</string>
1545
+ <key>StandardErrorPath</key>
1546
+ <string>{log_path}</string>
1547
+ </dict>
1548
+ </plist>"""
1549
+
1550
+ with open(plist_path, "w") as f:
1551
+ f.write(plist_content)
1552
+
1553
+ subprocess.run(["launchctl", "unload", plist_path], check=False)
1554
+ subprocess.run(["launchctl", "load", plist_path], check=True)
1555
+
1556
+ output = f"""Trigger service created:
1557
+ - Description: {trigger_info['description']}
1558
+ - Script: {script_path}
1559
+ - Launchd plist: {plist_path}
1560
+ - Log: {log_path}"""
1561
+
1562
+ elif platform_system == "Windows":
1563
+ script_path = os.path.join(triggers_dir, f"{trigger_name}.ps1")
1564
+
1565
+ with open(script_path, "w") as f:
1566
+ f.write(trigger_info["script"])
1567
+
1568
+ task_name = f"NPCSH_{trigger_name}"
1569
+
1570
+ # Create a scheduled task that runs at startup
1571
+ cmd = [
1572
+ "schtasks",
1573
+ "/create",
1574
+ "/tn",
1575
+ task_name,
1576
+ "/tr",
1577
+ f"powershell -NoProfile -ExecutionPolicy Bypass -File {script_path}",
1578
+ "/sc",
1579
+ "onstart",
1580
+ "/ru",
1581
+ "System",
1582
+ "/f", # Force creation
1583
+ ]
1584
+
1585
+ subprocess.run(cmd, check=True)
1586
+
1587
+ # Start the task immediately
1588
+ subprocess.run(["schtasks", "/run", "/tn", task_name])
1589
+
1590
+ output = f"""Trigger service created:
1591
+ - Description: {trigger_info['description']}
1592
+ - Script: {script_path}
1593
+ - Task name: {task_name}
1594
+ - Log: {log_path}"""
1595
+
1596
+ return {"messages": messages, "output": output}
1597
+
1598
+
1093
1599
  def enter_wander_mode(args, messages, npc_compiler, npc, model, provider):
1094
1600
  """
1095
1601
  Wander mode is an exploratory mode where an LLM is given a task and they begin to wander through space.
@@ -1334,6 +1840,25 @@ def execute_slash_command(
1334
1840
  print(output)
1335
1841
  elif command_name == "tools":
1336
1842
  return {"messages": messages, "output": print_tools(tools)}
1843
+ elif command_name == "plan":
1844
+ return execute_plan_command(
1845
+ command,
1846
+ npc=npc,
1847
+ model=model,
1848
+ provider=provider,
1849
+ api_url=api_url,
1850
+ messages=messages,
1851
+ )
1852
+ elif command_name == "trigger":
1853
+ return execute_trigger_command(
1854
+ command,
1855
+ npc=npc,
1856
+ model=model,
1857
+ provider=provider,
1858
+ api_url=api_url,
1859
+ messages=messages,
1860
+ )
1861
+
1337
1862
  elif command_name == "plonk":
1338
1863
  request = " ".join(args)
1339
1864
  plonk_call = plonk(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcsh
3
- Version: 0.3.27.7
3
+ Version: 0.3.29
4
4
  Summary: npcsh is a command line tool for integrating LLMs into everyday workflows and for orchestrating teams of NPCs.
5
5
  Home-page: https://github.com/cagostino/npcsh
6
6
  Author: Christopher Agostino
@@ -32,6 +32,7 @@ Requires-Dist: duckduckgo-search
32
32
  Requires-Dist: flask
33
33
  Requires-Dist: flask_cors
34
34
  Requires-Dist: redis
35
+ Requires-Dist: psycopg2-binary
35
36
  Requires-Dist: flask_sse
36
37
  Provides-Extra: lite
37
38
  Requires-Dist: anthropic; extra == "lite"
@@ -468,6 +469,7 @@ if __name__ == "__main__":
468
469
  ### Linux install
469
470
  ```bash
470
471
 
472
+ # for audio primarily
471
473
  sudo apt-get install espeak
472
474
  sudo apt-get install portaudio19-dev python3-pyaudio
473
475
  sudo apt-get install alsa-base alsa-utils
@@ -475,6 +477,10 @@ sudo apt-get install libcairo2-dev
475
477
  sudo apt-get install libgirepository1.0-dev
476
478
  sudo apt-get install ffmpeg
477
479
 
480
+ # for triggers
481
+ sudo apt install inotify-tools
482
+
483
+
478
484
  #And if you don't have ollama installed, use this:
479
485
  curl -fsSL https://ollama.com/install.sh | sh
480
486
 
@@ -482,25 +488,46 @@ ollama pull llama3.2
482
488
  ollama pull llava:7b
483
489
  ollama pull nomic-embed-text
484
490
  pip install npcsh
485
- ```
486
- If you'd like to install the abilities to use STT and TTS, additionall install the following
487
- ```
488
- pip install openai-whisper pyaudio gtts playsound
489
- ```
491
+ # if you want to install with the API libraries
492
+ pip install npcsh[lite]
493
+ # if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
494
+ pip install npcsh[local]
495
+ # if you want to use tts/stt
496
+ pip install npcsh[whisper]
497
+
498
+ # if you want everything:
499
+ pip install npcsh[all]
500
+
490
501
 
491
502
 
492
503
 
493
504
  ### Mac install
494
505
  ```bash
506
+ #mainly for audio
495
507
  brew install portaudio
496
508
  brew install ffmpeg
509
+ brew install pygobject3
510
+
511
+ # for triggers
512
+ brew install ...
513
+
514
+
497
515
  brew install ollama
498
516
  brew services start ollama
499
- brew install pygobject3
500
517
  ollama pull llama3.2
501
518
  ollama pull llava:7b
502
519
  ollama pull nomic-embed-text
503
520
  pip install npcsh
521
+ # if you want to install with the API libraries
522
+ pip install npcsh[lite]
523
+ # if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
524
+ pip install npcsh[local]
525
+ # if you want to use tts/stt
526
+ pip install npcsh[whisper]
527
+
528
+ # if you want everything:
529
+ pip install npcsh[all]
530
+
504
531
  ```
505
532
  ### Windows Install
506
533
 
@@ -513,6 +540,16 @@ ollama pull llama3.2
513
540
  ollama pull llava:7b
514
541
  ollama pull nomic-embed-text
515
542
  pip install npcsh
543
+ # if you want to install with the API libraries
544
+ pip install npcsh[lite]
545
+ # if you want the full local package set up (ollama, diffusers, transformers, cuda etc.)
546
+ pip install npcsh[local]
547
+ # if you want to use tts/stt
548
+ pip install npcsh[whisper]
549
+
550
+ # if you want everything:
551
+ pip install npcsh[all]
552
+
516
553
  ```
517
554
  As of now, npcsh appears to work well with some of the core functionalities like /ots and /whisper.
518
555
 
@@ -1050,13 +1087,30 @@ npc ots -f test_data/catfight.PNG
1050
1087
  ### Plan : Schedule tasks to be run at regular intervals (under construction)
1051
1088
  Use the /plan macro to schedule tasks to be run at regular intervals.
1052
1089
  ```npcsh
1053
- npcsh> /plan run a rag search on the files in the current directory every 5 minutes
1090
+ npcsh> /plan run a rag search for 'moonbeam' on the files in the current directory every 5 minutes
1091
+ ```
1092
+
1093
+ ```npcsh
1094
+ npcsh> /plan record the cpu usage every 5 minutes
1054
1095
  ```
1055
1096
 
1097
+ ```npcsh
1098
+ npcsh> /plan record the apps that are using the most ram every 5 minutes
1099
+ ```
1100
+
1101
+
1102
+
1103
+
1056
1104
  ```bash
1057
1105
  npc plan -f 30m -t 'task'
1058
1106
  ```
1059
1107
 
1108
+ Plan will use platform-specific scheduling tools. In particular, it uses crontab on Linux and launchd on macOS and Schedule Tasks on Windows.
1109
+
1110
+ Implementations have been provided for Mac and Windows but only has been tested as of 3/23/2025 on Linux.
1111
+
1112
+
1113
+
1060
1114
  ### Plonk : Computer Control
1061
1115
  Use the /plonk macro to allow the LLM to control your computer.
1062
1116
  ```npcsh
@@ -1342,6 +1396,18 @@ npcsh> /spool model=llama3.3
1342
1396
  npc spool -n npc.npc
1343
1397
  ```
1344
1398
 
1399
+ ### Trigger
1400
+ Use the /trigger macro to execute specific actionss based on certain conditions.
1401
+
1402
+ ```npcsh
1403
+ npcsh> /trigger watch for new PDF downloads in the ~/Downloads directory and move them
1404
+ to the ~/Documents/PDFs directory . Ensure that the directory exists or create it if it does not.
1405
+ ```
1406
+
1407
+ On Linux, trigger makes use of inotify-tools to watch for file system events. On macOS, it uses fswatch, and on Windows, it uses Watch-Command.
1408
+
1409
+
1410
+
1345
1411
 
1346
1412
 
1347
1413
  ### Vixynt: Image Generation
@@ -10,18 +10,18 @@ npcsh/helpers.py,sha256=aiK6kzf1jEYTHFssSxMOhPWDIcRvE0jXyrxWqh0coek,18992
10
10
  npcsh/image.py,sha256=nVnVsd0yqmUoyoyJWqbPLlEmjtFctNnpkiKZLdTSVIg,10533
11
11
  npcsh/image_gen.py,sha256=PZQfFBtyXQuxfs1x2mJGL4RkudnKdzZp5Xem9gSevG0,2047
12
12
  npcsh/knowledge_graph.py,sha256=YIE0SJmUUfYrn1GqG6L7lWG0dIsQBun7A5CeA86PO6o,31442
13
- npcsh/llm_funcs.py,sha256=ZZIVm6gXTCSxP8AUNe4kHuOTlhxqFtUtLdroHxJC7gE,66360
13
+ npcsh/llm_funcs.py,sha256=U10KSPqsJp4oy2edNh7mjAjYbIuxjGO3S-zaSi-rX0o,69030
14
14
  npcsh/load_data.py,sha256=Vh6YGxFVGWBMcn4cDrIgy8sC7QGCrWk0niJyR3l-k9U,1967
15
15
  npcsh/main.py,sha256=rpf_2ysx3cR3eHsrvZApprJ-3D3-OrWcJ15bM1bc97I,81
16
16
  npcsh/model_runner.py,sha256=riS6Hx3M7mj5erMm7OwBA8yufXYseVEbMYTRSfaDh2Y,6427
17
- npcsh/npc_compiler.py,sha256=Sij79Fs40WKpMgHiDuBpgY7IWrAlX127p2WXxg9H2os,102026
17
+ npcsh/npc_compiler.py,sha256=2l3-B2czt_uZARJg_5PObFjjkr8WbmBY2rBJBTq3w0k,104268
18
18
  npcsh/npc_sysenv.py,sha256=g5LPYo8g6jlKd22fWVGghanLCr7tqjxytKvFbu0k-C0,11505
19
19
  npcsh/plonk.py,sha256=ewdkX6K1V9kLFkrNsQ5oGXLiuFaLev9mxXBY-B0PKus,9926
20
20
  npcsh/response.py,sha256=DFh6uoIw06SqPqKGv9DGHzoltprUHJSz6ZxgFBZRzX8,22478
21
21
  npcsh/search.py,sha256=K3AcYlefm_YNWQO_Yq5kTIRFKIIN3xX70CC3z038ox0,8605
22
22
  npcsh/serve.py,sha256=Zy47IdaGCt5ziGnQ5KHTQH1K7xXyo7trwUMVDOLNxvU,48246
23
23
  npcsh/shell.py,sha256=EFsvNJCW5EYdikkbm0KHQRu0tNFh0HcHB_62k5gqcUI,18218
24
- npcsh/shell_helpers.py,sha256=Y0ZSR5AK5QovlJUF5WcqOwaIeCwZxM-LDo2ukPlDA8c,96185
24
+ npcsh/shell_helpers.py,sha256=79bOwxfUKpgGzcYLLAlNb0Hx3WwBm8Snk35PBhyXuPY,115158
25
25
  npcsh/stream.py,sha256=gEwanrb5g4Fmu10fVN-3Gu_i434GWtE6zNZrhjQJ6EA,21995
26
26
  npcsh/video.py,sha256=Fv9Sii6LIEOI6D_1eoEu7DA8Es3mUYxEPcX3dac2Lt0,1935
27
27
  npcsh/npc_team/corca.npc,sha256=9qs7922thBESU4r5GRygqAjvg9zvhQyMYqqyb4wwSew,662
@@ -43,28 +43,28 @@ npcsh/npc_team/tools/image_generation.tool,sha256=CaTkdjxWLFtMAghPvZnToMSwHbMQVu
43
43
  npcsh/npc_team/tools/local_search.tool,sha256=g9Hc_Xx08fc6lu1IHvdwYU_18KxcOoHDwvIAfrGmn3U,6852
44
44
  npcsh/npc_team/tools/screen_cap.tool,sha256=v7l54PWWdgszdd-n_DFqI9nyMlBSeYeNIfQsdFYSZ_4,1389
45
45
  npcsh/npc_team/tools/sql_executor.tool,sha256=laEOBoyX2p3dbQpY2HoWnj5IXvxBoZ4kghGMkBwymQA,825
46
- npcsh-0.3.27.7.data/data/npcsh/npc_team/calculator.tool,sha256=ZKmQ0VusuJj8Ib5MDqC8MTDaqWIZCxzpazWPVkYZqYc,197
47
- npcsh-0.3.27.7.data/data/npcsh/npc_team/celona.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
- npcsh-0.3.27.7.data/data/npcsh/npc_team/corca.npc,sha256=9qs7922thBESU4r5GRygqAjvg9zvhQyMYqqyb4wwSew,662
49
- npcsh-0.3.27.7.data/data/npcsh/npc_team/eriane.npc,sha256=5z6L-RjEouEp06SLOzkQoOCEi0eb1K-CxVnvyIbNK3g,299
50
- npcsh-0.3.27.7.data/data/npcsh/npc_team/foreman.npc,sha256=WqB8jLfBToGmr8c1vip1KOnTHxfXlGXwDUGnZoDMQr0,327
51
- npcsh-0.3.27.7.data/data/npcsh/npc_team/generic_search.tool,sha256=sYA4aUuZm84vpx-5tNi6ADq9ywHaj_YfLt6PI0ZblUo,745
52
- npcsh-0.3.27.7.data/data/npcsh/npc_team/image_generation.tool,sha256=CaTkdjxWLFtMAghPvZnToMSwHbMQVusojhd9R9jybmI,577
53
- npcsh-0.3.27.7.data/data/npcsh/npc_team/lineru.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
- npcsh-0.3.27.7.data/data/npcsh/npc_team/local_search.tool,sha256=g9Hc_Xx08fc6lu1IHvdwYU_18KxcOoHDwvIAfrGmn3U,6852
55
- npcsh-0.3.27.7.data/data/npcsh/npc_team/maurawa.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
- npcsh-0.3.27.7.data/data/npcsh/npc_team/npcsh.ctx,sha256=VOd7omCBo_764gKCYuHxJcyVc61oC5YjB7rnCbDhnYU,275
57
- npcsh-0.3.27.7.data/data/npcsh/npc_team/raone.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
- npcsh-0.3.27.7.data/data/npcsh/npc_team/screen_cap.tool,sha256=v7l54PWWdgszdd-n_DFqI9nyMlBSeYeNIfQsdFYSZ_4,1389
59
- npcsh-0.3.27.7.data/data/npcsh/npc_team/sibiji.npc,sha256=MJZHU9xXmvUbZvwpX1wWinvkrwYiKm1J63t37l0EYGE,202
60
- npcsh-0.3.27.7.data/data/npcsh/npc_team/slean.npc,sha256=to3-d7qWMzhrYAeXalzNqVdt_3wROFGEfhprVXDttos,326
61
- npcsh-0.3.27.7.data/data/npcsh/npc_team/sql_executor.tool,sha256=laEOBoyX2p3dbQpY2HoWnj5IXvxBoZ4kghGMkBwymQA,825
62
- npcsh-0.3.27.7.data/data/npcsh/npc_team/test_pipeline.py,sha256=GKIcqw0fXDHsUDfMvu7GTj5cfPnqs7aX5xxiKPs2xCc,5657
63
- npcsh-0.3.27.7.data/data/npcsh/npc_team/turnic.npc,sha256=odTFzQTN01-xaWXvjoC698htaFi6mvrzfKMAob8Yqb0,335
64
- npcsh-0.3.27.7.data/data/npcsh/npc_team/welxor.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
65
- npcsh-0.3.27.7.dist-info/licenses/LICENSE,sha256=j0YPvce7Ng9e32zYOu0EmXjXeJ0Nwawd0RA3uSGGH4E,1070
66
- npcsh-0.3.27.7.dist-info/METADATA,sha256=7FXydz8qTl1Z8rXG9bN9cvWjEcplss0gGllu7uYzEiw,81443
67
- npcsh-0.3.27.7.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
68
- npcsh-0.3.27.7.dist-info/entry_points.txt,sha256=Y2rAM_m1er_Effxc0DXtGh36sC1FOUfefqGAt6vEte0,64
69
- npcsh-0.3.27.7.dist-info/top_level.txt,sha256=kHSNgKMCkfjV95-DH0YSp1LLBi0HXdF3w57j7MQON3E,6
70
- npcsh-0.3.27.7.dist-info/RECORD,,
46
+ npcsh-0.3.29.data/data/npcsh/npc_team/calculator.tool,sha256=ZKmQ0VusuJj8Ib5MDqC8MTDaqWIZCxzpazWPVkYZqYc,197
47
+ npcsh-0.3.29.data/data/npcsh/npc_team/celona.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
+ npcsh-0.3.29.data/data/npcsh/npc_team/corca.npc,sha256=9qs7922thBESU4r5GRygqAjvg9zvhQyMYqqyb4wwSew,662
49
+ npcsh-0.3.29.data/data/npcsh/npc_team/eriane.npc,sha256=5z6L-RjEouEp06SLOzkQoOCEi0eb1K-CxVnvyIbNK3g,299
50
+ npcsh-0.3.29.data/data/npcsh/npc_team/foreman.npc,sha256=WqB8jLfBToGmr8c1vip1KOnTHxfXlGXwDUGnZoDMQr0,327
51
+ npcsh-0.3.29.data/data/npcsh/npc_team/generic_search.tool,sha256=sYA4aUuZm84vpx-5tNi6ADq9ywHaj_YfLt6PI0ZblUo,745
52
+ npcsh-0.3.29.data/data/npcsh/npc_team/image_generation.tool,sha256=CaTkdjxWLFtMAghPvZnToMSwHbMQVusojhd9R9jybmI,577
53
+ npcsh-0.3.29.data/data/npcsh/npc_team/lineru.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
+ npcsh-0.3.29.data/data/npcsh/npc_team/local_search.tool,sha256=g9Hc_Xx08fc6lu1IHvdwYU_18KxcOoHDwvIAfrGmn3U,6852
55
+ npcsh-0.3.29.data/data/npcsh/npc_team/maurawa.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
+ npcsh-0.3.29.data/data/npcsh/npc_team/npcsh.ctx,sha256=VOd7omCBo_764gKCYuHxJcyVc61oC5YjB7rnCbDhnYU,275
57
+ npcsh-0.3.29.data/data/npcsh/npc_team/raone.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
+ npcsh-0.3.29.data/data/npcsh/npc_team/screen_cap.tool,sha256=v7l54PWWdgszdd-n_DFqI9nyMlBSeYeNIfQsdFYSZ_4,1389
59
+ npcsh-0.3.29.data/data/npcsh/npc_team/sibiji.npc,sha256=MJZHU9xXmvUbZvwpX1wWinvkrwYiKm1J63t37l0EYGE,202
60
+ npcsh-0.3.29.data/data/npcsh/npc_team/slean.npc,sha256=to3-d7qWMzhrYAeXalzNqVdt_3wROFGEfhprVXDttos,326
61
+ npcsh-0.3.29.data/data/npcsh/npc_team/sql_executor.tool,sha256=laEOBoyX2p3dbQpY2HoWnj5IXvxBoZ4kghGMkBwymQA,825
62
+ npcsh-0.3.29.data/data/npcsh/npc_team/test_pipeline.py,sha256=GKIcqw0fXDHsUDfMvu7GTj5cfPnqs7aX5xxiKPs2xCc,5657
63
+ npcsh-0.3.29.data/data/npcsh/npc_team/turnic.npc,sha256=odTFzQTN01-xaWXvjoC698htaFi6mvrzfKMAob8Yqb0,335
64
+ npcsh-0.3.29.data/data/npcsh/npc_team/welxor.npc,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
65
+ npcsh-0.3.29.dist-info/licenses/LICENSE,sha256=j0YPvce7Ng9e32zYOu0EmXjXeJ0Nwawd0RA3uSGGH4E,1070
66
+ npcsh-0.3.29.dist-info/METADATA,sha256=5hc3pzglBFrBZya_Yi-8Fb9FReCmjEmGDJ2LAr8mUlI,83180
67
+ npcsh-0.3.29.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
68
+ npcsh-0.3.29.dist-info/entry_points.txt,sha256=Y2rAM_m1er_Effxc0DXtGh36sC1FOUfefqGAt6vEte0,64
69
+ npcsh-0.3.29.dist-info/top_level.txt,sha256=kHSNgKMCkfjV95-DH0YSp1LLBi0HXdF3w57j7MQON3E,6
70
+ npcsh-0.3.29.dist-info/RECORD,,