npcsh 0.3.32__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. npcsh/_state.py +942 -0
  2. npcsh/alicanto.py +1074 -0
  3. npcsh/guac.py +785 -0
  4. npcsh/mcp_helpers.py +357 -0
  5. npcsh/mcp_npcsh.py +822 -0
  6. npcsh/mcp_server.py +184 -0
  7. npcsh/npc.py +218 -0
  8. npcsh/npcsh.py +1161 -0
  9. npcsh/plonk.py +387 -269
  10. npcsh/pti.py +234 -0
  11. npcsh/routes.py +958 -0
  12. npcsh/spool.py +315 -0
  13. npcsh/wander.py +550 -0
  14. npcsh/yap.py +573 -0
  15. npcsh-1.0.0.dist-info/METADATA +596 -0
  16. npcsh-1.0.0.dist-info/RECORD +21 -0
  17. {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/WHEEL +1 -1
  18. npcsh-1.0.0.dist-info/entry_points.txt +9 -0
  19. {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/licenses/LICENSE +1 -1
  20. npcsh/audio.py +0 -569
  21. npcsh/audio_gen.py +0 -1
  22. npcsh/cli.py +0 -543
  23. npcsh/command_history.py +0 -566
  24. npcsh/conversation.py +0 -54
  25. npcsh/data_models.py +0 -46
  26. npcsh/dataframes.py +0 -171
  27. npcsh/embeddings.py +0 -168
  28. npcsh/helpers.py +0 -646
  29. npcsh/image.py +0 -298
  30. npcsh/image_gen.py +0 -79
  31. npcsh/knowledge_graph.py +0 -1006
  32. npcsh/llm_funcs.py +0 -2195
  33. npcsh/load_data.py +0 -83
  34. npcsh/main.py +0 -5
  35. npcsh/model_runner.py +0 -189
  36. npcsh/npc_compiler.py +0 -2879
  37. npcsh/npc_sysenv.py +0 -388
  38. npcsh/npc_team/assembly_lines/test_pipeline.py +0 -181
  39. npcsh/npc_team/corca.npc +0 -13
  40. npcsh/npc_team/foreman.npc +0 -7
  41. npcsh/npc_team/npcsh.ctx +0 -11
  42. npcsh/npc_team/sibiji.npc +0 -4
  43. npcsh/npc_team/templates/analytics/celona.npc +0 -0
  44. npcsh/npc_team/templates/hr_support/raone.npc +0 -0
  45. npcsh/npc_team/templates/humanities/eriane.npc +0 -4
  46. npcsh/npc_team/templates/it_support/lineru.npc +0 -0
  47. npcsh/npc_team/templates/marketing/slean.npc +0 -4
  48. npcsh/npc_team/templates/philosophy/maurawa.npc +0 -0
  49. npcsh/npc_team/templates/sales/turnic.npc +0 -4
  50. npcsh/npc_team/templates/software/welxor.npc +0 -0
  51. npcsh/npc_team/tools/bash_executer.tool +0 -32
  52. npcsh/npc_team/tools/calculator.tool +0 -8
  53. npcsh/npc_team/tools/code_executor.tool +0 -16
  54. npcsh/npc_team/tools/generic_search.tool +0 -27
  55. npcsh/npc_team/tools/image_generation.tool +0 -25
  56. npcsh/npc_team/tools/local_search.tool +0 -149
  57. npcsh/npc_team/tools/npcsh_executor.tool +0 -9
  58. npcsh/npc_team/tools/screen_cap.tool +0 -27
  59. npcsh/npc_team/tools/sql_executor.tool +0 -26
  60. npcsh/response.py +0 -272
  61. npcsh/search.py +0 -252
  62. npcsh/serve.py +0 -1467
  63. npcsh/shell.py +0 -524
  64. npcsh/shell_helpers.py +0 -3919
  65. npcsh/stream.py +0 -233
  66. npcsh/video.py +0 -52
  67. npcsh/video_gen.py +0 -69
  68. npcsh-0.3.32.data/data/npcsh/npc_team/bash_executer.tool +0 -32
  69. npcsh-0.3.32.data/data/npcsh/npc_team/calculator.tool +0 -8
  70. npcsh-0.3.32.data/data/npcsh/npc_team/celona.npc +0 -0
  71. npcsh-0.3.32.data/data/npcsh/npc_team/code_executor.tool +0 -16
  72. npcsh-0.3.32.data/data/npcsh/npc_team/corca.npc +0 -13
  73. npcsh-0.3.32.data/data/npcsh/npc_team/eriane.npc +0 -4
  74. npcsh-0.3.32.data/data/npcsh/npc_team/foreman.npc +0 -7
  75. npcsh-0.3.32.data/data/npcsh/npc_team/generic_search.tool +0 -27
  76. npcsh-0.3.32.data/data/npcsh/npc_team/image_generation.tool +0 -25
  77. npcsh-0.3.32.data/data/npcsh/npc_team/lineru.npc +0 -0
  78. npcsh-0.3.32.data/data/npcsh/npc_team/local_search.tool +0 -149
  79. npcsh-0.3.32.data/data/npcsh/npc_team/maurawa.npc +0 -0
  80. npcsh-0.3.32.data/data/npcsh/npc_team/npcsh.ctx +0 -11
  81. npcsh-0.3.32.data/data/npcsh/npc_team/npcsh_executor.tool +0 -9
  82. npcsh-0.3.32.data/data/npcsh/npc_team/raone.npc +0 -0
  83. npcsh-0.3.32.data/data/npcsh/npc_team/screen_cap.tool +0 -27
  84. npcsh-0.3.32.data/data/npcsh/npc_team/sibiji.npc +0 -4
  85. npcsh-0.3.32.data/data/npcsh/npc_team/slean.npc +0 -4
  86. npcsh-0.3.32.data/data/npcsh/npc_team/sql_executor.tool +0 -26
  87. npcsh-0.3.32.data/data/npcsh/npc_team/test_pipeline.py +0 -181
  88. npcsh-0.3.32.data/data/npcsh/npc_team/turnic.npc +0 -4
  89. npcsh-0.3.32.data/data/npcsh/npc_team/welxor.npc +0 -0
  90. npcsh-0.3.32.dist-info/METADATA +0 -779
  91. npcsh-0.3.32.dist-info/RECORD +0 -78
  92. npcsh-0.3.32.dist-info/entry_points.txt +0 -3
  93. {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/top_level.txt +0 -0
@@ -1,27 +0,0 @@
1
- tool_name: "screen_capture_analysis_tool"
2
- description: Captures the whole screen and sends the image for analysis
3
- inputs:
4
- - "prompt"
5
- steps:
6
- - engine: "python"
7
- code: |
8
- # Capture the screen
9
- import pyautogui
10
- import datetime
11
- import os
12
- from PIL import Image
13
- import time
14
- from npcsh.image import analyze_image_base, capture_screenshot
15
-
16
- out = capture_screenshot(npc = npc, full = True)
17
-
18
- llm_response = analyze_image_base( '{{prompt}}' + "\n\nAttached is a screenshot of my screen currently. Please use this to evaluate the situation. If the user asked for you to explain what's on their screen or something similar, they are referring to the details contained within the attached image. You do not need to actually view their screen. You do not need to mention that you cannot view or interpret images directly. You only need to answer the user's request based on the attached screenshot!",
19
- out['file_path'],
20
- out['filename'],
21
- npc=npc,
22
- **out['model_kwargs'])
23
- # To this:
24
- if isinstance(llm_response, dict):
25
- llm_response = llm_response.get('response', 'No response from image analysis')
26
- else:
27
- llm_response = 'No response from image analysis'
@@ -1,4 +0,0 @@
1
- name: sibiji
2
- primary_directive: You are a foundational AI assistant. Your role is to provide basic support and information. Respond to queries concisely and accurately.
3
- model: llama3.2
4
- provider: ollama
@@ -1,4 +0,0 @@
1
- name: slean
2
- primary_directive: Assist with marketing issues, challenges and questions. When responding, be careful to always think through the problems as if you are a wmarketing wiz who has launched and hyper scaled companies through effective marketing by always thinking outside the box.
3
- model: gpt-4o-mini
4
- provider: openai
@@ -1,26 +0,0 @@
1
- tool_name: data_pull
2
- description: Execute queries on the ~/npcsh_history.db to pull data. The database contains only information about conversations and other user-provided data. It does not store any information about individual files.
3
- inputs:
4
- - sql_query
5
- - interpret: false # Note that this is not a boolean, but a string
6
-
7
- steps:
8
- - engine: python
9
- code: |
10
- import pandas as pd
11
- try:
12
- df = pd.read_sql_query('{{sql_query}}', npc.db_conn)
13
- except pandas.errors.DatabaseError as e:
14
- df = pd.DataFrame({'Error': [str(e)]})
15
-
16
-
17
- output = df.to_string()
18
-
19
- - engine: natural
20
- code: |
21
- {% if interpret %}
22
- Here is the result of the SQL query:
23
- ```
24
- {{ df.to_string() }} # Convert DataFrame to string for a nicer display
25
- ```
26
- {% endif %}
@@ -1,181 +0,0 @@
1
- import pandas as pd
2
- from sqlalchemy import create_engine
3
- import os
4
-
5
- # Sample market events data
6
- market_events_data = {
7
- "datetime": [
8
- "2023-10-15 09:00:00",
9
- "2023-10-16 10:30:00",
10
- "2023-10-17 11:45:00",
11
- "2023-10-18 13:15:00",
12
- "2023-10-19 14:30:00",
13
- ],
14
- "headline": [
15
- "Stock Market Rallies Amid Positive Economic Data",
16
- "Tech Giant Announces New Product Line",
17
- "Federal Reserve Hints at Interest Rate Pause",
18
- "Oil Prices Surge Following Supply Concerns",
19
- "Retail Sector Reports Record Q3 Earnings",
20
- ],
21
- }
22
-
23
- # Create a DataFrame
24
- market_events_df = pd.DataFrame(market_events_data)
25
-
26
- # Define database path relative to user's home directory
27
- db_path = os.path.expanduser("~/npcsh_history.db")
28
-
29
- # Create a connection to the SQLite database
30
- engine = create_engine(f"sqlite:///{db_path}")
31
- with engine.connect() as connection:
32
- # Write the data to a new table 'market_events', replacing existing data
33
- market_events_df.to_sql(
34
- "market_events", con=connection, if_exists="replace", index=False
35
- )
36
-
37
- print("Market events have been added to the database.")
38
-
39
- email_data = {
40
- "datetime": [
41
- "2023-10-10 10:00:00",
42
- "2023-10-11 11:00:00",
43
- "2023-10-12 12:00:00",
44
- "2023-10-13 13:00:00",
45
- "2023-10-14 14:00:00",
46
- ],
47
- "subject": [
48
- "Meeting Reminder",
49
- "Project Update",
50
- "Invoice Attached",
51
- "Weekly Report",
52
- "Holiday Notice",
53
- ],
54
- "sender": [
55
- "alice@example.com",
56
- "bob@example.com",
57
- "carol@example.com",
58
- "dave@example.com",
59
- "eve@example.com",
60
- ],
61
- "recipient": [
62
- "bob@example.com",
63
- "carol@example.com",
64
- "dave@example.com",
65
- "eve@example.com",
66
- "alice@example.com",
67
- ],
68
- "body": [
69
- "Don't forget the meeting tomorrow at 10 AM.",
70
- "The project is progressing well, see attached update.",
71
- "Please find your invoice attached.",
72
- "Here is the weekly report.",
73
- "The office will be closed on holidays, have a great time!",
74
- ],
75
- }
76
-
77
- # Create a DataFrame
78
- emails_df = pd.DataFrame(email_data)
79
-
80
- # Define database path relative to user's home directory
81
- db_path = os.path.expanduser("~/npcsh_history.db")
82
-
83
- # Create a connection to the SQLite database
84
- engine = create_engine(f"sqlite:///{db_path}")
85
- with engine.connect() as connection:
86
- # Write the data to a new table 'emails', replacing existing data
87
- emails_df.to_sql("emails", con=connection, if_exists="replace", index=False)
88
-
89
- print("Sample emails have been added to the database.")
90
-
91
-
92
- from npcsh.npc_compiler import PipelineRunner
93
- import os
94
-
95
- pipeline_runner = PipelineRunner(
96
- pipeline_file="morning_routine.pipe",
97
- npc_root_dir=os.path.abspath("."), # Use absolute path to parent directory
98
- db_path="~/npcsh_history.db",
99
- )
100
- pipeline_runner.execute_pipeline()
101
-
102
-
103
- import pandas as pd
104
- from sqlalchemy import create_engine
105
- import os
106
-
107
- # Sample data generation for news articles
108
- news_articles_data = {
109
- "news_article_id": list(range(1, 21)),
110
- "headline": [
111
- "Economy sees unexpected growth in Q4",
112
- "New tech gadget takes the world by storm",
113
- "Political debate heats up over new policy",
114
- "Health concerns rise amid new disease outbreak",
115
- "Sports team secures victory in last minute",
116
- "New economic policy introduced by government",
117
- "Breakthrough in AI technology announced",
118
- "Political leader delivers speech on reforms",
119
- "Healthcare systems pushed to limits",
120
- "Celebrated athlete breaks world record",
121
- "Controversial economic measures spark debate",
122
- "Innovative tech startup gains traction",
123
- "Political scandal shakes administration",
124
- "Healthcare workers protest for better pay",
125
- "Major sports event postponed due to weather",
126
- "Trade tensions impact global economy",
127
- "Tech company accused of data breach",
128
- "Election results lead to political upheaval",
129
- "Vaccine developments offer hope amid pandemic",
130
- "Sports league announces return to action",
131
- ],
132
- "content": ["Article content here..." for _ in range(20)],
133
- "publication_date": pd.date_range(start="1/1/2023", periods=20, freq="D"),
134
- }
135
-
136
- # Create a DataFrame
137
- news_df = pd.DataFrame(news_articles_data)
138
-
139
- # Define the database path
140
- db_path = os.path.expanduser("~/npcsh_history.db")
141
-
142
- # Create a connection to the SQLite database
143
- engine = create_engine(f"sqlite:///{db_path}")
144
- with engine.connect() as connection:
145
- # Write the data to a new table 'news_articles', replacing existing data
146
- news_df.to_sql("news_articles", con=connection, if_exists="replace", index=False)
147
-
148
- print("News articles have been added to the database.")
149
-
150
- from npcsh.npc_compiler import PipelineRunner
151
- import os
152
-
153
- runner = PipelineRunner(
154
- "./news_analysis.pipe",
155
- db_path=os.path.expanduser("~/npcsh_history.db"),
156
- npc_root_dir=os.path.abspath("."),
157
- )
158
- results = runner.execute_pipeline()
159
-
160
- print("\nResults:")
161
- print("\nClassifications (processed row by row):")
162
- print(results["classify_news"])
163
- print("\nAnalysis (processed in batch):")
164
- print(results["analyze_news"])
165
-
166
-
167
- from npcsh.npc_compiler import PipelineRunner
168
- import os
169
-
170
- runner = PipelineRunner(
171
- "./news_analysis_mixa.pipe",
172
- db_path=os.path.expanduser("~/npcsh_history.db"),
173
- npc_root_dir=os.path.abspath("."),
174
- )
175
- results = runner.execute_pipeline()
176
-
177
- print("\nResults:")
178
- print("\nClassifications (processed row by row):")
179
- print(results["classify_news"])
180
- print("\nAnalysis (processed in batch):")
181
- print(results["analyze_news"])
@@ -1,4 +0,0 @@
1
- name: turnic
2
- primary_directive: Assist with sales challenges and questions. When responding, keep in mind that sales professionals tend to be interested in achieving results quickly so you must ensure that you opt for simpler and more straightforward solutions and explanations without much fanfare.
3
- model: gpt-4o-mini
4
- provider: openai
File without changes