npcsh 0.3.25__py3-none-any.whl → 0.3.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/conversation.py +2 -2
- npcsh/embeddings.py +2 -1
- npcsh/image_gen.py +2 -1
- npcsh/knowledge_graph.py +9 -2
- npcsh/llm_funcs.py +40 -43
- npcsh/npc_sysenv.py +6 -2
- npcsh/response.py +2 -2
- npcsh/search.py +6 -2
- npcsh/shell_helpers.py +3 -4
- npcsh/stream.py +4 -2
- {npcsh-0.3.25.dist-info → npcsh-0.3.27.dist-info}/METADATA +690 -686
- {npcsh-0.3.25.dist-info → npcsh-0.3.27.dist-info}/RECORD +35 -35
- {npcsh-0.3.25.dist-info → npcsh-0.3.27.dist-info}/WHEEL +1 -1
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/calculator.tool +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/celona.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/corca.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/eriane.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/foreman.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/generic_search.tool +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/image_generation.tool +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/lineru.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/local_search.tool +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/maurawa.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/raone.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/screen_cap.tool +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/sibiji.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/slean.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/sql_executor.tool +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/test_pipeline.py +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/turnic.npc +0 -0
- {npcsh-0.3.25.data → npcsh-0.3.27.data}/data/npcsh/npc_team/welxor.npc +0 -0
- {npcsh-0.3.25.dist-info → npcsh-0.3.27.dist-info}/entry_points.txt +0 -0
- {npcsh-0.3.25.dist-info → npcsh-0.3.27.dist-info}/licenses/LICENSE +0 -0
- {npcsh-0.3.25.dist-info → npcsh-0.3.27.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: npcsh
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.27
|
|
4
4
|
Summary: npcsh is a command line tool for integrating LLMs into everyday workflows and for orchestrating teams of NPCs.
|
|
5
5
|
Home-page: https://github.com/cagostino/npcsh
|
|
6
6
|
Author: Christopher Agostino
|
|
@@ -13,38 +13,18 @@ License-File: LICENSE
|
|
|
13
13
|
Requires-Dist: redis
|
|
14
14
|
Requires-Dist: flask_sse
|
|
15
15
|
Requires-Dist: anthropic
|
|
16
|
-
Requires-Dist: screeninfo
|
|
17
|
-
Requires-Dist: sentence_transformers
|
|
18
|
-
Requires-Dist: nltk
|
|
19
|
-
Requires-Dist: thefuzz
|
|
20
16
|
Requires-Dist: beautifulsoup4
|
|
21
17
|
Requires-Dist: google-generativeai
|
|
22
18
|
Requires-Dist: google-genai
|
|
23
19
|
Requires-Dist: duckduckgo-search
|
|
24
|
-
Requires-Dist: pypdf
|
|
25
|
-
Requires-Dist: PyMuPDF
|
|
26
|
-
Requires-Dist: opencv-python
|
|
27
|
-
Requires-Dist: librosa
|
|
28
20
|
Requires-Dist: openai
|
|
29
21
|
Requires-Dist: jinja2
|
|
30
|
-
Requires-Dist: pyautogui
|
|
31
|
-
Requires-Dist: pandas
|
|
32
|
-
Requires-Dist: matplotlib
|
|
33
|
-
Requires-Dist: IPython
|
|
34
|
-
Requires-Dist: ollama
|
|
35
22
|
Requires-Dist: requests
|
|
36
23
|
Requires-Dist: markdown
|
|
37
24
|
Requires-Dist: PyYAML
|
|
38
25
|
Requires-Dist: langchain
|
|
39
26
|
Requires-Dist: langchain_community
|
|
40
|
-
Requires-Dist: openai-whisper
|
|
41
|
-
Requires-Dist: pyaudio
|
|
42
27
|
Requires-Dist: pygments
|
|
43
|
-
Requires-Dist: pyttsx3
|
|
44
|
-
Requires-Dist: kuzu
|
|
45
|
-
Requires-Dist: chromadb
|
|
46
|
-
Requires-Dist: gtts
|
|
47
|
-
Requires-Dist: playsound==1.2.2
|
|
48
28
|
Requires-Dist: termcolor
|
|
49
29
|
Requires-Dist: colorama
|
|
50
30
|
Requires-Dist: python-dotenv
|
|
@@ -52,6 +32,21 @@ Requires-Dist: pytest
|
|
|
52
32
|
Requires-Dist: googlesearch-python
|
|
53
33
|
Requires-Dist: flask
|
|
54
34
|
Requires-Dist: flask_cors
|
|
35
|
+
Requires-Dist: librosa
|
|
36
|
+
Requires-Dist: pandas
|
|
37
|
+
Requires-Dist: matplotlib
|
|
38
|
+
Requires-Dist: IPython
|
|
39
|
+
Requires-Dist: pyautogui
|
|
40
|
+
Requires-Dist: nltk
|
|
41
|
+
Requires-Dist: thefuzz
|
|
42
|
+
Requires-Dist: pypdf
|
|
43
|
+
Requires-Dist: PyMuPDF
|
|
44
|
+
Requires-Dist: screeninfo
|
|
45
|
+
Requires-Dist: sentence_transformers
|
|
46
|
+
Requires-Dist: opencv-python
|
|
47
|
+
Requires-Dist: ollama
|
|
48
|
+
Requires-Dist: kuzu
|
|
49
|
+
Requires-Dist: chromadb
|
|
55
50
|
Requires-Dist: diffusers
|
|
56
51
|
Dynamic: author
|
|
57
52
|
Dynamic: author-email
|
|
@@ -72,7 +67,7 @@ Dynamic: summary
|
|
|
72
67
|
# npcsh
|
|
73
68
|
|
|
74
69
|
|
|
75
|
-
- `npcsh` is a python-based
|
|
70
|
+
- `npcsh` is a python-based AI Agent framework designed to integrate Large Language Models (LLMs) and Agents into one's daily workflow by making them available and easily configurable through a command line shell as well as an extensible python library.
|
|
76
71
|
|
|
77
72
|
- **Smart Interpreter**: `npcsh` leverages the power of LLMs to understand your natural language commands and questions, executing tasks, answering queries, and providing relevant information from local files and the web.
|
|
78
73
|
|
|
@@ -83,10 +78,9 @@ Dynamic: summary
|
|
|
83
78
|
|
|
84
79
|
* **Extensible with Python:** `npcsh`'s python package provides useful functions for interacting with LLMs, including explicit coverage for popular providers like ollama, anthropic, openai, gemini, deepseek, and openai-like providers. Each macro has a corresponding function and these can be used in python scripts. `npcsh`'s functions are purpose-built to simplify NPC interactions but NPCs are not required for them to work if you don't see the need.
|
|
85
80
|
|
|
86
|
-
* **Simple, Powerful CLI:** Use the `npc` CLI commands to
|
|
87
|
-
|
|
88
|
-
* **Shell Strengths:** Execute bash commands directly. Use your favorite command-line tools like VIM, Emacs, ipython, sqlite3, git. Pipe the output of these commands to LLMs or pass LLM results to bash commands.
|
|
81
|
+
* **Simple, Powerful CLI:** Use the `npc` CLI commands to run `npcsh` macros or commands from one's regular shell. Set up a flask server so you can expose your NPC team for use as a backend service. You can also use the `npc` CLI to run SQL models defined in your project, execute assembly lines, and verify the integrity of your NPC team's interrelations. `npcsh`'s NPCs take advantage of jinja templating to reference other NPCs and tools in their properties, and the `npc` CLI can be used to verify these references.
|
|
89
82
|
|
|
83
|
+
* **Powerful Tool integrations:** `npcsh` has built-in tools for users to have agents execute code, analyze data, generate images, search the web, and more. Tools can be defined in YAML files as part of project-specific `npc_team`s or in the global `~/.npcsh/npc_team` directory or simply in python scripts. Once compiled, the tools can be used as macros in the `npc` cli as well as `/{tool_name}` commands in the `npcsh` shell.
|
|
90
84
|
|
|
91
85
|
|
|
92
86
|
Interested to stay in the loop and to hear the latest and greatest about `npcsh` ? Be sure to sign up for the [npcsh newsletter](https://forms.gle/n1NzQmwjsV4xv1B2A)!
|
|
@@ -113,458 +107,790 @@ Users can take advantage of `npcsh` through its custom shell or through a comman
|
|
|
113
107
|
| Voice Chat | npc whisper 'npc_name' | /whisper |
|
|
114
108
|
|
|
115
109
|
|
|
116
|
-
##
|
|
110
|
+
## Python Examples
|
|
111
|
+
Integrate npcsh into your Python projects for additional flexibility. Below are a few examples of how to use the library programmatically.
|
|
117
112
|
|
|
118
|
-
[](https://star-history.com/#cagostino/npcsh&Date)
|
|
119
113
|
|
|
120
|
-
## Installation
|
|
121
|
-
`npcsh` is available on PyPI and can be installed using pip. Before installing, make sure you have the necessary dependencies installed on your system. Below are the instructions for installing such dependencies on Linux, Mac, and Windows. If you find any other dependencies that are needed, please let us know so we can update the installation instructions to be more accommodating.
|
|
122
114
|
|
|
123
|
-
###
|
|
115
|
+
### Example 1: Creating and Using an NPC
|
|
116
|
+
This example shows how to create and initialize an NPC and use it to answer a question.
|
|
124
117
|
```bash
|
|
118
|
+
import sqlite3
|
|
119
|
+
from npcsh.npc_compiler import NPC
|
|
125
120
|
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
sudo apt-get install libcairo2-dev
|
|
130
|
-
sudo apt-get install libgirepository1.0-dev
|
|
131
|
-
sudo apt-get install ffmpeg
|
|
121
|
+
# Set up database connection
|
|
122
|
+
db_path = '~/npcsh_history.db'
|
|
123
|
+
conn = sqlite3.connect(db_path)
|
|
132
124
|
|
|
133
|
-
#
|
|
134
|
-
|
|
125
|
+
# Load NPC from a file
|
|
126
|
+
npc = NPC(
|
|
127
|
+
name='Simon Bolivar',
|
|
128
|
+
db_conn=conn,
|
|
129
|
+
primary_directive='Liberate South America from the Spanish Royalists.',
|
|
130
|
+
model='gpt-4o-mini',
|
|
131
|
+
provider='openai',
|
|
132
|
+
)
|
|
135
133
|
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
134
|
+
response = npc.get_llm_response("What is the most important territory to retain in the Andes mountains?")
|
|
135
|
+
print(response['response'])
|
|
136
|
+
```
|
|
137
|
+
```bash
|
|
138
|
+
'The most important territory to retain in the Andes mountains for the cause of liberation in South America would be the region of Quito in present-day Ecuador. This area is strategically significant due to its location and access to key trade routes. It also acts as a vital link between the northern and southern parts of the continent, influencing both military movements and the morale of the independence struggle. Retaining control over Quito would bolster efforts to unite various factions in the fight against Spanish colonial rule across the Andean states.'
|
|
140
139
|
```
|
|
140
|
+
### Example 2: Using an NPC to Analyze Data
|
|
141
|
+
This example shows how to use an NPC to perform data analysis on a DataFrame using LLM commands.
|
|
142
|
+
```bash
|
|
143
|
+
from npcsh.npc_compiler import NPC
|
|
144
|
+
import sqlite3
|
|
145
|
+
import os
|
|
146
|
+
# Set up database connection
|
|
147
|
+
db_path = '~/npcsh_history.db'
|
|
148
|
+
conn = sqlite3.connect(os.path.expanduser(db_path))
|
|
141
149
|
|
|
150
|
+
# make a table to put into npcsh_history.db or change this example to use an existing table in a database you have
|
|
151
|
+
import pandas as pd
|
|
152
|
+
data = {
|
|
153
|
+
'customer_feedback': ['The product is great!', 'The service was terrible.', 'I love the new feature.'],
|
|
154
|
+
'customer_id': [1, 2, 3],
|
|
155
|
+
'customer_rating': [5, 1, 3],
|
|
156
|
+
'timestamp': ['2022-01-01', '2022-01-02', '2022-01-03']
|
|
157
|
+
}
|
|
142
158
|
|
|
143
159
|
|
|
160
|
+
df = pd.DataFrame(data)
|
|
161
|
+
df.to_sql('customer_feedback', conn, if_exists='replace', index=False)
|
|
144
162
|
|
|
145
|
-
### Mac install
|
|
146
|
-
```bash
|
|
147
|
-
brew install portaudio
|
|
148
|
-
brew install ffmpeg
|
|
149
|
-
brew install ollama
|
|
150
|
-
brew services start ollama
|
|
151
|
-
brew install pygobject3
|
|
152
|
-
ollama pull llama3.2
|
|
153
|
-
ollama pull llava:7b
|
|
154
|
-
ollama pull nomic-embed-text
|
|
155
|
-
pip install npcsh
|
|
156
|
-
```
|
|
157
|
-
### Windows Install
|
|
158
163
|
|
|
159
|
-
|
|
164
|
+
npc = NPC(
|
|
165
|
+
name='Felix',
|
|
166
|
+
db_conn=conn,
|
|
167
|
+
primary_directive='Analyze customer feedback for sentiment.',
|
|
168
|
+
model='gpt-4o-mini',
|
|
169
|
+
provider='openai',
|
|
170
|
+
)
|
|
171
|
+
response = npc.analyze_db_data('Provide a detailed report on the data contained in the `customer_feedback` table?')
|
|
160
172
|
|
|
161
|
-
Then, in a powershell. Download and install ffmpeg.
|
|
162
173
|
|
|
163
174
|
```
|
|
164
|
-
ollama pull llama3.2
|
|
165
|
-
ollama pull llava:7b
|
|
166
|
-
ollama pull nomic-embed-text
|
|
167
|
-
pip install npcsh
|
|
168
|
-
```
|
|
169
|
-
As of now, npcsh appears to work well with some of the core functionalities like /ots and /whisper.
|
|
170
175
|
|
|
171
176
|
|
|
172
|
-
###
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
xhost + (pyautogui)
|
|
176
|
-
python-tkinter (pyautogui)
|
|
177
|
+
### Example 3: Creating and Using a Tool
|
|
178
|
+
You can define a tool and execute it from within your Python script.
|
|
179
|
+
Here we'll create a tool that will take in a pdf file, extract the text, and then answer a user request about the text.
|
|
177
180
|
|
|
178
|
-
## Startup Configuration and Project Structure
|
|
179
|
-
After it has been pip installed, `npcsh` can be used as a command line tool. Start it by typing:
|
|
180
|
-
```bash
|
|
181
|
-
npcsh
|
|
182
|
-
```
|
|
183
|
-
When initialized, `npcsh` will generate a .npcshrc file in your home directory that stores your npcsh settings.
|
|
184
|
-
Here is an example of what the .npcshrc file might look like after this has been run.
|
|
185
181
|
```bash
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
export NPCSH_CHAT_MODEL='llama3.2'
|
|
190
|
-
export NPCSH_DB_PATH='~/npcsh_history.db'
|
|
191
|
-
```
|
|
192
|
-
`npcsh` also comes with a set of tools and NPCs that are used in processing. It will generate a folder at ~/.npcsh/ that contains the tools and NPCs that are used in the shell and these will be used in the absence of other project-specific ones. Additionally, `npcsh` records interactions and compiled information about npcs within a local SQLite database at the path specified in the .npcshrc file. This will default to ~/npcsh_history.db if not specified. When the data mode is used to load or analyze data in CSVs or PDFs, these data will be stored in the same database for future reference.
|
|
182
|
+
from npcsh.npc_compiler import Tool, NPC
|
|
183
|
+
import sqlite3
|
|
184
|
+
import os
|
|
193
185
|
|
|
194
|
-
|
|
186
|
+
from jinja2 import Environment, FileSystemLoader
|
|
195
187
|
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
if [ -f ~/.npcshrc ]; then
|
|
199
|
-
. ~/.npcshrc
|
|
200
|
-
fi
|
|
201
|
-
```
|
|
188
|
+
# Create a proper Jinja environment
|
|
189
|
+
jinja_env = Environment(loader=FileSystemLoader('.'))
|
|
202
190
|
|
|
203
|
-
We support inference via `openai`, `anthropic`, `ollama`,`gemini`, `deepseek`, and `openai-like` APIs. The default provider must be one of `['openai','anthropic','ollama', 'gemini', 'deepseek', 'openai-like']` and the model must be one available from those providers.
|
|
204
191
|
|
|
205
|
-
|
|
192
|
+
tool_data = {
|
|
193
|
+
"tool_name": "pdf_analyzer",
|
|
194
|
+
"inputs": ["request", "file"],
|
|
195
|
+
"steps": [{ # Make this a list with one dict inside
|
|
196
|
+
"engine": "python",
|
|
197
|
+
"code": """
|
|
198
|
+
try:
|
|
199
|
+
import fitz # PyMuPDF
|
|
206
200
|
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
export ANTHROPIC_API_KEY="your_anthropic_key"
|
|
210
|
-
export DEEPSEEK_API_KEY='your_deepseek_key'
|
|
211
|
-
export GEMINI_API_KEY='your_gemini_key'
|
|
212
|
-
export PERPLEXITY_API_KEY='your_perplexity_key'
|
|
213
|
-
```
|
|
201
|
+
shared_context = {}
|
|
202
|
+
shared_context['inputs'] = '{{request}}'
|
|
214
203
|
|
|
204
|
+
pdf_path = '{{file}}'
|
|
215
205
|
|
|
216
|
-
Individual npcs can also be set to use different models and providers by setting the `model` and `provider` keys in the npc files.
|
|
217
|
-
Once initialized and set up, you will find the following in your ~/.npcsh directory:
|
|
218
|
-
```bash
|
|
219
|
-
~/.npcsh/
|
|
220
|
-
├── npc_team/ # Global NPCs
|
|
221
|
-
│ ├── tools/ # Global tools
|
|
222
|
-
│ └── assembly_lines/ # Workflow pipelines
|
|
223
206
|
|
|
224
|
-
```
|
|
225
|
-
For cases where you wish to set up a project specific set of NPCs, tools, and assembly lines, add a `npc_team` directory to your project and `npcsh` should be able to pick up on its presence, like so:
|
|
226
|
-
```bash
|
|
227
|
-
./npc_team/ # Project-specific NPCs
|
|
228
|
-
├── tools/ # Project tools #example tool next
|
|
229
|
-
│ └── example.tool
|
|
230
|
-
└── assembly_lines/ # Project workflows
|
|
231
|
-
└── example.pipe
|
|
232
|
-
└── models/ # Project workflows
|
|
233
|
-
└── example.model
|
|
234
|
-
└── example1.npc # Example NPC
|
|
235
|
-
└── example2.npc # Example NPC
|
|
236
|
-
└── example1.ctx # Example NPC
|
|
237
|
-
└── example2.ctx # Example NPC
|
|
238
207
|
|
|
239
|
-
|
|
208
|
+
# Open the PDF
|
|
209
|
+
doc = fitz.open(pdf_path)
|
|
210
|
+
text = ""
|
|
240
211
|
|
|
241
|
-
|
|
212
|
+
# Extract text from each page
|
|
213
|
+
for page_num in range(len(doc)):
|
|
214
|
+
page = doc[page_num]
|
|
215
|
+
text += page.get_text()
|
|
242
216
|
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
```bash
|
|
246
|
-
rm ~/.npcsh/npc_team/tools/*.tool
|
|
247
|
-
```
|
|
248
|
-
and then
|
|
249
|
-
```bash
|
|
250
|
-
npcsh
|
|
251
|
-
```
|
|
252
|
-
and the updated tools will be copied over into the correct location.
|
|
217
|
+
# Close the document
|
|
218
|
+
doc.close()
|
|
253
219
|
|
|
254
|
-
|
|
255
|
-
|
|
220
|
+
print(f"Extracted text length: {len(text)}")
|
|
221
|
+
if len(text) > 100:
|
|
222
|
+
print(f"First 100 characters: {text[:100]}...")
|
|
256
223
|
|
|
224
|
+
shared_context['extracted_text'] = text
|
|
225
|
+
print("Text extraction completed successfully")
|
|
257
226
|
|
|
258
|
-
|
|
227
|
+
except Exception as e:
|
|
228
|
+
error_msg = f"Error processing PDF: {str(e)}"
|
|
229
|
+
print(error_msg)
|
|
230
|
+
shared_context['extracted_text'] = f"Error: {error_msg}"
|
|
231
|
+
"""
|
|
232
|
+
},
|
|
233
|
+
{
|
|
234
|
+
"engine": "natural",
|
|
235
|
+
"code": """
|
|
236
|
+
{% if shared_context and shared_context.extracted_text %}
|
|
237
|
+
{% if shared_context.extracted_text.startswith('Error:') %}
|
|
238
|
+
{{ shared_context.extracted_text }}
|
|
239
|
+
{% else %}
|
|
240
|
+
Here is the text extracted from the PDF:
|
|
259
241
|
|
|
260
|
-
|
|
261
|
-
to provide a more consistent naming scheme now that we have additionally introduced `NPCSH_VISION_MODEL` and `NPCSH_VISION_PROVIDER`, `NPCSH_EMBEDDING_MODEL`, `NPCSH_EMBEDDING_PROVIDER`, `NPCSH_REASONING_MODEL`, `NPCSH_REASONING_PROVIDER`, `NPCSH_IMAGE_GEN_MODEL`, and `NPCSH_IMAGE_GEN_PROVIDER`.
|
|
262
|
-
- In addition, we have added NPCSH_API_URL to better accommodate openai-like apis that require a specific url to be set as well as `NPCSH_STREAM_OUTPUT` to indicate whether or not to use streaming in one's responses. It will be set to 0 (false) by default as it has only been tested and verified for a small subset of the models and providers we have available (openai, anthropic, and ollama). If you try it and run into issues, please post them here so we can correct them as soon as possible !
|
|
242
|
+
{{ shared_context.extracted_text }}
|
|
263
243
|
|
|
244
|
+
Please provide a response to user request: {{ request }} using the information extracted above.
|
|
245
|
+
{% endif %}
|
|
246
|
+
{% else %}
|
|
247
|
+
Error: No text was extracted from the PDF.
|
|
248
|
+
{% endif %}
|
|
249
|
+
"""
|
|
250
|
+
},]
|
|
251
|
+
}
|
|
264
252
|
|
|
265
|
-
|
|
266
|
-
|
|
253
|
+
# Instantiate the tool
|
|
254
|
+
tool = Tool(tool_data)
|
|
267
255
|
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
256
|
+
# Create an NPC instance
|
|
257
|
+
npc = NPC(
|
|
258
|
+
name='starlana',
|
|
259
|
+
primary_directive='Analyze text from Astrophysics papers with a keen attention to theoretical machinations and mechanisms.',
|
|
260
|
+
model = 'llama3.2',
|
|
261
|
+
provider='ollama',
|
|
262
|
+
db_conn=sqlite3.connect(os.path.expanduser('~/npcsh_database.db'))
|
|
263
|
+
)
|
|
271
264
|
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
liberty, governance, and social issues continue to inspire movements for democracy and social justice in the region. Simón Bolívar died on December 17, 1830, but remains a pivotal figure
|
|
281
|
-
in the history of Latin America.
|
|
282
|
-
```
|
|
265
|
+
# Define input values dictionary
|
|
266
|
+
input_values = {
|
|
267
|
+
"request": "what is the point of the yuan and narayanan work?",
|
|
268
|
+
"file": os.path.abspath("test_data/yuan2004.pdf")
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
print(f"Attempting to read file: {input_values['file']}")
|
|
272
|
+
print(f"File exists: {os.path.exists(input_values['file'])}")
|
|
283
273
|
|
|
274
|
+
# Execute the tool
|
|
275
|
+
output = tool.execute(input_values, npc.tools_dict, jinja_env, 'Sample Command',model=npc.model, provider=npc.provider, npc=npc)
|
|
284
276
|
|
|
285
|
-
|
|
286
|
-
npcsh> What is the capital of France?
|
|
287
|
-
The capital of France is Paris. It is the largest city in the country and is known for its rich history, art, culture, and architecture, including famous landmarks such as the Eiffel Tower, Notre-Dame Cathedral, and the Louvre Museum.
|
|
277
|
+
print('Tool Output:', output)
|
|
288
278
|
```
|
|
289
279
|
|
|
290
|
-
|
|
291
|
-
npcsh> can you tell me a joke about my favorite city?
|
|
280
|
+
### Example 4: Orchestrating a team
|
|
292
281
|
|
|
293
|
-
Additional input needed: The user did not specify their favorite city, which is necessary to generate a relevant joke.
|
|
294
|
-
Please tell me your favorite city so I can share a joke about it!: boston
|
|
295
282
|
|
|
296
|
-
Sure! Here's a joke about Boston:
|
|
297
|
-
Why do Bostonians like to play hide and seek?
|
|
298
|
-
Because good luck hiding when everyone yells, "Wicked awesome, ya gotta be here!"
|
|
299
|
-
```
|
|
300
283
|
|
|
301
|
-
```
|
|
302
|
-
|
|
284
|
+
```python
|
|
285
|
+
import pandas as pd
|
|
286
|
+
import numpy as np
|
|
287
|
+
import os
|
|
288
|
+
from npcsh.npc_compiler import NPC, NPCTeam, Tool
|
|
303
289
|
|
|
304
|
-
handle_tool_call invoked with tool_name: generic_search_tool
|
|
305
290
|
|
|
306
|
-
|
|
291
|
+
# Create test data and save to CSV
|
|
292
|
+
def create_test_data(filepath="sales_data.csv"):
|
|
293
|
+
sales_data = pd.DataFrame(
|
|
294
|
+
{
|
|
295
|
+
"date": pd.date_range(start="2024-01-01", periods=90),
|
|
296
|
+
"revenue": np.random.normal(10000, 2000, 90),
|
|
297
|
+
"customer_count": np.random.poisson(100, 90),
|
|
298
|
+
"avg_ticket": np.random.normal(100, 20, 90),
|
|
299
|
+
"region": np.random.choice(["North", "South", "East", "West"], 90),
|
|
300
|
+
"channel": np.random.choice(["Online", "Store", "Mobile"], 90),
|
|
301
|
+
}
|
|
302
|
+
)
|
|
307
303
|
|
|
308
|
-
|
|
304
|
+
# Add patterns to make data more realistic
|
|
305
|
+
sales_data["revenue"] *= 1 + 0.3 * np.sin(
|
|
306
|
+
np.pi * np.arange(90) / 30
|
|
307
|
+
) # Seasonal pattern
|
|
308
|
+
sales_data.loc[sales_data["channel"] == "Mobile", "revenue"] *= 1.1 # Mobile growth
|
|
309
|
+
sales_data.loc[
|
|
310
|
+
sales_data["channel"] == "Online", "customer_count"
|
|
311
|
+
] *= 1.2 # Online customer growth
|
|
309
312
|
|
|
310
|
-
|
|
313
|
+
sales_data.to_csv(filepath, index=False)
|
|
314
|
+
return filepath, sales_data
|
|
311
315
|
|
|
312
|
-
• Winds: Generally from the northwest at 5 to 10 mph.
|
|
313
316
|
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
+
code_execution_tool = Tool(
|
|
318
|
+
{
|
|
319
|
+
"tool_name": "execute_code",
|
|
320
|
+
"description": """Executes a Python code block with access to pandas,
|
|
321
|
+
numpy, and matplotlib.
|
|
322
|
+
Results should be stored in the 'results' dict to be returned.
|
|
323
|
+
The only input should be a single code block with \n characters included.
|
|
324
|
+
The code block must use only the libraries or methods contained withen the
|
|
325
|
+
pandas, numpy, and matplotlib libraries or using builtin methods.
|
|
326
|
+
do not include any json formatting or markdown formatting.
|
|
317
327
|
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
```npcsh
|
|
321
|
-
npcsh> Can you explain what the code does in the currently open VS code window?
|
|
328
|
+
When generating your script, the final output must be encoded in a variable
|
|
329
|
+
named "output". e.g.
|
|
322
330
|
|
|
323
|
-
|
|
331
|
+
output = some_analysis_function(inputs, derived_data_from_inputs)
|
|
332
|
+
Adapt accordingly based on the scope of the analysis
|
|
324
333
|
|
|
325
|
-
|
|
334
|
+
""",
|
|
335
|
+
"inputs": ["script"],
|
|
336
|
+
"steps": [
|
|
337
|
+
{
|
|
338
|
+
"engine": "python",
|
|
339
|
+
"code": """{{script}}""",
|
|
340
|
+
}
|
|
341
|
+
],
|
|
342
|
+
}
|
|
343
|
+
)
|
|
326
344
|
|
|
327
|
-
|
|
345
|
+
# Analytics team definition
|
|
346
|
+
analytics_team = [
|
|
347
|
+
{
|
|
348
|
+
"name": "analyst",
|
|
349
|
+
"primary_directive": "You analyze sales performance data, focusing on revenue trends, customer behavior metrics, and market indicators. Your expertise is in extracting actionable insights from complex datasets.",
|
|
350
|
+
"model": "gpt-4o-mini",
|
|
351
|
+
"provider": "openai",
|
|
352
|
+
"tools": [code_execution_tool], # Only the code execution tool
|
|
353
|
+
},
|
|
354
|
+
{
|
|
355
|
+
"name": "researcher",
|
|
356
|
+
"primary_directive": "You specialize in causal analysis and experimental design. Given data insights, you determine what factors drive observed patterns and design tests to validate hypotheses.",
|
|
357
|
+
"model": "gpt-4o-mini",
|
|
358
|
+
"provider": "openai",
|
|
359
|
+
"tools": [code_execution_tool], # Only the code execution tool
|
|
360
|
+
},
|
|
361
|
+
{
|
|
362
|
+
"name": "engineer",
|
|
363
|
+
"primary_directive": "You implement data pipelines and optimize data processing. When given analysis requirements, you create efficient workflows to automate insights generation.",
|
|
364
|
+
"model": "gpt-4o-mini",
|
|
365
|
+
"provider": "openai",
|
|
366
|
+
"tools": [code_execution_tool], # Only the code execution tool
|
|
367
|
+
},
|
|
368
|
+
]
|
|
328
369
|
|
|
329
|
-
1 Import Necessary Libraries: It imports required libraries like system, datetime, and pyautogui, which are essential for capturing screenshots and handling date-time operations.
|
|
330
370
|
|
|
331
|
-
|
|
371
|
+
def create_analytics_team():
|
|
372
|
+
# Initialize NPCs with just the code execution tool
|
|
373
|
+
npcs = []
|
|
374
|
+
for npc_data in analytics_team:
|
|
375
|
+
npc = NPC(
|
|
376
|
+
name=npc_data["name"],
|
|
377
|
+
primary_directive=npc_data["primary_directive"],
|
|
378
|
+
model=npc_data["model"],
|
|
379
|
+
provider=npc_data["provider"],
|
|
380
|
+
tools=[code_execution_tool], # Only code execution tool
|
|
381
|
+
)
|
|
382
|
+
npcs.append(npc)
|
|
332
383
|
|
|
333
|
-
|
|
384
|
+
# Create coordinator with just code execution tool
|
|
385
|
+
coordinator = NPC(
|
|
386
|
+
name="coordinator",
|
|
387
|
+
primary_directive="You coordinate the analytics team, ensuring each specialist contributes their expertise effectively. You synthesize insights and manage the workflow.",
|
|
388
|
+
model="gpt-4o-mini",
|
|
389
|
+
provider="openai",
|
|
390
|
+
tools=[code_execution_tool], # Only code execution tool
|
|
391
|
+
)
|
|
334
392
|
|
|
335
|
-
|
|
393
|
+
# Create team
|
|
394
|
+
team = NPCTeam(npcs=npcs, foreman=coordinator)
|
|
395
|
+
return team
|
|
336
396
|
|
|
337
|
-
5 Image Analysis: It likely imports a functionality (analyze_image_base) to analyze the screenshot for specific content or information.
|
|
338
397
|
|
|
339
|
-
|
|
340
|
-
|
|
398
|
+
def main():
|
|
399
|
+
# Create and save test data
|
|
400
|
+
data_path, sales_data = create_test_data()
|
|
341
401
|
|
|
402
|
+
# Initialize team
|
|
403
|
+
team = create_analytics_team()
|
|
342
404
|
|
|
343
|
-
|
|
405
|
+
# Run analysis - updated prompt to reflect code execution approach
|
|
406
|
+
results = team.orchestrate(
|
|
407
|
+
f"""
|
|
408
|
+
Analyze the sales data at {data_path} to:
|
|
409
|
+
1. Identify key performance drivers
|
|
410
|
+
2. Determine if mobile channel growth is significant
|
|
411
|
+
3. Recommend tests to validate growth hypotheses
|
|
344
412
|
|
|
345
|
-
|
|
346
|
-
|
|
413
|
+
Here is a header for the data file at {data_path}:
|
|
414
|
+
{sales_data.head()}
|
|
347
415
|
|
|
348
|
-
|
|
416
|
+
When working with dates, ensure that date columns are converted from raw strings. e.g. use the pd.to_datetime function.
|
|
349
417
|
|
|
350
|
-
Running command: ls -S | head -n 1
|
|
351
418
|
|
|
352
|
-
|
|
419
|
+
When working with potentially messy data, handle null values by using nan versions of numpy functions or
|
|
420
|
+
by filtering them with a mask .
|
|
353
421
|
|
|
354
|
-
|
|
422
|
+
Use Python code execution to perform the analysis - load the data and perform statistical analysis directly.
|
|
423
|
+
"""
|
|
424
|
+
)
|
|
355
425
|
|
|
356
|
-
|
|
426
|
+
print(results)
|
|
357
427
|
|
|
358
|
-
|
|
359
|
-
|
|
428
|
+
# Cleanup
|
|
429
|
+
os.remove(data_path)
|
|
360
430
|
|
|
361
|
-
The best way to implement a linked list in Python is to define a Node class for the individual elements and a LinkedList class to manage the nodes. Here's a basic implementation:
|
|
362
|
-
class Node:
|
|
363
|
-
def __init__(self, data):
|
|
364
|
-
self.data = data # Store the data
|
|
365
|
-
self.next = None # Initialize the next node as None
|
|
366
431
|
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
self.head = None # Initialize the head of the list
|
|
432
|
+
if __name__ == "__main__":
|
|
433
|
+
main()
|
|
370
434
|
|
|
371
|
-
|
|
372
|
-
"""Inserts a new node at the end of the linked list."""
|
|
373
|
-
new_node = Node(data)
|
|
374
|
-
if not self.head: # If list is empty, set new node as head
|
|
375
|
-
self.head = new_node
|
|
376
|
-
return
|
|
377
|
-
last_node = self.head
|
|
378
|
-
while last_node.next: # Traverse to the last node
|
|
379
|
-
last_node = last_node.next
|
|
380
|
-
last_node.next = new_node # Link the new node
|
|
435
|
+
```
|
|
381
436
|
|
|
382
|
-
def display(self):
|
|
383
|
-
"""Prints the linked list."""
|
|
384
|
-
current = self.head
|
|
385
|
-
while current:
|
|
386
|
-
print(current.data, end=" -> ")
|
|
387
|
-
current = current.next
|
|
388
|
-
print("None")
|
|
389
437
|
|
|
390
|
-
# Example usage:
|
|
391
|
-
llist = LinkedList()
|
|
392
|
-
llist.insert(1)
|
|
393
|
-
llist.insert(2)
|
|
394
|
-
llist.insert(3)
|
|
395
|
-
llist.display() # Output: 1 -> 2 -> 3 -> None
|
|
396
|
-
Explanation:
|
|
397
438
|
|
|
398
|
-
|
|
439
|
+
## Star History
|
|
399
440
|
|
|
400
|
-
|
|
441
|
+
[](https://star-history.com/#cagostino/npcsh&Date)
|
|
401
442
|
|
|
402
|
-
|
|
443
|
+
## Installation
|
|
444
|
+
`npcsh` is available on PyPI and can be installed using pip. Before installing, make sure you have the necessary dependencies installed on your system. Below are the instructions for installing such dependencies on Linux, Mac, and Windows. If you find any other dependencies that are needed, please let us know so we can update the installation instructions to be more accommodating.
|
|
403
445
|
|
|
404
|
-
|
|
405
|
-
|
|
446
|
+
### Linux install
|
|
447
|
+
```bash
|
|
448
|
+
|
|
449
|
+
sudo apt-get install espeak
|
|
450
|
+
sudo apt-get install portaudio19-dev python3-pyaudio
|
|
451
|
+
sudo apt-get install alsa-base alsa-utils
|
|
452
|
+
sudo apt-get install libcairo2-dev
|
|
453
|
+
sudo apt-get install libgirepository1.0-dev
|
|
454
|
+
sudo apt-get install ffmpeg
|
|
455
|
+
|
|
456
|
+
#And if you don't have ollama installed, use this:
|
|
457
|
+
curl -fsSL https://ollama.com/install.sh | sh
|
|
406
458
|
|
|
459
|
+
ollama pull llama3.2
|
|
460
|
+
ollama pull llava:7b
|
|
461
|
+
ollama pull nomic-embed-text
|
|
462
|
+
pip install npcsh
|
|
463
|
+
```
|
|
464
|
+
If you'd like to install the abilities to use STT and TTS, additionall install the following
|
|
465
|
+
```
|
|
466
|
+
pip install openai-whisper pyaudio gtts playsound
|
|
407
467
|
```
|
|
408
468
|
|
|
409
|
-
```npcsh
|
|
410
|
-
npcsh> In which of the files in the current directory is "Simon Bolivar" mentioned?
|
|
411
469
|
|
|
412
|
-
The user's query was about "Simon Bolivar." Based on the relevant context from the codebase, I found specific mentions and relevant information regarding Simon Bolívar in the documentation provided
|
|
413
|
-
in the README.md file.
|
|
414
|
-
From the relevant context, specifically in the section of README.md, it shared a code example that demonstrates how to load an NPC (Non-Player Character) named "Simon Bolivar" using SQLite and
|
|
415
|
-
answer a question about project updates. This code snippet indicates that "Simon Bolivar" is represented in the NPC system, allowing for interaction through questions to retrieve information.
|
|
416
|
-
Additionally, there is another passage that directly answers the query. It reads:
|
|
417
470
|
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
471
|
+
### Mac install
|
|
472
|
+
```bash
|
|
473
|
+
brew install portaudio
|
|
474
|
+
brew install ffmpeg
|
|
475
|
+
brew install ollama
|
|
476
|
+
brew services start ollama
|
|
477
|
+
brew install pygobject3
|
|
478
|
+
ollama pull llama3.2
|
|
479
|
+
ollama pull llava:7b
|
|
480
|
+
ollama pull nomic-embed-text
|
|
481
|
+
pip install npcsh
|
|
482
|
+
```
|
|
483
|
+
### Windows Install
|
|
484
|
+
|
|
485
|
+
Download and install ollama exe.
|
|
424
486
|
|
|
487
|
+
Then, in a powershell. Download and install ffmpeg.
|
|
425
488
|
|
|
426
489
|
```
|
|
490
|
+
ollama pull llama3.2
|
|
491
|
+
ollama pull llava:7b
|
|
492
|
+
ollama pull nomic-embed-text
|
|
493
|
+
pip install npcsh
|
|
494
|
+
```
|
|
495
|
+
As of now, npcsh appears to work well with some of the core functionalities like /ots and /whisper.
|
|
427
496
|
|
|
428
497
|
|
|
429
|
-
|
|
430
|
-
npcsh>what process is using the most ram on my computer right now?
|
|
498
|
+
### Fedora Install (under construction)
|
|
431
499
|
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
caug 1040046 1.1 2.7 1226193464 1598980 ? SNl Dec22 23:30 /opt/vivaldi/vivaldi-bin --type=renderer --crashpad-handler-pid=4529 --enable-crash-reporter=,stable --change-stack-guard-on-fork=
|
|
436
|
-
caug 683148 0.0 1.9 2047464 1137232 ? SNl Dec13 10:11 io.elementary.appcenter
|
|
437
|
-
ollama 1884 0.0 1.8 3683920 1051020 ? SNsl Dec02 0:30 /usr/local/bin/ollama serve
|
|
438
|
-
caug 1191836 5.3 1.6 7346052 964988 pts/17 Sl+ 22:57 0:22 /home/caug/.pyenv/versions/npcsh/bin/python /home/caug/.pyenv/versions/3.11.0/envs/npcsh/bin/npcsh
|
|
439
|
-
caug 1145749 0.0 1.5 7068128 892500 pts/13 Sl+ Dec22 0:15 /home/caug/.pyenv/versions/3.11.0/bin/python3.11 /home/caug/.pyenv/versions/3.11.0/bin/npcsh
|
|
440
|
-
caug 493912 0.0 1.5 7050968 890012 pts/11 Sl+ Dec10 0:14 /home/caug/.pyenv/versions/npcsh/bin/python /home/caug/.pyenv/versions/npcsh/bin/npcsh
|
|
441
|
-
caug 4522 0.9 1.5 34391236 874756 ? SNLl Dec02 295:06 /opt/vivaldi/vivaldi-bin
|
|
442
|
-
caug 3995 0.7 1.1 6051812 683916 ? R<sl Dec02 221:19 /usr/bin/gnome-shell
|
|
443
|
-
caug 501721 0.3 1.0 1212584660 626248 ? SNl Dec10 60:22 /opt/vivaldi/vivaldi-bin --type=renderer --crashpad-handler-pid=4529 --enable-crash-reporter=,stable --change-stack-guard-on-fork=
|
|
500
|
+
python3-dev (fixes hnswlib issues with chroma db)
|
|
501
|
+
xhost + (pyautogui)
|
|
502
|
+
python-tkinter (pyautogui)
|
|
444
503
|
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
504
|
+
## Startup Configuration and Project Structure
|
|
505
|
+
After it has been pip installed, `npcsh` can be used as a command line tool. Start it by typing:
|
|
506
|
+
```bash
|
|
507
|
+
npcsh
|
|
508
|
+
```
|
|
509
|
+
When initialized, `npcsh` will generate a .npcshrc file in your home directory that stores your npcsh settings.
|
|
510
|
+
Here is an example of what the .npcshrc file might look like after this has been run.
|
|
511
|
+
```bash
|
|
512
|
+
# NPCSH Configuration File
|
|
513
|
+
export NPCSH_INITIALIZED=1
|
|
514
|
+
export NPCSH_CHAT_PROVIDER='ollama'
|
|
515
|
+
export NPCSH_CHAT_MODEL='llama3.2'
|
|
516
|
+
export NPCSH_DB_PATH='~/npcsh_history.db'
|
|
448
517
|
```
|
|
518
|
+
`npcsh` also comes with a set of tools and NPCs that are used in processing. It will generate a folder at ~/.npcsh/ that contains the tools and NPCs that are used in the shell and these will be used in the absence of other project-specific ones. Additionally, `npcsh` records interactions and compiled information about npcs within a local SQLite database at the path specified in the .npcshrc file. This will default to ~/npcsh_history.db if not specified. When the data mode is used to load or analyze data in CSVs or PDFs, these data will be stored in the same database for future reference.
|
|
449
519
|
|
|
450
|
-
|
|
451
|
-
npcsh> can you generate an image of a calico cat and an orange tabby cat in the style of a japanese woodblock print where the two cats are engaged in a samurai battle but instead of samurai swords they are holding cat toys that are like wands with felt birds on the ends of them on strings
|
|
520
|
+
The installer will automatically add this file to your shell config, but if it does not do so successfully for whatever reason you can add the following to your .bashrc or .zshrc:
|
|
452
521
|
|
|
453
|
-
|
|
522
|
+
```bash
|
|
523
|
+
# Source NPCSH configuration
|
|
524
|
+
if [ -f ~/.npcshrc ]; then
|
|
525
|
+
. ~/.npcshrc
|
|
526
|
+
fi
|
|
454
527
|
```
|
|
455
|
-

|
|
456
528
|
|
|
529
|
+
We support inference via `openai`, `anthropic`, `ollama`,`gemini`, `deepseek`, and `openai-like` APIs. The default provider must be one of `['openai','anthropic','ollama', 'gemini', 'deepseek', 'openai-like']` and the model must be one available from those providers.
|
|
457
530
|
|
|
458
|
-
|
|
459
|
-
the ability to generate tool call sequences when asked:
|
|
460
|
-
```npcsh
|
|
531
|
+
To use tools that require API keys, create an `.env` file up in the folder where you are working or place relevant API keys as env variables in your ~/.npcshrc. If you already have these API keys set in a ~/.bashrc or a ~/.zshrc or similar files, you need not additionally add them to ~/.npcshrc or to an `.env` file. Here is an example of what an `.env` file might look like:
|
|
461
532
|
|
|
462
|
-
|
|
533
|
+
```bash
|
|
534
|
+
export OPENAI_API_KEY="your_openai_key"
|
|
535
|
+
export ANTHROPIC_API_KEY="your_anthropic_key"
|
|
536
|
+
export DEEPSEEK_API_KEY='your_deepseek_key'
|
|
537
|
+
export GEMINI_API_KEY='your_gemini_key'
|
|
538
|
+
export PERPLEXITY_API_KEY='your_perplexity_key'
|
|
539
|
+
```
|
|
463
540
|
|
|
464
|
-
handle_tool_call invoked with tool_name: generic_search_tool
|
|
465
|
-
Tool found: generic_search_tool
|
|
466
|
-
handle_tool_call invoked with tool_name: image_generation_tool
|
|
467
|
-
Tool found: image_generation_tool
|
|
468
541
|
|
|
469
|
-
|
|
470
|
-
|
|
542
|
+
Individual npcs can also be set to use different models and providers by setting the `model` and `provider` keys in the npc files.
|
|
543
|
+
Once initialized and set up, you will find the following in your ~/.npcsh directory:
|
|
544
|
+
```bash
|
|
545
|
+
~/.npcsh/
|
|
546
|
+
├── npc_team/ # Global NPCs
|
|
547
|
+
│ ├── tools/ # Global tools
|
|
548
|
+
│ └── assembly_lines/ # Workflow pipelines
|
|
471
549
|
|
|
472
|
-
It seems that you've generated an image. If you have any questions or need assistance related to this image, please let me know how I can help!
|
|
473
550
|
```
|
|
474
|
-
and
|
|
475
|
-
|
|
551
|
+
For cases where you wish to set up a project specific set of NPCs, tools, and assembly lines, add a `npc_team` directory to your project and `npcsh` should be able to pick up on its presence, like so:
|
|
552
|
+
```bash
|
|
553
|
+
./npc_team/ # Project-specific NPCs
|
|
554
|
+
├── tools/ # Project tools #example tool next
|
|
555
|
+
│ └── example.tool
|
|
556
|
+
└── assembly_lines/ # Project workflows
|
|
557
|
+
└── example.pipe
|
|
558
|
+
└── models/ # Project workflows
|
|
559
|
+
└── example.model
|
|
560
|
+
└── example1.npc # Example NPC
|
|
561
|
+
└── example2.npc # Example NPC
|
|
562
|
+
└── example1.ctx # Example NPC
|
|
563
|
+
└── example2.ctx # Example NPC
|
|
564
|
+
|
|
565
|
+
```
|
|
566
|
+
|
|
567
|
+
## IMPORTANT: migrations and deprecations
|
|
568
|
+
|
|
569
|
+
### v0.3.4
|
|
570
|
+
-In v0.3.4, the structure for tools was adjusted. If you have made custom tools please refer to the structure within npc_compiler to ensure that they are in the correct format. Otherwise, do the following
|
|
571
|
+
```bash
|
|
572
|
+
rm ~/.npcsh/npc_team/tools/*.tool
|
|
573
|
+
```
|
|
574
|
+
and then
|
|
575
|
+
```bash
|
|
576
|
+
npcsh
|
|
577
|
+
```
|
|
578
|
+
and the updated tools will be copied over into the correct location.
|
|
476
579
|
|
|
580
|
+
### v0.3.5
|
|
581
|
+
-Version 0.3.5 included a complete overhaul and refactoring of the llm_funcs module. This was done to make it not as horribly long and to make it easier to add new models and providers
|
|
477
582
|
|
|
478
583
|
|
|
584
|
+
-in version 0.3.5, a change was introduced to the database schema for messages to add npcs, models, providers, and associated attachments to data. If you have used `npcsh` before this version, you will need to run this migration script to update your database schema: [migrate_conversation_history_v0.3.5.py](https://github.com/cagostino/npcsh/blob/cfb9dc226e227b3e888f3abab53585693e77f43d/npcsh/migrations/migrate_conversation_history_%3Cv0.3.4-%3Ev0.3.5.py)
|
|
479
585
|
|
|
586
|
+
-additionally, NPCSH_MODEL and NPCSH_PROVIDER have been renamed to NPCSH_CHAT_MODEL and NPCSH_CHAT_PROVIDER
|
|
587
|
+
to provide a more consistent naming scheme now that we have additionally introduced `NPCSH_VISION_MODEL` and `NPCSH_VISION_PROVIDER`, `NPCSH_EMBEDDING_MODEL`, `NPCSH_EMBEDDING_PROVIDER`, `NPCSH_REASONING_MODEL`, `NPCSH_REASONING_PROVIDER`, `NPCSH_IMAGE_GEN_MODEL`, and `NPCSH_IMAGE_GEN_PROVIDER`.
|
|
588
|
+
- In addition, we have added NPCSH_API_URL to better accommodate openai-like apis that require a specific url to be set as well as `NPCSH_STREAM_OUTPUT` to indicate whether or not to use streaming in one's responses. It will be set to 0 (false) by default as it has only been tested and verified for a small subset of the models and providers we have available (openai, anthropic, and ollama). If you try it and run into issues, please post them here so we can correct them as soon as possible !
|
|
480
589
|
|
|
481
590
|
|
|
591
|
+
## npcsh usage
|
|
592
|
+
In the `npcsh` shell, users can ask LLMs questions, have LLMLs execute commands or use tools, or utilize macros that provide additional functionality. When a user does not invoke a specific macro, the shell will automatically decide which tool to use based on the user's input. Here are some examples of things one might ask the npcsh shell.
|
|
482
593
|
|
|
483
|
-
|
|
484
|
-
An important facet that makes `npcsh` so powerful is the ability to pipe outputs from one tool call to another. This allows for the chaining of commands and the creation of complex workflows. For example, you can use the output of a search to generate an image, or you can use the output of an image analysis to generate a report. Here is an example of how this might look in practice:
|
|
594
|
+
Here are some examples of how you can use npcsh
|
|
485
595
|
```npcsh
|
|
486
|
-
npcsh>
|
|
596
|
+
npcsh>Who was Simon Bolivar?
|
|
597
|
+
|
|
598
|
+
Simón Bolívar, often referred to as "El Libertador," was a Venezuelan military and political leader who played a key role in the Latin American wars of independence against Spanish
|
|
599
|
+
colonial rule in the early 19th century. He was born on July 24, 1783, in Caracas, Venezuela, into a wealthy Creole family.
|
|
600
|
+
Bolívar's vision extended beyond merely liberating territories; he aspired to unify the newly independent nations of South America into a single federation, which he called "Gran
|
|
601
|
+
Colombia," consisting of present-day Colombia, Venezuela, Ecuador, and Panama. He was known for his exceptional military strategies and leadership, which led to successful campaigns in
|
|
602
|
+
various regions, including the battles of Boyacá, Carabobo, and Ayacucho.
|
|
603
|
+
He faced numerous challenges, including political fragmentation, regional rivalries, and personal betrayals. Despite his initial successes, the unity he sought for Latin America proved
|
|
604
|
+
difficult to achieve, and Gran Colombia eventually disintegrated in the early 1830s.
|
|
605
|
+
Bolívar's influence and legacy extend far beyond his lifetime. He is celebrated in various countries across Latin America as a national hero and symbol of independence. His thoughts on
|
|
606
|
+
liberty, governance, and social issues continue to inspire movements for democracy and social justice in the region. Simón Bolívar died on December 17, 1830, but remains a pivotal figure
|
|
607
|
+
in the history of Latin America.
|
|
608
|
+
```
|
|
609
|
+
|
|
487
610
|
|
|
488
|
-
### Executing Bash Commands
|
|
489
|
-
You can execute bash commands directly within npcsh. The LLM can also generate and execute bash commands based on your natural language requests.
|
|
490
|
-
For example:
|
|
491
611
|
```npcsh
|
|
492
|
-
npcsh>
|
|
612
|
+
npcsh> What is the capital of France?
|
|
613
|
+
The capital of France is Paris. It is the largest city in the country and is known for its rich history, art, culture, and architecture, including famous landmarks such as the Eiffel Tower, Notre-Dame Cathedral, and the Louvre Museum.
|
|
614
|
+
```
|
|
493
615
|
|
|
494
|
-
npcsh
|
|
495
|
-
npcsh>
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
616
|
+
```npcsh
|
|
617
|
+
npcsh> can you tell me a joke about my favorite city?
|
|
618
|
+
|
|
619
|
+
Additional input needed: The user did not specify their favorite city, which is necessary to generate a relevant joke.
|
|
620
|
+
Please tell me your favorite city so I can share a joke about it!: boston
|
|
499
621
|
|
|
622
|
+
Sure! Here's a joke about Boston:
|
|
623
|
+
Why do Bostonians like to play hide and seek?
|
|
624
|
+
Because good luck hiding when everyone yells, "Wicked awesome, ya gotta be here!"
|
|
500
625
|
```
|
|
501
626
|
|
|
502
|
-
|
|
503
|
-
|
|
627
|
+
```npcsh
|
|
628
|
+
npcsh> What's the weather in Tokyo?
|
|
504
629
|
|
|
505
|
-
|
|
506
|
-
```bash
|
|
507
|
-
$ npc 'whats the biggest filei n my computer'
|
|
508
|
-
Loaded .env file from /home/caug/npcww/npcsh
|
|
509
|
-
action chosen: request_input
|
|
510
|
-
explanation given: The user needs to provide more context about their operating system or specify which directory to search for the biggest file.
|
|
630
|
+
handle_tool_call invoked with tool_name: generic_search_tool
|
|
511
631
|
|
|
512
|
-
|
|
513
|
-
Please specify your operating system (e.g., Windows, macOS, Linux) and the directory you want to search in.: linux and root
|
|
514
|
-
action chosen: execute_command
|
|
515
|
-
explanation given: The user is asking for the biggest file on their computer, which can be accomplished with a simple bash command that searches for the largest files.
|
|
516
|
-
sibiji generating command
|
|
517
|
-
LLM suggests the following bash command: sudo find / -type f -exec du -h {} + | sort -rh | head -n 1
|
|
518
|
-
Running command: sudo find / -type f -exec du -h {} + | sort -rh | head -n 1
|
|
519
|
-
Command executed with output: 11G /home/caug/.cache/huggingface/hub/models--state-spaces--mamba-2.8b/blobs/39911a8470a2b256016b57cc71c68e0f96751cba5b229216ab1f4f9d82096a46
|
|
632
|
+
The weather in Tokyo, Japan, is expected to be mixed with sun and clouds. Here are some details from the recent forecasts:
|
|
520
633
|
|
|
521
|
-
|
|
634
|
+
• Highs: Around 53°F to 58°F with a few variations depending on the day.
|
|
522
635
|
|
|
523
|
-
|
|
524
|
-
2. **Disk Usage**: For each file found, it calculates its disk usage in a human-readable format (`du -h`).
|
|
525
|
-
3. **Sort**: It sorts the results in reverse order based on size (`sort -rh`), so the largest files appear first.
|
|
526
|
-
4. **Head**: Finally, it retrieves just the largest file using `head -n 1`.
|
|
636
|
+
• Lows: Approximately 35°F to 40°F.
|
|
527
637
|
|
|
528
|
-
|
|
638
|
+
• Winds: Generally from the northwest at 5 to 10 mph.
|
|
639
|
+
|
|
640
|
+
• Condition: Mainly sunny, but there may be periods of clouds and some overcast conditions throughout the week.
|
|
641
|
+
For more detailed information, you can refer to sources like The Weather Channel or AccuWeather.
|
|
642
|
+
/home/caug/npcww/npcsh:npcsh>
|
|
529
643
|
|
|
530
644
|
```
|
|
645
|
+
In the below example, the code that was open was the screen capture analysis tool itself.
|
|
646
|
+
```npcsh
|
|
647
|
+
npcsh> Can you explain what the code does in the currently open VS code window?
|
|
531
648
|
|
|
532
|
-
|
|
533
|
-
$ npc 'whats the weather in tokyo'
|
|
534
|
-
Loaded .env file from /home/caug/npcww/npcsh
|
|
535
|
-
action chosen: invoke_tool
|
|
536
|
-
explanation given: The user's request for the current weather in Tokyo requires up-to-date information, which can be best obtained through an internet search.
|
|
537
|
-
Tool found: internet_search
|
|
538
|
-
Executing tool with input values: {'query': 'whats the weather in tokyo'}
|
|
539
|
-
QUERY in tool whats the weather in tokyo
|
|
540
|
-
[{'title': 'Tokyo, Tokyo, Japan Weather Forecast | AccuWeather', 'href': 'https://www.accuweather.com/en/jp/tokyo/226396/weather-forecast/226396', 'body': 'Tokyo, Tokyo, Japan Weather Forecast, with current conditions, wind, air quality, and what to expect for the next 3 days.'}, {'title': 'Tokyo, Japan 14 day weather forecast - timeanddate.com', 'href': 'https://www.timeanddate.com/weather/japan/tokyo/ext', 'body': 'Tokyo Extended Forecast with high and low temperatures. °F. Last 2 weeks of weather'}, {'title': 'Tokyo, Tokyo, Japan Current Weather | AccuWeather', 'href': 'https://www.accuweather.com/en/jp/tokyo/226396/current-weather/226396', 'body': 'Current weather in Tokyo, Tokyo, Japan. Check current conditions in Tokyo, Tokyo, Japan with radar, hourly, and more.'}, {'title': 'Weather in Tokyo, Japan - timeanddate.com', 'href': 'https://www.timeanddate.com/weather/japan/tokyo', 'body': 'Current weather in Tokyo and forecast for today, tomorrow, and next 14 days'}, {'title': 'Tokyo Weather Forecast Today', 'href': 'https://japanweather.org/tokyo', 'body': "For today's mild weather in Tokyo, with temperatures between 13ºC to 16ºC (55.4ºF to 60.8ºF), consider wearing: - Comfortable jeans or slacks - Sun hat (if spending time outdoors) - Lightweight sweater or cardigan - Long-sleeve shirt or blouse. Temperature. Day. 14°C. Night. 10°C. Morning. 10°C. Afternoon."}] <class 'list'>
|
|
541
|
-
RESULTS in tool ["Tokyo, Tokyo, Japan Weather Forecast, with current conditions, wind, air quality, and what to expect for the next 3 days.\n Citation: https://www.accuweather.com/en/jp/tokyo/226396/weather-forecast/226396\n\n\n\nTokyo Extended Forecast with high and low temperatures. °F. Last 2 weeks of weather\n Citation: https://www.timeanddate.com/weather/japan/tokyo/ext\n\n\n\nCurrent weather in Tokyo, Tokyo, Japan. Check current conditions in Tokyo, Tokyo, Japan with radar, hourly, and more.\n Citation: https://www.accuweather.com/en/jp/tokyo/226396/current-weather/226396\n\n\n\nCurrent weather in Tokyo and forecast for today, tomorrow, and next 14 days\n Citation: https://www.timeanddate.com/weather/japan/tokyo\n\n\n\nFor today's mild weather in Tokyo, with temperatures between 13ºC to 16ºC (55.4ºF to 60.8ºF), consider wearing: - Comfortable jeans or slacks - Sun hat (if spending time outdoors) - Lightweight sweater or cardigan - Long-sleeve shirt or blouse. Temperature. Day. 14°C. Night. 10°C. Morning. 10°C. Afternoon.\n Citation: https://japanweather.org/tokyo\n\n\n", 'https://www.accuweather.com/en/jp/tokyo/226396/weather-forecast/226396\n\nhttps://www.timeanddate.com/weather/japan/tokyo/ext\n\nhttps://www.accuweather.com/en/jp/tokyo/226396/current-weather/226396\n\nhttps://www.timeanddate.com/weather/japan/tokyo\n\nhttps://japanweather.org/tokyo\n']
|
|
542
|
-
The current weather in Tokyo, Japan is mild, with temperatures ranging from 13°C to 16°C (approximately 55.4°F to 60.8°F). For today's conditions, it is suggested to wear comfortable jeans or slacks, a lightweight sweater or cardigan, and a long-sleeve shirt or blouse, especially if spending time outdoors. The temperature today is expected to reach a high of 14°C (57.2°F) during the day and a low of 10°C (50°F) at night.
|
|
649
|
+
handle_tool_call invoked with tool_name: screen_capture_analysis_tool
|
|
543
650
|
|
|
544
|
-
|
|
545
|
-
- [AccuWeather Forecast](https://www.accuweather.com/en/jp/tokyo/226396/weather-forecast/226396)
|
|
546
|
-
- [Time and Date Extended Forecast](https://www.timeanddate.com/weather/japan/tokyo/ext)
|
|
547
|
-
- [Current Weather on AccuWeather](https://www.accuweather.com/en/jp/tokyo/226396/current-weather/226396)
|
|
548
|
-
- [More on Time and Date](https://www.timeanddate.com/weather/japan/tokyo)
|
|
549
|
-
- [Japan Weather](https://japanweather.org/tokyo)
|
|
550
|
-
```
|
|
651
|
+
Screenshot saved as screenshot_20241223_225815.png
|
|
551
652
|
|
|
653
|
+
The code in the visible section of your VS Code window appears to be a script for capturing and analyzing screenshots. Here's a breakdown of what the code does:
|
|
552
654
|
|
|
553
|
-
|
|
554
|
-
|
|
655
|
+
1 Import Necessary Libraries: It imports required libraries like system, datetime, and pyautogui, which are essential for capturing screenshots and handling date-time operations.
|
|
656
|
+
|
|
657
|
+
2 Capture the Screen: The code captures the current screen using pyautogui.screenshot(), taking a screenshot of the entire screen.
|
|
658
|
+
|
|
659
|
+
3 File Management: It generates a unique filename for the screenshot using the current date and time. This ensures that each screenshot file is distinct.
|
|
660
|
+
|
|
661
|
+
4 Save the Screenshot: The captured screenshot is saved to a file with the generated filename.
|
|
662
|
+
|
|
663
|
+
5 Image Analysis: It likely imports a functionality (analyze_image_base) to analyze the screenshot for specific content or information.
|
|
664
|
+
|
|
665
|
+
6 Run the Analysis: While not explicitly shown, it appears to call this analysis function to process the saved screenshot.
|
|
666
|
+
In summary, this code automates the process of capturing a screenshot, saving it with a unique filename, and analyzing that image for specific content or patterns.
|
|
555
667
|
|
|
556
|
-
on Ubuntu:
|
|
557
|
-
```bash
|
|
558
|
-
sudo apt update && sudo apt install redis-server
|
|
559
|
-
redis-server
|
|
560
|
-
```
|
|
561
668
|
|
|
562
|
-
on macOS:
|
|
563
|
-
```bash
|
|
564
|
-
brew install redis
|
|
565
|
-
redis-server
|
|
566
669
|
```
|
|
567
|
-
|
|
670
|
+
|
|
671
|
+
```npcsh
|
|
672
|
+
npcsh> What is the biggest file in my current folder?
|
|
673
|
+
|
|
674
|
+
LLM suggests the following bash command: ls -S | head -n 1
|
|
675
|
+
|
|
676
|
+
Running command: ls -S | head -n 1
|
|
677
|
+
|
|
678
|
+
Command executed with output: image_20241111_000033.png
|
|
679
|
+
|
|
680
|
+
I ran the command ls -S | head -n 1 in your current folder. This command sorts all files by size in descending order and then selects the first entry, which represents the largest file. The result of this operation shows that the biggest file in your current folder is image_20241111_000033.png.
|
|
681
|
+
|
|
682
|
+
```
|
|
683
|
+
|
|
684
|
+
```npcsh
|
|
685
|
+
npcsh>What is the best way to implement a linked list in Python?
|
|
686
|
+
|
|
687
|
+
The best way to implement a linked list in Python is to define a Node class for the individual elements and a LinkedList class to manage the nodes. Here's a basic implementation:
|
|
688
|
+
class Node:
|
|
689
|
+
def __init__(self, data):
|
|
690
|
+
self.data = data # Store the data
|
|
691
|
+
self.next = None # Initialize the next node as None
|
|
692
|
+
|
|
693
|
+
class LinkedList:
|
|
694
|
+
def __init__(self):
|
|
695
|
+
self.head = None # Initialize the head of the list
|
|
696
|
+
|
|
697
|
+
def insert(self, data):
|
|
698
|
+
"""Inserts a new node at the end of the linked list."""
|
|
699
|
+
new_node = Node(data)
|
|
700
|
+
if not self.head: # If list is empty, set new node as head
|
|
701
|
+
self.head = new_node
|
|
702
|
+
return
|
|
703
|
+
last_node = self.head
|
|
704
|
+
while last_node.next: # Traverse to the last node
|
|
705
|
+
last_node = last_node.next
|
|
706
|
+
last_node.next = new_node # Link the new node
|
|
707
|
+
|
|
708
|
+
def display(self):
|
|
709
|
+
"""Prints the linked list."""
|
|
710
|
+
current = self.head
|
|
711
|
+
while current:
|
|
712
|
+
print(current.data, end=" -> ")
|
|
713
|
+
current = current.next
|
|
714
|
+
print("None")
|
|
715
|
+
|
|
716
|
+
# Example usage:
|
|
717
|
+
llist = LinkedList()
|
|
718
|
+
llist.insert(1)
|
|
719
|
+
llist.insert(2)
|
|
720
|
+
llist.insert(3)
|
|
721
|
+
llist.display() # Output: 1 -> 2 -> 3 -> None
|
|
722
|
+
Explanation:
|
|
723
|
+
|
|
724
|
+
1 Node Class: Represents each node in the linked list. It has two attributes: data (to store the value) and next (to point to the next node).
|
|
725
|
+
|
|
726
|
+
2 LinkedList Class: Manages the linked list. It has a head attribute pointing to the first node.
|
|
727
|
+
|
|
728
|
+
3 Insert Method: Adds a new node to the end of the list.
|
|
729
|
+
|
|
730
|
+
4 Display Method: Prints the elements of the list in order.
|
|
731
|
+
This implementation is simple and can be extended with more methods (e.g., deletion, searching, etc.) as needed.
|
|
732
|
+
|
|
733
|
+
```
|
|
734
|
+
|
|
735
|
+
```npcsh
|
|
736
|
+
npcsh> In which of the files in the current directory is "Simon Bolivar" mentioned?
|
|
737
|
+
|
|
738
|
+
The user's query was about "Simon Bolivar." Based on the relevant context from the codebase, I found specific mentions and relevant information regarding Simon Bolívar in the documentation provided
|
|
739
|
+
in the README.md file.
|
|
740
|
+
From the relevant context, specifically in the section of README.md, it shared a code example that demonstrates how to load an NPC (Non-Player Character) named "Simon Bolivar" using SQLite and
|
|
741
|
+
answer a question about project updates. This code snippet indicates that "Simon Bolivar" is represented in the NPC system, allowing for interaction through questions to retrieve information.
|
|
742
|
+
Additionally, there is another passage that directly answers the query. It reads:
|
|
743
|
+
|
|
744
|
+
▌ "Simón Bolívar, often referred to as 'El Libertador,' was a Venezuelan military and political leader who played a key role in the Latin American wars of independence against Spanish colonial
|
|
745
|
+
▌ rule in the early 19th century."
|
|
746
|
+
This information highlights Bolívar's significant historical role and achievements, answering the query regarding who Simon Bolivar was, thus making it clear that he was a prominent leader in Latin
|
|
747
|
+
America's fight for independence.
|
|
748
|
+
To summarize, I arrived at this answer by examining the provided context in the README.md file that includes both the coding example that initializes an NPC named "Simon Bolivar" and a historical
|
|
749
|
+
overview of his contributions, which directly addresses the user's inquiry.
|
|
750
|
+
|
|
751
|
+
|
|
752
|
+
```
|
|
753
|
+
|
|
754
|
+
|
|
755
|
+
```npcsh
|
|
756
|
+
npcsh>what process is using the most ram on my computer right now?
|
|
757
|
+
|
|
758
|
+
LLM suggests the following bash command: ps aux --sort=-%mem | head -n 10
|
|
759
|
+
Running command: ps aux --sort=-%mem | head -n 10
|
|
760
|
+
Command executed with output: USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND
|
|
761
|
+
caug 1040046 1.1 2.7 1226193464 1598980 ? SNl Dec22 23:30 /opt/vivaldi/vivaldi-bin --type=renderer --crashpad-handler-pid=4529 --enable-crash-reporter=,stable --change-stack-guard-on-fork=
|
|
762
|
+
caug 683148 0.0 1.9 2047464 1137232 ? SNl Dec13 10:11 io.elementary.appcenter
|
|
763
|
+
ollama 1884 0.0 1.8 3683920 1051020 ? SNsl Dec02 0:30 /usr/local/bin/ollama serve
|
|
764
|
+
caug 1191836 5.3 1.6 7346052 964988 pts/17 Sl+ 22:57 0:22 /home/caug/.pyenv/versions/npcsh/bin/python /home/caug/.pyenv/versions/3.11.0/envs/npcsh/bin/npcsh
|
|
765
|
+
caug 1145749 0.0 1.5 7068128 892500 pts/13 Sl+ Dec22 0:15 /home/caug/.pyenv/versions/3.11.0/bin/python3.11 /home/caug/.pyenv/versions/3.11.0/bin/npcsh
|
|
766
|
+
caug 493912 0.0 1.5 7050968 890012 pts/11 Sl+ Dec10 0:14 /home/caug/.pyenv/versions/npcsh/bin/python /home/caug/.pyenv/versions/npcsh/bin/npcsh
|
|
767
|
+
caug 4522 0.9 1.5 34391236 874756 ? SNLl Dec02 295:06 /opt/vivaldi/vivaldi-bin
|
|
768
|
+
caug 3995 0.7 1.1 6051812 683916 ? R<sl Dec02 221:19 /usr/bin/gnome-shell
|
|
769
|
+
caug 501721 0.3 1.0 1212584660 626248 ? SNl Dec10 60:22 /opt/vivaldi/vivaldi-bin --type=renderer --crashpad-handler-pid=4529 --enable-crash-reporter=,stable --change-stack-guard-on-fork=
|
|
770
|
+
|
|
771
|
+
To determine which processes are using the most RAM on your computer, I executed the command ps aux --sort=-%mem | head -n 10.
|
|
772
|
+
This command lists all running processes (ps aux), sorts them by memory usage in descending order (--sort=-%mem), and then retrieves the top 10 entries (head -n 10).
|
|
773
|
+
The output shows the processes currently consuming the most memory. In the results, the process using the most RAM is Vivaldi's renderer, followed by other applications such as the Elementary App Center, Ollama server, and several instances of Python related to your NPC shell.
|
|
774
|
+
```
|
|
775
|
+
|
|
776
|
+
```npcsh
|
|
777
|
+
npcsh> can you generate an image of a calico cat and an orange tabby cat in the style of a japanese woodblock print where the two cats are engaged in a samurai battle but instead of samurai swords they are holding cat toys that are like wands with felt birds on the ends of them on strings
|
|
778
|
+
|
|
779
|
+
handle_tool_call invoked with tool_name: image_generation_tool
|
|
780
|
+
```
|
|
781
|
+

|
|
782
|
+
|
|
783
|
+
|
|
784
|
+
In addition to its ability to execute one-off tasks or tool calls, npcsh also has
|
|
785
|
+
the ability to generate tool call sequences when asked:
|
|
786
|
+
```npcsh
|
|
787
|
+
|
|
788
|
+
npcsh> can you use a tool sequence to find the gdp of russia in 2024 and then to use that information to generate an image?
|
|
789
|
+
|
|
790
|
+
handle_tool_call invoked with tool_name: generic_search_tool
|
|
791
|
+
Tool found: generic_search_tool
|
|
792
|
+
handle_tool_call invoked with tool_name: image_generation_tool
|
|
793
|
+
Tool found: image_generation_tool
|
|
794
|
+
|
|
795
|
+
The Gross Domestic Product (GDP) of Russia is estimated to be $8.311 trillion by the end of 2024, according to World Economics.
|
|
796
|
+
This figure is significantly larger than the official estimate of $5.804 trillion published by the World Bank for the end of 2023.
|
|
797
|
+
|
|
798
|
+
It seems that you've generated an image. If you have any questions or need assistance related to this image, please let me know how I can help!
|
|
799
|
+
```
|
|
800
|
+
and then the associated image :
|
|
801
|
+

|
|
802
|
+
|
|
803
|
+
|
|
804
|
+
|
|
805
|
+
|
|
806
|
+
|
|
807
|
+
|
|
808
|
+
|
|
809
|
+
### Piping outputs
|
|
810
|
+
An important facet that makes `npcsh` so powerful is the ability to pipe outputs from one tool call to another. This allows for the chaining of commands and the creation of complex workflows. For example, you can use the output of a search to generate an image, or you can use the output of an image analysis to generate a report. Here is an example of how this might look in practice:
|
|
811
|
+
```npcsh
|
|
812
|
+
npcsh> what is the gdp of russia in 2024? | /vixynt 'generate an image that contains {0}'
|
|
813
|
+
|
|
814
|
+
### Executing Bash Commands
|
|
815
|
+
You can execute bash commands directly within npcsh. The LLM can also generate and execute bash commands based on your natural language requests.
|
|
816
|
+
For example:
|
|
817
|
+
```npcsh
|
|
818
|
+
npcsh> ls -l
|
|
819
|
+
|
|
820
|
+
npcsh> cp file1.txt file2.txt
|
|
821
|
+
npcsh> mv file1.txt file2.txt
|
|
822
|
+
npcsh> mkdir new_directory
|
|
823
|
+
npcsh> git status
|
|
824
|
+
npcsh> vim file.txt
|
|
825
|
+
|
|
826
|
+
```
|
|
827
|
+
|
|
828
|
+
### NPC CLI
|
|
829
|
+
When npcsh is installed, it comes with the `npc` cli as well. The `npc` cli has various command to make initializing and serving NPC projects easier.
|
|
830
|
+
|
|
831
|
+
Users can make queries like so:
|
|
832
|
+
```bash
|
|
833
|
+
$ npc 'whats the biggest filei n my computer'
|
|
834
|
+
Loaded .env file from /home/caug/npcww/npcsh
|
|
835
|
+
action chosen: request_input
|
|
836
|
+
explanation given: The user needs to provide more context about their operating system or specify which directory to search for the biggest file.
|
|
837
|
+
|
|
838
|
+
Additional input needed: The user did not specify their operating system or the directory to search for the biggest file, making it unclear how to execute the command.
|
|
839
|
+
Please specify your operating system (e.g., Windows, macOS, Linux) and the directory you want to search in.: linux and root
|
|
840
|
+
action chosen: execute_command
|
|
841
|
+
explanation given: The user is asking for the biggest file on their computer, which can be accomplished with a simple bash command that searches for the largest files.
|
|
842
|
+
sibiji generating command
|
|
843
|
+
LLM suggests the following bash command: sudo find / -type f -exec du -h {} + | sort -rh | head -n 1
|
|
844
|
+
Running command: sudo find / -type f -exec du -h {} + | sort -rh | head -n 1
|
|
845
|
+
Command executed with output: 11G /home/caug/.cache/huggingface/hub/models--state-spaces--mamba-2.8b/blobs/39911a8470a2b256016b57cc71c68e0f96751cba5b229216ab1f4f9d82096a46
|
|
846
|
+
|
|
847
|
+
I ran a command on your Linux system that searches for the largest files on your computer. The command `sudo find / -type f -exec du -h {} + | sort -rh | head -n 1` performs the following steps:
|
|
848
|
+
|
|
849
|
+
1. **Find Command**: It searches for all files (`-type f`) starting from the root directory (`/`).
|
|
850
|
+
2. **Disk Usage**: For each file found, it calculates its disk usage in a human-readable format (`du -h`).
|
|
851
|
+
3. **Sort**: It sorts the results in reverse order based on size (`sort -rh`), so the largest files appear first.
|
|
852
|
+
4. **Head**: Finally, it retrieves just the largest file using `head -n 1`.
|
|
853
|
+
|
|
854
|
+
The output indicates that the biggest file on your system is located at `/home/caug/.cache/huggingface/hub/models--state-spaces--mamba-2.8b/blobs/39911a8470a2b256016b57cc71c68e0f96751cba5b229216ab1f4f9d82096a46` and is 11GB in size.
|
|
855
|
+
|
|
856
|
+
```
|
|
857
|
+
|
|
858
|
+
```bash
|
|
859
|
+
$ npc 'whats the weather in tokyo'
|
|
860
|
+
Loaded .env file from /home/caug/npcww/npcsh
|
|
861
|
+
action chosen: invoke_tool
|
|
862
|
+
explanation given: The user's request for the current weather in Tokyo requires up-to-date information, which can be best obtained through an internet search.
|
|
863
|
+
Tool found: internet_search
|
|
864
|
+
Executing tool with input values: {'query': 'whats the weather in tokyo'}
|
|
865
|
+
QUERY in tool whats the weather in tokyo
|
|
866
|
+
[{'title': 'Tokyo, Tokyo, Japan Weather Forecast | AccuWeather', 'href': 'https://www.accuweather.com/en/jp/tokyo/226396/weather-forecast/226396', 'body': 'Tokyo, Tokyo, Japan Weather Forecast, with current conditions, wind, air quality, and what to expect for the next 3 days.'}, {'title': 'Tokyo, Japan 14 day weather forecast - timeanddate.com', 'href': 'https://www.timeanddate.com/weather/japan/tokyo/ext', 'body': 'Tokyo Extended Forecast with high and low temperatures. °F. Last 2 weeks of weather'}, {'title': 'Tokyo, Tokyo, Japan Current Weather | AccuWeather', 'href': 'https://www.accuweather.com/en/jp/tokyo/226396/current-weather/226396', 'body': 'Current weather in Tokyo, Tokyo, Japan. Check current conditions in Tokyo, Tokyo, Japan with radar, hourly, and more.'}, {'title': 'Weather in Tokyo, Japan - timeanddate.com', 'href': 'https://www.timeanddate.com/weather/japan/tokyo', 'body': 'Current weather in Tokyo and forecast for today, tomorrow, and next 14 days'}, {'title': 'Tokyo Weather Forecast Today', 'href': 'https://japanweather.org/tokyo', 'body': "For today's mild weather in Tokyo, with temperatures between 13ºC to 16ºC (55.4ºF to 60.8ºF), consider wearing: - Comfortable jeans or slacks - Sun hat (if spending time outdoors) - Lightweight sweater or cardigan - Long-sleeve shirt or blouse. Temperature. Day. 14°C. Night. 10°C. Morning. 10°C. Afternoon."}] <class 'list'>
|
|
867
|
+
RESULTS in tool ["Tokyo, Tokyo, Japan Weather Forecast, with current conditions, wind, air quality, and what to expect for the next 3 days.\n Citation: https://www.accuweather.com/en/jp/tokyo/226396/weather-forecast/226396\n\n\n\nTokyo Extended Forecast with high and low temperatures. °F. Last 2 weeks of weather\n Citation: https://www.timeanddate.com/weather/japan/tokyo/ext\n\n\n\nCurrent weather in Tokyo, Tokyo, Japan. Check current conditions in Tokyo, Tokyo, Japan with radar, hourly, and more.\n Citation: https://www.accuweather.com/en/jp/tokyo/226396/current-weather/226396\n\n\n\nCurrent weather in Tokyo and forecast for today, tomorrow, and next 14 days\n Citation: https://www.timeanddate.com/weather/japan/tokyo\n\n\n\nFor today's mild weather in Tokyo, with temperatures between 13ºC to 16ºC (55.4ºF to 60.8ºF), consider wearing: - Comfortable jeans or slacks - Sun hat (if spending time outdoors) - Lightweight sweater or cardigan - Long-sleeve shirt or blouse. Temperature. Day. 14°C. Night. 10°C. Morning. 10°C. Afternoon.\n Citation: https://japanweather.org/tokyo\n\n\n", 'https://www.accuweather.com/en/jp/tokyo/226396/weather-forecast/226396\n\nhttps://www.timeanddate.com/weather/japan/tokyo/ext\n\nhttps://www.accuweather.com/en/jp/tokyo/226396/current-weather/226396\n\nhttps://www.timeanddate.com/weather/japan/tokyo\n\nhttps://japanweather.org/tokyo\n']
|
|
868
|
+
The current weather in Tokyo, Japan is mild, with temperatures ranging from 13°C to 16°C (approximately 55.4°F to 60.8°F). For today's conditions, it is suggested to wear comfortable jeans or slacks, a lightweight sweater or cardigan, and a long-sleeve shirt or blouse, especially if spending time outdoors. The temperature today is expected to reach a high of 14°C (57.2°F) during the day and a low of 10°C (50°F) at night.
|
|
869
|
+
|
|
870
|
+
For more detailed weather information, you can check out the following sources:
|
|
871
|
+
- [AccuWeather Forecast](https://www.accuweather.com/en/jp/tokyo/226396/weather-forecast/226396)
|
|
872
|
+
- [Time and Date Extended Forecast](https://www.timeanddate.com/weather/japan/tokyo/ext)
|
|
873
|
+
- [Current Weather on AccuWeather](https://www.accuweather.com/en/jp/tokyo/226396/current-weather/226396)
|
|
874
|
+
- [More on Time and Date](https://www.timeanddate.com/weather/japan/tokyo)
|
|
875
|
+
- [Japan Weather](https://japanweather.org/tokyo)
|
|
876
|
+
```
|
|
877
|
+
|
|
878
|
+
|
|
879
|
+
### Serving
|
|
880
|
+
To serve an NPC project, first install redis-server and start it
|
|
881
|
+
|
|
882
|
+
on Ubuntu:
|
|
883
|
+
```bash
|
|
884
|
+
sudo apt update && sudo apt install redis-server
|
|
885
|
+
redis-server
|
|
886
|
+
```
|
|
887
|
+
|
|
888
|
+
on macOS:
|
|
889
|
+
```bash
|
|
890
|
+
brew install redis
|
|
891
|
+
redis-server
|
|
892
|
+
```
|
|
893
|
+
Then navigate to the project directory and run:
|
|
568
894
|
|
|
569
895
|
```bash
|
|
570
896
|
npc serve
|
|
@@ -1071,9 +1397,10 @@ Tools are defined as YAMLs with `.tool` extension within the npc_team/tools dire
|
|
|
1071
1397
|
Here is an example of a tool file:
|
|
1072
1398
|
```yaml
|
|
1073
1399
|
tool_name: "screen_capture_analysis_tool"
|
|
1400
|
+
description: Captures the whole screen and sends the image for analysis
|
|
1074
1401
|
inputs:
|
|
1075
1402
|
- "prompt"
|
|
1076
|
-
|
|
1403
|
+
steps:
|
|
1077
1404
|
- engine: "python"
|
|
1078
1405
|
code: |
|
|
1079
1406
|
# Capture the screen
|
|
@@ -1081,30 +1408,22 @@ preprocess:
|
|
|
1081
1408
|
import datetime
|
|
1082
1409
|
import os
|
|
1083
1410
|
from PIL import Image
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
screenshot.
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
prompt:
|
|
1101
|
-
engine: "natural"
|
|
1102
|
-
code: ""
|
|
1103
|
-
postprocess:
|
|
1104
|
-
- engine: "natural"
|
|
1105
|
-
code: |
|
|
1106
|
-
Screenshot captured and saved as {{ filename }}.
|
|
1107
|
-
Analysis Result: {{ llm_output }}
|
|
1411
|
+
import time
|
|
1412
|
+
from npcsh.image import analyze_image_base, capture_screenshot
|
|
1413
|
+
|
|
1414
|
+
out = capture_screenshot(npc = npc, full = True)
|
|
1415
|
+
|
|
1416
|
+
llm_response = analyze_image_base( '{{prompt}}' + "\n\nAttached is a screenshot of my screen currently. Please use this to evaluate the situation. If the user asked for you to explain what's on their screen or something similar, they are referring to the details contained within the attached image. You do not need to actually view their screen. You do not need to mention that you cannot view or interpret images directly. You only need to answer the user's request based on the attached screenshot!",
|
|
1417
|
+
out['file_path'],
|
|
1418
|
+
out['filename'],
|
|
1419
|
+
npc=npc,
|
|
1420
|
+
**out['model_kwargs'])
|
|
1421
|
+
# To this:
|
|
1422
|
+
if isinstance(llm_response, dict):
|
|
1423
|
+
llm_response = llm_response.get('response', 'No response from image analysis')
|
|
1424
|
+
else:
|
|
1425
|
+
llm_response = 'No response from image analysis'
|
|
1426
|
+
|
|
1108
1427
|
```
|
|
1109
1428
|
|
|
1110
1429
|
|
|
@@ -1417,321 +1736,6 @@ results = runner.execute_pipeline()
|
|
|
1417
1736
|
Note, in the future we will aim to separate compilation and running so that we will have a compilation step that is more like a jinja rendering of the relevant information so that it can be more easily audited.
|
|
1418
1737
|
|
|
1419
1738
|
|
|
1420
|
-
## Python Examples
|
|
1421
|
-
Integrate npcsh into your Python projects for additional flexibility. Below are a few examples of how to use the library programmatically.
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
### Example 1: Creating and Using an NPC
|
|
1426
|
-
This example shows how to create and initialize an NPC and use it to answer a question.
|
|
1427
|
-
```bash
|
|
1428
|
-
import sqlite3
|
|
1429
|
-
from npcsh.npc_compiler import NPC
|
|
1430
|
-
|
|
1431
|
-
# Set up database connection
|
|
1432
|
-
db_path = '~/npcsh_history.db'
|
|
1433
|
-
conn = sqlite3.connect(db_path)
|
|
1434
|
-
|
|
1435
|
-
# Load NPC from a file
|
|
1436
|
-
npc = NPC(db_conn=conn,
|
|
1437
|
-
name='Simon Bolivar',
|
|
1438
|
-
primary_directive='Liberate South America from the Spanish Royalists.',
|
|
1439
|
-
model='gpt-4o-mini',
|
|
1440
|
-
provider='openai',
|
|
1441
|
-
)
|
|
1442
|
-
|
|
1443
|
-
response = npc.get_llm_response("What is the most important territory to retain in the Andes mountains?")
|
|
1444
|
-
print(response['response'])
|
|
1445
|
-
```
|
|
1446
|
-
```bash
|
|
1447
|
-
'The most important territory to retain in the Andes mountains for the cause of liberation in South America would be the region of Quito in present-day Ecuador. This area is strategically significant due to its location and access to key trade routes. It also acts as a vital link between the northern and southern parts of the continent, influencing both military movements and the morale of the independence struggle. Retaining control over Quito would bolster efforts to unite various factions in the fight against Spanish colonial rule across the Andean states.'
|
|
1448
|
-
```
|
|
1449
|
-
### Example 2: Using an NPC to Analyze Data
|
|
1450
|
-
This example shows how to use an NPC to perform data analysis on a DataFrame using LLM commands.
|
|
1451
|
-
```bash
|
|
1452
|
-
from npcsh.npc_compiler import NPC
|
|
1453
|
-
import sqlite3
|
|
1454
|
-
import os
|
|
1455
|
-
# Set up database connection
|
|
1456
|
-
db_path = '~/npcsh_history.db'
|
|
1457
|
-
conn = sqlite3.connect(os.path.expanduser(db_path))
|
|
1458
|
-
|
|
1459
|
-
# make a table to put into npcsh_history.db or change this example to use an existing table in a database you have
|
|
1460
|
-
import pandas as pd
|
|
1461
|
-
data = {
|
|
1462
|
-
'customer_feedback': ['The product is great!', 'The service was terrible.', 'I love the new feature.'],
|
|
1463
|
-
'customer_id': [1, 2, 3],
|
|
1464
|
-
'customer_rating': [5, 1, 3],
|
|
1465
|
-
'timestamp': ['2022-01-01', '2022-01-02', '2022-01-03']
|
|
1466
|
-
}
|
|
1467
|
-
|
|
1468
|
-
|
|
1469
|
-
df = pd.DataFrame(data)
|
|
1470
|
-
df.to_sql('customer_feedback', conn, if_exists='replace', index=False)
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
npc = NPC(db_conn=conn,
|
|
1474
|
-
name='Felix',
|
|
1475
|
-
primary_directive='Analyze customer feedback for sentiment.',
|
|
1476
|
-
model='llama3.2',
|
|
1477
|
-
provider='ollama',
|
|
1478
|
-
)
|
|
1479
|
-
response = npc.analyze_db_data('Provide a detailed report on the data contained in the `customer_feedback` table?')
|
|
1480
|
-
|
|
1481
|
-
|
|
1482
|
-
```
|
|
1483
|
-
|
|
1484
|
-
|
|
1485
|
-
### Example 3: Creating and Using a Tool
|
|
1486
|
-
You can define a tool and execute it from within your Python script.
|
|
1487
|
-
Here we'll create a tool that will take in a pdf file, extract the text, and then answer a user request about the text.
|
|
1488
|
-
|
|
1489
|
-
```bash
|
|
1490
|
-
from npcsh.npc_compiler import Tool, NPC
|
|
1491
|
-
import sqlite3
|
|
1492
|
-
import os
|
|
1493
|
-
|
|
1494
|
-
tool_data = {
|
|
1495
|
-
"tool_name": "pdf_analyzer",
|
|
1496
|
-
"inputs": ["request", "file"],
|
|
1497
|
-
"steps": [{ # Make this a list with one dict inside
|
|
1498
|
-
"engine": "python",
|
|
1499
|
-
"code": """
|
|
1500
|
-
try:
|
|
1501
|
-
import fitz # PyMuPDF
|
|
1502
|
-
|
|
1503
|
-
shared_context = {}
|
|
1504
|
-
shared_context['inputs'] = inputs
|
|
1505
|
-
|
|
1506
|
-
pdf_path = inputs['file']
|
|
1507
|
-
print(f"Processing PDF file: {pdf_path}")
|
|
1508
|
-
|
|
1509
|
-
# Open the PDF
|
|
1510
|
-
doc = fitz.open(pdf_path)
|
|
1511
|
-
text = ""
|
|
1512
|
-
|
|
1513
|
-
# Extract text from each page
|
|
1514
|
-
for page_num in range(len(doc)):
|
|
1515
|
-
page = doc[page_num]
|
|
1516
|
-
text += page.get_text()
|
|
1517
|
-
|
|
1518
|
-
# Close the document
|
|
1519
|
-
doc.close()
|
|
1520
|
-
|
|
1521
|
-
print(f"Extracted text length: {len(text)}")
|
|
1522
|
-
if len(text) > 100:
|
|
1523
|
-
print(f"First 100 characters: {text[:100]}...")
|
|
1524
|
-
|
|
1525
|
-
shared_context['extracted_text'] = text
|
|
1526
|
-
print("Text extraction completed successfully")
|
|
1527
|
-
|
|
1528
|
-
except Exception as e:
|
|
1529
|
-
error_msg = f"Error processing PDF: {str(e)}"
|
|
1530
|
-
print(error_msg)
|
|
1531
|
-
shared_context['extracted_text'] = f"Error: {error_msg}"
|
|
1532
|
-
"""
|
|
1533
|
-
},
|
|
1534
|
-
{
|
|
1535
|
-
"engine": "natural",
|
|
1536
|
-
"code": """
|
|
1537
|
-
{% if shared_context and shared_context.extracted_text %}
|
|
1538
|
-
{% if shared_context.extracted_text.startswith('Error:') %}
|
|
1539
|
-
{{ shared_context.extracted_text }}
|
|
1540
|
-
{% else %}
|
|
1541
|
-
Here is the text extracted from the PDF:
|
|
1542
|
-
|
|
1543
|
-
{{ shared_context.extracted_text }}
|
|
1544
|
-
|
|
1545
|
-
Please provide a response to user request: {{ inputs.request }} using the information extracted above.
|
|
1546
|
-
{% endif %}
|
|
1547
|
-
{% else %}
|
|
1548
|
-
Error: No text was extracted from the PDF.
|
|
1549
|
-
{% endif %}
|
|
1550
|
-
"""
|
|
1551
|
-
},]
|
|
1552
|
-
|
|
1553
|
-
# Instantiate the tool
|
|
1554
|
-
tool = Tool(tool_data)
|
|
1555
|
-
|
|
1556
|
-
# Create an NPC instance
|
|
1557
|
-
npc = NPC(
|
|
1558
|
-
name='starlana',
|
|
1559
|
-
primary_directive='Analyze text from Astrophysics papers with a keen attention to theoretical machinations and mechanisms.',
|
|
1560
|
-
db_conn=sqlite3.connect(os.path.expanduser('~/npcsh_database.db'))
|
|
1561
|
-
)
|
|
1562
|
-
|
|
1563
|
-
# Define input values dictionary
|
|
1564
|
-
input_values = {
|
|
1565
|
-
"request": "what is the point of the yuan and narayanan work?",
|
|
1566
|
-
"file": os.path.abspath("test_data/yuan2004.pdf")
|
|
1567
|
-
}
|
|
1568
|
-
|
|
1569
|
-
print(f"Attempting to read file: {input_values['file']}")
|
|
1570
|
-
print(f"File exists: {os.path.exists(input_values['file'])}")
|
|
1571
|
-
|
|
1572
|
-
# Execute the tool
|
|
1573
|
-
output = tool.execute(input_values, npc.tools_dict, None, 'Sample Command', npc)
|
|
1574
|
-
|
|
1575
|
-
print('Tool Output:', output)
|
|
1576
|
-
```
|
|
1577
|
-
|
|
1578
|
-
### Example 4: Orchestrating a team
|
|
1579
|
-
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
```python
|
|
1583
|
-
import pandas as pd
|
|
1584
|
-
import numpy as np
|
|
1585
|
-
import os
|
|
1586
|
-
from npcsh.npc_compiler import NPC, NPCTeam, Tool
|
|
1587
|
-
|
|
1588
|
-
|
|
1589
|
-
# Create test data and save to CSV
|
|
1590
|
-
def create_test_data(filepath="sales_data.csv"):
|
|
1591
|
-
sales_data = pd.DataFrame(
|
|
1592
|
-
{
|
|
1593
|
-
"date": pd.date_range(start="2024-01-01", periods=90),
|
|
1594
|
-
"revenue": np.random.normal(10000, 2000, 90),
|
|
1595
|
-
"customer_count": np.random.poisson(100, 90),
|
|
1596
|
-
"avg_ticket": np.random.normal(100, 20, 90),
|
|
1597
|
-
"region": np.random.choice(["North", "South", "East", "West"], 90),
|
|
1598
|
-
"channel": np.random.choice(["Online", "Store", "Mobile"], 90),
|
|
1599
|
-
}
|
|
1600
|
-
)
|
|
1601
|
-
|
|
1602
|
-
# Add patterns to make data more realistic
|
|
1603
|
-
sales_data["revenue"] *= 1 + 0.3 * np.sin(
|
|
1604
|
-
np.pi * np.arange(90) / 30
|
|
1605
|
-
) # Seasonal pattern
|
|
1606
|
-
sales_data.loc[sales_data["channel"] == "Mobile", "revenue"] *= 1.1 # Mobile growth
|
|
1607
|
-
sales_data.loc[
|
|
1608
|
-
sales_data["channel"] == "Online", "customer_count"
|
|
1609
|
-
] *= 1.2 # Online customer growth
|
|
1610
|
-
|
|
1611
|
-
sales_data.to_csv(filepath, index=False)
|
|
1612
|
-
return filepath, sales_data
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
code_execution_tool = Tool(
|
|
1616
|
-
{
|
|
1617
|
-
"tool_name": "execute_code",
|
|
1618
|
-
"description": """Executes a Python code block with access to pandas,
|
|
1619
|
-
numpy, and matplotlib.
|
|
1620
|
-
Results should be stored in the 'results' dict to be returned.
|
|
1621
|
-
The only input should be a single code block with \n characters included.
|
|
1622
|
-
The code block must use only the libraries or methods contained withen the
|
|
1623
|
-
pandas, numpy, and matplotlib libraries or using builtin methods.
|
|
1624
|
-
do not include any json formatting or markdown formatting.
|
|
1625
|
-
|
|
1626
|
-
When generating your script, the final output must be encoded in a variable
|
|
1627
|
-
named "output". e.g.
|
|
1628
|
-
|
|
1629
|
-
output = some_analysis_function(inputs, derived_data_from_inputs)
|
|
1630
|
-
Adapt accordingly based on the scope of the analysis
|
|
1631
|
-
|
|
1632
|
-
""",
|
|
1633
|
-
"inputs": ["script"],
|
|
1634
|
-
"steps": [
|
|
1635
|
-
{
|
|
1636
|
-
"engine": "python",
|
|
1637
|
-
"code": """{{script}}""",
|
|
1638
|
-
}
|
|
1639
|
-
],
|
|
1640
|
-
}
|
|
1641
|
-
)
|
|
1642
|
-
|
|
1643
|
-
# Analytics team definition
|
|
1644
|
-
analytics_team = [
|
|
1645
|
-
{
|
|
1646
|
-
"name": "analyst",
|
|
1647
|
-
"primary_directive": "You analyze sales performance data, focusing on revenue trends, customer behavior metrics, and market indicators. Your expertise is in extracting actionable insights from complex datasets.",
|
|
1648
|
-
"model": "gpt-4o-mini",
|
|
1649
|
-
"provider": "openai",
|
|
1650
|
-
"tools": [code_execution_tool], # Only the code execution tool
|
|
1651
|
-
},
|
|
1652
|
-
{
|
|
1653
|
-
"name": "researcher",
|
|
1654
|
-
"primary_directive": "You specialize in causal analysis and experimental design. Given data insights, you determine what factors drive observed patterns and design tests to validate hypotheses.",
|
|
1655
|
-
"model": "gpt-4o-mini",
|
|
1656
|
-
"provider": "openai",
|
|
1657
|
-
"tools": [code_execution_tool], # Only the code execution tool
|
|
1658
|
-
},
|
|
1659
|
-
{
|
|
1660
|
-
"name": "engineer",
|
|
1661
|
-
"primary_directive": "You implement data pipelines and optimize data processing. When given analysis requirements, you create efficient workflows to automate insights generation.",
|
|
1662
|
-
"model": "gpt-4o-mini",
|
|
1663
|
-
"provider": "openai",
|
|
1664
|
-
"tools": [code_execution_tool], # Only the code execution tool
|
|
1665
|
-
},
|
|
1666
|
-
]
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
def create_analytics_team():
|
|
1670
|
-
# Initialize NPCs with just the code execution tool
|
|
1671
|
-
npcs = []
|
|
1672
|
-
for npc_data in analytics_team:
|
|
1673
|
-
npc = NPC(
|
|
1674
|
-
name=npc_data["name"],
|
|
1675
|
-
primary_directive=npc_data["primary_directive"],
|
|
1676
|
-
model=npc_data["model"],
|
|
1677
|
-
provider=npc_data["provider"],
|
|
1678
|
-
tools=[code_execution_tool], # Only code execution tool
|
|
1679
|
-
)
|
|
1680
|
-
npcs.append(npc)
|
|
1681
|
-
|
|
1682
|
-
# Create coordinator with just code execution tool
|
|
1683
|
-
coordinator = NPC(
|
|
1684
|
-
name="coordinator",
|
|
1685
|
-
primary_directive="You coordinate the analytics team, ensuring each specialist contributes their expertise effectively. You synthesize insights and manage the workflow.",
|
|
1686
|
-
model="gpt-4o-mini",
|
|
1687
|
-
provider="openai",
|
|
1688
|
-
tools=[code_execution_tool], # Only code execution tool
|
|
1689
|
-
)
|
|
1690
|
-
|
|
1691
|
-
# Create team
|
|
1692
|
-
team = NPCTeam(npcs=npcs, foreman=coordinator)
|
|
1693
|
-
return team
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
def main():
|
|
1697
|
-
# Create and save test data
|
|
1698
|
-
data_path, sales_data = create_test_data()
|
|
1699
|
-
|
|
1700
|
-
# Initialize team
|
|
1701
|
-
team = create_analytics_team()
|
|
1702
|
-
|
|
1703
|
-
# Run analysis - updated prompt to reflect code execution approach
|
|
1704
|
-
results = team.orchestrate(
|
|
1705
|
-
f"""
|
|
1706
|
-
Analyze the sales data at {data_path} to:
|
|
1707
|
-
1. Identify key performance drivers
|
|
1708
|
-
2. Determine if mobile channel growth is significant
|
|
1709
|
-
3. Recommend tests to validate growth hypotheses
|
|
1710
|
-
|
|
1711
|
-
Here is a header for the data file at {data_path}:
|
|
1712
|
-
{sales_data.head()}
|
|
1713
|
-
|
|
1714
|
-
When working with dates, ensure that date columns are converted from raw strings. e.g. use the pd.to_datetime function.
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
When working with potentially messy data, handle null values by using nan versions of numpy functions or
|
|
1718
|
-
by filtering them with a mask .
|
|
1719
|
-
|
|
1720
|
-
Use Python code execution to perform the analysis - load the data and perform statistical analysis directly.
|
|
1721
|
-
"""
|
|
1722
|
-
)
|
|
1723
|
-
|
|
1724
|
-
print(results)
|
|
1725
|
-
|
|
1726
|
-
# Cleanup
|
|
1727
|
-
os.remove(data_path)
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
if __name__ == "__main__":
|
|
1731
|
-
main()
|
|
1732
|
-
|
|
1733
|
-
```
|
|
1734
|
-
|
|
1735
1739
|
## npcsql: SQL Integration and pipelines (UNDER CONSTRUCTION)
|
|
1736
1740
|
|
|
1737
1741
|
|