npcsh 0.3.32__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/_state.py +942 -0
- npcsh/alicanto.py +1074 -0
- npcsh/guac.py +785 -0
- npcsh/mcp_helpers.py +357 -0
- npcsh/mcp_npcsh.py +822 -0
- npcsh/mcp_server.py +184 -0
- npcsh/npc.py +218 -0
- npcsh/npcsh.py +1161 -0
- npcsh/plonk.py +387 -269
- npcsh/pti.py +234 -0
- npcsh/routes.py +958 -0
- npcsh/spool.py +315 -0
- npcsh/wander.py +550 -0
- npcsh/yap.py +573 -0
- npcsh-1.0.0.dist-info/METADATA +596 -0
- npcsh-1.0.0.dist-info/RECORD +21 -0
- {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/WHEEL +1 -1
- npcsh-1.0.0.dist-info/entry_points.txt +9 -0
- {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/licenses/LICENSE +1 -1
- npcsh/audio.py +0 -569
- npcsh/audio_gen.py +0 -1
- npcsh/cli.py +0 -543
- npcsh/command_history.py +0 -566
- npcsh/conversation.py +0 -54
- npcsh/data_models.py +0 -46
- npcsh/dataframes.py +0 -171
- npcsh/embeddings.py +0 -168
- npcsh/helpers.py +0 -646
- npcsh/image.py +0 -298
- npcsh/image_gen.py +0 -79
- npcsh/knowledge_graph.py +0 -1006
- npcsh/llm_funcs.py +0 -2195
- npcsh/load_data.py +0 -83
- npcsh/main.py +0 -5
- npcsh/model_runner.py +0 -189
- npcsh/npc_compiler.py +0 -2879
- npcsh/npc_sysenv.py +0 -388
- npcsh/npc_team/assembly_lines/test_pipeline.py +0 -181
- npcsh/npc_team/corca.npc +0 -13
- npcsh/npc_team/foreman.npc +0 -7
- npcsh/npc_team/npcsh.ctx +0 -11
- npcsh/npc_team/sibiji.npc +0 -4
- npcsh/npc_team/templates/analytics/celona.npc +0 -0
- npcsh/npc_team/templates/hr_support/raone.npc +0 -0
- npcsh/npc_team/templates/humanities/eriane.npc +0 -4
- npcsh/npc_team/templates/it_support/lineru.npc +0 -0
- npcsh/npc_team/templates/marketing/slean.npc +0 -4
- npcsh/npc_team/templates/philosophy/maurawa.npc +0 -0
- npcsh/npc_team/templates/sales/turnic.npc +0 -4
- npcsh/npc_team/templates/software/welxor.npc +0 -0
- npcsh/npc_team/tools/bash_executer.tool +0 -32
- npcsh/npc_team/tools/calculator.tool +0 -8
- npcsh/npc_team/tools/code_executor.tool +0 -16
- npcsh/npc_team/tools/generic_search.tool +0 -27
- npcsh/npc_team/tools/image_generation.tool +0 -25
- npcsh/npc_team/tools/local_search.tool +0 -149
- npcsh/npc_team/tools/npcsh_executor.tool +0 -9
- npcsh/npc_team/tools/screen_cap.tool +0 -27
- npcsh/npc_team/tools/sql_executor.tool +0 -26
- npcsh/response.py +0 -272
- npcsh/search.py +0 -252
- npcsh/serve.py +0 -1467
- npcsh/shell.py +0 -524
- npcsh/shell_helpers.py +0 -3919
- npcsh/stream.py +0 -233
- npcsh/video.py +0 -52
- npcsh/video_gen.py +0 -69
- npcsh-0.3.32.data/data/npcsh/npc_team/bash_executer.tool +0 -32
- npcsh-0.3.32.data/data/npcsh/npc_team/calculator.tool +0 -8
- npcsh-0.3.32.data/data/npcsh/npc_team/celona.npc +0 -0
- npcsh-0.3.32.data/data/npcsh/npc_team/code_executor.tool +0 -16
- npcsh-0.3.32.data/data/npcsh/npc_team/corca.npc +0 -13
- npcsh-0.3.32.data/data/npcsh/npc_team/eriane.npc +0 -4
- npcsh-0.3.32.data/data/npcsh/npc_team/foreman.npc +0 -7
- npcsh-0.3.32.data/data/npcsh/npc_team/generic_search.tool +0 -27
- npcsh-0.3.32.data/data/npcsh/npc_team/image_generation.tool +0 -25
- npcsh-0.3.32.data/data/npcsh/npc_team/lineru.npc +0 -0
- npcsh-0.3.32.data/data/npcsh/npc_team/local_search.tool +0 -149
- npcsh-0.3.32.data/data/npcsh/npc_team/maurawa.npc +0 -0
- npcsh-0.3.32.data/data/npcsh/npc_team/npcsh.ctx +0 -11
- npcsh-0.3.32.data/data/npcsh/npc_team/npcsh_executor.tool +0 -9
- npcsh-0.3.32.data/data/npcsh/npc_team/raone.npc +0 -0
- npcsh-0.3.32.data/data/npcsh/npc_team/screen_cap.tool +0 -27
- npcsh-0.3.32.data/data/npcsh/npc_team/sibiji.npc +0 -4
- npcsh-0.3.32.data/data/npcsh/npc_team/slean.npc +0 -4
- npcsh-0.3.32.data/data/npcsh/npc_team/sql_executor.tool +0 -26
- npcsh-0.3.32.data/data/npcsh/npc_team/test_pipeline.py +0 -181
- npcsh-0.3.32.data/data/npcsh/npc_team/turnic.npc +0 -4
- npcsh-0.3.32.data/data/npcsh/npc_team/welxor.npc +0 -0
- npcsh-0.3.32.dist-info/METADATA +0 -779
- npcsh-0.3.32.dist-info/RECORD +0 -78
- npcsh-0.3.32.dist-info/entry_points.txt +0 -3
- {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/top_level.txt +0 -0
npcsh/npc_sysenv.py
DELETED
|
@@ -1,388 +0,0 @@
|
|
|
1
|
-
import re
|
|
2
|
-
from datetime import datetime
|
|
3
|
-
from typing import Any
|
|
4
|
-
import os
|
|
5
|
-
import io
|
|
6
|
-
import sqlite3
|
|
7
|
-
from dotenv import load_dotenv
|
|
8
|
-
from PIL import Image
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def get_model_and_provider(command: str, available_models: list) -> tuple:
|
|
12
|
-
"""
|
|
13
|
-
Function Description:
|
|
14
|
-
Extracts model and provider from command and autocompletes if possible.
|
|
15
|
-
Args:
|
|
16
|
-
command : str : Command string
|
|
17
|
-
available_models : list : List of available models
|
|
18
|
-
Keyword Args:
|
|
19
|
-
None
|
|
20
|
-
Returns:
|
|
21
|
-
model_name : str : Model name
|
|
22
|
-
provider : str : Provider
|
|
23
|
-
cleaned_command : str : Clean
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
"""
|
|
27
|
-
|
|
28
|
-
model_match = re.search(r"@(\S+)", command)
|
|
29
|
-
if model_match:
|
|
30
|
-
model_name = model_match.group(1)
|
|
31
|
-
# Autocomplete model name
|
|
32
|
-
matches = [m for m in available_models if m.startswith(model_name)]
|
|
33
|
-
if matches:
|
|
34
|
-
if len(matches) == 1:
|
|
35
|
-
model_name = matches[0] # Complete the name if only one match
|
|
36
|
-
# Find provider for the (potentially autocompleted) model
|
|
37
|
-
provider = lookup_provider(model_name)
|
|
38
|
-
if provider:
|
|
39
|
-
# Remove the model tag from the command
|
|
40
|
-
cleaned_command = command.replace(
|
|
41
|
-
f"@{model_match.group(1)}", ""
|
|
42
|
-
).strip()
|
|
43
|
-
# print(cleaned_command, 'cleaned_command')
|
|
44
|
-
return model_name, provider, cleaned_command
|
|
45
|
-
else:
|
|
46
|
-
return None, None, command # Provider not found
|
|
47
|
-
else:
|
|
48
|
-
return None, None, command # No matching model
|
|
49
|
-
else:
|
|
50
|
-
return None, None, command # No model specified
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
def get_available_models() -> list:
|
|
54
|
-
"""
|
|
55
|
-
Function Description:
|
|
56
|
-
Fetches available models from Ollama, OpenAI, and Anthropic.
|
|
57
|
-
Args:
|
|
58
|
-
None
|
|
59
|
-
Keyword Args:
|
|
60
|
-
None
|
|
61
|
-
Returns:
|
|
62
|
-
available_models : list : List of available models
|
|
63
|
-
|
|
64
|
-
"""
|
|
65
|
-
available_chat_models = []
|
|
66
|
-
available_reasoning_models = []
|
|
67
|
-
|
|
68
|
-
ollama_chat_models = [
|
|
69
|
-
"gemma3",
|
|
70
|
-
"llama3.3",
|
|
71
|
-
"llama3.2",
|
|
72
|
-
"llama3.1" "phi4",
|
|
73
|
-
"phi3.5",
|
|
74
|
-
"mistral",
|
|
75
|
-
"llama3",
|
|
76
|
-
"gemma",
|
|
77
|
-
"qwen",
|
|
78
|
-
"qwen2",
|
|
79
|
-
"qwen2.5",
|
|
80
|
-
"phi3",
|
|
81
|
-
"llava",
|
|
82
|
-
"codellama",
|
|
83
|
-
"qwen2.5-coder",
|
|
84
|
-
"tinyllama",
|
|
85
|
-
"mistral-nemo",
|
|
86
|
-
"llama3.2-vesion",
|
|
87
|
-
"starcoder2",
|
|
88
|
-
"mixtral",
|
|
89
|
-
"dolphin-mixtral",
|
|
90
|
-
"deepseek-coder-v2",
|
|
91
|
-
"codegemma",
|
|
92
|
-
"phi",
|
|
93
|
-
"deepseek-coder",
|
|
94
|
-
"wizardlm2",
|
|
95
|
-
"llava-llama3",
|
|
96
|
-
]
|
|
97
|
-
available_chat_models.extend(ollama_chat_models)
|
|
98
|
-
|
|
99
|
-
ollama_reasoning_models = ["deepseek-r1", "qwq"]
|
|
100
|
-
available_reasoning_models.extend(ollama_reasoning_models)
|
|
101
|
-
|
|
102
|
-
# OpenAI models
|
|
103
|
-
openai_chat_models = [
|
|
104
|
-
"gpt-4-turbo",
|
|
105
|
-
"gpt-4o",
|
|
106
|
-
"gpt-4o-mini",
|
|
107
|
-
"dall-e-3",
|
|
108
|
-
"dall-e-2",
|
|
109
|
-
]
|
|
110
|
-
openai_reasoning_models = [
|
|
111
|
-
"o1-mini",
|
|
112
|
-
"o1",
|
|
113
|
-
"o1-preview",
|
|
114
|
-
"o3-mini",
|
|
115
|
-
"o3-preview",
|
|
116
|
-
]
|
|
117
|
-
available_reasoning_models.extend(openai_reasoning_models)
|
|
118
|
-
|
|
119
|
-
available_chat_models.extend(openai_chat_models)
|
|
120
|
-
|
|
121
|
-
# Anthropic models
|
|
122
|
-
anthropic_chat_models = [
|
|
123
|
-
"claude-3-opus-20240229",
|
|
124
|
-
"claude-3-sonnet-20240229",
|
|
125
|
-
"claude-3-5-sonnet-20241022",
|
|
126
|
-
"claude-3-haiku-20240307",
|
|
127
|
-
"claude-2.1",
|
|
128
|
-
"claude-2.0",
|
|
129
|
-
"claude-instant-1.2",
|
|
130
|
-
]
|
|
131
|
-
available_chat_models.extend(anthropic_chat_models)
|
|
132
|
-
diffusers_models = [
|
|
133
|
-
"runwayml/stable-diffusion-v1-5",
|
|
134
|
-
]
|
|
135
|
-
available_chat_models.extend(diffusers_models)
|
|
136
|
-
|
|
137
|
-
deepseek_chat_models = [
|
|
138
|
-
"deepseek-chat",
|
|
139
|
-
]
|
|
140
|
-
|
|
141
|
-
deepseek_reasoning_models = [
|
|
142
|
-
"deepseek-reasoner",
|
|
143
|
-
]
|
|
144
|
-
|
|
145
|
-
available_chat_models.extend(deepseek_chat_models)
|
|
146
|
-
available_reasoning_models.extend(deepseek_reasoning_models)
|
|
147
|
-
return available_chat_models, available_reasoning_models
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
def get_system_message(npc: Any) -> str:
|
|
151
|
-
"""
|
|
152
|
-
Function Description:
|
|
153
|
-
This function generates a system message for the NPC.
|
|
154
|
-
Args:
|
|
155
|
-
npc (Any): The NPC object.
|
|
156
|
-
Keyword Args:
|
|
157
|
-
None
|
|
158
|
-
Returns:
|
|
159
|
-
str: The system message for the NPC.
|
|
160
|
-
"""
|
|
161
|
-
# print(npc, type(npc))
|
|
162
|
-
|
|
163
|
-
system_message = f"""
|
|
164
|
-
.
|
|
165
|
-
..
|
|
166
|
-
...
|
|
167
|
-
....
|
|
168
|
-
.....
|
|
169
|
-
......
|
|
170
|
-
.......
|
|
171
|
-
........
|
|
172
|
-
.........
|
|
173
|
-
..........
|
|
174
|
-
Hello!
|
|
175
|
-
Welcome to the team.
|
|
176
|
-
You are an NPC working as part of our team.
|
|
177
|
-
You are the {npc.name} NPC with the following primary directive: {npc.primary_directive}.
|
|
178
|
-
Users may refer to you by your assistant name, {npc.name} and you should
|
|
179
|
-
consider this to be your core identity.
|
|
180
|
-
|
|
181
|
-
The current date and time are : {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
In some cases, users may request insights into data contained in a local database.
|
|
185
|
-
For these purposes, you may use any data contained within these sql tables
|
|
186
|
-
{npc.tables}
|
|
187
|
-
|
|
188
|
-
which are contained in the database at {NPCSH_DB_PATH}.
|
|
189
|
-
|
|
190
|
-
If you ever need to produce markdown texts for the user, please do so
|
|
191
|
-
with less than 80 characters width for each line.
|
|
192
|
-
"""
|
|
193
|
-
|
|
194
|
-
# need to move this to the check_llm_command or move that one here
|
|
195
|
-
|
|
196
|
-
if npc.tools:
|
|
197
|
-
tool_descriptions = "\n".join(
|
|
198
|
-
[
|
|
199
|
-
f"Tool Name: {tool.tool_name}\n"
|
|
200
|
-
f"Inputs: {tool.inputs}\n"
|
|
201
|
-
f"Steps: {tool.steps}\n"
|
|
202
|
-
for tool in npc.all_tools
|
|
203
|
-
]
|
|
204
|
-
)
|
|
205
|
-
system_message += f"\n\nAvailable Tools:\n{tool_descriptions}"
|
|
206
|
-
system_message += """\n\nSome users may attach images to their request.
|
|
207
|
-
Please process them accordingly.
|
|
208
|
-
|
|
209
|
-
If the user asked for you to explain what's on their screen or something similar,
|
|
210
|
-
they are referring to the details contained within the attached image(s).
|
|
211
|
-
You do not need to actually view their screen.
|
|
212
|
-
You do not need to mention that you cannot view or interpret images directly.
|
|
213
|
-
They understand that you can view them multimodally.
|
|
214
|
-
You only need to answer the user's request based on the attached image(s).
|
|
215
|
-
"""
|
|
216
|
-
return system_message
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
available_chat_models, available_reasoning_models = get_available_models()
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
EMBEDDINGS_DB_PATH = os.path.expanduser("~/npcsh_chroma.db")
|
|
223
|
-
|
|
224
|
-
try:
|
|
225
|
-
import chromadb
|
|
226
|
-
|
|
227
|
-
chroma_client = chromadb.PersistentClient(path=EMBEDDINGS_DB_PATH)
|
|
228
|
-
except:
|
|
229
|
-
chroma_client = None
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
# Load environment variables from .env file
|
|
233
|
-
def load_env_from_execution_dir() -> None:
|
|
234
|
-
"""
|
|
235
|
-
Function Description:
|
|
236
|
-
This function loads environment variables from a .env file in the current execution directory.
|
|
237
|
-
Args:
|
|
238
|
-
None
|
|
239
|
-
Keyword Args:
|
|
240
|
-
None
|
|
241
|
-
Returns:
|
|
242
|
-
None
|
|
243
|
-
"""
|
|
244
|
-
|
|
245
|
-
# Get the directory where the script is being executed
|
|
246
|
-
execution_dir = os.path.abspath(os.getcwd())
|
|
247
|
-
# print(f"Execution directory: {execution_dir}")
|
|
248
|
-
# Construct the path to the .env file
|
|
249
|
-
env_path = os.path.join(execution_dir, ".env")
|
|
250
|
-
|
|
251
|
-
# Load the .env file if it exists
|
|
252
|
-
if os.path.exists(env_path):
|
|
253
|
-
load_dotenv(dotenv_path=env_path)
|
|
254
|
-
print(f"Loaded .env file from {execution_dir}")
|
|
255
|
-
else:
|
|
256
|
-
print(f"Warning: No .env file found in {execution_dir}")
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
def get_available_tables(db_path: str) -> str:
|
|
260
|
-
"""
|
|
261
|
-
Function Description:
|
|
262
|
-
This function gets the available tables in the database.
|
|
263
|
-
Args:
|
|
264
|
-
db_path (str): The database path.
|
|
265
|
-
Keyword Args:
|
|
266
|
-
None
|
|
267
|
-
Returns:
|
|
268
|
-
str: The available tables in the database.
|
|
269
|
-
"""
|
|
270
|
-
|
|
271
|
-
try:
|
|
272
|
-
with sqlite3.connect(db_path) as conn:
|
|
273
|
-
cursor = conn.cursor()
|
|
274
|
-
cursor.execute(
|
|
275
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name != 'command_history'"
|
|
276
|
-
)
|
|
277
|
-
tables = cursor.fetchall()
|
|
278
|
-
|
|
279
|
-
return tables
|
|
280
|
-
except Exception as e:
|
|
281
|
-
print(f"Error getting available tables: {e}")
|
|
282
|
-
return ""
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
def lookup_provider(model: str) -> str:
|
|
286
|
-
"""
|
|
287
|
-
Function Description:
|
|
288
|
-
This function determines the provider based on the model name.
|
|
289
|
-
Args:
|
|
290
|
-
model (str): The model name.
|
|
291
|
-
Keyword Args:
|
|
292
|
-
None
|
|
293
|
-
Returns:
|
|
294
|
-
str: The provider based on the model name.
|
|
295
|
-
"""
|
|
296
|
-
if model == "deepseek-chat" or model == "deepseek-reasoner":
|
|
297
|
-
return "deepseek"
|
|
298
|
-
ollama_prefixes = [
|
|
299
|
-
"llama",
|
|
300
|
-
"deepseek",
|
|
301
|
-
"qwen",
|
|
302
|
-
"llava",
|
|
303
|
-
"phi",
|
|
304
|
-
"mistral",
|
|
305
|
-
"mixtral",
|
|
306
|
-
"dolphin",
|
|
307
|
-
"codellama",
|
|
308
|
-
"gemma",
|
|
309
|
-
]
|
|
310
|
-
if any(model.startswith(prefix) for prefix in ollama_prefixes):
|
|
311
|
-
return "ollama"
|
|
312
|
-
|
|
313
|
-
# OpenAI models
|
|
314
|
-
openai_prefixes = ["gpt-", "dall-e-", "whisper-", "o1"]
|
|
315
|
-
if any(model.startswith(prefix) for prefix in openai_prefixes):
|
|
316
|
-
return "openai"
|
|
317
|
-
|
|
318
|
-
# Anthropic models
|
|
319
|
-
if model.startswith("claude"):
|
|
320
|
-
return "anthropic"
|
|
321
|
-
if model.startswith("gemini"):
|
|
322
|
-
return "gemini"
|
|
323
|
-
if "diffusion" in model:
|
|
324
|
-
return "diffusers"
|
|
325
|
-
return None
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
def compress_image(image_bytes, max_size=(800, 600)):
|
|
329
|
-
# Create a copy of the bytes in memory
|
|
330
|
-
buffer = io.BytesIO(image_bytes)
|
|
331
|
-
img = Image.open(buffer)
|
|
332
|
-
|
|
333
|
-
# Force loading of image data
|
|
334
|
-
img.load()
|
|
335
|
-
|
|
336
|
-
# Convert RGBA to RGB if necessary
|
|
337
|
-
if img.mode == "RGBA":
|
|
338
|
-
background = Image.new("RGB", img.size, (255, 255, 255))
|
|
339
|
-
background.paste(img, mask=img.split()[3])
|
|
340
|
-
img = background
|
|
341
|
-
|
|
342
|
-
# Resize if needed
|
|
343
|
-
if img.size[0] > max_size[0] or img.size[1] > max_size[1]:
|
|
344
|
-
img.thumbnail(max_size)
|
|
345
|
-
|
|
346
|
-
# Save with minimal compression
|
|
347
|
-
out_buffer = io.BytesIO()
|
|
348
|
-
img.save(out_buffer, format="JPEG", quality=95, optimize=False)
|
|
349
|
-
return out_buffer.getvalue()
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
load_env_from_execution_dir()
|
|
353
|
-
deepseek_api_key = os.getenv("DEEPSEEK_API_KEY", None)
|
|
354
|
-
gemini_api_key = os.getenv("GEMINI_API_KEY", None)
|
|
355
|
-
|
|
356
|
-
anthropic_api_key = os.getenv("ANTHROPIC_API_KEY", None)
|
|
357
|
-
openai_api_key = os.getenv("OPENAI_API_KEY", None)
|
|
358
|
-
|
|
359
|
-
NPCSH_CHAT_MODEL = os.environ.get("NPCSH_CHAT_MODEL", "llama3.2")
|
|
360
|
-
# print("NPCSH_CHAT_MODEL", NPCSH_CHAT_MODEL)
|
|
361
|
-
NPCSH_CHAT_PROVIDER = os.environ.get("NPCSH_CHAT_PROVIDER", "ollama")
|
|
362
|
-
# print("NPCSH_CHAT_PROVIDER", NPCSH_CHAT_PROVIDER)
|
|
363
|
-
NPCSH_DB_PATH = os.path.expanduser(
|
|
364
|
-
os.environ.get("NPCSH_DB_PATH", "~/npcsh_history.db")
|
|
365
|
-
)
|
|
366
|
-
NPCSH_VECTOR_DB_PATH = os.path.expanduser(
|
|
367
|
-
os.environ.get("NPCSH_VECTOR_DB_PATH", "~/npcsh_chroma.db")
|
|
368
|
-
)
|
|
369
|
-
NPCSH_DEFAULT_MODE = os.path.expanduser(os.environ.get("NPCSH_DEFAULT_MODE", "chat"))
|
|
370
|
-
|
|
371
|
-
NPCSH_VISION_MODEL = os.environ.get("NPCSH_VISION_MODEL", "llava7b")
|
|
372
|
-
NPCSH_VISION_PROVIDER = os.environ.get("NPCSH_VISION_PROVIDER", "ollama")
|
|
373
|
-
NPCSH_IMAGE_GEN_MODEL = os.environ.get(
|
|
374
|
-
"NPCSH_IMAGE_GEN_MODEL", "runwayml/stable-diffusion-v1-5"
|
|
375
|
-
)
|
|
376
|
-
NPCSH_IMAGE_GEN_PROVIDER = os.environ.get("NPCSH_IMAGE_GEN_PROVIDER", "diffusers")
|
|
377
|
-
NPCSH_VIDEO_GEN_MODEL = os.environ.get(
|
|
378
|
-
"NPCSH_VIDEO_GEN_MODEL", "damo-vilab/text-to-video-ms-1.7b"
|
|
379
|
-
)
|
|
380
|
-
NPCSH_VIDEO_GEN_PROVIDER = os.environ.get("NPCSH_VIDEO_GEN_PROVIDER", "diffusers")
|
|
381
|
-
|
|
382
|
-
NPCSH_EMBEDDING_MODEL = os.environ.get("NPCSH_EMBEDDING_MODEL", "nomic-embed-text")
|
|
383
|
-
NPCSH_EMBEDDING_PROVIDER = os.environ.get("NPCSH_EMBEDDING_PROVIDER", "ollama")
|
|
384
|
-
NPCSH_REASONING_MODEL = os.environ.get("NPCSH_REASONING_MODEL", "deepseek-r1")
|
|
385
|
-
NPCSH_REASONING_PROVIDER = os.environ.get("NPCSH_REASONING_PROVIDER", "ollama")
|
|
386
|
-
NPCSH_STREAM_OUTPUT = eval(os.environ.get("NPCSH_STREAM_OUTPUT", "0")) == 1
|
|
387
|
-
NPCSH_API_URL = os.environ.get("NPCSH_API_URL", None)
|
|
388
|
-
NPCSH_SEARCH_PROVIDER = os.environ.get("NPCSH_SEARCH_PROVIDER", "duckduckgo")
|
|
@@ -1,181 +0,0 @@
|
|
|
1
|
-
import pandas as pd
|
|
2
|
-
from sqlalchemy import create_engine
|
|
3
|
-
import os
|
|
4
|
-
|
|
5
|
-
# Sample market events data
|
|
6
|
-
market_events_data = {
|
|
7
|
-
"datetime": [
|
|
8
|
-
"2023-10-15 09:00:00",
|
|
9
|
-
"2023-10-16 10:30:00",
|
|
10
|
-
"2023-10-17 11:45:00",
|
|
11
|
-
"2023-10-18 13:15:00",
|
|
12
|
-
"2023-10-19 14:30:00",
|
|
13
|
-
],
|
|
14
|
-
"headline": [
|
|
15
|
-
"Stock Market Rallies Amid Positive Economic Data",
|
|
16
|
-
"Tech Giant Announces New Product Line",
|
|
17
|
-
"Federal Reserve Hints at Interest Rate Pause",
|
|
18
|
-
"Oil Prices Surge Following Supply Concerns",
|
|
19
|
-
"Retail Sector Reports Record Q3 Earnings",
|
|
20
|
-
],
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
# Create a DataFrame
|
|
24
|
-
market_events_df = pd.DataFrame(market_events_data)
|
|
25
|
-
|
|
26
|
-
# Define database path relative to user's home directory
|
|
27
|
-
db_path = os.path.expanduser("~/npcsh_history.db")
|
|
28
|
-
|
|
29
|
-
# Create a connection to the SQLite database
|
|
30
|
-
engine = create_engine(f"sqlite:///{db_path}")
|
|
31
|
-
with engine.connect() as connection:
|
|
32
|
-
# Write the data to a new table 'market_events', replacing existing data
|
|
33
|
-
market_events_df.to_sql(
|
|
34
|
-
"market_events", con=connection, if_exists="replace", index=False
|
|
35
|
-
)
|
|
36
|
-
|
|
37
|
-
print("Market events have been added to the database.")
|
|
38
|
-
|
|
39
|
-
email_data = {
|
|
40
|
-
"datetime": [
|
|
41
|
-
"2023-10-10 10:00:00",
|
|
42
|
-
"2023-10-11 11:00:00",
|
|
43
|
-
"2023-10-12 12:00:00",
|
|
44
|
-
"2023-10-13 13:00:00",
|
|
45
|
-
"2023-10-14 14:00:00",
|
|
46
|
-
],
|
|
47
|
-
"subject": [
|
|
48
|
-
"Meeting Reminder",
|
|
49
|
-
"Project Update",
|
|
50
|
-
"Invoice Attached",
|
|
51
|
-
"Weekly Report",
|
|
52
|
-
"Holiday Notice",
|
|
53
|
-
],
|
|
54
|
-
"sender": [
|
|
55
|
-
"alice@example.com",
|
|
56
|
-
"bob@example.com",
|
|
57
|
-
"carol@example.com",
|
|
58
|
-
"dave@example.com",
|
|
59
|
-
"eve@example.com",
|
|
60
|
-
],
|
|
61
|
-
"recipient": [
|
|
62
|
-
"bob@example.com",
|
|
63
|
-
"carol@example.com",
|
|
64
|
-
"dave@example.com",
|
|
65
|
-
"eve@example.com",
|
|
66
|
-
"alice@example.com",
|
|
67
|
-
],
|
|
68
|
-
"body": [
|
|
69
|
-
"Don't forget the meeting tomorrow at 10 AM.",
|
|
70
|
-
"The project is progressing well, see attached update.",
|
|
71
|
-
"Please find your invoice attached.",
|
|
72
|
-
"Here is the weekly report.",
|
|
73
|
-
"The office will be closed on holidays, have a great time!",
|
|
74
|
-
],
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
# Create a DataFrame
|
|
78
|
-
emails_df = pd.DataFrame(email_data)
|
|
79
|
-
|
|
80
|
-
# Define database path relative to user's home directory
|
|
81
|
-
db_path = os.path.expanduser("~/npcsh_history.db")
|
|
82
|
-
|
|
83
|
-
# Create a connection to the SQLite database
|
|
84
|
-
engine = create_engine(f"sqlite:///{db_path}")
|
|
85
|
-
with engine.connect() as connection:
|
|
86
|
-
# Write the data to a new table 'emails', replacing existing data
|
|
87
|
-
emails_df.to_sql("emails", con=connection, if_exists="replace", index=False)
|
|
88
|
-
|
|
89
|
-
print("Sample emails have been added to the database.")
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
from npcsh.npc_compiler import PipelineRunner
|
|
93
|
-
import os
|
|
94
|
-
|
|
95
|
-
pipeline_runner = PipelineRunner(
|
|
96
|
-
pipeline_file="morning_routine.pipe",
|
|
97
|
-
npc_root_dir=os.path.abspath("."), # Use absolute path to parent directory
|
|
98
|
-
db_path="~/npcsh_history.db",
|
|
99
|
-
)
|
|
100
|
-
pipeline_runner.execute_pipeline()
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
import pandas as pd
|
|
104
|
-
from sqlalchemy import create_engine
|
|
105
|
-
import os
|
|
106
|
-
|
|
107
|
-
# Sample data generation for news articles
|
|
108
|
-
news_articles_data = {
|
|
109
|
-
"news_article_id": list(range(1, 21)),
|
|
110
|
-
"headline": [
|
|
111
|
-
"Economy sees unexpected growth in Q4",
|
|
112
|
-
"New tech gadget takes the world by storm",
|
|
113
|
-
"Political debate heats up over new policy",
|
|
114
|
-
"Health concerns rise amid new disease outbreak",
|
|
115
|
-
"Sports team secures victory in last minute",
|
|
116
|
-
"New economic policy introduced by government",
|
|
117
|
-
"Breakthrough in AI technology announced",
|
|
118
|
-
"Political leader delivers speech on reforms",
|
|
119
|
-
"Healthcare systems pushed to limits",
|
|
120
|
-
"Celebrated athlete breaks world record",
|
|
121
|
-
"Controversial economic measures spark debate",
|
|
122
|
-
"Innovative tech startup gains traction",
|
|
123
|
-
"Political scandal shakes administration",
|
|
124
|
-
"Healthcare workers protest for better pay",
|
|
125
|
-
"Major sports event postponed due to weather",
|
|
126
|
-
"Trade tensions impact global economy",
|
|
127
|
-
"Tech company accused of data breach",
|
|
128
|
-
"Election results lead to political upheaval",
|
|
129
|
-
"Vaccine developments offer hope amid pandemic",
|
|
130
|
-
"Sports league announces return to action",
|
|
131
|
-
],
|
|
132
|
-
"content": ["Article content here..." for _ in range(20)],
|
|
133
|
-
"publication_date": pd.date_range(start="1/1/2023", periods=20, freq="D"),
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
# Create a DataFrame
|
|
137
|
-
news_df = pd.DataFrame(news_articles_data)
|
|
138
|
-
|
|
139
|
-
# Define the database path
|
|
140
|
-
db_path = os.path.expanduser("~/npcsh_history.db")
|
|
141
|
-
|
|
142
|
-
# Create a connection to the SQLite database
|
|
143
|
-
engine = create_engine(f"sqlite:///{db_path}")
|
|
144
|
-
with engine.connect() as connection:
|
|
145
|
-
# Write the data to a new table 'news_articles', replacing existing data
|
|
146
|
-
news_df.to_sql("news_articles", con=connection, if_exists="replace", index=False)
|
|
147
|
-
|
|
148
|
-
print("News articles have been added to the database.")
|
|
149
|
-
|
|
150
|
-
from npcsh.npc_compiler import PipelineRunner
|
|
151
|
-
import os
|
|
152
|
-
|
|
153
|
-
runner = PipelineRunner(
|
|
154
|
-
"./news_analysis.pipe",
|
|
155
|
-
db_path=os.path.expanduser("~/npcsh_history.db"),
|
|
156
|
-
npc_root_dir=os.path.abspath("."),
|
|
157
|
-
)
|
|
158
|
-
results = runner.execute_pipeline()
|
|
159
|
-
|
|
160
|
-
print("\nResults:")
|
|
161
|
-
print("\nClassifications (processed row by row):")
|
|
162
|
-
print(results["classify_news"])
|
|
163
|
-
print("\nAnalysis (processed in batch):")
|
|
164
|
-
print(results["analyze_news"])
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
from npcsh.npc_compiler import PipelineRunner
|
|
168
|
-
import os
|
|
169
|
-
|
|
170
|
-
runner = PipelineRunner(
|
|
171
|
-
"./news_analysis_mixa.pipe",
|
|
172
|
-
db_path=os.path.expanduser("~/npcsh_history.db"),
|
|
173
|
-
npc_root_dir=os.path.abspath("."),
|
|
174
|
-
)
|
|
175
|
-
results = runner.execute_pipeline()
|
|
176
|
-
|
|
177
|
-
print("\nResults:")
|
|
178
|
-
print("\nClassifications (processed row by row):")
|
|
179
|
-
print(results["classify_news"])
|
|
180
|
-
print("\nAnalysis (processed in batch):")
|
|
181
|
-
print(results["analyze_news"])
|
npcsh/npc_team/corca.npc
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
name: corca
|
|
2
|
-
primary_directive: |
|
|
3
|
-
You are corca, a distinguished member of the NPC team.
|
|
4
|
-
Your expertise is in the area of software development and
|
|
5
|
-
you have a kanck for thinking through problems carefully.
|
|
6
|
-
You favor solutions that prioritize simplicity and clarity and
|
|
7
|
-
ought to always consider how some suggestion may increase rather than reduce tech debt
|
|
8
|
-
unnecessarily. Now, the key is in this last term, "unnecessarily".
|
|
9
|
-
You must distinguish carefully and when in doubt, opt to ask for further
|
|
10
|
-
information or clarification with concrete clear options that make it
|
|
11
|
-
easy for a user to choose.
|
|
12
|
-
model: gpt-4o-mini
|
|
13
|
-
provider: openai
|
npcsh/npc_team/foreman.npc
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
name: foreman
|
|
2
|
-
primary_directive: You are the foreman of an NPC team. It is your duty
|
|
3
|
-
to delegate tasks to your team members or to other specialized teams
|
|
4
|
-
in order to complete the project. You are responsible for the
|
|
5
|
-
completion of the project and the safety of your team members.
|
|
6
|
-
model: gpt-4o-mini
|
|
7
|
-
provider: openai
|
npcsh/npc_team/npcsh.ctx
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
context: |
|
|
2
|
-
The npcsh NPC team is devoted to providing a safe and helpful
|
|
3
|
-
environment for users where they can work and be as successful as possible.
|
|
4
|
-
npcsh is a command-line tool that makes it easy for users to harness
|
|
5
|
-
the power of LLMs from a command line shell.
|
|
6
|
-
databases:
|
|
7
|
-
- ~/npcsh_history.db
|
|
8
|
-
mcp_servers:
|
|
9
|
-
- /path/to/mcp/server.py
|
|
10
|
-
- @npm for server
|
|
11
|
-
|
npcsh/npc_team/sibiji.npc
DELETED
|
File without changes
|
|
File without changes
|
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
name: eriane
|
|
2
|
-
primary_directive: you are an expert in the humanities and you must draw from your vast knowledge of history, literature, art, and philosophy to aid users in their requests, pulling real useful examples that can make users better understand results.
|
|
3
|
-
model: gpt-4o-mini
|
|
4
|
-
provider: openai
|
|
File without changes
|
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
name: slean
|
|
2
|
-
primary_directive: Assist with marketing issues, challenges and questions. When responding, be careful to always think through the problems as if you are a wmarketing wiz who has launched and hyper scaled companies through effective marketing by always thinking outside the box.
|
|
3
|
-
model: gpt-4o-mini
|
|
4
|
-
provider: openai
|
|
File without changes
|
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
name: turnic
|
|
2
|
-
primary_directive: Assist with sales challenges and questions. When responding, keep in mind that sales professionals tend to be interested in achieving results quickly so you must ensure that you opt for simpler and more straightforward solutions and explanations without much fanfare.
|
|
3
|
-
model: gpt-4o-mini
|
|
4
|
-
provider: openai
|
|
File without changes
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
tool_name: bash_executor
|
|
2
|
-
description: Execute bash queries.
|
|
3
|
-
inputs:
|
|
4
|
-
- bash_command
|
|
5
|
-
- user_request
|
|
6
|
-
steps:
|
|
7
|
-
- engine: python
|
|
8
|
-
code: |
|
|
9
|
-
import subprocess
|
|
10
|
-
import os
|
|
11
|
-
cmd = '{{bash_command}}' # Properly quote the command input
|
|
12
|
-
def run_command(cmd):
|
|
13
|
-
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
14
|
-
stdout, stderr = process.communicate()
|
|
15
|
-
if stderr:
|
|
16
|
-
print(f"Error: {stderr.decode('utf-8')}")
|
|
17
|
-
return stderr
|
|
18
|
-
return stdout
|
|
19
|
-
result = run_command(cmd)
|
|
20
|
-
output = result.decode('utf-8')
|
|
21
|
-
|
|
22
|
-
- engine: natural
|
|
23
|
-
code: |
|
|
24
|
-
|
|
25
|
-
Here is the result of the bash command:
|
|
26
|
-
```
|
|
27
|
-
{{ output }}
|
|
28
|
-
```
|
|
29
|
-
This was the original user request: {{ user_request }}
|
|
30
|
-
|
|
31
|
-
Please provide a response accordingly.
|
|
32
|
-
|