npcsh 1.1.7__tar.gz → 1.1.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. {npcsh-1.1.7 → npcsh-1.1.8}/PKG-INFO +52 -24
  2. {npcsh-1.1.7 → npcsh-1.1.8}/README.md +51 -23
  3. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/corca.py +97 -38
  4. npcsh-1.1.8/npcsh/npc_team/jinxs/modes/corca.jinx +28 -0
  5. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/modes/plonk.jinx +0 -4
  6. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/ots.jinx +2 -2
  7. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/roll.jinx +6 -4
  8. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/search.jinx +1 -1
  9. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npcsh.py +18 -17
  10. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh.egg-info/PKG-INFO +52 -24
  11. {npcsh-1.1.7 → npcsh-1.1.8}/setup.py +1 -1
  12. npcsh-1.1.7/npcsh/npc_team/jinxs/modes/corca.jinx +0 -28
  13. {npcsh-1.1.7 → npcsh-1.1.8}/LICENSE +0 -0
  14. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/__init__.py +0 -0
  15. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/_state.py +0 -0
  16. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/alicanto.py +0 -0
  17. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/build.py +0 -0
  18. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/guac.py +0 -0
  19. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/mcp_helpers.py +0 -0
  20. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/mcp_server.py +0 -0
  21. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc.py +0 -0
  22. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/alicanto.npc +0 -0
  23. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/alicanto.png +0 -0
  24. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/corca.npc +0 -0
  25. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/corca.png +0 -0
  26. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/corca_example.png +0 -0
  27. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/foreman.npc +0 -0
  28. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/frederic.npc +0 -0
  29. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/frederic4.png +0 -0
  30. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/guac.png +0 -0
  31. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/code/python.jinx +0 -0
  32. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/code/sh.jinx +0 -0
  33. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/code/sql.jinx +0 -0
  34. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/modes/alicanto.jinx +0 -0
  35. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/modes/guac.jinx +0 -0
  36. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/modes/pti.jinx +0 -0
  37. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/modes/spool.jinx +0 -0
  38. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/modes/wander.jinx +0 -0
  39. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/modes/yap.jinx +0 -0
  40. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/breathe.jinx +0 -0
  41. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/core/build.jinx +0 -0
  42. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/core/compile.jinx +0 -0
  43. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/core/help.jinx +0 -0
  44. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/core/init.jinx +0 -0
  45. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/core/jinxs.jinx +0 -0
  46. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/core/set.jinx +0 -0
  47. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/edit_file.jinx +0 -0
  48. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/flush.jinx +0 -0
  49. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/npc-studio.jinx +0 -0
  50. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/plan.jinx +0 -0
  51. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/sample.jinx +0 -0
  52. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/serve.jinx +0 -0
  53. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/sleep.jinx +0 -0
  54. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/trigger.jinx +0 -0
  55. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/jinxs/utils/vixynt.jinx +0 -0
  56. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/kadiefa.npc +0 -0
  57. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/kadiefa.png +0 -0
  58. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/npcsh.ctx +0 -0
  59. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/npcsh_sibiji.png +0 -0
  60. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/plonk.npc +0 -0
  61. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/plonk.png +0 -0
  62. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/plonkjr.npc +0 -0
  63. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/plonkjr.png +0 -0
  64. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/sibiji.npc +0 -0
  65. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/sibiji.png +0 -0
  66. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/spool.png +0 -0
  67. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/npc_team/yap.png +0 -0
  68. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/plonk.py +0 -0
  69. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/pti.py +0 -0
  70. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/routes.py +0 -0
  71. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/spool.py +0 -0
  72. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/wander.py +0 -0
  73. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh/yap.py +0 -0
  74. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh.egg-info/SOURCES.txt +0 -0
  75. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh.egg-info/dependency_links.txt +0 -0
  76. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh.egg-info/entry_points.txt +0 -0
  77. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh.egg-info/requires.txt +0 -0
  78. {npcsh-1.1.7 → npcsh-1.1.8}/npcsh.egg-info/top_level.txt +0 -0
  79. {npcsh-1.1.7 → npcsh-1.1.8}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcsh
3
- Version: 1.1.7
3
+ Version: 1.1.8
4
4
  Summary: npcsh is a command-line toolkit for using AI agents in novel ways.
5
5
  Home-page: https://github.com/NPC-Worldwide/npcsh
6
6
  Author: Christopher Agostino
@@ -143,17 +143,49 @@ and you will enter the NPC shell. Additionally, the pip installation includes th
143
143
  ```
144
144
 
145
145
 
146
- - **Search the Web**
146
+ - **Search**
147
+ - search the web
147
148
  ```bash
148
- /search "cal golden bears football schedule" -sp perplexity
149
+ /search "cerulean city" perplexity
150
+
149
151
  ```
150
152
  <p align="center">
151
- <img src="https://raw.githubusercontent.com/npc-worldwide/npcsh/main/test_data/search_example.png" alt="example of search results", width=600>
152
- </p>
153
+ <img src="https://raw.githubusercontent.com/npc-worldwide/npcsh/main/test_data/search.gif" alt="example of search results", width=600>
154
+ </p>
155
+
156
+ - search approved memories
157
+ ```bash
158
+ /search query="how to deploy python apps" memory=true
159
+ ```
160
+
161
+ - search the knowledge graph
162
+
163
+ ```bash
164
+ /search query="user preferences for database" kg=true
165
+ ```
166
+
167
+ - execute a RAG search across files
168
+
169
+ ```bash
170
+ /search --rag -f ~/docs/api.md,~/docs/config.yaml "authentication setup"
171
+ ```
172
+
173
+ - brainblast search (searches many keyword combinations)
174
+
175
+ ```bash
176
+ /search query="git commands" brainblast=true
177
+ ```
178
+
179
+ - web search with specific provider
180
+
181
+ ```bash
182
+ /search query="family vacations" sprovider="perplexity"
183
+ ```
153
184
 
154
185
  - **Computer Use**
186
+
155
187
  ```bash
156
- /plonk 'find out the latest news on cnn'
188
+ /plonk 'find out the latest news on cnn' gemma3:12b ollama
157
189
  ```
158
190
 
159
191
  - **Generate Image**
@@ -165,12 +197,12 @@ and you will enter the NPC shell. Additionally, the pip installation includes th
165
197
  </p>
166
198
  - **Generate Video**
167
199
  ```bash
168
- /roll 'generate a video of a hat riding a dog'
200
+ /roll 'generate a video of a hat riding a dog' veo-3.1-fast-generate-preview gemini
169
201
  ```
170
- <!--
202
+
171
203
  <p align="center">
172
- <img src="https://raw.githubusercontent.com/npc-worldwide/npcsh/main/test_data/hat_video.mp4" alt="video of a hat riding a dog", width=250>
173
- </p> -->
204
+ <img src="https://raw.githubusercontent.com/NPC-Worldwide/npcsh/main/test_data/hatridingdog.gif" alt="video of a hat riding a dog", width=250>
205
+ </p>
174
206
 
175
207
  - **Serve an NPC Team**
176
208
  ```bash
@@ -212,16 +244,13 @@ This architecture enables users to build complex AI workflows while maintaining
212
244
 
213
245
  Importantly, users can switch easily between the NPCs they are chatting with by typing `/n npc_name` within the NPC shell. Likewise, they can create Jinxs and then use them from within the NPC shell by invoking the jinx name and the arguments required for the Jinx; `/<jinx_name> arg1 arg2`
214
246
 
215
- # Macros
216
- - activated by invoking `/<command> ...` in `npcsh`, macros can be called in bash or through the `npc` CLI. In our examples, we provide both `npcsh` calls as well as bash calls with the `npc` cli where relevant. For converting any `/<command>` in `npcsh` to a bash version, replace the `/` with `npc ` and the macro command will be invoked as a positional argument. Some, like breathe, flush,
217
-
247
+ # Jinx as macros
248
+ - activated by invoking `/<jinx_name> ...` in `npcsh`, jinxs can be called in bash or through the `npc` CLI. In our examples, we provide both `npcsh` calls as well as bash calls with the `npc` cli where relevant. For converting any `/<command>` in `npcsh` to a bash version, replace the `/` with `npc ` and the macro command will be invoked as a positional argument.
218
249
  - `/alicanto` - Conduct deep research with multiple perspectives, identifying gold insights and cliff warnings. Usage: `/alicanto 'query to be researched' --num-npcs <int> --depth <int>`
219
250
  - `/build` - Builds the current npc team to an executable format . Usage: `/build <output[flask,docker,cli,static]> --options`
220
- - `/brainblast` - Execute an advanced chunked search on command history. Usage: `/brainblast 'query' --top_k 10`
221
251
  - `/breathe` - Condense context on a regular cadence. Usage: `/breathe -p <provider: NPCSH_CHAT_PROVIDER> -m <model: NPCSH_CHAT_MODEL>`
222
252
  - `/compile` - Compile NPC profiles. Usage: `/compile <path_to_npc> `
223
253
  - `/corca` - Enter the Corca MCP-powered agentic shell. Usage: `/corca [--mcp-server-path path]`
224
- - `/flush` - Flush the last N messages. Usage: `/flush N=10`
225
254
  - `/guac` - Enter guac mode. Usage: `/guac`
226
255
  - `/help` - Show help for commands, NPCs, or Jinxs. Usage: `/help`
227
256
  - `/init` - Initialize NPC project. Usage: `/init`
@@ -229,13 +258,11 @@ Importantly, users can switch easily between the NPCs they are chatting with by
229
258
  - `/<jinx_name>` - Run a jinx with specified command line arguments. `/<jinx_name> jinx_arg1 jinx_arg2`
230
259
  - `/npc-studio` - Start npc studio. Pulls NPC Studio github to `~/.npcsh/npc-studio` and launches it in development mode after installing necessary NPM dependencies.Usage: `/npc-studio`
231
260
  - `/ots` - Take screenshot and analyze with vision model. Usage: `/ots filename=<output_file_name_for_screenshot>` then select an area, and you will be prompted for your request.
232
- - `/plan` - Execute a plan command. Usage: `/plan 'idea for a cron job to be set up to accomplish'`
233
261
  - `/plonk` - Use vision model to interact with GUI. Usage: `/plonk '<task description>' `
234
262
  - `/pti` - Use pardon-the-interruption mode to interact with reasoning model LLM. Usage: `/pti`
235
- - `/rag` - Execute a RAG command using ChromaDB embeddings with optional file input (-f/--file). Usage: `/rag '<query_to_rag>' --emodel <NPCSH_EMBEDDING_MODEL> --eprovider <NPCSH_EMBEDDING_PROVIDER>`
236
263
  - `/roll` - generate a video with video generation model. Usage: `/roll '<description_for_a_movie>' --vgmodel <NPCSH_VIDEO_GEN_MODEL> --vgprovider <NPCSH_VIDEO_GEN_PROVIDER>`
237
264
  - `/sample` - Send a context-free prompt to an LLM, letting you get fresh answers without needing to start a separate conversation/shell. Usage: `/sample -m <NPCSH_CHAT_MODEL> 'question to sample --temp <float> --top_k int`
238
- - `/search` - Execute a web search command. Usage: `/search 'search query' --sprovider <provider>` where provider is currently limited to DuckDuckGo and Perplexity. Wikipedia integration ongoing.
265
+ - `/search` - Execute a search command on the web, in your memories, in the knowledge graph, or in documents with rag. Usage: `/search 'search query' --sprovider <provider>` where provider is currently limited to DuckDuckGo, Perplexity, and Exa with more coming soon through litellm. Wikipedia integration ongoing. See above for more search specific examples.
239
266
  - `/serve` - Serve an NPC Team server.
240
267
  - `/set` - Set configuration values.
241
268
  - Usage:
@@ -249,7 +276,6 @@ Importantly, users can switch easily between the NPCs they are chatting with by
249
276
  - Usage:
250
277
  - Gen Image: `/vixynt -igp <NPCSH_IMAGE_GEN_PROVIDER> --igmodel <NPCSH_IMAGE_GEN_MODEL> --output_file <path_to_file> width=<int:1024> height =<int:1024> 'description of image`
251
278
  - Edit Image: `/vixynt 'edit this....' --attachments '/path/to/image.png,/path/to/image.jpeg'`
252
-
253
279
  - `/wander` - A method for LLMs to think on a problem by switching between states of high temperature and low temperature. Usage: `/wander 'query to wander about' --provider "ollama" --model "deepseek-r1:32b" environment="a vast dark ocean" interruption-likelihood=.1`
254
280
  - `/yap` - Enter voice chat (yap) mode. Usage: `/yap -n <npc_to_chat_with>`
255
281
 
@@ -271,24 +297,26 @@ Importantly, users can switch easily between the NPCs they are chatting with by
271
297
  '
272
298
 
273
299
  ## Read the Docs
274
- To see more about how to use the macros and modes in the NPC Shell, read the docs at [npc-shell.readthedocs.io](https://npc-shell.readthedocs.io/en/latest/)
300
+ To see more about how to use the jinxs and modes in the NPC Shell, read the docs at [npc-shell.readthedocs.io](https://npc-shell.readthedocs.io/en/latest/)
275
301
 
276
302
 
277
303
  ## Inference Capabilities
278
304
  - `npcsh` works with local and enterprise LLM providers through its LiteLLM integration, allowing users to run inference from Ollama, LMStudio, vLLM, MLX, OpenAI, Anthropic, Gemini, and Deepseek, making it a versatile tool for both simple commands and sophisticated AI-driven tasks.
279
305
 
280
306
  ## NPC Studio
281
- There is a graphical user interface that makes use of the NPC Toolkit through the NPC Studio. See the source code for NPC Studio [here](https://github.com/npc-worldwide/npc-studio). Download the executables at [our website](https://enpisi.com/npc-studio). For the most up to date version, you can use NPC Studio by invoking it in npcsh
307
+ There is a graphical user interface that makes use of the NPC Toolkit through the NPC Studio. See the source code for NPC Studio [here](https://github.com/npc-worldwide/npc-studio). Download the executables at [our website](https://enpisi.com/downloads). For the most up to date development version, you can use NPC Studio by invoking it in npcsh
308
+
282
309
  ```
283
310
  /npc-studio
284
311
  ```
285
- which will download and set up and serve the NPC Studio application within your `~/.npcsh` folder. It requires `npm` and `node` to work.
312
+ which will download and set up and serve the NPC Studio application within your `~/.npcsh` folder. It requires `npm` and `node` to work, and of course npcpy !
286
313
 
287
- ## Mailing List
314
+ ## Mailing List and Community
288
315
  Interested to stay in the loop and to hear the latest and greatest about `npcpy`, `npcsh`, and NPC Studio? Be sure to sign up for the [newsletter](https://forms.gle/n1NzQmwjsV4xv1B2A)!
289
316
 
317
+ [Join the discord to discuss ideas for npc tools](https://discord.gg/VvYVT5YC)
290
318
  ## Support
291
- If you appreciate the work here, [consider supporting NPC Worldwide with a monthly donation](https://buymeacoffee.com/npcworldwide), [buying NPC-WW themed merch](https://enpisi.com/shop), or hiring us to help you explore how to use the NPC Toolkit and AI tools to help your business or research team, please reach out to info@npcworldwi.de .
319
+ If you appreciate the work here, [consider supporting NPC Worldwide with a monthly donation](https://buymeacoffee.com/npcworldwide), [buying NPC-WW themed merch](https://enpisi.com/shop), [using and subscribing to Lavanzaro](lavanzaro.com),s or hiring us to help you explore how to use the NPC Toolkit and AI tools to help your business or research team, please reach out to info@npcworldwi.de .
292
320
 
293
321
 
294
322
  ## Installation
@@ -43,17 +43,49 @@ and you will enter the NPC shell. Additionally, the pip installation includes th
43
43
  ```
44
44
 
45
45
 
46
- - **Search the Web**
46
+ - **Search**
47
+ - search the web
47
48
  ```bash
48
- /search "cal golden bears football schedule" -sp perplexity
49
+ /search "cerulean city" perplexity
50
+
49
51
  ```
50
52
  <p align="center">
51
- <img src="https://raw.githubusercontent.com/npc-worldwide/npcsh/main/test_data/search_example.png" alt="example of search results", width=600>
52
- </p>
53
+ <img src="https://raw.githubusercontent.com/npc-worldwide/npcsh/main/test_data/search.gif" alt="example of search results", width=600>
54
+ </p>
55
+
56
+ - search approved memories
57
+ ```bash
58
+ /search query="how to deploy python apps" memory=true
59
+ ```
60
+
61
+ - search the knowledge graph
62
+
63
+ ```bash
64
+ /search query="user preferences for database" kg=true
65
+ ```
66
+
67
+ - execute a RAG search across files
68
+
69
+ ```bash
70
+ /search --rag -f ~/docs/api.md,~/docs/config.yaml "authentication setup"
71
+ ```
72
+
73
+ - brainblast search (searches many keyword combinations)
74
+
75
+ ```bash
76
+ /search query="git commands" brainblast=true
77
+ ```
78
+
79
+ - web search with specific provider
80
+
81
+ ```bash
82
+ /search query="family vacations" sprovider="perplexity"
83
+ ```
53
84
 
54
85
  - **Computer Use**
86
+
55
87
  ```bash
56
- /plonk 'find out the latest news on cnn'
88
+ /plonk 'find out the latest news on cnn' gemma3:12b ollama
57
89
  ```
58
90
 
59
91
  - **Generate Image**
@@ -65,12 +97,12 @@ and you will enter the NPC shell. Additionally, the pip installation includes th
65
97
  </p>
66
98
  - **Generate Video**
67
99
  ```bash
68
- /roll 'generate a video of a hat riding a dog'
100
+ /roll 'generate a video of a hat riding a dog' veo-3.1-fast-generate-preview gemini
69
101
  ```
70
- <!--
102
+
71
103
  <p align="center">
72
- <img src="https://raw.githubusercontent.com/npc-worldwide/npcsh/main/test_data/hat_video.mp4" alt="video of a hat riding a dog", width=250>
73
- </p> -->
104
+ <img src="https://raw.githubusercontent.com/NPC-Worldwide/npcsh/main/test_data/hatridingdog.gif" alt="video of a hat riding a dog", width=250>
105
+ </p>
74
106
 
75
107
  - **Serve an NPC Team**
76
108
  ```bash
@@ -112,16 +144,13 @@ This architecture enables users to build complex AI workflows while maintaining
112
144
 
113
145
  Importantly, users can switch easily between the NPCs they are chatting with by typing `/n npc_name` within the NPC shell. Likewise, they can create Jinxs and then use them from within the NPC shell by invoking the jinx name and the arguments required for the Jinx; `/<jinx_name> arg1 arg2`
114
146
 
115
- # Macros
116
- - activated by invoking `/<command> ...` in `npcsh`, macros can be called in bash or through the `npc` CLI. In our examples, we provide both `npcsh` calls as well as bash calls with the `npc` cli where relevant. For converting any `/<command>` in `npcsh` to a bash version, replace the `/` with `npc ` and the macro command will be invoked as a positional argument. Some, like breathe, flush,
117
-
147
+ # Jinx as macros
148
+ - activated by invoking `/<jinx_name> ...` in `npcsh`, jinxs can be called in bash or through the `npc` CLI. In our examples, we provide both `npcsh` calls as well as bash calls with the `npc` cli where relevant. For converting any `/<command>` in `npcsh` to a bash version, replace the `/` with `npc ` and the macro command will be invoked as a positional argument.
118
149
  - `/alicanto` - Conduct deep research with multiple perspectives, identifying gold insights and cliff warnings. Usage: `/alicanto 'query to be researched' --num-npcs <int> --depth <int>`
119
150
  - `/build` - Builds the current npc team to an executable format . Usage: `/build <output[flask,docker,cli,static]> --options`
120
- - `/brainblast` - Execute an advanced chunked search on command history. Usage: `/brainblast 'query' --top_k 10`
121
151
  - `/breathe` - Condense context on a regular cadence. Usage: `/breathe -p <provider: NPCSH_CHAT_PROVIDER> -m <model: NPCSH_CHAT_MODEL>`
122
152
  - `/compile` - Compile NPC profiles. Usage: `/compile <path_to_npc> `
123
153
  - `/corca` - Enter the Corca MCP-powered agentic shell. Usage: `/corca [--mcp-server-path path]`
124
- - `/flush` - Flush the last N messages. Usage: `/flush N=10`
125
154
  - `/guac` - Enter guac mode. Usage: `/guac`
126
155
  - `/help` - Show help for commands, NPCs, or Jinxs. Usage: `/help`
127
156
  - `/init` - Initialize NPC project. Usage: `/init`
@@ -129,13 +158,11 @@ Importantly, users can switch easily between the NPCs they are chatting with by
129
158
  - `/<jinx_name>` - Run a jinx with specified command line arguments. `/<jinx_name> jinx_arg1 jinx_arg2`
130
159
  - `/npc-studio` - Start npc studio. Pulls NPC Studio github to `~/.npcsh/npc-studio` and launches it in development mode after installing necessary NPM dependencies.Usage: `/npc-studio`
131
160
  - `/ots` - Take screenshot and analyze with vision model. Usage: `/ots filename=<output_file_name_for_screenshot>` then select an area, and you will be prompted for your request.
132
- - `/plan` - Execute a plan command. Usage: `/plan 'idea for a cron job to be set up to accomplish'`
133
161
  - `/plonk` - Use vision model to interact with GUI. Usage: `/plonk '<task description>' `
134
162
  - `/pti` - Use pardon-the-interruption mode to interact with reasoning model LLM. Usage: `/pti`
135
- - `/rag` - Execute a RAG command using ChromaDB embeddings with optional file input (-f/--file). Usage: `/rag '<query_to_rag>' --emodel <NPCSH_EMBEDDING_MODEL> --eprovider <NPCSH_EMBEDDING_PROVIDER>`
136
163
  - `/roll` - generate a video with video generation model. Usage: `/roll '<description_for_a_movie>' --vgmodel <NPCSH_VIDEO_GEN_MODEL> --vgprovider <NPCSH_VIDEO_GEN_PROVIDER>`
137
164
  - `/sample` - Send a context-free prompt to an LLM, letting you get fresh answers without needing to start a separate conversation/shell. Usage: `/sample -m <NPCSH_CHAT_MODEL> 'question to sample --temp <float> --top_k int`
138
- - `/search` - Execute a web search command. Usage: `/search 'search query' --sprovider <provider>` where provider is currently limited to DuckDuckGo and Perplexity. Wikipedia integration ongoing.
165
+ - `/search` - Execute a search command on the web, in your memories, in the knowledge graph, or in documents with rag. Usage: `/search 'search query' --sprovider <provider>` where provider is currently limited to DuckDuckGo, Perplexity, and Exa with more coming soon through litellm. Wikipedia integration ongoing. See above for more search specific examples.
139
166
  - `/serve` - Serve an NPC Team server.
140
167
  - `/set` - Set configuration values.
141
168
  - Usage:
@@ -149,7 +176,6 @@ Importantly, users can switch easily between the NPCs they are chatting with by
149
176
  - Usage:
150
177
  - Gen Image: `/vixynt -igp <NPCSH_IMAGE_GEN_PROVIDER> --igmodel <NPCSH_IMAGE_GEN_MODEL> --output_file <path_to_file> width=<int:1024> height =<int:1024> 'description of image`
151
178
  - Edit Image: `/vixynt 'edit this....' --attachments '/path/to/image.png,/path/to/image.jpeg'`
152
-
153
179
  - `/wander` - A method for LLMs to think on a problem by switching between states of high temperature and low temperature. Usage: `/wander 'query to wander about' --provider "ollama" --model "deepseek-r1:32b" environment="a vast dark ocean" interruption-likelihood=.1`
154
180
  - `/yap` - Enter voice chat (yap) mode. Usage: `/yap -n <npc_to_chat_with>`
155
181
 
@@ -171,24 +197,26 @@ Importantly, users can switch easily between the NPCs they are chatting with by
171
197
  '
172
198
 
173
199
  ## Read the Docs
174
- To see more about how to use the macros and modes in the NPC Shell, read the docs at [npc-shell.readthedocs.io](https://npc-shell.readthedocs.io/en/latest/)
200
+ To see more about how to use the jinxs and modes in the NPC Shell, read the docs at [npc-shell.readthedocs.io](https://npc-shell.readthedocs.io/en/latest/)
175
201
 
176
202
 
177
203
  ## Inference Capabilities
178
204
  - `npcsh` works with local and enterprise LLM providers through its LiteLLM integration, allowing users to run inference from Ollama, LMStudio, vLLM, MLX, OpenAI, Anthropic, Gemini, and Deepseek, making it a versatile tool for both simple commands and sophisticated AI-driven tasks.
179
205
 
180
206
  ## NPC Studio
181
- There is a graphical user interface that makes use of the NPC Toolkit through the NPC Studio. See the source code for NPC Studio [here](https://github.com/npc-worldwide/npc-studio). Download the executables at [our website](https://enpisi.com/npc-studio). For the most up to date version, you can use NPC Studio by invoking it in npcsh
207
+ There is a graphical user interface that makes use of the NPC Toolkit through the NPC Studio. See the source code for NPC Studio [here](https://github.com/npc-worldwide/npc-studio). Download the executables at [our website](https://enpisi.com/downloads). For the most up to date development version, you can use NPC Studio by invoking it in npcsh
208
+
182
209
  ```
183
210
  /npc-studio
184
211
  ```
185
- which will download and set up and serve the NPC Studio application within your `~/.npcsh` folder. It requires `npm` and `node` to work.
212
+ which will download and set up and serve the NPC Studio application within your `~/.npcsh` folder. It requires `npm` and `node` to work, and of course npcpy !
186
213
 
187
- ## Mailing List
214
+ ## Mailing List and Community
188
215
  Interested to stay in the loop and to hear the latest and greatest about `npcpy`, `npcsh`, and NPC Studio? Be sure to sign up for the [newsletter](https://forms.gle/n1NzQmwjsV4xv1B2A)!
189
216
 
217
+ [Join the discord to discuss ideas for npc tools](https://discord.gg/VvYVT5YC)
190
218
  ## Support
191
- If you appreciate the work here, [consider supporting NPC Worldwide with a monthly donation](https://buymeacoffee.com/npcworldwide), [buying NPC-WW themed merch](https://enpisi.com/shop), or hiring us to help you explore how to use the NPC Toolkit and AI tools to help your business or research team, please reach out to info@npcworldwi.de .
219
+ If you appreciate the work here, [consider supporting NPC Worldwide with a monthly donation](https://buymeacoffee.com/npcworldwide), [buying NPC-WW themed merch](https://enpisi.com/shop), [using and subscribing to Lavanzaro](lavanzaro.com),s or hiring us to help you explore how to use the NPC Toolkit and AI tools to help your business or research team, please reach out to info@npcworldwi.de .
192
220
 
193
221
 
194
222
  ## Installation
@@ -1235,68 +1235,98 @@ def create_corca_state_and_mcp_client(conversation_id,
1235
1235
 
1236
1236
  return state
1237
1237
 
1238
-
1239
- def enter_corca_mode(command: str, **kwargs):
1240
- state: ShellState = kwargs.get('shell_state')
1241
- command_history: CommandHistory = kwargs.get('command_history')
1242
-
1243
- if not state or not command_history:
1244
- return {"output": "Error: Corca mode requires shell state and history.", "messages": kwargs.get('messages', [])}
1245
-
1246
- all_command_parts = shlex.split(command)
1247
- parser = argparse.ArgumentParser(prog="/corca", description="Enter Corca MCP-powered mode.")
1248
- parser.add_argument("--mcp-server-path", type=str, help="Path to an MCP server script.")
1249
- parser.add_argument("-g", "--global", dest="force_global", action="store_true", help="Force use of global MCP server.")
1238
+ def corca_session(
1239
+ command_history: CommandHistory,
1240
+ state: Optional[ShellState] = None,
1241
+ mcp_server_path: Optional[str] = None,
1242
+ force_global: bool = False,
1243
+ initial_command: Optional[str] = None
1244
+ ) -> Dict[str, Any]:
1245
+ """
1246
+ Clean programmatic entry to Corca mode.
1250
1247
 
1251
- try:
1252
- known_args, remaining_args = parser.parse_known_args(all_command_parts[1:])
1253
- except SystemExit:
1254
- return {"output": "Invalid arguments for /corca. See /help corca.", "messages": state.messages}
1248
+ Args:
1249
+ command_history: CommandHistory instance
1250
+ state: Optional existing ShellState, will create if None
1251
+ mcp_server_path: Optional explicit path to MCP server
1252
+ force_global: Force use of global MCP server
1253
+ initial_command: Optional command to execute before entering loop
1254
+
1255
+ Returns:
1256
+ Dict with 'output' and 'messages' keys
1257
+ """
1258
+ # Setup state if not provided
1259
+ if state is None:
1260
+ _, team, default_npc = setup_shell()
1261
+
1262
+ # Load corca.npc if available
1263
+ project_corca_path = os.path.join('./npc_team/', "corca.npc")
1264
+ global_corca_path = os.path.expanduser('~/.npcsh/npc_team/corca.npc')
1265
+
1266
+ if os.path.exists(project_corca_path):
1267
+ default_npc = NPC(file=project_corca_path, db_conn=command_history.engine)
1268
+ elif os.path.exists(global_corca_path):
1269
+ default_npc = NPC(file=global_corca_path, db_conn=command_history.engine)
1270
+
1271
+ # Set defaults
1272
+ if default_npc.model is None:
1273
+ default_npc.model = team.model or NPCSH_CHAT_MODEL
1274
+ if default_npc.provider is None:
1275
+ default_npc.provider = team.provider or NPCSH_CHAT_PROVIDER
1276
+
1277
+ from npcsh._state import initial_state
1278
+ state = initial_state
1279
+ state.team = team
1280
+ state.npc = default_npc
1281
+ state.command_history = command_history
1255
1282
 
1256
1283
  print_corca_welcome_message()
1257
1284
 
1285
+ # Resolve MCP server path
1258
1286
  auto_copy_bypass = os.getenv("NPCSH_CORCA_AUTO_COPY_MCP_SERVER", "false").lower() == "true"
1259
-
1287
+
1260
1288
  resolved_server_path = _resolve_and_copy_mcp_server_path(
1261
- explicit_path=known_args.mcp_server_path,
1289
+ explicit_path=mcp_server_path,
1262
1290
  current_path=state.current_path,
1263
1291
  team_ctx_mcp_servers=state.team.team_ctx.get('mcp_servers', []) if state.team and hasattr(state.team, 'team_ctx') else None,
1264
1292
  interactive=True,
1265
1293
  auto_copy_bypass=auto_copy_bypass,
1266
- force_global=known_args.force_global
1294
+ force_global=force_global
1267
1295
  )
1268
1296
 
1269
- mcp_client = None
1297
+ # Connect to MCP server
1270
1298
  if resolved_server_path:
1271
1299
  try:
1272
1300
  mcp_client = MCPClientNPC()
1273
1301
  if mcp_client.connect_sync(resolved_server_path):
1274
1302
  state.mcp_client = mcp_client
1275
1303
  else:
1276
- cprint(f"Failed to connect to MCP server at {resolved_server_path}. Corca mode will have limited agent functionality.", "yellow")
1304
+ cprint(f"Failed to connect to MCP server. Limited functionality.", "yellow")
1277
1305
  state.mcp_client = None
1278
1306
  except Exception as e:
1279
- cprint(f"Error connecting to MCP server: {e}. Corca mode will have limited agent functionality.", "red")
1307
+ cprint(f"Error connecting to MCP server: {e}", "red")
1280
1308
  traceback.print_exc()
1281
1309
  state.mcp_client = None
1282
1310
  else:
1283
- cprint("No MCP server path provided or found. Corca mode will have limited agent functionality.", "yellow")
1311
+ cprint("No MCP server path found. Limited functionality.", "yellow")
1284
1312
  state.mcp_client = None
1285
1313
 
1314
+ # Execute initial command if provided
1315
+ if initial_command:
1316
+ try:
1317
+ state, output = execute_command_corca(initial_command, state, command_history)
1318
+ if not (isinstance(output, dict) and output.get('interrupted')):
1319
+ process_corca_result(initial_command, state, output, command_history)
1320
+ except Exception as e:
1321
+ print(colored(f'Error executing initial command: {e}', "red"))
1322
+
1323
+ # Main loop
1286
1324
  while True:
1287
1325
  try:
1288
- prompt_npc_name = "npc"
1289
- if state.npc:
1290
- prompt_npc_name = state.npc.name
1291
-
1326
+ prompt_npc_name = state.npc.name if state.npc else "npc"
1292
1327
  prompt_str = f"{colored(os.path.basename(state.current_path), 'blue')}:{prompt_npc_name}🦌> "
1293
1328
  prompt = readline_safe_prompt(prompt_str)
1294
-
1295
- if remaining_args:
1296
- user_input = " ".join(remaining_args)
1297
- remaining_args = []
1298
- else:
1299
- user_input = get_multiline_input(prompt).strip()
1329
+ user_input = get_multiline_input(prompt).strip()
1300
1330
 
1301
1331
  if user_input.lower() in ["exit", "quit", "done"]:
1302
1332
  break
@@ -1311,11 +1341,7 @@ def enter_corca_mode(command: str, **kwargs):
1311
1341
  print(colored("\n⚠️ Command interrupted. MCP session maintained.", "yellow"))
1312
1342
  continue
1313
1343
 
1314
- process_corca_result(user_input,
1315
- state,
1316
- output,
1317
- command_history,
1318
- )
1344
+ process_corca_result(user_input, state, output, command_history)
1319
1345
  except KeyboardInterrupt:
1320
1346
  print(colored("\n⚠️ Interrupted. Type 'exit' to quit Corca mode.", "yellow"))
1321
1347
  continue
@@ -1330,12 +1356,45 @@ def enter_corca_mode(command: str, **kwargs):
1330
1356
  print("\nExiting Corca Mode.")
1331
1357
  break
1332
1358
 
1359
+ # Cleanup
1333
1360
  if state.mcp_client:
1334
1361
  state.mcp_client.disconnect_sync()
1335
1362
  state.mcp_client = None
1336
1363
 
1337
1364
  render_markdown("\n# Exiting Corca Mode")
1338
1365
  return {"output": "", "messages": state.messages}
1366
+ def enter_corca_mode(command: str, **kwargs):
1367
+ """Legacy wrapper for command-line entry"""
1368
+ state: ShellState = kwargs.get('shell_state')
1369
+ command_history: CommandHistory = kwargs.get('command_history')
1370
+
1371
+ if not state or not command_history:
1372
+ return {"output": "Error: Corca mode requires shell state and history.", "messages": kwargs.get('messages', [])}
1373
+
1374
+ # Parse command arguments
1375
+ all_command_parts = shlex.split(command)
1376
+ parser = argparse.ArgumentParser(prog="/corca", description="Enter Corca MCP-powered mode.")
1377
+ parser.add_argument("--mcp-server-path", type=str, help="Path to an MCP server script.")
1378
+ parser.add_argument("-g", "--global", dest="force_global", action="store_true", help="Force use of global MCP server.")
1379
+
1380
+ try:
1381
+ known_args, remaining_args = parser.parse_known_args(all_command_parts[1:])
1382
+ except SystemExit:
1383
+ return {"output": "Invalid arguments for /corca. See /help corca.", "messages": state.messages}
1384
+
1385
+ # Get initial command from remaining args
1386
+ initial_command = " ".join(remaining_args) if remaining_args else None
1387
+
1388
+ # Call the clean entry point
1389
+ return corca_session(
1390
+ command_history=command_history,
1391
+ state=state,
1392
+ mcp_server_path=known_args.mcp_server_path,
1393
+ force_global=known_args.force_global,
1394
+ initial_command=initial_command
1395
+ )
1396
+
1397
+
1339
1398
  def main():
1340
1399
  parser = argparse.ArgumentParser(description="Corca - An MCP-powered npcsh shell.")
1341
1400
  parser.add_argument("--mcp-server-path", type=str, help="Path to an MCP server script to connect to.")
@@ -0,0 +1,28 @@
1
+ jinx_name: "corca"
2
+ description: "Enter the Corca MCP-powered agentic shell"
3
+ inputs:
4
+ - mcp_server_path: '~/.npcsh/npc_team/mcp_server.py'
5
+ - force_global: false
6
+ - initial_command: null
7
+ steps:
8
+ - name: "enter_corca"
9
+ engine: "python"
10
+ code: |
11
+ from npcsh._state import setup_shell
12
+ from npcsh.corca import corca_session
13
+
14
+ mcp_server_path = context.get('mcp_server_path')
15
+ force_global = context.get('force_global', False)
16
+ initial_command = context.get('initial_command')
17
+
18
+ command_history, _, _ = setup_shell()
19
+
20
+ result = corca_session(
21
+ command_history=command_history,
22
+ mcp_server_path=mcp_server_path,
23
+ force_global=force_global,
24
+ initial_command=initial_command
25
+ )
26
+
27
+ context['output'] = result.get('output', 'Exited Corca mode.')
28
+ context['messages'] = result.get('messages', [])
@@ -30,10 +30,6 @@ steps:
30
30
  if not vision_provider and current_npc and current_npc.provider:
31
31
  vision_provider = current_npc.provider
32
32
 
33
- # Final fallbacks (these would ideally come from npcsh._state config)
34
- if not vision_model: vision_model = "gemini-1.5-pro-vision" # Example default
35
- if not vision_provider: vision_provider = "gemini" # Example default
36
-
37
33
  try:
38
34
  summary_data = execute_plonk_command(
39
35
  request=task_description,
@@ -38,10 +38,10 @@ steps:
38
38
  print(f"📸 Screenshot captured: {screenshot_info.get('filename', os.path.basename(screenshot_info['file_path']))}")
39
39
 
40
40
  if not vision_model:
41
- vision_model = getattr(current_npc, 'model', 'gpt-4o-mini')
41
+ vision_model = getattr(current_npc, 'model', 'gemma3:4b')
42
42
 
43
43
  if not vision_provider:
44
- vision_provider = getattr(current_npc, 'provider', 'openai')
44
+ vision_provider = getattr(current_npc, 'provider', 'ollama')
45
45
 
46
46
  response_data = get_llm_response(
47
47
  prompt=user_prompt,
@@ -2,12 +2,12 @@ jinx_name: "roll"
2
2
  description: "Generate a video from a text prompt."
3
3
  inputs:
4
4
  - prompt: "" # Required text prompt for video generation.
5
+ - vgmodel: "" # Video generation model to use. Defaults to NPCSH_VIDEO_GEN_MODEL or NPC's model.
6
+ - vgprovider: "" # Video generation provider to use. Defaults to NPCSH_VIDEO_GEN_PROVIDER or NPC's provider.
5
7
  - num_frames: 125 # Number of frames for the video.
6
8
  - width: 256 # Width of the video.
7
9
  - height: 256 # Height of the video.
8
10
  - output_path: "output.mp4" # Output file path for the video.
9
- - vgmodel: "" # Video generation model to use. Defaults to NPCSH_VIDEO_GEN_MODEL or NPC's model.
10
- - vgprovider: "" # Video generation provider to use. Defaults to NPCSH_VIDEO_GEN_PROVIDER or NPC's provider.
11
11
  steps:
12
12
  - name: "generate_video"
13
13
  engine: "python"
@@ -38,8 +38,10 @@ steps:
38
38
  video_gen_provider = current_npc.provider
39
39
 
40
40
  # Final fallbacks (these would ideally come from npcsh._state config)
41
- if not video_gen_model: video_gen_model = "stable-video-diffusion" # Example default
42
- if not video_gen_provider: video_gen_provider = "diffusers" # Example default
41
+ if not video_gen_model:
42
+ video_gen_model = "stable-video-diffusion" # Example default
43
+ if not video_gen_provider:
44
+ video_gen_provider = "diffusers" # Example default
43
45
 
44
46
  try:
45
47
  result = gen_video(
@@ -9,6 +9,7 @@ description: >
9
9
  /search --brainblast <query> (Advanced history search)
10
10
  inputs:
11
11
  - query: ""
12
+ - sprovider: ""
12
13
  - memory: false
13
14
  - kg: false
14
15
  - rag: false
@@ -16,7 +17,6 @@ inputs:
16
17
  - file_paths: ""
17
18
  - history_db_path: "~/npcsh_history.db"
18
19
  - vector_db_path: "~/npcsh_chroma.db"
19
- - sprovider: ""
20
20
  - emodel: ""
21
21
  - eprovider: ""
22
22
  steps:
@@ -140,23 +140,24 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState, router)
140
140
 
141
141
  while True:
142
142
  try:
143
- if len(state.messages) > 20:
144
- planning_state = {
145
- "goal": "ongoing npcsh session",
146
- "facts": [f"Working in {state.current_path}", f"Current mode: {state.current_mode}"],
147
- "successes": [],
148
- "mistakes": [],
149
- "todos": [],
150
- "constraints": ["Follow user requests", "Use appropriate mode for tasks"]
151
- }
152
- compressed_state = state.npc.compress_planning_state(planning_state)
153
- state.messages = [{"role": "system", "content": f"Session context: {compressed_state}"}]
154
-
155
- try:
156
- completer = make_completer(state, router)
157
- readline.set_completer(completer)
158
- except:
159
- pass
143
+ if state.messages is not None:
144
+ if len(state.messages) > 20:
145
+ planning_state = {
146
+ "goal": "ongoing npcsh session",
147
+ "facts": [f"Working in {state.current_path}", f"Current mode: {state.current_mode}"],
148
+ "successes": [],
149
+ "mistakes": [],
150
+ "todos": [],
151
+ "constraints": ["Follow user requests", "Use appropriate mode for tasks"]
152
+ }
153
+ compressed_state = state.npc.compress_planning_state(planning_state)
154
+ state.messages = [{"role": "system", "content": f"Session context: {compressed_state}"}]
155
+
156
+ try:
157
+ completer = make_completer(state, router)
158
+ readline.set_completer(completer)
159
+ except:
160
+ pass
160
161
 
161
162
  display_model = state.chat_model
162
163
  if isinstance(state.npc, NPC) and state.npc.model:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcsh
3
- Version: 1.1.7
3
+ Version: 1.1.8
4
4
  Summary: npcsh is a command-line toolkit for using AI agents in novel ways.
5
5
  Home-page: https://github.com/NPC-Worldwide/npcsh
6
6
  Author: Christopher Agostino
@@ -143,17 +143,49 @@ and you will enter the NPC shell. Additionally, the pip installation includes th
143
143
  ```
144
144
 
145
145
 
146
- - **Search the Web**
146
+ - **Search**
147
+ - search the web
147
148
  ```bash
148
- /search "cal golden bears football schedule" -sp perplexity
149
+ /search "cerulean city" perplexity
150
+
149
151
  ```
150
152
  <p align="center">
151
- <img src="https://raw.githubusercontent.com/npc-worldwide/npcsh/main/test_data/search_example.png" alt="example of search results", width=600>
152
- </p>
153
+ <img src="https://raw.githubusercontent.com/npc-worldwide/npcsh/main/test_data/search.gif" alt="example of search results", width=600>
154
+ </p>
155
+
156
+ - search approved memories
157
+ ```bash
158
+ /search query="how to deploy python apps" memory=true
159
+ ```
160
+
161
+ - search the knowledge graph
162
+
163
+ ```bash
164
+ /search query="user preferences for database" kg=true
165
+ ```
166
+
167
+ - execute a RAG search across files
168
+
169
+ ```bash
170
+ /search --rag -f ~/docs/api.md,~/docs/config.yaml "authentication setup"
171
+ ```
172
+
173
+ - brainblast search (searches many keyword combinations)
174
+
175
+ ```bash
176
+ /search query="git commands" brainblast=true
177
+ ```
178
+
179
+ - web search with specific provider
180
+
181
+ ```bash
182
+ /search query="family vacations" sprovider="perplexity"
183
+ ```
153
184
 
154
185
  - **Computer Use**
186
+
155
187
  ```bash
156
- /plonk 'find out the latest news on cnn'
188
+ /plonk 'find out the latest news on cnn' gemma3:12b ollama
157
189
  ```
158
190
 
159
191
  - **Generate Image**
@@ -165,12 +197,12 @@ and you will enter the NPC shell. Additionally, the pip installation includes th
165
197
  </p>
166
198
  - **Generate Video**
167
199
  ```bash
168
- /roll 'generate a video of a hat riding a dog'
200
+ /roll 'generate a video of a hat riding a dog' veo-3.1-fast-generate-preview gemini
169
201
  ```
170
- <!--
202
+
171
203
  <p align="center">
172
- <img src="https://raw.githubusercontent.com/npc-worldwide/npcsh/main/test_data/hat_video.mp4" alt="video of a hat riding a dog", width=250>
173
- </p> -->
204
+ <img src="https://raw.githubusercontent.com/NPC-Worldwide/npcsh/main/test_data/hatridingdog.gif" alt="video of a hat riding a dog", width=250>
205
+ </p>
174
206
 
175
207
  - **Serve an NPC Team**
176
208
  ```bash
@@ -212,16 +244,13 @@ This architecture enables users to build complex AI workflows while maintaining
212
244
 
213
245
  Importantly, users can switch easily between the NPCs they are chatting with by typing `/n npc_name` within the NPC shell. Likewise, they can create Jinxs and then use them from within the NPC shell by invoking the jinx name and the arguments required for the Jinx; `/<jinx_name> arg1 arg2`
214
246
 
215
- # Macros
216
- - activated by invoking `/<command> ...` in `npcsh`, macros can be called in bash or through the `npc` CLI. In our examples, we provide both `npcsh` calls as well as bash calls with the `npc` cli where relevant. For converting any `/<command>` in `npcsh` to a bash version, replace the `/` with `npc ` and the macro command will be invoked as a positional argument. Some, like breathe, flush,
217
-
247
+ # Jinx as macros
248
+ - activated by invoking `/<jinx_name> ...` in `npcsh`, jinxs can be called in bash or through the `npc` CLI. In our examples, we provide both `npcsh` calls as well as bash calls with the `npc` cli where relevant. For converting any `/<command>` in `npcsh` to a bash version, replace the `/` with `npc ` and the macro command will be invoked as a positional argument.
218
249
  - `/alicanto` - Conduct deep research with multiple perspectives, identifying gold insights and cliff warnings. Usage: `/alicanto 'query to be researched' --num-npcs <int> --depth <int>`
219
250
  - `/build` - Builds the current npc team to an executable format . Usage: `/build <output[flask,docker,cli,static]> --options`
220
- - `/brainblast` - Execute an advanced chunked search on command history. Usage: `/brainblast 'query' --top_k 10`
221
251
  - `/breathe` - Condense context on a regular cadence. Usage: `/breathe -p <provider: NPCSH_CHAT_PROVIDER> -m <model: NPCSH_CHAT_MODEL>`
222
252
  - `/compile` - Compile NPC profiles. Usage: `/compile <path_to_npc> `
223
253
  - `/corca` - Enter the Corca MCP-powered agentic shell. Usage: `/corca [--mcp-server-path path]`
224
- - `/flush` - Flush the last N messages. Usage: `/flush N=10`
225
254
  - `/guac` - Enter guac mode. Usage: `/guac`
226
255
  - `/help` - Show help for commands, NPCs, or Jinxs. Usage: `/help`
227
256
  - `/init` - Initialize NPC project. Usage: `/init`
@@ -229,13 +258,11 @@ Importantly, users can switch easily between the NPCs they are chatting with by
229
258
  - `/<jinx_name>` - Run a jinx with specified command line arguments. `/<jinx_name> jinx_arg1 jinx_arg2`
230
259
  - `/npc-studio` - Start npc studio. Pulls NPC Studio github to `~/.npcsh/npc-studio` and launches it in development mode after installing necessary NPM dependencies.Usage: `/npc-studio`
231
260
  - `/ots` - Take screenshot and analyze with vision model. Usage: `/ots filename=<output_file_name_for_screenshot>` then select an area, and you will be prompted for your request.
232
- - `/plan` - Execute a plan command. Usage: `/plan 'idea for a cron job to be set up to accomplish'`
233
261
  - `/plonk` - Use vision model to interact with GUI. Usage: `/plonk '<task description>' `
234
262
  - `/pti` - Use pardon-the-interruption mode to interact with reasoning model LLM. Usage: `/pti`
235
- - `/rag` - Execute a RAG command using ChromaDB embeddings with optional file input (-f/--file). Usage: `/rag '<query_to_rag>' --emodel <NPCSH_EMBEDDING_MODEL> --eprovider <NPCSH_EMBEDDING_PROVIDER>`
236
263
  - `/roll` - generate a video with video generation model. Usage: `/roll '<description_for_a_movie>' --vgmodel <NPCSH_VIDEO_GEN_MODEL> --vgprovider <NPCSH_VIDEO_GEN_PROVIDER>`
237
264
  - `/sample` - Send a context-free prompt to an LLM, letting you get fresh answers without needing to start a separate conversation/shell. Usage: `/sample -m <NPCSH_CHAT_MODEL> 'question to sample --temp <float> --top_k int`
238
- - `/search` - Execute a web search command. Usage: `/search 'search query' --sprovider <provider>` where provider is currently limited to DuckDuckGo and Perplexity. Wikipedia integration ongoing.
265
+ - `/search` - Execute a search command on the web, in your memories, in the knowledge graph, or in documents with rag. Usage: `/search 'search query' --sprovider <provider>` where provider is currently limited to DuckDuckGo, Perplexity, and Exa with more coming soon through litellm. Wikipedia integration ongoing. See above for more search specific examples.
239
266
  - `/serve` - Serve an NPC Team server.
240
267
  - `/set` - Set configuration values.
241
268
  - Usage:
@@ -249,7 +276,6 @@ Importantly, users can switch easily between the NPCs they are chatting with by
249
276
  - Usage:
250
277
  - Gen Image: `/vixynt -igp <NPCSH_IMAGE_GEN_PROVIDER> --igmodel <NPCSH_IMAGE_GEN_MODEL> --output_file <path_to_file> width=<int:1024> height =<int:1024> 'description of image`
251
278
  - Edit Image: `/vixynt 'edit this....' --attachments '/path/to/image.png,/path/to/image.jpeg'`
252
-
253
279
  - `/wander` - A method for LLMs to think on a problem by switching between states of high temperature and low temperature. Usage: `/wander 'query to wander about' --provider "ollama" --model "deepseek-r1:32b" environment="a vast dark ocean" interruption-likelihood=.1`
254
280
  - `/yap` - Enter voice chat (yap) mode. Usage: `/yap -n <npc_to_chat_with>`
255
281
 
@@ -271,24 +297,26 @@ Importantly, users can switch easily between the NPCs they are chatting with by
271
297
  '
272
298
 
273
299
  ## Read the Docs
274
- To see more about how to use the macros and modes in the NPC Shell, read the docs at [npc-shell.readthedocs.io](https://npc-shell.readthedocs.io/en/latest/)
300
+ To see more about how to use the jinxs and modes in the NPC Shell, read the docs at [npc-shell.readthedocs.io](https://npc-shell.readthedocs.io/en/latest/)
275
301
 
276
302
 
277
303
  ## Inference Capabilities
278
304
  - `npcsh` works with local and enterprise LLM providers through its LiteLLM integration, allowing users to run inference from Ollama, LMStudio, vLLM, MLX, OpenAI, Anthropic, Gemini, and Deepseek, making it a versatile tool for both simple commands and sophisticated AI-driven tasks.
279
305
 
280
306
  ## NPC Studio
281
- There is a graphical user interface that makes use of the NPC Toolkit through the NPC Studio. See the source code for NPC Studio [here](https://github.com/npc-worldwide/npc-studio). Download the executables at [our website](https://enpisi.com/npc-studio). For the most up to date version, you can use NPC Studio by invoking it in npcsh
307
+ There is a graphical user interface that makes use of the NPC Toolkit through the NPC Studio. See the source code for NPC Studio [here](https://github.com/npc-worldwide/npc-studio). Download the executables at [our website](https://enpisi.com/downloads). For the most up to date development version, you can use NPC Studio by invoking it in npcsh
308
+
282
309
  ```
283
310
  /npc-studio
284
311
  ```
285
- which will download and set up and serve the NPC Studio application within your `~/.npcsh` folder. It requires `npm` and `node` to work.
312
+ which will download and set up and serve the NPC Studio application within your `~/.npcsh` folder. It requires `npm` and `node` to work, and of course npcpy !
286
313
 
287
- ## Mailing List
314
+ ## Mailing List and Community
288
315
  Interested to stay in the loop and to hear the latest and greatest about `npcpy`, `npcsh`, and NPC Studio? Be sure to sign up for the [newsletter](https://forms.gle/n1NzQmwjsV4xv1B2A)!
289
316
 
317
+ [Join the discord to discuss ideas for npc tools](https://discord.gg/VvYVT5YC)
290
318
  ## Support
291
- If you appreciate the work here, [consider supporting NPC Worldwide with a monthly donation](https://buymeacoffee.com/npcworldwide), [buying NPC-WW themed merch](https://enpisi.com/shop), or hiring us to help you explore how to use the NPC Toolkit and AI tools to help your business or research team, please reach out to info@npcworldwi.de .
319
+ If you appreciate the work here, [consider supporting NPC Worldwide with a monthly donation](https://buymeacoffee.com/npcworldwide), [buying NPC-WW themed merch](https://enpisi.com/shop), [using and subscribing to Lavanzaro](lavanzaro.com),s or hiring us to help you explore how to use the NPC Toolkit and AI tools to help your business or research team, please reach out to info@npcworldwi.de .
292
320
 
293
321
 
294
322
  ## Installation
@@ -78,7 +78,7 @@ extra_files = package_files("npcsh/npc_team/")
78
78
 
79
79
  setup(
80
80
  name="npcsh",
81
- version="1.1.7",
81
+ version="1.1.8",
82
82
  packages=find_packages(exclude=["tests*"]),
83
83
  install_requires=base_requirements, # Only install base requirements by default
84
84
  extras_require={
@@ -1,28 +0,0 @@
1
- jinx_name: "corca"
2
- description: "Enter the Corca MCP-powered agentic shell. Usage: /corca [--mcp-server-path path]"
3
- inputs:
4
- - command: "/corca" # The full command string, e.g., "/corca --mcp-server-path /tmp/mcp"
5
- steps:
6
- - name: "enter_corca"
7
- engine: "python"
8
- code: |
9
- # Assume npcsh._state and enter_corca_mode are accessible in the environment
10
-
11
- from npcsh._state import initial_state, setup_shell
12
- from npcsh.corca import enter_corca_mode
13
-
14
-
15
- full_command_str = context.get('command')
16
- output_messages = context.get('messages', [])
17
-
18
- command_history, team, default_npc = setup_shell()
19
-
20
- result = enter_corca_mode(
21
- command=full_command_str,
22
- command_history=command_history,
23
- shell_state=initial_state,
24
- **context # Pass all context as kwargs to enter_corca_mode as it expects
25
- )
26
-
27
- context['output'] = result.get('output', 'Entered Corca mode.')
28
- context['messages'] = result.get('messages', output_messages)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes