npcsh 1.1.16__py3-none-any.whl → 1.1.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (217) hide show
  1. npcsh/_state.py +138 -100
  2. npcsh/alicanto.py +2 -2
  3. npcsh/benchmark/__init__.py +28 -0
  4. npcsh/benchmark/npcsh_agent.py +296 -0
  5. npcsh/benchmark/runner.py +611 -0
  6. npcsh/benchmark/templates/install-npcsh.sh.j2 +35 -0
  7. npcsh/build.py +2 -4
  8. npcsh/completion.py +2 -6
  9. npcsh/config.py +1 -3
  10. npcsh/conversation_viewer.py +389 -0
  11. npcsh/corca.py +0 -1
  12. npcsh/execution.py +0 -1
  13. npcsh/guac.py +0 -1
  14. npcsh/mcp_helpers.py +2 -3
  15. npcsh/mcp_server.py +5 -10
  16. npcsh/npc.py +10 -11
  17. npcsh/npc_team/jinxs/bin/benchmark.jinx +146 -0
  18. npcsh/npc_team/jinxs/bin/nql.jinx +7 -7
  19. npcsh/npc_team/jinxs/bin/roll.jinx +20 -23
  20. npcsh/npc_team/jinxs/bin/sample.jinx +6 -7
  21. npcsh/npc_team/jinxs/bin/sync.jinx +6 -6
  22. npcsh/npc_team/jinxs/bin/vixynt.jinx +8 -8
  23. npcsh/npc_team/jinxs/incognide/add_tab.jinx +11 -0
  24. npcsh/npc_team/jinxs/incognide/close_pane.jinx +9 -0
  25. npcsh/npc_team/jinxs/incognide/close_tab.jinx +10 -0
  26. npcsh/npc_team/jinxs/incognide/confirm.jinx +10 -0
  27. npcsh/npc_team/jinxs/incognide/focus_pane.jinx +9 -0
  28. npcsh/npc_team/jinxs/{npc_studio/npc-studio.jinx → incognide/incognide.jinx} +2 -2
  29. npcsh/npc_team/jinxs/incognide/list_panes.jinx +8 -0
  30. npcsh/npc_team/jinxs/incognide/navigate.jinx +10 -0
  31. npcsh/npc_team/jinxs/incognide/notify.jinx +10 -0
  32. npcsh/npc_team/jinxs/incognide/open_pane.jinx +13 -0
  33. npcsh/npc_team/jinxs/incognide/read_pane.jinx +9 -0
  34. npcsh/npc_team/jinxs/incognide/run_terminal.jinx +10 -0
  35. npcsh/npc_team/jinxs/incognide/send_message.jinx +10 -0
  36. npcsh/npc_team/jinxs/incognide/split_pane.jinx +12 -0
  37. npcsh/npc_team/jinxs/incognide/switch_npc.jinx +10 -0
  38. npcsh/npc_team/jinxs/incognide/switch_tab.jinx +10 -0
  39. npcsh/npc_team/jinxs/incognide/write_file.jinx +11 -0
  40. npcsh/npc_team/jinxs/incognide/zen_mode.jinx +9 -0
  41. npcsh/npc_team/jinxs/lib/browser/browser_action.jinx +4 -4
  42. npcsh/npc_team/jinxs/lib/browser/browser_screenshot.jinx +1 -1
  43. npcsh/npc_team/jinxs/lib/browser/open_browser.jinx +2 -2
  44. npcsh/npc_team/jinxs/lib/computer_use/click.jinx +2 -2
  45. npcsh/npc_team/jinxs/lib/computer_use/key_press.jinx +1 -1
  46. npcsh/npc_team/jinxs/lib/computer_use/launch_app.jinx +1 -1
  47. npcsh/npc_team/jinxs/lib/computer_use/screenshot.jinx +1 -1
  48. npcsh/npc_team/jinxs/lib/computer_use/trigger.jinx +2 -2
  49. npcsh/npc_team/jinxs/lib/computer_use/type_text.jinx +1 -1
  50. npcsh/npc_team/jinxs/lib/computer_use/wait.jinx +1 -1
  51. npcsh/npc_team/jinxs/lib/core/chat.jinx +4 -4
  52. npcsh/npc_team/jinxs/lib/core/cmd.jinx +4 -4
  53. npcsh/npc_team/jinxs/lib/core/compress.jinx +8 -8
  54. npcsh/npc_team/jinxs/lib/core/edit_file.jinx +3 -0
  55. npcsh/npc_team/jinxs/lib/core/ots.jinx +7 -7
  56. npcsh/npc_team/jinxs/lib/core/search/db_search.jinx +348 -0
  57. npcsh/npc_team/jinxs/lib/core/search/file_search.jinx +339 -0
  58. npcsh/npc_team/jinxs/lib/core/search/kg_search.jinx +418 -0
  59. npcsh/npc_team/jinxs/lib/core/search/mem_review.jinx +73 -0
  60. npcsh/npc_team/jinxs/lib/core/search/mem_search.jinx +388 -0
  61. npcsh/npc_team/jinxs/lib/core/search/web_search.jinx +283 -0
  62. npcsh/npc_team/jinxs/lib/core/search.jinx +52 -129
  63. npcsh/npc_team/jinxs/lib/core/sh.jinx +1 -1
  64. npcsh/npc_team/jinxs/lib/core/sleep.jinx +29 -18
  65. npcsh/npc_team/jinxs/lib/core/sql.jinx +15 -11
  66. npcsh/npc_team/jinxs/lib/orchestration/convene.jinx +7 -7
  67. npcsh/npc_team/jinxs/lib/orchestration/delegate.jinx +8 -9
  68. npcsh/npc_team/jinxs/lib/research/paper_search.jinx +389 -78
  69. npcsh/npc_team/jinxs/lib/research/semantic_scholar.jinx +373 -56
  70. npcsh/npc_team/jinxs/lib/utils/build.jinx +5 -5
  71. npcsh/npc_team/jinxs/lib/utils/compile.jinx +2 -2
  72. npcsh/npc_team/jinxs/lib/utils/help.jinx +1 -1
  73. npcsh/npc_team/jinxs/lib/utils/init.jinx +5 -5
  74. npcsh/npc_team/jinxs/lib/utils/jinxs.jinx +300 -145
  75. npcsh/npc_team/jinxs/lib/utils/serve.jinx +2 -2
  76. npcsh/npc_team/jinxs/lib/utils/set.jinx +2 -2
  77. npcsh/npc_team/jinxs/lib/utils/switch.jinx +3 -3
  78. npcsh/npc_team/jinxs/lib/utils/switches.jinx +1 -1
  79. npcsh/npc_team/jinxs/lib/utils/teamviz.jinx +2 -2
  80. npcsh/npc_team/jinxs/modes/alicanto.jinx +356 -0
  81. npcsh/npc_team/jinxs/modes/arxiv.jinx +720 -0
  82. npcsh/npc_team/jinxs/modes/corca.jinx +430 -0
  83. npcsh/npc_team/jinxs/modes/guac.jinx +544 -0
  84. npcsh/npc_team/jinxs/modes/plonk.jinx +379 -0
  85. npcsh/npc_team/jinxs/modes/pti.jinx +357 -0
  86. npcsh/npc_team/jinxs/modes/reattach.jinx +291 -0
  87. npcsh/npc_team/jinxs/modes/spool.jinx +350 -0
  88. npcsh/npc_team/jinxs/modes/wander.jinx +455 -0
  89. {npcsh-1.1.16.data/data/npcsh/npc_team → npcsh/npc_team/jinxs/modes}/yap.jinx +8 -2
  90. npcsh/npc_team/sibiji.npc +1 -1
  91. npcsh/npcsh.py +87 -46
  92. npcsh/plonk.py +0 -1
  93. npcsh/pti.py +0 -1
  94. npcsh/routes.py +1 -3
  95. npcsh/spool.py +0 -1
  96. npcsh/ui.py +0 -1
  97. npcsh/wander.py +0 -1
  98. npcsh/yap.py +0 -1
  99. npcsh-1.1.18.data/data/npcsh/npc_team/add_tab.jinx +11 -0
  100. npcsh-1.1.18.data/data/npcsh/npc_team/alicanto.jinx +356 -0
  101. npcsh-1.1.18.data/data/npcsh/npc_team/arxiv.jinx +720 -0
  102. npcsh-1.1.18.data/data/npcsh/npc_team/benchmark.jinx +146 -0
  103. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/browser_action.jinx +4 -4
  104. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/browser_screenshot.jinx +1 -1
  105. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/build.jinx +5 -5
  106. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/chat.jinx +4 -4
  107. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/click.jinx +2 -2
  108. npcsh-1.1.18.data/data/npcsh/npc_team/close_pane.jinx +9 -0
  109. npcsh-1.1.18.data/data/npcsh/npc_team/close_tab.jinx +10 -0
  110. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/cmd.jinx +4 -4
  111. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/compile.jinx +2 -2
  112. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/compress.jinx +8 -8
  113. npcsh-1.1.18.data/data/npcsh/npc_team/confirm.jinx +10 -0
  114. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/convene.jinx +7 -7
  115. npcsh-1.1.18.data/data/npcsh/npc_team/corca.jinx +430 -0
  116. npcsh-1.1.18.data/data/npcsh/npc_team/db_search.jinx +348 -0
  117. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/delegate.jinx +8 -9
  118. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/edit_file.jinx +3 -0
  119. npcsh-1.1.18.data/data/npcsh/npc_team/file_search.jinx +339 -0
  120. npcsh-1.1.18.data/data/npcsh/npc_team/focus_pane.jinx +9 -0
  121. npcsh-1.1.18.data/data/npcsh/npc_team/guac.jinx +544 -0
  122. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/help.jinx +1 -1
  123. npcsh-1.1.16.data/data/npcsh/npc_team/npc-studio.jinx → npcsh-1.1.18.data/data/npcsh/npc_team/incognide.jinx +2 -2
  124. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/init.jinx +5 -5
  125. npcsh-1.1.18.data/data/npcsh/npc_team/jinxs.jinx +331 -0
  126. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/key_press.jinx +1 -1
  127. npcsh-1.1.18.data/data/npcsh/npc_team/kg_search.jinx +418 -0
  128. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/launch_app.jinx +1 -1
  129. npcsh-1.1.18.data/data/npcsh/npc_team/list_panes.jinx +8 -0
  130. npcsh-1.1.18.data/data/npcsh/npc_team/mem_review.jinx +73 -0
  131. npcsh-1.1.18.data/data/npcsh/npc_team/mem_search.jinx +388 -0
  132. npcsh-1.1.18.data/data/npcsh/npc_team/navigate.jinx +10 -0
  133. npcsh-1.1.18.data/data/npcsh/npc_team/notify.jinx +10 -0
  134. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/nql.jinx +7 -7
  135. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/open_browser.jinx +2 -2
  136. npcsh-1.1.18.data/data/npcsh/npc_team/open_pane.jinx +13 -0
  137. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/ots.jinx +7 -7
  138. npcsh-1.1.18.data/data/npcsh/npc_team/paper_search.jinx +412 -0
  139. npcsh-1.1.18.data/data/npcsh/npc_team/plonk.jinx +379 -0
  140. npcsh-1.1.18.data/data/npcsh/npc_team/pti.jinx +357 -0
  141. npcsh-1.1.18.data/data/npcsh/npc_team/read_pane.jinx +9 -0
  142. npcsh-1.1.18.data/data/npcsh/npc_team/reattach.jinx +291 -0
  143. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/roll.jinx +20 -23
  144. npcsh-1.1.18.data/data/npcsh/npc_team/run_terminal.jinx +10 -0
  145. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sample.jinx +6 -7
  146. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/screenshot.jinx +1 -1
  147. npcsh-1.1.18.data/data/npcsh/npc_team/search.jinx +54 -0
  148. npcsh-1.1.18.data/data/npcsh/npc_team/semantic_scholar.jinx +386 -0
  149. npcsh-1.1.18.data/data/npcsh/npc_team/send_message.jinx +10 -0
  150. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/serve.jinx +2 -2
  151. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/set.jinx +2 -2
  152. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sh.jinx +1 -1
  153. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sibiji.npc +1 -1
  154. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sleep.jinx +29 -18
  155. npcsh-1.1.18.data/data/npcsh/npc_team/split_pane.jinx +12 -0
  156. npcsh-1.1.18.data/data/npcsh/npc_team/spool.jinx +350 -0
  157. npcsh-1.1.18.data/data/npcsh/npc_team/sql.jinx +20 -0
  158. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/switch.jinx +3 -3
  159. npcsh-1.1.18.data/data/npcsh/npc_team/switch_npc.jinx +10 -0
  160. npcsh-1.1.18.data/data/npcsh/npc_team/switch_tab.jinx +10 -0
  161. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/switches.jinx +1 -1
  162. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sync.jinx +6 -6
  163. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/teamviz.jinx +2 -2
  164. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/trigger.jinx +2 -2
  165. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/type_text.jinx +1 -1
  166. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/vixynt.jinx +8 -8
  167. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/wait.jinx +1 -1
  168. npcsh-1.1.18.data/data/npcsh/npc_team/wander.jinx +455 -0
  169. npcsh-1.1.18.data/data/npcsh/npc_team/web_search.jinx +283 -0
  170. npcsh-1.1.18.data/data/npcsh/npc_team/write_file.jinx +11 -0
  171. {npcsh/npc_team/jinxs/bin → npcsh-1.1.18.data/data/npcsh/npc_team}/yap.jinx +8 -2
  172. npcsh-1.1.18.data/data/npcsh/npc_team/zen_mode.jinx +9 -0
  173. {npcsh-1.1.16.dist-info → npcsh-1.1.18.dist-info}/METADATA +99 -7
  174. npcsh-1.1.18.dist-info/RECORD +235 -0
  175. {npcsh-1.1.16.dist-info → npcsh-1.1.18.dist-info}/WHEEL +1 -1
  176. {npcsh-1.1.16.dist-info → npcsh-1.1.18.dist-info}/entry_points.txt +2 -3
  177. npcsh/npc_team/jinxs/bin/spool.jinx +0 -161
  178. npcsh/npc_team/jinxs/bin/wander.jinx +0 -152
  179. npcsh/npc_team/jinxs/lib/research/arxiv.jinx +0 -76
  180. npcsh-1.1.16.data/data/npcsh/npc_team/arxiv.jinx +0 -76
  181. npcsh-1.1.16.data/data/npcsh/npc_team/jinxs.jinx +0 -176
  182. npcsh-1.1.16.data/data/npcsh/npc_team/paper_search.jinx +0 -101
  183. npcsh-1.1.16.data/data/npcsh/npc_team/search.jinx +0 -131
  184. npcsh-1.1.16.data/data/npcsh/npc_team/semantic_scholar.jinx +0 -69
  185. npcsh-1.1.16.data/data/npcsh/npc_team/spool.jinx +0 -161
  186. npcsh-1.1.16.data/data/npcsh/npc_team/sql.jinx +0 -16
  187. npcsh-1.1.16.data/data/npcsh/npc_team/wander.jinx +0 -152
  188. npcsh-1.1.16.dist-info/RECORD +0 -170
  189. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/alicanto.npc +0 -0
  190. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/alicanto.png +0 -0
  191. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/close_browser.jinx +0 -0
  192. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/corca.npc +0 -0
  193. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/corca.png +0 -0
  194. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/corca_example.png +0 -0
  195. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/frederic.npc +0 -0
  196. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/frederic4.png +0 -0
  197. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/guac.npc +0 -0
  198. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/guac.png +0 -0
  199. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
  200. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/kadiefa.png +0 -0
  201. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/load_file.jinx +0 -0
  202. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
  203. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
  204. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/paste.jinx +0 -0
  205. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/plonk.npc +0 -0
  206. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/plonk.png +0 -0
  207. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
  208. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/plonkjr.png +0 -0
  209. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/python.jinx +0 -0
  210. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/shh.jinx +0 -0
  211. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sibiji.png +0 -0
  212. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/spool.png +0 -0
  213. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/usage.jinx +0 -0
  214. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/verbose.jinx +0 -0
  215. {npcsh-1.1.16.data → npcsh-1.1.18.data}/data/npcsh/npc_team/yap.png +0 -0
  216. {npcsh-1.1.16.dist-info → npcsh-1.1.18.dist-info}/licenses/LICENSE +0 -0
  217. {npcsh-1.1.16.dist-info → npcsh-1.1.18.dist-info}/top_level.txt +0 -0
@@ -1,76 +0,0 @@
1
- jinx_name: arxiv
2
- description: Search arXiv for preprints and papers
3
- inputs:
4
- - query: ""
5
- - limit: 10
6
- steps:
7
- - name: search_arxiv
8
- engine: python
9
- code: |
10
- import urllib.request
11
- import urllib.parse
12
- import xml.etree.ElementTree as ET
13
-
14
- query = context.get('query', '')
15
- limit = int(context.get('limit', 10))
16
-
17
- if not query:
18
- context['output'] = "Usage: /arxiv <query> [--limit N]"
19
- exit()
20
-
21
- base_url = "http://export.arxiv.org/api/query"
22
- params = {
23
- "search_query": f"all:{query}",
24
- "start": 0,
25
- "max_results": limit,
26
- "sortBy": "relevance",
27
- "sortOrder": "descending"
28
- }
29
-
30
- url = f"{base_url}?{urllib.parse.urlencode(params)}"
31
-
32
- try:
33
- with urllib.request.urlopen(url, timeout=30) as response:
34
- data = response.read().decode('utf-8')
35
-
36
- root = ET.fromstring(data)
37
- ns = {'atom': 'http://www.w3.org/2005/Atom'}
38
-
39
- entries = root.findall('atom:entry', ns)
40
-
41
- if not entries:
42
- context['output'] = f"No papers found for: {query}"
43
- exit()
44
-
45
- results = []
46
- papers = []
47
- for i, entry in enumerate(entries, 1):
48
- title = entry.find('atom:title', ns).text.strip().replace('\n', ' ')
49
- summary = entry.find('atom:summary', ns).text.strip()[:300] + '...'
50
- published = entry.find('atom:published', ns).text[:10]
51
- authors = [a.find('atom:name', ns).text for a in entry.findall('atom:author', ns)]
52
- author_str = ', '.join(authors[:3])
53
- if len(authors) > 3:
54
- author_str += ' et al.'
55
- link = entry.find('atom:id', ns).text
56
-
57
- results.append(f"{i}. {title}")
58
- results.append(f" Authors: {author_str}")
59
- results.append(f" Published: {published}")
60
- results.append(f" Abstract: {summary}")
61
- results.append(f" URL: {link}")
62
- results.append("")
63
-
64
- papers.append({
65
- 'title': title,
66
- 'authors': authors,
67
- 'abstract': entry.find('atom:summary', ns).text.strip(),
68
- 'published': published,
69
- 'url': link
70
- })
71
-
72
- context['output'] = f"Found {len(entries)} papers on arXiv:\n\n" + "\n".join(results)
73
- context['papers'] = papers
74
-
75
- except Exception as e:
76
- context['output'] = f"arXiv search error: {e}"
@@ -1,76 +0,0 @@
1
- jinx_name: arxiv
2
- description: Search arXiv for preprints and papers
3
- inputs:
4
- - query: ""
5
- - limit: 10
6
- steps:
7
- - name: search_arxiv
8
- engine: python
9
- code: |
10
- import urllib.request
11
- import urllib.parse
12
- import xml.etree.ElementTree as ET
13
-
14
- query = context.get('query', '')
15
- limit = int(context.get('limit', 10))
16
-
17
- if not query:
18
- context['output'] = "Usage: /arxiv <query> [--limit N]"
19
- exit()
20
-
21
- base_url = "http://export.arxiv.org/api/query"
22
- params = {
23
- "search_query": f"all:{query}",
24
- "start": 0,
25
- "max_results": limit,
26
- "sortBy": "relevance",
27
- "sortOrder": "descending"
28
- }
29
-
30
- url = f"{base_url}?{urllib.parse.urlencode(params)}"
31
-
32
- try:
33
- with urllib.request.urlopen(url, timeout=30) as response:
34
- data = response.read().decode('utf-8')
35
-
36
- root = ET.fromstring(data)
37
- ns = {'atom': 'http://www.w3.org/2005/Atom'}
38
-
39
- entries = root.findall('atom:entry', ns)
40
-
41
- if not entries:
42
- context['output'] = f"No papers found for: {query}"
43
- exit()
44
-
45
- results = []
46
- papers = []
47
- for i, entry in enumerate(entries, 1):
48
- title = entry.find('atom:title', ns).text.strip().replace('\n', ' ')
49
- summary = entry.find('atom:summary', ns).text.strip()[:300] + '...'
50
- published = entry.find('atom:published', ns).text[:10]
51
- authors = [a.find('atom:name', ns).text for a in entry.findall('atom:author', ns)]
52
- author_str = ', '.join(authors[:3])
53
- if len(authors) > 3:
54
- author_str += ' et al.'
55
- link = entry.find('atom:id', ns).text
56
-
57
- results.append(f"{i}. {title}")
58
- results.append(f" Authors: {author_str}")
59
- results.append(f" Published: {published}")
60
- results.append(f" Abstract: {summary}")
61
- results.append(f" URL: {link}")
62
- results.append("")
63
-
64
- papers.append({
65
- 'title': title,
66
- 'authors': authors,
67
- 'abstract': entry.find('atom:summary', ns).text.strip(),
68
- 'published': published,
69
- 'url': link
70
- })
71
-
72
- context['output'] = f"Found {len(entries)} papers on arXiv:\n\n" + "\n".join(results)
73
- context['papers'] = papers
74
-
75
- except Exception as e:
76
- context['output'] = f"arXiv search error: {e}"
@@ -1,176 +0,0 @@
1
- jinx_name: jinxs
2
- description: "Show available jinxs organized by folder. Use /jinxs <path> for details on a specific folder."
3
- inputs:
4
- - path: "" # Optional path to show details for (e.g., "lib/core", "bin")
5
- steps:
6
- - name: list_jinxs
7
- engine: python
8
- code: |
9
- import os
10
- from pathlib import Path
11
- import yaml
12
-
13
- filter_path = context.get('path', '').strip()
14
-
15
- # Find jinxs directory from team or fallback
16
- jinxs_dir = None
17
- if hasattr(npc, 'team') and npc.team:
18
- if hasattr(npc.team, 'jinxs_dir') and npc.team.jinxs_dir:
19
- jinxs_dir = Path(npc.team.jinxs_dir)
20
- elif hasattr(npc.team, 'team_path') and npc.team.team_path:
21
- candidate = Path(npc.team.team_path) / "jinxs"
22
- if candidate.exists():
23
- jinxs_dir = candidate
24
-
25
- if not jinxs_dir:
26
- # Fallback to global jinxs
27
- global_jinxs = Path.home() / ".npcsh" / "npc_team" / "jinxs"
28
- if global_jinxs.exists():
29
- jinxs_dir = global_jinxs
30
-
31
- if not jinxs_dir or not jinxs_dir.exists():
32
- output = "Error: Could not find jinxs directory"
33
- exit()
34
-
35
- def get_jinx_info(jinx_path):
36
- """Extract name and description from a jinx file."""
37
- try:
38
- with open(jinx_path, 'r') as f:
39
- content = f.read()
40
- # Parse just the header (before steps:)
41
- header = content.split('steps:')[0] if 'steps:' in content else content
42
- data = yaml.safe_load(header)
43
- name = data.get('jinx_name', jinx_path.stem)
44
- desc = data.get('description', 'No description')
45
- return name, desc
46
- except:
47
- return jinx_path.stem, 'No description'
48
-
49
- def get_folder_structure(base_path):
50
- """Get jinxs organized by folder."""
51
- structure = {}
52
- for root, dirs, files in os.walk(base_path):
53
- # Skip hidden directories
54
- dirs[:] = [d for d in dirs if not d.startswith('.')]
55
-
56
- jinx_files = [f for f in files if f.endswith('.jinx')]
57
- if jinx_files:
58
- rel_path = Path(root).relative_to(base_path)
59
- rel_str = str(rel_path) if str(rel_path) != '.' else 'root'
60
- structure[rel_str] = []
61
- for jf in sorted(jinx_files):
62
- jinx_path = Path(root) / jf
63
- name, desc = get_jinx_info(jinx_path)
64
- structure[rel_str].append((name, desc, jf))
65
- return structure
66
-
67
- output_lines = []
68
-
69
- if filter_path:
70
- # Show details for a specific path
71
- target_path = jinxs_dir / filter_path
72
- if not target_path.exists():
73
- # Try to find a matching folder
74
- matches = []
75
- for root, dirs, files in os.walk(jinxs_dir):
76
- rel = Path(root).relative_to(jinxs_dir)
77
- if filter_path in str(rel) or filter_path in Path(root).name:
78
- matches.append(rel)
79
-
80
- if matches:
81
- output_lines.append(f"No exact match for '{filter_path}'. Did you mean:\n")
82
- for m in matches[:5]:
83
- output_lines.append(f" /jinxs {m}\n")
84
- output = "".join(output_lines)
85
- exit()
86
- else:
87
- output = f"No jinxs found at path: {filter_path}"
88
- exit()
89
-
90
- # Get jinxs in this path
91
- structure = get_folder_structure(target_path)
92
- if not structure:
93
- # Check if it's a single folder with jinxs
94
- jinx_files = list(target_path.glob("*.jinx"))
95
- if jinx_files:
96
- output_lines.append(f"Jinxs in {filter_path}:\n\n")
97
- for jf in sorted(jinx_files):
98
- name, desc = get_jinx_info(jf)
99
- output_lines.append(f" /{name}\n")
100
- output_lines.append(f" {desc}\n\n")
101
- else:
102
- output = f"No jinxs found at path: {filter_path}"
103
- exit()
104
- else:
105
- output_lines.append(f"Jinxs in {filter_path}:\n\n")
106
- for folder, jinxs in sorted(structure.items()):
107
- if folder != 'root':
108
- output_lines.append(f" {folder}/\n")
109
- for name, desc, filename in jinxs:
110
- prefix = " " if folder != 'root' else " "
111
- output_lines.append(f"{prefix}/{name} - {desc}\n")
112
- output_lines.append("\n")
113
-
114
- else:
115
- # Show overview organized by folder
116
- structure = get_folder_structure(jinxs_dir)
117
-
118
- output_lines.append("Available Jinxs\n")
119
- output_lines.append("=" * 40 + "\n\n")
120
-
121
- # Group by top-level folder
122
- top_level = {}
123
- for folder, jinxs in structure.items():
124
- if folder == 'root':
125
- top = 'root'
126
- else:
127
- top = folder.split('/')[0] if '/' in folder else folder
128
-
129
- if top not in top_level:
130
- top_level[top] = {'subfolders': {}, 'jinxs': []}
131
-
132
- if folder == top or folder == 'root':
133
- top_level[top]['jinxs'].extend(jinxs)
134
- else:
135
- subfolder = '/'.join(folder.split('/')[1:])
136
- if subfolder not in top_level[top]['subfolders']:
137
- top_level[top]['subfolders'][subfolder] = []
138
- top_level[top]['subfolders'][subfolder].extend(jinxs)
139
-
140
- # Display
141
- folder_order = ['bin', 'lib', 'npc_studio', 'root']
142
- sorted_folders = sorted(top_level.keys(), key=lambda x: (folder_order.index(x) if x in folder_order else 99, x))
143
-
144
- for top in sorted_folders:
145
- data = top_level[top]
146
-
147
- if top == 'root':
148
- if data['jinxs']:
149
- output_lines.append("Root Jinxs:\n")
150
- for name, desc, _ in data['jinxs']:
151
- output_lines.append(f" /{name} - {desc}\n")
152
- output_lines.append("\n")
153
- else:
154
- total = len(data['jinxs'])
155
- for sf_jinxs in data['subfolders'].values():
156
- total += len(sf_jinxs)
157
-
158
- output_lines.append(f"{top}/ ({total} jinxs)\n")
159
-
160
- # Show direct jinxs
161
- if data['jinxs']:
162
- for name, desc, _ in data['jinxs'][:3]:
163
- output_lines.append(f" /{name} - {desc}\n")
164
- if len(data['jinxs']) > 3:
165
- output_lines.append(f" ... and {len(data['jinxs']) - 3} more\n")
166
-
167
- # Show subfolders summary
168
- if data['subfolders']:
169
- for subfolder, jinxs in sorted(data['subfolders'].items()):
170
- output_lines.append(f" {subfolder}/ ({len(jinxs)} jinxs)\n")
171
-
172
- output_lines.append(f" → /jinxs {top} for details\n\n")
173
-
174
- output_lines.append("Use /jinxs <path> for details (e.g., /jinxs lib/core)\n")
175
-
176
- output = "".join(output_lines)
@@ -1,101 +0,0 @@
1
- jinx_name: paper_search
2
- description: Search for academic papers across multiple sources (Semantic Scholar, arXiv, local datasets)
3
- inputs:
4
- - query: ""
5
- - limit: 10
6
- - source: "all"
7
- steps:
8
- - name: search_papers
9
- engine: python
10
- code: |
11
- import os
12
- import time
13
- import requests
14
- import urllib.request
15
- import urllib.parse
16
- import xml.etree.ElementTree as ET
17
-
18
- query = context.get('query', '')
19
- limit = int(context.get('limit', 10))
20
- source = context.get('source', 'all').lower()
21
-
22
- if not query:
23
- context['output'] = """Usage: /paper_search <query> [--limit N] [--source SOURCE]
24
-
25
- Sources:
26
- all - Search all available sources (default)
27
- s2 - Semantic Scholar only (requires S2_API_KEY)
28
- arxiv - arXiv only
29
- """
30
- exit()
31
-
32
- all_results = []
33
-
34
- # Semantic Scholar
35
- if source in ['all', 's2']:
36
- api_key = os.environ.get('S2_API_KEY')
37
- if api_key:
38
- try:
39
- url = "https://api.semanticscholar.org/graph/v1/paper/search"
40
- headers = {"x-api-key": api_key}
41
- params = {"query": query, "limit": limit, "fields": "title,abstract,authors,year,citationCount,url"}
42
- response = requests.get(url, headers=headers, params=params, timeout=30)
43
- response.raise_for_status()
44
- for paper in response.json().get('data', []):
45
- all_results.append({
46
- 'source': 'Semantic Scholar',
47
- 'title': paper.get('title', ''),
48
- 'year': paper.get('year'),
49
- 'citations': paper.get('citationCount', 0),
50
- 'authors': [a.get('name', '') for a in paper.get('authors', [])],
51
- 'abstract': paper.get('abstract', '')[:300] if paper.get('abstract') else '',
52
- 'url': paper.get('url', '')
53
- })
54
- except Exception as e:
55
- print(f"S2 error: {e}")
56
-
57
- # arXiv
58
- if source in ['all', 'arxiv']:
59
- try:
60
- base_url = "http://export.arxiv.org/api/query"
61
- params = {"search_query": f"all:{query}", "max_results": limit}
62
- url = f"{base_url}?{urllib.parse.urlencode(params)}"
63
- with urllib.request.urlopen(url, timeout=30) as response:
64
- data = response.read().decode('utf-8')
65
- root = ET.fromstring(data)
66
- ns = {'atom': 'http://www.w3.org/2005/Atom'}
67
- for entry in root.findall('atom:entry', ns):
68
- all_results.append({
69
- 'source': 'arXiv',
70
- 'title': entry.find('atom:title', ns).text.strip().replace('\n', ' '),
71
- 'year': entry.find('atom:published', ns).text[:4],
72
- 'citations': None,
73
- 'authors': [a.find('atom:name', ns).text for a in entry.findall('atom:author', ns)],
74
- 'abstract': entry.find('atom:summary', ns).text.strip()[:300],
75
- 'url': entry.find('atom:id', ns).text
76
- })
77
- except Exception as e:
78
- print(f"arXiv error: {e}")
79
-
80
- if not all_results:
81
- context['output'] = f"No papers found for: {query}"
82
- exit()
83
-
84
- # Format output
85
- results = []
86
- for i, paper in enumerate(all_results[:limit], 1):
87
- authors = ', '.join(paper['authors'][:3])
88
- if len(paper['authors']) > 3:
89
- authors += ' et al.'
90
- year = paper.get('year', '?')
91
- citations = f", {paper['citations']} citations" if paper.get('citations') else ""
92
-
93
- results.append(f"{i}. [{paper['source']}] {paper['title']} ({year}{citations})")
94
- results.append(f" Authors: {authors}")
95
- if paper['abstract']:
96
- results.append(f" Abstract: {paper['abstract']}...")
97
- results.append(f" URL: {paper['url']}")
98
- results.append("")
99
-
100
- context['output'] = f"Found {len(all_results)} papers:\n\n" + "\n".join(results)
101
- context['papers'] = all_results
@@ -1,131 +0,0 @@
1
- jinx_name: "search"
2
- description: >
3
- Executes a search across various sources.
4
- Usage:
5
- /search <query> (Default: Web Search)
6
- /search --memory <query> (Search approved memories)
7
- /search --kg <query> (Search the knowledge graph)
8
- /search --rag [-f <paths>] <query> (Execute a RAG search)
9
- /search --brainblast <query> (Advanced history search)
10
- inputs:
11
- - query: ""
12
- - sprovider: ""
13
- - memory: false
14
- - kg: false
15
- - rag: false
16
- - brainblast: false
17
- - file_paths: ""
18
- - history_db_path: "~/npcsh_history.db"
19
- - vector_db_path: "~/npcsh_chroma.db"
20
- - emodel: ""
21
- - eprovider: ""
22
- steps:
23
- - name: "execute_unified_search"
24
- engine: "python"
25
- code: |
26
- import os
27
- import traceback
28
- from npcpy.data.web import search_web
29
-
30
- # Access query from context
31
- query = context.get('query')
32
- if not query or not query.strip():
33
- context['output'] = "Usage: /search [--memory|--kg|--rag|--brainblast] <query>"
34
- else:
35
- # state is available as a GLOBAL variable (from extra_globals)
36
- # Access it directly, not from context
37
- try:
38
- current_state = state # This should work now
39
- except NameError:
40
- context['output'] = "Error: Shell state not available in jinx context"
41
- raise
42
-
43
- current_npc = current_state.npc
44
- current_team = current_state.team
45
-
46
- npc_name = getattr(current_npc, 'name', '__none__') if current_npc else '__none__'
47
- team_name = getattr(current_team, 'name', '__none__') if current_team else '__none__'
48
- current_path = os.getcwd()
49
- db_path = os.path.expanduser(context.get("history_db_path") or "~/.npcsh/npcsh_history.db")
50
-
51
- try:
52
- cmd_history = CommandHistory(db_path)
53
-
54
- if context.get('memory'):
55
- memories = get_relevant_memories(
56
- command_history=cmd_history,
57
- npc_name=npc_name,
58
- team_name=team_name,
59
- path=current_path,
60
- query=query,
61
- max_memories=10,
62
- state=current_state # Pass the state object
63
- )
64
- print(memories)
65
-
66
- if not memories:
67
- output = f"No memories found for query: '{query}'"
68
- else:
69
- output = f"Found {len(memories)} memories:\n\n" + "\n".join(
70
- f"{i}. [{mem.get('timestamp', 'unknown')}] {mem.get('final_memory') or mem.get('initial_memory')}"
71
- for i, mem in enumerate(memories, 1)
72
- )
73
-
74
- elif context.get('kg'):
75
- facts = search_kg_facts(
76
- cmd_history,
77
- npc_name,
78
- team_name,
79
- current_path,
80
- query
81
- )
82
- print(facts)
83
-
84
- if not facts:
85
- output = f"No KG facts found for query: '{query}'"
86
- else:
87
- output = f"Found {len(facts)} KG facts:\n\n" + "\n".join(
88
- f"{i}. {fact.get('statement')}" for i, fact in enumerate(facts, 1)
89
- )
90
-
91
- elif context.get('rag'):
92
- file_paths_str = context.get('file_paths', '')
93
- file_paths = [os.path.abspath(os.path.expanduser(p.strip())) for p in file_paths_str.split(',') if p.strip()]
94
- emodel = context.get('emodel') or current_state.embedding_model
95
- eprovider = context.get('eprovider') or current_state.embedding_provider
96
-
97
- file_contents = []
98
- for path in file_paths:
99
- chunks = load_file_contents(path)
100
- basename = os.path.basename(path)
101
- file_contents.extend([f"{basename}: {chunk}" for chunk in chunks])
102
-
103
- result = execute_rag_command(
104
- command=query,
105
- vector_db_path=os.path.expanduser(context.get('vector_db_path') or "~/.npcsh/npcsh_chroma.db"),
106
- embedding_model=emodel,
107
- embedding_provider=eprovider,
108
- file_contents=file_contents or None
109
- )
110
- print(result)
111
- output = result.get('response', 'No response from RAG.')
112
-
113
- elif context.get('brainblast'):
114
- result = execute_brainblast_command(
115
- command=query,
116
- command_history=cmd_history,
117
- **context
118
- )
119
- print(result)
120
- output = result.get('output', 'Brainblast search executed.')
121
-
122
- else:
123
- # Default to web search
124
- provider = context.get('sprovider') or current_state.search_provider
125
- results = search_web(query, provider=provider)
126
- output = "\n".join([f"- {res}" for res in results]) if results else "No web results found."
127
-
128
- except Exception as e:
129
- output = f"An error occurred in the search jinx: {e}\n{traceback.format_exc()}"
130
-
131
- context['output'] = output
@@ -1,69 +0,0 @@
1
- jinx_name: semantic_scholar
2
- description: Search Semantic Scholar for academic papers. Requires S2_API_KEY env var.
3
- inputs:
4
- - query: ""
5
- - limit: 10
6
- steps:
7
- - name: search_s2
8
- engine: python
9
- code: |
10
- import os
11
- import time
12
- import requests
13
-
14
- query = context.get('query', '')
15
- limit = int(context.get('limit', 10))
16
-
17
- if not query:
18
- context['output'] = "Usage: /semantic_scholar <query> [--limit N]"
19
- exit()
20
-
21
- api_key = os.environ.get('S2_API_KEY')
22
- if not api_key:
23
- context['output'] = "Error: S2_API_KEY environment variable not set. Get one at https://www.semanticscholar.org/product/api"
24
- exit()
25
-
26
- url = "https://api.semanticscholar.org/graph/v1/paper/search"
27
- headers = {"x-api-key": api_key}
28
- params = {
29
- "query": query,
30
- "limit": limit,
31
- "fields": "title,abstract,authors,year,citationCount,url,tldr"
32
- }
33
-
34
- try:
35
- response = requests.get(url, headers=headers, params=params, timeout=30)
36
- response.raise_for_status()
37
- data = response.json().get('data', [])
38
-
39
- if not data:
40
- context['output'] = f"No papers found for: {query}"
41
- exit()
42
-
43
- results = []
44
- for i, paper in enumerate(data, 1):
45
- title = paper.get('title', 'No title')
46
- year = paper.get('year', '?')
47
- citations = paper.get('citationCount', 0)
48
- authors = ', '.join([a.get('name', '') for a in paper.get('authors', [])[:3]])
49
- if len(paper.get('authors', [])) > 3:
50
- authors += ' et al.'
51
- abstract = paper.get('abstract', '')[:200] + '...' if paper.get('abstract') else 'No abstract'
52
- tldr = paper.get('tldr', {}).get('text', '') if paper.get('tldr') else ''
53
- url = paper.get('url', '')
54
-
55
- results.append(f"{i}. {title} ({year})")
56
- results.append(f" Authors: {authors}")
57
- results.append(f" Citations: {citations}")
58
- if tldr:
59
- results.append(f" TL;DR: {tldr}")
60
- else:
61
- results.append(f" Abstract: {abstract}")
62
- results.append(f" URL: {url}")
63
- results.append("")
64
-
65
- context['output'] = f"Found {len(data)} papers:\n\n" + "\n".join(results)
66
- context['papers'] = data
67
-
68
- except requests.exceptions.RequestException as e:
69
- context['output'] = f"Semantic Scholar API error: {e}"