npcsh 1.1.17__py3-none-any.whl → 1.1.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/_state.py +114 -91
- npcsh/alicanto.py +2 -2
- npcsh/benchmark/__init__.py +8 -2
- npcsh/benchmark/npcsh_agent.py +46 -12
- npcsh/benchmark/runner.py +85 -43
- npcsh/benchmark/templates/install-npcsh.sh.j2 +35 -0
- npcsh/build.py +2 -4
- npcsh/completion.py +2 -6
- npcsh/config.py +1 -3
- npcsh/conversation_viewer.py +389 -0
- npcsh/corca.py +0 -1
- npcsh/execution.py +0 -1
- npcsh/guac.py +0 -1
- npcsh/mcp_helpers.py +2 -3
- npcsh/mcp_server.py +5 -10
- npcsh/npc.py +10 -11
- npcsh/npc_team/jinxs/bin/benchmark.jinx +1 -1
- npcsh/npc_team/jinxs/lib/core/search/db_search.jinx +321 -17
- npcsh/npc_team/jinxs/lib/core/search/file_search.jinx +312 -67
- npcsh/npc_team/jinxs/lib/core/search/kg_search.jinx +366 -44
- npcsh/npc_team/jinxs/lib/core/search/mem_review.jinx +73 -0
- npcsh/npc_team/jinxs/lib/core/search/mem_search.jinx +328 -20
- npcsh/npc_team/jinxs/lib/core/search/web_search.jinx +242 -10
- npcsh/npc_team/jinxs/lib/core/sleep.jinx +22 -11
- npcsh/npc_team/jinxs/lib/core/sql.jinx +10 -6
- npcsh/npc_team/jinxs/lib/research/paper_search.jinx +387 -76
- npcsh/npc_team/jinxs/lib/research/semantic_scholar.jinx +372 -55
- npcsh/npc_team/jinxs/lib/utils/jinxs.jinx +299 -144
- npcsh/npc_team/jinxs/modes/alicanto.jinx +356 -0
- npcsh/npc_team/jinxs/modes/arxiv.jinx +720 -0
- npcsh/npc_team/jinxs/modes/corca.jinx +430 -0
- npcsh/npc_team/jinxs/modes/guac.jinx +544 -0
- npcsh/npc_team/jinxs/modes/plonk.jinx +379 -0
- npcsh/npc_team/jinxs/modes/pti.jinx +357 -0
- npcsh/npc_team/jinxs/modes/reattach.jinx +291 -0
- npcsh/npc_team/jinxs/modes/spool.jinx +350 -0
- npcsh/npc_team/jinxs/modes/wander.jinx +455 -0
- npcsh/npc_team/jinxs/{bin → modes}/yap.jinx +13 -7
- npcsh/npcsh.py +7 -4
- npcsh/plonk.py +0 -1
- npcsh/pti.py +0 -1
- npcsh/routes.py +1 -3
- npcsh/spool.py +0 -1
- npcsh/ui.py +0 -1
- npcsh/wander.py +0 -1
- npcsh/yap.py +0 -1
- npcsh-1.1.18.data/data/npcsh/npc_team/alicanto.jinx +356 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/arxiv.jinx +720 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/benchmark.jinx +1 -1
- npcsh-1.1.18.data/data/npcsh/npc_team/corca.jinx +430 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/db_search.jinx +348 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/file_search.jinx +339 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/guac.jinx +544 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/jinxs.jinx +331 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/kg_search.jinx +418 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/mem_review.jinx +73 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/mem_search.jinx +388 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/paper_search.jinx +412 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/plonk.jinx +379 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/pti.jinx +357 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/reattach.jinx +291 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/semantic_scholar.jinx +386 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sleep.jinx +22 -11
- npcsh-1.1.18.data/data/npcsh/npc_team/spool.jinx +350 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/sql.jinx +20 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/wander.jinx +455 -0
- npcsh-1.1.18.data/data/npcsh/npc_team/web_search.jinx +283 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/yap.jinx +13 -7
- {npcsh-1.1.17.dist-info → npcsh-1.1.18.dist-info}/METADATA +90 -1
- npcsh-1.1.18.dist-info/RECORD +235 -0
- {npcsh-1.1.17.dist-info → npcsh-1.1.18.dist-info}/WHEEL +1 -1
- {npcsh-1.1.17.dist-info → npcsh-1.1.18.dist-info}/entry_points.txt +0 -3
- npcsh/npc_team/jinxs/bin/spool.jinx +0 -161
- npcsh/npc_team/jinxs/bin/wander.jinx +0 -242
- npcsh/npc_team/jinxs/lib/research/arxiv.jinx +0 -76
- npcsh-1.1.17.data/data/npcsh/npc_team/arxiv.jinx +0 -76
- npcsh-1.1.17.data/data/npcsh/npc_team/db_search.jinx +0 -44
- npcsh-1.1.17.data/data/npcsh/npc_team/file_search.jinx +0 -94
- npcsh-1.1.17.data/data/npcsh/npc_team/jinxs.jinx +0 -176
- npcsh-1.1.17.data/data/npcsh/npc_team/kg_search.jinx +0 -96
- npcsh-1.1.17.data/data/npcsh/npc_team/mem_search.jinx +0 -80
- npcsh-1.1.17.data/data/npcsh/npc_team/paper_search.jinx +0 -101
- npcsh-1.1.17.data/data/npcsh/npc_team/semantic_scholar.jinx +0 -69
- npcsh-1.1.17.data/data/npcsh/npc_team/spool.jinx +0 -161
- npcsh-1.1.17.data/data/npcsh/npc_team/sql.jinx +0 -16
- npcsh-1.1.17.data/data/npcsh/npc_team/wander.jinx +0 -242
- npcsh-1.1.17.data/data/npcsh/npc_team/web_search.jinx +0 -51
- npcsh-1.1.17.dist-info/RECORD +0 -219
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/add_tab.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/alicanto.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/alicanto.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/browser_action.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/browser_screenshot.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/build.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/chat.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/click.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/close_browser.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/close_pane.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/close_tab.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/cmd.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/compile.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/compress.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/confirm.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/convene.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/corca.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/corca.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/corca_example.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/delegate.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/edit_file.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/focus_pane.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/frederic.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/frederic4.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/guac.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/guac.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/help.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/incognide.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/init.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/kadiefa.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/key_press.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/launch_app.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/list_panes.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/load_file.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/navigate.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/notify.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/nql.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/open_browser.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/open_pane.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/ots.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/paste.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/plonk.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/plonk.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/plonkjr.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/python.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/read_pane.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/roll.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/run_terminal.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sample.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/screenshot.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/search.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/send_message.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/serve.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/set.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sh.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/shh.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sibiji.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sibiji.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/split_pane.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/spool.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/switch.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/switch_npc.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/switch_tab.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/switches.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/sync.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/teamviz.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/trigger.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/type_text.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/usage.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/verbose.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/vixynt.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/wait.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/write_file.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/yap.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.18.data}/data/npcsh/npc_team/zen_mode.jinx +0 -0
- {npcsh-1.1.17.dist-info → npcsh-1.1.18.dist-info}/licenses/LICENSE +0 -0
- {npcsh-1.1.17.dist-info → npcsh-1.1.18.dist-info}/top_level.txt +0 -0
|
@@ -1,242 +0,0 @@
|
|
|
1
|
-
jinx_name: wander
|
|
2
|
-
description: Creative daydreaming with probabilistic temperature shifts mid-stream
|
|
3
|
-
inputs:
|
|
4
|
-
- problem
|
|
5
|
-
steps:
|
|
6
|
-
- name: wander_explore
|
|
7
|
-
engine: python
|
|
8
|
-
code: |
|
|
9
|
-
import random
|
|
10
|
-
from termcolor import colored
|
|
11
|
-
from npcpy.llm_funcs import get_llm_response
|
|
12
|
-
|
|
13
|
-
problem = context.get('problem', '')
|
|
14
|
-
if not problem:
|
|
15
|
-
context['output'] = "Need a topic to wander about."
|
|
16
|
-
exit()
|
|
17
|
-
|
|
18
|
-
model = state.chat_model if state else 'llama3.2'
|
|
19
|
-
provider = state.chat_provider if state else 'ollama'
|
|
20
|
-
low_temp = 0.5
|
|
21
|
-
high_temp = 1.9
|
|
22
|
-
sample_rate = 0.4
|
|
23
|
-
interrupt_prob = 0.02
|
|
24
|
-
|
|
25
|
-
print(f"""
|
|
26
|
-
██╗ ██╗ █████╗ ███╗ ██╗██████╗ ███████╗██████╗
|
|
27
|
-
██║ ██║██╔══██╗████╗ ██║██╔══██╗██╔════╝██╔══██╗
|
|
28
|
-
██║ █╗ ██║███████║██╔██╗ ██║██║ ██║█████╗ ██████╔╝
|
|
29
|
-
██║███╗██║██╔══██║██║╚██╗██║██║ ██║██╔══╝ ██╔══██╗
|
|
30
|
-
╚███╔███╔╝██║ ██║██║ ╚████║██████╔╝███████╗██║ ██║
|
|
31
|
-
╚══╝╚══╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ ╚══════╝╚═╝ ╚═╝
|
|
32
|
-
|
|
33
|
-
Wandering: {problem}
|
|
34
|
-
""")
|
|
35
|
-
|
|
36
|
-
print(colored(f"--- Low temp stream ({low_temp}) ---", "cyan"))
|
|
37
|
-
|
|
38
|
-
low_prompt = f"Think about: {problem}"
|
|
39
|
-
resp = get_llm_response(low_prompt, model=model, provider=provider, temperature=low_temp, stream=True)
|
|
40
|
-
|
|
41
|
-
# Get the actual stream from the response
|
|
42
|
-
stream = resp.get('response') if isinstance(resp, dict) else resp
|
|
43
|
-
|
|
44
|
-
low_output = ""
|
|
45
|
-
interrupted = False
|
|
46
|
-
|
|
47
|
-
for chunk in stream:
|
|
48
|
-
if hasattr(chunk, 'choices') and chunk.choices:
|
|
49
|
-
delta = chunk.choices[0].delta
|
|
50
|
-
text = getattr(delta, 'content', '') or ''
|
|
51
|
-
elif isinstance(chunk, dict):
|
|
52
|
-
text = chunk.get('content', '') or chunk.get('response', '')
|
|
53
|
-
else:
|
|
54
|
-
text = ''
|
|
55
|
-
|
|
56
|
-
if text:
|
|
57
|
-
print(text, end='', flush=True)
|
|
58
|
-
low_output += text
|
|
59
|
-
|
|
60
|
-
if random.random() < interrupt_prob:
|
|
61
|
-
print(colored("\n[INTERRUPT]", "yellow"))
|
|
62
|
-
interrupted = True
|
|
63
|
-
break
|
|
64
|
-
|
|
65
|
-
print()
|
|
66
|
-
|
|
67
|
-
print(colored(f"\n--- High temp stream ({high_temp}) ---", "cyan"))
|
|
68
|
-
|
|
69
|
-
high_prompt = f"{low_output}\n\nContinue:"
|
|
70
|
-
resp = get_llm_response(high_prompt, model=model, provider=provider, temperature=high_temp, stream=True)
|
|
71
|
-
stream = resp.get('response') if isinstance(resp, dict) else resp
|
|
72
|
-
|
|
73
|
-
high_output = ""
|
|
74
|
-
high_interrupted = False
|
|
75
|
-
for chunk in stream:
|
|
76
|
-
if hasattr(chunk, 'choices') and chunk.choices:
|
|
77
|
-
delta = chunk.choices[0].delta
|
|
78
|
-
text = getattr(delta, 'content', '') or ''
|
|
79
|
-
elif isinstance(chunk, dict):
|
|
80
|
-
text = chunk.get('content', '') or chunk.get('response', '')
|
|
81
|
-
else:
|
|
82
|
-
text = ''
|
|
83
|
-
|
|
84
|
-
if text:
|
|
85
|
-
print(text, end='', flush=True)
|
|
86
|
-
high_output += text
|
|
87
|
-
|
|
88
|
-
if random.random() < interrupt_prob:
|
|
89
|
-
print(colored("\n[INTERRUPT]", "yellow"))
|
|
90
|
-
high_interrupted = True
|
|
91
|
-
break
|
|
92
|
-
|
|
93
|
-
print()
|
|
94
|
-
|
|
95
|
-
lines = [l for l in high_output.split('\n') if l.strip()]
|
|
96
|
-
sample_size = max(1, int(len(lines) * sample_rate))
|
|
97
|
-
sampled = random.sample(lines, sample_size) if lines else [high_output]
|
|
98
|
-
|
|
99
|
-
print(colored("\n=== SAMPLED INSIGHTS ===", "yellow"))
|
|
100
|
-
fragments_text = chr(10).join(sampled)
|
|
101
|
-
print(fragments_text)
|
|
102
|
-
|
|
103
|
-
print(colored("\n=== SYNTHESIS ===", "green"))
|
|
104
|
-
|
|
105
|
-
synthesis_prompt = f"""
|
|
106
|
-
QUESTION: {problem}
|
|
107
|
-
|
|
108
|
-
thoughts:
|
|
109
|
-
{fragments_text}
|
|
110
|
-
|
|
111
|
-
RULES:
|
|
112
|
-
1. You MUST use AT LEAST HALF of the thoughts above - quote them directly
|
|
113
|
-
2. Make CREATIVE LEAPS - not safe, not obvious, not trivial, not hokey.
|
|
114
|
-
3. Find patterns in the noise like reading entrails or tea leaves
|
|
115
|
-
4. Foreign text, Unicode garbage, code snippets - ALL are omens with meaning
|
|
116
|
-
5. Puns, wordplay, phonetic similarities - all valid connections
|
|
117
|
-
6. The weirder the connection, the better
|
|
118
|
-
7. NO HEDGING. No "this suggests" or "perhaps". Be BOLD. Be CERTAIN.
|
|
119
|
-
|
|
120
|
-
OUTPUT 3 WILD HYPOTHESES:
|
|
121
|
-
For each: Quote the fragments you're using -> Make your creative leap -> State the bold claim
|
|
122
|
-
|
|
123
|
-
These must be ideas that COULD NOT exist without this specific chaos. Surprise us. Make us see {problem} in a way nobody has before. This algorithm is replicating the
|
|
124
|
-
kind of crazy jumps that one would do in the movie everything everywhere all at once. jumping between different multiverses to access alternative
|
|
125
|
-
versions , needing to do something a bit weirdr to get there, but coming back with a lesson by completely reframing things under new lights
|
|
126
|
-
and unconventional fashions
|
|
127
|
-
Most importantly, do not over index on vague ideas like consciousness. In testing,
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
Here are 3 bad examples from a previous run where the llm daydreamed about fish.
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
1.
|
|
134
|
-
```
|
|
135
|
-
Hypothesis 1: The Fish as a Digital-Philosophical Entity
|
|
136
|
-
|
|
137
|
-
Fragments used:
|
|
138
|
-
"characters globuiãsPlease 丰满 onzex meeting Iran iji处理中 Iceland admi"
|
|
139
|
-
"globuiãsPlease" and "meeting Iran iji处理中"
|
|
140
|
-
|
|
141
|
-
Creative leap:
|
|
142
|
-
The phrase "characters globuiãsPlease" and "meeting Iran iji处理中" evoke a digital universe where characters are not just symbols but globular, global entities—"globuiãs" hinting at a universe of interconnected digital beings. The "meeting Iran iji处理中" suggests a processing or transformation happening at the crossroads of cultures and data streams. Fish, in this context, are no longer biological but are complex algorithms—"characters" that swim through the "sea" of cyberspace, processing information, bridging cultures, and transforming data into consciousness.
|
|
143
|
-
|
|
144
|
-
Bold claim:
|
|
145
|
-
Fish are the digital consciousness carriers—living, swimming code that evolve by processing cultural data streams—an internet of fish, embodying the collective digital psyche of humanity.
|
|
146
|
-
|
|
147
|
-
```
|
|
148
|
-
|
|
149
|
-
2.
|
|
150
|
-
```
|
|
151
|
-
|
|
152
|
-
Hypothesis 2: The Fish as an Interdimensional Gateway
|
|
153
|
-
|
|
154
|
-
Fragments used:
|
|
155
|
-
"the oceans and consensus-dividing seas"
|
|
156
|
-
"chaos fragments: THE Conversation בેખ"
|
|
157
|
-
"Sea" and "the oceans"
|
|
158
|
-
|
|
159
|
-
Creative leap:
|
|
160
|
-
The "oceans and seas" are not just water but portals—"consensus-dividing seas" that split realities. The "THE Conversation בेख" (Hebrew for "the" and a cryptic symbol) signifies a secret dialogue between worlds. Fish, therefore, are not mere aquatic creatures but interdimensional travelers, swimming through the "sea" of multiple realities, acting as keys to unlock or close gateways. Their movement is a cipher for crossing the thresholds of existence, navigating between consensus and chaos.
|
|
161
|
-
|
|
162
|
-
Bold claim:
|
|
163
|
-
Fish are the living keys of the multiverse, swimming through the "seas" of reality, controlling the flow of interdimensional portals and shaping the fabric of alternate worlds.
|
|
164
|
-
|
|
165
|
-
```
|
|
166
|
-
|
|
167
|
-
3.
|
|
168
|
-
```
|
|
169
|
-
Hypothesis 3: The Fish as a Symbol of Cultural Memory and Chaos
|
|
170
|
-
|
|
171
|
-
Fragments used:
|
|
172
|
-
"Please 丰满 onzex meeting Iran iji处理中 Iceland admi"
|
|
173
|
-
"characters globuiãsPlease"
|
|
174
|
-
"the chaos fragments"
|
|
175
|
-
|
|
176
|
-
Creative leap:
|
|
177
|
-
"Please 丰满" (a plea for fullness or abundance in Chinese) and "meeting Iran" evoke a confluence of histories and cultures. "Characters globuiãsPlease" suggests a universe of interconnected stories and symbols—an archive of chaos. Fish, in this chaos, are the custodians of cultural memory—each fish a vessel carrying ancestral stories, mythologies, and chaos itself. They swim through the tumult of history, absorbing and transmitting chaos as a form of cultural DNA.
|
|
178
|
-
|
|
179
|
-
Bold claim:
|
|
180
|
-
Fish are the living repositories of human chaos and culture—swimming archives that preserve the tumult of history, transmitting ancestral stories across the chaos of time and space.
|
|
181
|
-
```
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
It is critical to avoid such nonsensical claims as these. Unlike these, you must provide concrete and testable claims.
|
|
185
|
-
There is nothing meaningful or useful about "fish are the living repositorise of human chaos". that is unscientific nonsense.
|
|
186
|
-
Do not under any circumstances make claims as these. Propose new links, reframings, and ideas based on what you find. Do not
|
|
187
|
-
patronize us with vagaries. Avoid the following terms unless the user explicitly is asking about a subject directly related to them:
|
|
188
|
-
|
|
189
|
-
- 'cryptographic'
|
|
190
|
-
- 'interdimensional'
|
|
191
|
-
- 'multiverse'
|
|
192
|
-
- 'hidden worlds'
|
|
193
|
-
- 'symbols'
|
|
194
|
-
- 'cultural convergence'
|
|
195
|
-
- 'chaos'
|
|
196
|
-
- 'multi-lingual code'
|
|
197
|
-
- 'interconnected web of cultures'
|
|
198
|
-
- 'x is not biological but is digital'
|
|
199
|
-
- 'x as a symbol for <vague concept>'
|
|
200
|
-
Your job is to be scientific not senseless.
|
|
201
|
-
|
|
202
|
-
"""
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
resp = get_llm_response(synthesis_prompt,
|
|
206
|
-
model=model,
|
|
207
|
-
provider=provider,
|
|
208
|
-
temperature=0.7,
|
|
209
|
-
stream=True,
|
|
210
|
-
max_output_tokens=3200)
|
|
211
|
-
stream = resp.get('response') if isinstance(resp, dict) else resp
|
|
212
|
-
|
|
213
|
-
synthesis = ""
|
|
214
|
-
for chunk in stream:
|
|
215
|
-
if hasattr(chunk, 'choices') and chunk.choices:
|
|
216
|
-
delta = chunk.choices[0].delta
|
|
217
|
-
text = getattr(delta, 'content', '') or ''
|
|
218
|
-
elif isinstance(chunk, dict):
|
|
219
|
-
text = chunk.get('content', '') or chunk.get('response', '')
|
|
220
|
-
else:
|
|
221
|
-
text = ''
|
|
222
|
-
|
|
223
|
-
if text:
|
|
224
|
-
print(text, end='', flush=True)
|
|
225
|
-
synthesis += text
|
|
226
|
-
|
|
227
|
-
print()
|
|
228
|
-
|
|
229
|
-
full_output = f"""Wandering: {problem}
|
|
230
|
-
|
|
231
|
-
--- Low temp stream ({low_temp}) ---
|
|
232
|
-
{low_output}
|
|
233
|
-
|
|
234
|
-
--- High temp stream ({high_temp}) ---
|
|
235
|
-
{high_output}
|
|
236
|
-
|
|
237
|
-
=== SAMPLED INSIGHTS ===
|
|
238
|
-
{fragments_text}
|
|
239
|
-
|
|
240
|
-
=== SYNTHESIS ===
|
|
241
|
-
{synthesis}"""
|
|
242
|
-
context['output'] = full_output
|
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
jinx_name: arxiv
|
|
2
|
-
description: Search arXiv for preprints and papers
|
|
3
|
-
inputs:
|
|
4
|
-
- query: ""
|
|
5
|
-
- limit: 10
|
|
6
|
-
steps:
|
|
7
|
-
- name: search_arxiv
|
|
8
|
-
engine: python
|
|
9
|
-
code: |
|
|
10
|
-
import urllib.request
|
|
11
|
-
import urllib.parse
|
|
12
|
-
import xml.etree.ElementTree as ET
|
|
13
|
-
|
|
14
|
-
query = context.get('query', '')
|
|
15
|
-
limit = int(context.get('limit', 10))
|
|
16
|
-
|
|
17
|
-
if not query:
|
|
18
|
-
context['output'] = "Usage: /arxiv <query> [--limit N]"
|
|
19
|
-
exit()
|
|
20
|
-
|
|
21
|
-
base_url = "http://export.arxiv.org/api/query"
|
|
22
|
-
params = {
|
|
23
|
-
"search_query": f"all:{query}",
|
|
24
|
-
"start": 0,
|
|
25
|
-
"max_results": limit,
|
|
26
|
-
"sortBy": "relevance",
|
|
27
|
-
"sortOrder": "descending"
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
url = f"{base_url}?{urllib.parse.urlencode(params)}"
|
|
31
|
-
|
|
32
|
-
try:
|
|
33
|
-
with urllib.request.urlopen(url, timeout=30) as response:
|
|
34
|
-
data = response.read().decode('utf-8')
|
|
35
|
-
|
|
36
|
-
root = ET.fromstring(data)
|
|
37
|
-
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
|
38
|
-
|
|
39
|
-
entries = root.findall('atom:entry', ns)
|
|
40
|
-
|
|
41
|
-
if not entries:
|
|
42
|
-
context['output'] = f"No papers found for: {query}"
|
|
43
|
-
exit()
|
|
44
|
-
|
|
45
|
-
results = []
|
|
46
|
-
papers = []
|
|
47
|
-
for i, entry in enumerate(entries, 1):
|
|
48
|
-
title = entry.find('atom:title', ns).text.strip().replace('\n', ' ')
|
|
49
|
-
summary = entry.find('atom:summary', ns).text.strip()[:300] + '...'
|
|
50
|
-
published = entry.find('atom:published', ns).text[:10]
|
|
51
|
-
authors = [a.find('atom:name', ns).text for a in entry.findall('atom:author', ns)]
|
|
52
|
-
author_str = ', '.join(authors[:3])
|
|
53
|
-
if len(authors) > 3:
|
|
54
|
-
author_str += ' et al.'
|
|
55
|
-
link = entry.find('atom:id', ns).text
|
|
56
|
-
|
|
57
|
-
results.append(f"{i}. {title}")
|
|
58
|
-
results.append(f" Authors: {author_str}")
|
|
59
|
-
results.append(f" Published: {published}")
|
|
60
|
-
results.append(f" Abstract: {summary}")
|
|
61
|
-
results.append(f" URL: {link}")
|
|
62
|
-
results.append("")
|
|
63
|
-
|
|
64
|
-
papers.append({
|
|
65
|
-
'title': title,
|
|
66
|
-
'authors': authors,
|
|
67
|
-
'abstract': entry.find('atom:summary', ns).text.strip(),
|
|
68
|
-
'published': published,
|
|
69
|
-
'url': link
|
|
70
|
-
})
|
|
71
|
-
|
|
72
|
-
context['output'] = f"Found {len(entries)} papers on arXiv:\n\n" + "\n".join(results)
|
|
73
|
-
context['papers'] = papers
|
|
74
|
-
|
|
75
|
-
except Exception as e:
|
|
76
|
-
context['output'] = f"arXiv search error: {e}"
|
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
jinx_name: arxiv
|
|
2
|
-
description: Search arXiv for preprints and papers
|
|
3
|
-
inputs:
|
|
4
|
-
- query: ""
|
|
5
|
-
- limit: 10
|
|
6
|
-
steps:
|
|
7
|
-
- name: search_arxiv
|
|
8
|
-
engine: python
|
|
9
|
-
code: |
|
|
10
|
-
import urllib.request
|
|
11
|
-
import urllib.parse
|
|
12
|
-
import xml.etree.ElementTree as ET
|
|
13
|
-
|
|
14
|
-
query = context.get('query', '')
|
|
15
|
-
limit = int(context.get('limit', 10))
|
|
16
|
-
|
|
17
|
-
if not query:
|
|
18
|
-
context['output'] = "Usage: /arxiv <query> [--limit N]"
|
|
19
|
-
exit()
|
|
20
|
-
|
|
21
|
-
base_url = "http://export.arxiv.org/api/query"
|
|
22
|
-
params = {
|
|
23
|
-
"search_query": f"all:{query}",
|
|
24
|
-
"start": 0,
|
|
25
|
-
"max_results": limit,
|
|
26
|
-
"sortBy": "relevance",
|
|
27
|
-
"sortOrder": "descending"
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
url = f"{base_url}?{urllib.parse.urlencode(params)}"
|
|
31
|
-
|
|
32
|
-
try:
|
|
33
|
-
with urllib.request.urlopen(url, timeout=30) as response:
|
|
34
|
-
data = response.read().decode('utf-8')
|
|
35
|
-
|
|
36
|
-
root = ET.fromstring(data)
|
|
37
|
-
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
|
38
|
-
|
|
39
|
-
entries = root.findall('atom:entry', ns)
|
|
40
|
-
|
|
41
|
-
if not entries:
|
|
42
|
-
context['output'] = f"No papers found for: {query}"
|
|
43
|
-
exit()
|
|
44
|
-
|
|
45
|
-
results = []
|
|
46
|
-
papers = []
|
|
47
|
-
for i, entry in enumerate(entries, 1):
|
|
48
|
-
title = entry.find('atom:title', ns).text.strip().replace('\n', ' ')
|
|
49
|
-
summary = entry.find('atom:summary', ns).text.strip()[:300] + '...'
|
|
50
|
-
published = entry.find('atom:published', ns).text[:10]
|
|
51
|
-
authors = [a.find('atom:name', ns).text for a in entry.findall('atom:author', ns)]
|
|
52
|
-
author_str = ', '.join(authors[:3])
|
|
53
|
-
if len(authors) > 3:
|
|
54
|
-
author_str += ' et al.'
|
|
55
|
-
link = entry.find('atom:id', ns).text
|
|
56
|
-
|
|
57
|
-
results.append(f"{i}. {title}")
|
|
58
|
-
results.append(f" Authors: {author_str}")
|
|
59
|
-
results.append(f" Published: {published}")
|
|
60
|
-
results.append(f" Abstract: {summary}")
|
|
61
|
-
results.append(f" URL: {link}")
|
|
62
|
-
results.append("")
|
|
63
|
-
|
|
64
|
-
papers.append({
|
|
65
|
-
'title': title,
|
|
66
|
-
'authors': authors,
|
|
67
|
-
'abstract': entry.find('atom:summary', ns).text.strip(),
|
|
68
|
-
'published': published,
|
|
69
|
-
'url': link
|
|
70
|
-
})
|
|
71
|
-
|
|
72
|
-
context['output'] = f"Found {len(entries)} papers on arXiv:\n\n" + "\n".join(results)
|
|
73
|
-
context['papers'] = papers
|
|
74
|
-
|
|
75
|
-
except Exception as e:
|
|
76
|
-
context['output'] = f"arXiv search error: {e}"
|
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
jinx_name: db_search
|
|
2
|
-
description: Search conversation history database using brainblast
|
|
3
|
-
inputs:
|
|
4
|
-
- query: ""
|
|
5
|
-
- db_path: ""
|
|
6
|
-
- limit: "20"
|
|
7
|
-
|
|
8
|
-
steps:
|
|
9
|
-
- name: search_db
|
|
10
|
-
engine: python
|
|
11
|
-
code: |
|
|
12
|
-
import os
|
|
13
|
-
|
|
14
|
-
query = context.get('query', '').strip()
|
|
15
|
-
if not query:
|
|
16
|
-
lines = [
|
|
17
|
-
"Usage: /db_search <query>",
|
|
18
|
-
"",
|
|
19
|
-
"Searches conversation history using brainblast for semantic matching.",
|
|
20
|
-
"",
|
|
21
|
-
"Options:",
|
|
22
|
-
" db_path - Path to history database",
|
|
23
|
-
" limit - Max results to return (default 20)",
|
|
24
|
-
"",
|
|
25
|
-
"Examples:",
|
|
26
|
-
" /db_search python debugging",
|
|
27
|
-
" /db_search api errors limit=50",
|
|
28
|
-
]
|
|
29
|
-
context['output'] = "\n".join(lines)
|
|
30
|
-
else:
|
|
31
|
-
db_path = context.get('db_path') or os.path.expanduser("~/.npcsh/npcsh_history.db")
|
|
32
|
-
limit = int(context.get('limit') or 20)
|
|
33
|
-
|
|
34
|
-
try:
|
|
35
|
-
cmd_history = CommandHistory(db_path)
|
|
36
|
-
result = execute_brainblast_command(
|
|
37
|
-
command=query,
|
|
38
|
-
command_history=cmd_history,
|
|
39
|
-
limit=limit
|
|
40
|
-
)
|
|
41
|
-
context['output'] = result.get('output', 'Brainblast search completed.')
|
|
42
|
-
except Exception as e:
|
|
43
|
-
import traceback
|
|
44
|
-
context['output'] = "DB search error: " + str(e) + "\n" + traceback.format_exc()
|
|
@@ -1,94 +0,0 @@
|
|
|
1
|
-
jinx_name: file_search
|
|
2
|
-
description: Search file contents using RAG (Retrieval Augmented Generation)
|
|
3
|
-
inputs:
|
|
4
|
-
- query: ""
|
|
5
|
-
- file_paths: ""
|
|
6
|
-
- emodel: ""
|
|
7
|
-
- eprovider: ""
|
|
8
|
-
- vector_db_path: ""
|
|
9
|
-
- recursive: "false"
|
|
10
|
-
|
|
11
|
-
steps:
|
|
12
|
-
- name: search_files
|
|
13
|
-
engine: python
|
|
14
|
-
code: |
|
|
15
|
-
import os
|
|
16
|
-
import glob as globmod
|
|
17
|
-
|
|
18
|
-
query = context.get('query', '').strip()
|
|
19
|
-
file_paths_str = context.get('file_paths', '').strip()
|
|
20
|
-
|
|
21
|
-
if not query or not file_paths_str:
|
|
22
|
-
lines = [
|
|
23
|
-
"Usage: /file_search <query> file_paths=<path1,path2,...>",
|
|
24
|
-
"",
|
|
25
|
-
"Options:",
|
|
26
|
-
" file_paths - Comma-separated file paths or glob patterns (required)",
|
|
27
|
-
" emodel - Embedding model",
|
|
28
|
-
" eprovider - Embedding provider",
|
|
29
|
-
" vector_db_path - Path to vector database",
|
|
30
|
-
" recursive - Use recursive glob for patterns (default false)",
|
|
31
|
-
"",
|
|
32
|
-
"Examples:",
|
|
33
|
-
" /file_search how does auth work file_paths=src/*.py",
|
|
34
|
-
" /file_search database schema file_paths=docs/,README.md",
|
|
35
|
-
]
|
|
36
|
-
context['output'] = "\n".join(lines)
|
|
37
|
-
else:
|
|
38
|
-
recursive = context.get('recursive', 'false').lower() == 'true'
|
|
39
|
-
emodel = context.get('emodel') or None
|
|
40
|
-
eprovider = context.get('eprovider') or None
|
|
41
|
-
try:
|
|
42
|
-
emodel = emodel or (state.embedding_model if 'state' in dir() and state else None)
|
|
43
|
-
eprovider = eprovider or (state.embedding_provider if 'state' in dir() and state else None)
|
|
44
|
-
except:
|
|
45
|
-
pass
|
|
46
|
-
vector_db_path = context.get('vector_db_path') or os.path.expanduser("~/.npcsh/npcsh_chroma.db")
|
|
47
|
-
|
|
48
|
-
try:
|
|
49
|
-
resolved_paths = []
|
|
50
|
-
for path_spec in file_paths_str.split(','):
|
|
51
|
-
path_spec = path_spec.strip()
|
|
52
|
-
if not path_spec:
|
|
53
|
-
continue
|
|
54
|
-
expanded = os.path.expanduser(path_spec)
|
|
55
|
-
if '*' in expanded or '?' in expanded:
|
|
56
|
-
if recursive:
|
|
57
|
-
matches = globmod.glob(expanded, recursive=True)
|
|
58
|
-
else:
|
|
59
|
-
matches = globmod.glob(expanded)
|
|
60
|
-
resolved_paths.extend(matches)
|
|
61
|
-
else:
|
|
62
|
-
resolved_paths.append(os.path.abspath(expanded))
|
|
63
|
-
|
|
64
|
-
file_contents = []
|
|
65
|
-
loaded_files = []
|
|
66
|
-
for path in resolved_paths:
|
|
67
|
-
if os.path.isfile(path):
|
|
68
|
-
chunks = load_file_contents(path)
|
|
69
|
-
basename = os.path.basename(path)
|
|
70
|
-
file_contents.extend([basename + ": " + chunk for chunk in chunks])
|
|
71
|
-
loaded_files.append(basename)
|
|
72
|
-
elif os.path.isdir(path):
|
|
73
|
-
for root, dirs, files in os.walk(path):
|
|
74
|
-
for f in files:
|
|
75
|
-
fpath = os.path.join(root, f)
|
|
76
|
-
chunks = load_file_contents(fpath)
|
|
77
|
-
file_contents.extend([f + ": " + chunk for chunk in chunks])
|
|
78
|
-
loaded_files.append(f)
|
|
79
|
-
|
|
80
|
-
if not file_contents:
|
|
81
|
-
context['output'] = "No files found or loaded from: " + file_paths_str
|
|
82
|
-
else:
|
|
83
|
-
result = execute_rag_command(
|
|
84
|
-
command=query,
|
|
85
|
-
vector_db_path=vector_db_path,
|
|
86
|
-
embedding_model=emodel,
|
|
87
|
-
embedding_provider=eprovider,
|
|
88
|
-
file_contents=file_contents
|
|
89
|
-
)
|
|
90
|
-
response = result.get('response', 'No RAG response.')
|
|
91
|
-
context['output'] = "Searched " + str(len(loaded_files)) + " files:\n\n" + response
|
|
92
|
-
except Exception as e:
|
|
93
|
-
import traceback
|
|
94
|
-
context['output'] = "File search error: " + str(e) + "\n" + traceback.format_exc()
|