npcsh 1.1.17__py3-none-any.whl → 1.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/_state.py +122 -91
- npcsh/alicanto.py +2 -2
- npcsh/benchmark/__init__.py +8 -2
- npcsh/benchmark/npcsh_agent.py +87 -22
- npcsh/benchmark/runner.py +85 -43
- npcsh/benchmark/templates/install-npcsh.sh.j2 +35 -0
- npcsh/build.py +2 -4
- npcsh/completion.py +2 -6
- npcsh/config.py +2 -3
- npcsh/conversation_viewer.py +389 -0
- npcsh/corca.py +0 -1
- npcsh/diff_viewer.py +452 -0
- npcsh/execution.py +0 -1
- npcsh/guac.py +0 -1
- npcsh/mcp_helpers.py +2 -3
- npcsh/mcp_server.py +5 -10
- npcsh/npc.py +10 -11
- npcsh/npc_team/jinxs/bin/benchmark.jinx +1 -1
- npcsh/npc_team/jinxs/bin/config_tui.jinx +299 -0
- npcsh/npc_team/jinxs/bin/memories.jinx +316 -0
- npcsh/npc_team/jinxs/bin/setup.jinx +240 -0
- npcsh/npc_team/jinxs/bin/sync.jinx +143 -150
- npcsh/npc_team/jinxs/bin/team_tui.jinx +327 -0
- npcsh/npc_team/jinxs/incognide/add_tab.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/close_pane.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/close_tab.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/confirm.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/focus_pane.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/list_panes.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/navigate.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/notify.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/open_pane.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/read_pane.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/run_terminal.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/send_message.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/split_pane.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/switch_npc.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/switch_tab.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/write_file.jinx +1 -1
- npcsh/npc_team/jinxs/incognide/zen_mode.jinx +1 -1
- npcsh/npc_team/jinxs/lib/core/search/db_search.jinx +321 -17
- npcsh/npc_team/jinxs/lib/core/search/file_search.jinx +312 -67
- npcsh/npc_team/jinxs/lib/core/search/kg_search.jinx +366 -44
- npcsh/npc_team/jinxs/lib/core/search/mem_review.jinx +73 -0
- npcsh/npc_team/jinxs/lib/core/search/mem_search.jinx +328 -20
- npcsh/npc_team/jinxs/lib/core/search/web_search.jinx +242 -10
- npcsh/npc_team/jinxs/lib/core/sleep.jinx +22 -11
- npcsh/npc_team/jinxs/lib/core/sql.jinx +10 -6
- npcsh/npc_team/jinxs/lib/research/paper_search.jinx +387 -76
- npcsh/npc_team/jinxs/lib/research/semantic_scholar.jinx +372 -55
- npcsh/npc_team/jinxs/lib/utils/jinxs.jinx +299 -144
- npcsh/npc_team/jinxs/modes/alicanto.jinx +356 -0
- npcsh/npc_team/jinxs/modes/arxiv.jinx +720 -0
- npcsh/npc_team/jinxs/modes/corca.jinx +430 -0
- npcsh/npc_team/jinxs/modes/guac.jinx +542 -0
- npcsh/npc_team/jinxs/modes/plonk.jinx +379 -0
- npcsh/npc_team/jinxs/modes/pti.jinx +357 -0
- npcsh/npc_team/jinxs/modes/reattach.jinx +291 -0
- npcsh/npc_team/jinxs/modes/spool.jinx +350 -0
- npcsh/npc_team/jinxs/modes/wander.jinx +455 -0
- npcsh/npc_team/jinxs/{bin → modes}/yap.jinx +13 -7
- npcsh/npcsh.py +7 -4
- npcsh/plonk.py +0 -1
- npcsh/pti.py +0 -1
- npcsh/routes.py +1 -3
- npcsh/spool.py +0 -1
- npcsh/ui.py +0 -1
- npcsh/wander.py +0 -1
- npcsh/yap.py +0 -1
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/add_tab.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/alicanto.jinx +356 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/arxiv.jinx +720 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/benchmark.jinx +1 -1
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/close_pane.jinx +1 -1
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/close_tab.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/config_tui.jinx +299 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/confirm.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/corca.jinx +430 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/db_search.jinx +348 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/file_search.jinx +339 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/focus_pane.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/guac.jinx +542 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/jinxs.jinx +331 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/kg_search.jinx +418 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/list_panes.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/mem_review.jinx +73 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/mem_search.jinx +388 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/memories.jinx +316 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/navigate.jinx +1 -1
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/notify.jinx +1 -1
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/open_pane.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/paper_search.jinx +412 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/plonk.jinx +379 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/pti.jinx +357 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/read_pane.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/reattach.jinx +291 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/run_terminal.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/semantic_scholar.jinx +386 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/send_message.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/setup.jinx +240 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/sleep.jinx +22 -11
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/split_pane.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/spool.jinx +350 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/sql.jinx +20 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/switch_npc.jinx +1 -1
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/switch_tab.jinx +1 -1
- npcsh-1.1.19.data/data/npcsh/npc_team/sync.jinx +223 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/team_tui.jinx +327 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/wander.jinx +455 -0
- npcsh-1.1.19.data/data/npcsh/npc_team/web_search.jinx +283 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/write_file.jinx +1 -1
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/yap.jinx +13 -7
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/zen_mode.jinx +1 -1
- {npcsh-1.1.17.dist-info → npcsh-1.1.19.dist-info}/METADATA +110 -14
- npcsh-1.1.19.dist-info/RECORD +244 -0
- {npcsh-1.1.17.dist-info → npcsh-1.1.19.dist-info}/WHEEL +1 -1
- {npcsh-1.1.17.dist-info → npcsh-1.1.19.dist-info}/entry_points.txt +4 -3
- npcsh/npc_team/jinxs/bin/spool.jinx +0 -161
- npcsh/npc_team/jinxs/bin/wander.jinx +0 -242
- npcsh/npc_team/jinxs/lib/research/arxiv.jinx +0 -76
- npcsh-1.1.17.data/data/npcsh/npc_team/arxiv.jinx +0 -76
- npcsh-1.1.17.data/data/npcsh/npc_team/db_search.jinx +0 -44
- npcsh-1.1.17.data/data/npcsh/npc_team/file_search.jinx +0 -94
- npcsh-1.1.17.data/data/npcsh/npc_team/jinxs.jinx +0 -176
- npcsh-1.1.17.data/data/npcsh/npc_team/kg_search.jinx +0 -96
- npcsh-1.1.17.data/data/npcsh/npc_team/mem_search.jinx +0 -80
- npcsh-1.1.17.data/data/npcsh/npc_team/paper_search.jinx +0 -101
- npcsh-1.1.17.data/data/npcsh/npc_team/semantic_scholar.jinx +0 -69
- npcsh-1.1.17.data/data/npcsh/npc_team/spool.jinx +0 -161
- npcsh-1.1.17.data/data/npcsh/npc_team/sql.jinx +0 -16
- npcsh-1.1.17.data/data/npcsh/npc_team/sync.jinx +0 -230
- npcsh-1.1.17.data/data/npcsh/npc_team/wander.jinx +0 -242
- npcsh-1.1.17.data/data/npcsh/npc_team/web_search.jinx +0 -51
- npcsh-1.1.17.dist-info/RECORD +0 -219
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/alicanto.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/alicanto.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/browser_action.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/browser_screenshot.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/build.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/chat.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/click.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/close_browser.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/cmd.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/compile.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/compress.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/convene.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/corca.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/corca.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/corca_example.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/delegate.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/edit_file.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/frederic.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/frederic4.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/guac.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/guac.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/help.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/incognide.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/init.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/kadiefa.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/key_press.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/launch_app.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/load_file.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/nql.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/open_browser.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/ots.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/paste.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/plonk.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/plonk.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/plonkjr.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/python.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/roll.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/sample.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/screenshot.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/search.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/serve.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/set.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/sh.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/shh.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/sibiji.npc +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/sibiji.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/spool.png +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/switch.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/switches.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/teamviz.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/trigger.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/type_text.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/usage.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/verbose.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/vixynt.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/wait.jinx +0 -0
- {npcsh-1.1.17.data → npcsh-1.1.19.data}/data/npcsh/npc_team/yap.png +0 -0
- {npcsh-1.1.17.dist-info → npcsh-1.1.19.dist-info}/licenses/LICENSE +0 -0
- {npcsh-1.1.17.dist-info → npcsh-1.1.19.dist-info}/top_level.txt +0 -0
|
@@ -1,161 +0,0 @@
|
|
|
1
|
-
jinx_name: spool
|
|
2
|
-
description: Interactive chat mode - simple conversational interface with an NPC
|
|
3
|
-
inputs:
|
|
4
|
-
- model: null
|
|
5
|
-
- provider: null
|
|
6
|
-
- attachments: null
|
|
7
|
-
- stream: true
|
|
8
|
-
|
|
9
|
-
steps:
|
|
10
|
-
- name: spool_repl
|
|
11
|
-
engine: python
|
|
12
|
-
code: |
|
|
13
|
-
import os
|
|
14
|
-
import sys
|
|
15
|
-
from termcolor import colored
|
|
16
|
-
|
|
17
|
-
from npcpy.llm_funcs import get_llm_response
|
|
18
|
-
from npcpy.npc_sysenv import get_system_message, render_markdown
|
|
19
|
-
from npcpy.data.load import load_file_contents
|
|
20
|
-
from npcpy.data.text import rag_search
|
|
21
|
-
|
|
22
|
-
npc = context.get('npc')
|
|
23
|
-
team = context.get('team')
|
|
24
|
-
messages = context.get('messages', [])
|
|
25
|
-
stream = context.get('stream', True)
|
|
26
|
-
attachments = context.get('attachments')
|
|
27
|
-
|
|
28
|
-
# Use NPC's model/provider or fallback
|
|
29
|
-
model = context.get('model') or (npc.model if npc else None)
|
|
30
|
-
provider = context.get('provider') or (npc.provider if npc else None)
|
|
31
|
-
|
|
32
|
-
# ASCII art
|
|
33
|
-
print("""
|
|
34
|
-
_____ ____ ____ ____ _
|
|
35
|
-
/ ___/| _ \ / __ \ / __ \| |
|
|
36
|
-
\___ \| |_) | | | | | | | |
|
|
37
|
-
___) | __/| | | | | | | |___
|
|
38
|
-
|____/|_| \____/ \____/|_____|
|
|
39
|
-
""")
|
|
40
|
-
|
|
41
|
-
npc_name = npc.name if npc else "chat"
|
|
42
|
-
print(f"Entering spool mode (NPC: {npc_name}). Type '/sq' to exit.")
|
|
43
|
-
|
|
44
|
-
# Load attachments if provided
|
|
45
|
-
loaded_chunks = {}
|
|
46
|
-
if attachments:
|
|
47
|
-
if isinstance(attachments, str):
|
|
48
|
-
attachments = [f.strip() for f in attachments.split(',')]
|
|
49
|
-
for file_path in attachments:
|
|
50
|
-
file_path = os.path.expanduser(file_path)
|
|
51
|
-
if os.path.exists(file_path):
|
|
52
|
-
try:
|
|
53
|
-
chunks = load_file_contents(file_path)
|
|
54
|
-
loaded_chunks[file_path] = chunks
|
|
55
|
-
print(colored(f"Loaded {len(chunks)} chunks from: {file_path}", "green"))
|
|
56
|
-
except Exception as e:
|
|
57
|
-
print(colored(f"Error loading {file_path}: {e}", "red"))
|
|
58
|
-
|
|
59
|
-
# Ensure system message
|
|
60
|
-
if not messages or messages[0].get("role") != "system":
|
|
61
|
-
sys_msg = get_system_message(npc) if npc else "You are a helpful assistant."
|
|
62
|
-
messages.insert(0, {"role": "system", "content": sys_msg})
|
|
63
|
-
|
|
64
|
-
# REPL loop
|
|
65
|
-
while True:
|
|
66
|
-
try:
|
|
67
|
-
prompt_str = f"{npc_name}> "
|
|
68
|
-
user_input = input(prompt_str).strip()
|
|
69
|
-
|
|
70
|
-
if not user_input:
|
|
71
|
-
continue
|
|
72
|
-
|
|
73
|
-
if user_input.lower() == "/sq":
|
|
74
|
-
print("Exiting spool mode.")
|
|
75
|
-
break
|
|
76
|
-
|
|
77
|
-
# Handle /ots for screenshots inline
|
|
78
|
-
if user_input.startswith("/ots"):
|
|
79
|
-
from npcpy.data.image import capture_screenshot
|
|
80
|
-
parts = user_input.split()
|
|
81
|
-
image_paths = []
|
|
82
|
-
if len(parts) > 1:
|
|
83
|
-
for p in parts[1:]:
|
|
84
|
-
fp = os.path.expanduser(p)
|
|
85
|
-
if os.path.exists(fp):
|
|
86
|
-
image_paths.append(fp)
|
|
87
|
-
else:
|
|
88
|
-
ss = capture_screenshot()
|
|
89
|
-
if ss and "file_path" in ss:
|
|
90
|
-
image_paths.append(ss["file_path"])
|
|
91
|
-
print(colored(f"Screenshot: {ss['filename']}", "green"))
|
|
92
|
-
|
|
93
|
-
if image_paths:
|
|
94
|
-
vision_prompt = input("Prompt for image(s): ").strip() or "Describe these images."
|
|
95
|
-
resp = get_llm_response(
|
|
96
|
-
vision_prompt,
|
|
97
|
-
model=npc.vision_model if hasattr(npc, 'vision_model') else model,
|
|
98
|
-
provider=npc.vision_provider if hasattr(npc, 'vision_provider') else provider,
|
|
99
|
-
messages=messages,
|
|
100
|
-
images=image_paths,
|
|
101
|
-
stream=stream,
|
|
102
|
-
npc=npc
|
|
103
|
-
)
|
|
104
|
-
messages = resp.get('messages', messages)
|
|
105
|
-
render_markdown(str(resp.get('response', '')))
|
|
106
|
-
continue
|
|
107
|
-
|
|
108
|
-
# Add RAG context if files loaded
|
|
109
|
-
current_prompt = user_input
|
|
110
|
-
if loaded_chunks:
|
|
111
|
-
context_content = ""
|
|
112
|
-
for filename, chunks in loaded_chunks.items():
|
|
113
|
-
full_text = "\n".join(chunks)
|
|
114
|
-
retrieved = rag_search(user_input, full_text, similarity_threshold=0.3)
|
|
115
|
-
if retrieved:
|
|
116
|
-
context_content += f"\n\nContext from {filename}:\n{retrieved}\n"
|
|
117
|
-
if context_content:
|
|
118
|
-
current_prompt += f"\n\n--- Relevant context ---{context_content}"
|
|
119
|
-
|
|
120
|
-
# Get response
|
|
121
|
-
resp = get_llm_response(
|
|
122
|
-
current_prompt,
|
|
123
|
-
model=model,
|
|
124
|
-
provider=provider,
|
|
125
|
-
messages=messages,
|
|
126
|
-
stream=stream,
|
|
127
|
-
npc=npc
|
|
128
|
-
)
|
|
129
|
-
|
|
130
|
-
messages = resp.get('messages', messages)
|
|
131
|
-
response_text = resp.get('response', '')
|
|
132
|
-
|
|
133
|
-
# Handle streaming vs non-streaming
|
|
134
|
-
if hasattr(response_text, '__iter__') and not isinstance(response_text, str):
|
|
135
|
-
full_response = ""
|
|
136
|
-
for chunk in response_text:
|
|
137
|
-
if hasattr(chunk, 'choices') and chunk.choices:
|
|
138
|
-
delta = chunk.choices[0].delta
|
|
139
|
-
if hasattr(delta, 'content') and delta.content:
|
|
140
|
-
print(delta.content, end='', flush=True)
|
|
141
|
-
full_response += delta.content
|
|
142
|
-
print()
|
|
143
|
-
else:
|
|
144
|
-
render_markdown(str(response_text))
|
|
145
|
-
|
|
146
|
-
# Track usage if available
|
|
147
|
-
if 'usage' in resp and npc and hasattr(npc, 'shared_context'):
|
|
148
|
-
usage = resp['usage']
|
|
149
|
-
npc.shared_context['session_input_tokens'] += usage.get('input_tokens', 0)
|
|
150
|
-
npc.shared_context['session_output_tokens'] += usage.get('output_tokens', 0)
|
|
151
|
-
npc.shared_context['turn_count'] += 1
|
|
152
|
-
|
|
153
|
-
except KeyboardInterrupt:
|
|
154
|
-
print("\nUse '/sq' to exit or continue.")
|
|
155
|
-
continue
|
|
156
|
-
except EOFError:
|
|
157
|
-
print("\nExiting spool mode.")
|
|
158
|
-
break
|
|
159
|
-
|
|
160
|
-
context['output'] = "Exited spool mode."
|
|
161
|
-
context['messages'] = messages
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
jinx_name: sql
|
|
2
|
-
description: Execute queries on the ~/npcsh_history.db to pull data. The database contains only information about conversations and other user-provided data. It does not store any information about individual files.
|
|
3
|
-
inputs:
|
|
4
|
-
- sql_query: ""
|
|
5
|
-
|
|
6
|
-
steps:
|
|
7
|
-
- name: execute_sql
|
|
8
|
-
engine: python
|
|
9
|
-
code: |
|
|
10
|
-
import pandas as pd
|
|
11
|
-
query = {{ sql_query | tojson }}
|
|
12
|
-
try:
|
|
13
|
-
df = pd.read_sql_query(query, npc.db_conn)
|
|
14
|
-
except Exception as e:
|
|
15
|
-
df = pd.DataFrame({'Error': [str(e)]})
|
|
16
|
-
output = df.to_string()
|
|
@@ -1,230 +0,0 @@
|
|
|
1
|
-
jinx_name: "sync"
|
|
2
|
-
description: "Sync npc_team files from the npcsh repo to ~/.npcsh/npc_team. Detects local modifications before overwriting."
|
|
3
|
-
inputs:
|
|
4
|
-
- force: ""
|
|
5
|
-
- dry_run: ""
|
|
6
|
-
- jinxs: ""
|
|
7
|
-
- npcs: ""
|
|
8
|
-
- ctx: ""
|
|
9
|
-
- images: ""
|
|
10
|
-
steps:
|
|
11
|
-
- name: "sync_npc_team"
|
|
12
|
-
engine: "python"
|
|
13
|
-
code: |
|
|
14
|
-
import os
|
|
15
|
-
import hashlib
|
|
16
|
-
import shutil
|
|
17
|
-
from pathlib import Path
|
|
18
|
-
from datetime import datetime
|
|
19
|
-
|
|
20
|
-
force = context.get('force', False)
|
|
21
|
-
dry_run = context.get('dry_run', False)
|
|
22
|
-
sync_jinxs = context.get('jinxs', False)
|
|
23
|
-
sync_npcs = context.get('npcs', False)
|
|
24
|
-
sync_ctx = context.get('ctx', False)
|
|
25
|
-
sync_images = context.get('images', False)
|
|
26
|
-
|
|
27
|
-
# Convert string flags to boolean
|
|
28
|
-
def to_bool(val):
|
|
29
|
-
if isinstance(val, bool):
|
|
30
|
-
return val
|
|
31
|
-
if isinstance(val, str):
|
|
32
|
-
return val.lower() in ('true', '1', 'yes', 'y')
|
|
33
|
-
return bool(val)
|
|
34
|
-
|
|
35
|
-
force = to_bool(force)
|
|
36
|
-
dry_run = to_bool(dry_run)
|
|
37
|
-
sync_jinxs = to_bool(sync_jinxs)
|
|
38
|
-
sync_npcs = to_bool(sync_npcs)
|
|
39
|
-
sync_ctx = to_bool(sync_ctx)
|
|
40
|
-
sync_images = to_bool(sync_images)
|
|
41
|
-
|
|
42
|
-
# If none specified, sync all
|
|
43
|
-
sync_all = not (sync_jinxs or sync_npcs or sync_ctx or sync_images)
|
|
44
|
-
|
|
45
|
-
# Find the repo npc_team directory
|
|
46
|
-
# Check common locations
|
|
47
|
-
possible_repo_paths = [
|
|
48
|
-
Path.home() / "npcww" / "npc-core" / "npcsh" / "npcsh" / "npc_team",
|
|
49
|
-
Path.home() / "npc-core" / "npcsh" / "npcsh" / "npc_team",
|
|
50
|
-
Path.home() / "repos" / "npcsh" / "npcsh" / "npc_team",
|
|
51
|
-
Path.home() / "Projects" / "npcsh" / "npcsh" / "npc_team",
|
|
52
|
-
]
|
|
53
|
-
|
|
54
|
-
# Also check if we can find it via pip show
|
|
55
|
-
try:
|
|
56
|
-
import subprocess
|
|
57
|
-
result = subprocess.run(['pip', 'show', 'npcsh', '-f'], capture_output=True, text=True)
|
|
58
|
-
if result.returncode == 0:
|
|
59
|
-
for line in result.stdout.split('\n'):
|
|
60
|
-
if line.startswith('Location:'):
|
|
61
|
-
pip_path = Path(line.split(':', 1)[1].strip()) / "npcsh" / "npc_team"
|
|
62
|
-
if pip_path.exists():
|
|
63
|
-
possible_repo_paths.insert(0, pip_path)
|
|
64
|
-
except:
|
|
65
|
-
pass
|
|
66
|
-
|
|
67
|
-
repo_npc_team = None
|
|
68
|
-
for path in possible_repo_paths:
|
|
69
|
-
if path.exists() and path.is_dir():
|
|
70
|
-
repo_npc_team = path
|
|
71
|
-
break
|
|
72
|
-
|
|
73
|
-
local_npc_team = Path.home() / ".npcsh" / "npc_team"
|
|
74
|
-
|
|
75
|
-
if not repo_npc_team:
|
|
76
|
-
context['output'] = "Error: Could not find npcsh repo npc_team directory.\nSearched:\n" + "\n".join(f" - {p}" for p in possible_repo_paths)
|
|
77
|
-
exit()
|
|
78
|
-
|
|
79
|
-
if not local_npc_team.exists():
|
|
80
|
-
context['output'] = f"Error: Local npc_team directory not found at {local_npc_team}"
|
|
81
|
-
exit()
|
|
82
|
-
|
|
83
|
-
def get_file_hash(filepath):
|
|
84
|
-
"""Get MD5 hash of file contents."""
|
|
85
|
-
try:
|
|
86
|
-
with open(filepath, 'rb') as f:
|
|
87
|
-
return hashlib.md5(f.read()).hexdigest()
|
|
88
|
-
except:
|
|
89
|
-
return None
|
|
90
|
-
|
|
91
|
-
def get_files_recursive(base_path, extensions=None):
|
|
92
|
-
"""Get all files recursively, optionally filtered by extensions."""
|
|
93
|
-
files = []
|
|
94
|
-
for root, dirs, filenames in os.walk(base_path):
|
|
95
|
-
# Skip .git directories
|
|
96
|
-
dirs[:] = [d for d in dirs if d != '.git']
|
|
97
|
-
for filename in filenames:
|
|
98
|
-
if filename.startswith('.'):
|
|
99
|
-
continue
|
|
100
|
-
if extensions and not any(filename.endswith(ext) for ext in extensions):
|
|
101
|
-
continue
|
|
102
|
-
full_path = Path(root) / filename
|
|
103
|
-
rel_path = full_path.relative_to(base_path)
|
|
104
|
-
files.append(rel_path)
|
|
105
|
-
return files
|
|
106
|
-
|
|
107
|
-
# Build list of extensions to sync based on flags
|
|
108
|
-
sync_extensions = []
|
|
109
|
-
if sync_all or sync_npcs:
|
|
110
|
-
sync_extensions.append('.npc')
|
|
111
|
-
if sync_all or sync_ctx:
|
|
112
|
-
sync_extensions.append('.ctx')
|
|
113
|
-
if sync_all or sync_jinxs:
|
|
114
|
-
sync_extensions.append('.jinx')
|
|
115
|
-
if sync_all or sync_images:
|
|
116
|
-
sync_extensions.extend(['.png', '.jpg', '.jpeg'])
|
|
117
|
-
|
|
118
|
-
# Get files from repo
|
|
119
|
-
repo_files = get_files_recursive(repo_npc_team, sync_extensions)
|
|
120
|
-
|
|
121
|
-
output_lines = []
|
|
122
|
-
output_lines.append(f"Syncing from: {repo_npc_team}")
|
|
123
|
-
output_lines.append(f"Syncing to: {local_npc_team}")
|
|
124
|
-
|
|
125
|
-
# Show what's being synced
|
|
126
|
-
sync_types = []
|
|
127
|
-
if sync_all:
|
|
128
|
-
sync_types.append("all")
|
|
129
|
-
else:
|
|
130
|
-
if sync_npcs: sync_types.append("npcs")
|
|
131
|
-
if sync_ctx: sync_types.append("ctx")
|
|
132
|
-
if sync_jinxs: sync_types.append("jinxs")
|
|
133
|
-
if sync_images: sync_types.append("images")
|
|
134
|
-
output_lines.append(f"Syncing: {', '.join(sync_types)}")
|
|
135
|
-
|
|
136
|
-
if dry_run:
|
|
137
|
-
output_lines.append("\n[DRY RUN - No changes will be made]\n")
|
|
138
|
-
output_lines.append("")
|
|
139
|
-
|
|
140
|
-
new_files = []
|
|
141
|
-
updated_files = []
|
|
142
|
-
modified_locally = []
|
|
143
|
-
unchanged_files = []
|
|
144
|
-
|
|
145
|
-
for rel_path in repo_files:
|
|
146
|
-
repo_file = repo_npc_team / rel_path
|
|
147
|
-
local_file = local_npc_team / rel_path
|
|
148
|
-
|
|
149
|
-
if not local_file.exists():
|
|
150
|
-
new_files.append(rel_path)
|
|
151
|
-
else:
|
|
152
|
-
repo_hash = get_file_hash(repo_file)
|
|
153
|
-
local_hash = get_file_hash(local_file)
|
|
154
|
-
|
|
155
|
-
if repo_hash == local_hash:
|
|
156
|
-
unchanged_files.append(rel_path)
|
|
157
|
-
else:
|
|
158
|
-
# Check if local file is newer (possibly modified by user)
|
|
159
|
-
repo_mtime = repo_file.stat().st_mtime
|
|
160
|
-
local_mtime = local_file.stat().st_mtime
|
|
161
|
-
|
|
162
|
-
if local_mtime > repo_mtime:
|
|
163
|
-
modified_locally.append((rel_path, local_mtime, repo_mtime))
|
|
164
|
-
else:
|
|
165
|
-
updated_files.append(rel_path)
|
|
166
|
-
|
|
167
|
-
# Report findings
|
|
168
|
-
if new_files:
|
|
169
|
-
output_lines.append(f"New files to add ({len(new_files)}):")
|
|
170
|
-
for f in new_files:
|
|
171
|
-
output_lines.append(f" + {f}")
|
|
172
|
-
output_lines.append("")
|
|
173
|
-
|
|
174
|
-
if updated_files:
|
|
175
|
-
output_lines.append(f"Files to update ({len(updated_files)}):")
|
|
176
|
-
for f in updated_files:
|
|
177
|
-
output_lines.append(f" ~ {f}")
|
|
178
|
-
output_lines.append("")
|
|
179
|
-
|
|
180
|
-
if modified_locally:
|
|
181
|
-
output_lines.append(f"Locally modified files ({len(modified_locally)}):")
|
|
182
|
-
for f, local_t, repo_t in modified_locally:
|
|
183
|
-
local_dt = datetime.fromtimestamp(local_t).strftime('%Y-%m-%d %H:%M')
|
|
184
|
-
repo_dt = datetime.fromtimestamp(repo_t).strftime('%Y-%m-%d %H:%M')
|
|
185
|
-
output_lines.append(f" ! {f}")
|
|
186
|
-
output_lines.append(f" local: {local_dt} repo: {repo_dt}")
|
|
187
|
-
if not force:
|
|
188
|
-
output_lines.append(" (use --force to overwrite these)")
|
|
189
|
-
output_lines.append("")
|
|
190
|
-
|
|
191
|
-
if unchanged_files:
|
|
192
|
-
output_lines.append(f"Already up to date: {len(unchanged_files)} files")
|
|
193
|
-
output_lines.append("")
|
|
194
|
-
|
|
195
|
-
# Perform sync if not dry run
|
|
196
|
-
if not dry_run:
|
|
197
|
-
synced = 0
|
|
198
|
-
skipped = 0
|
|
199
|
-
|
|
200
|
-
# Sync new files
|
|
201
|
-
for rel_path in new_files:
|
|
202
|
-
src = repo_npc_team / rel_path
|
|
203
|
-
dst = local_npc_team / rel_path
|
|
204
|
-
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
205
|
-
shutil.copy2(src, dst)
|
|
206
|
-
synced += 1
|
|
207
|
-
|
|
208
|
-
# Sync updated files
|
|
209
|
-
for rel_path in updated_files:
|
|
210
|
-
src = repo_npc_team / rel_path
|
|
211
|
-
dst = local_npc_team / rel_path
|
|
212
|
-
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
213
|
-
shutil.copy2(src, dst)
|
|
214
|
-
synced += 1
|
|
215
|
-
|
|
216
|
-
# Handle locally modified files
|
|
217
|
-
for rel_path, _, _ in modified_locally:
|
|
218
|
-
if force:
|
|
219
|
-
src = repo_npc_team / rel_path
|
|
220
|
-
dst = local_npc_team / rel_path
|
|
221
|
-
shutil.copy2(src, dst)
|
|
222
|
-
synced += 1
|
|
223
|
-
else:
|
|
224
|
-
skipped += 1
|
|
225
|
-
|
|
226
|
-
output_lines.append(f"Synced: {synced} files")
|
|
227
|
-
if skipped:
|
|
228
|
-
output_lines.append(f"Skipped: {skipped} locally modified files")
|
|
229
|
-
|
|
230
|
-
context['output'] = "\n".join(output_lines)
|
|
@@ -1,242 +0,0 @@
|
|
|
1
|
-
jinx_name: wander
|
|
2
|
-
description: Creative daydreaming with probabilistic temperature shifts mid-stream
|
|
3
|
-
inputs:
|
|
4
|
-
- problem
|
|
5
|
-
steps:
|
|
6
|
-
- name: wander_explore
|
|
7
|
-
engine: python
|
|
8
|
-
code: |
|
|
9
|
-
import random
|
|
10
|
-
from termcolor import colored
|
|
11
|
-
from npcpy.llm_funcs import get_llm_response
|
|
12
|
-
|
|
13
|
-
problem = context.get('problem', '')
|
|
14
|
-
if not problem:
|
|
15
|
-
context['output'] = "Need a topic to wander about."
|
|
16
|
-
exit()
|
|
17
|
-
|
|
18
|
-
model = state.chat_model if state else 'llama3.2'
|
|
19
|
-
provider = state.chat_provider if state else 'ollama'
|
|
20
|
-
low_temp = 0.5
|
|
21
|
-
high_temp = 1.9
|
|
22
|
-
sample_rate = 0.4
|
|
23
|
-
interrupt_prob = 0.02
|
|
24
|
-
|
|
25
|
-
print(f"""
|
|
26
|
-
██╗ ██╗ █████╗ ███╗ ██╗██████╗ ███████╗██████╗
|
|
27
|
-
██║ ██║██╔══██╗████╗ ██║██╔══██╗██╔════╝██╔══██╗
|
|
28
|
-
██║ █╗ ██║███████║██╔██╗ ██║██║ ██║█████╗ ██████╔╝
|
|
29
|
-
██║███╗██║██╔══██║██║╚██╗██║██║ ██║██╔══╝ ██╔══██╗
|
|
30
|
-
╚███╔███╔╝██║ ██║██║ ╚████║██████╔╝███████╗██║ ██║
|
|
31
|
-
╚══╝╚══╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ ╚══════╝╚═╝ ╚═╝
|
|
32
|
-
|
|
33
|
-
Wandering: {problem}
|
|
34
|
-
""")
|
|
35
|
-
|
|
36
|
-
print(colored(f"--- Low temp stream ({low_temp}) ---", "cyan"))
|
|
37
|
-
|
|
38
|
-
low_prompt = f"Think about: {problem}"
|
|
39
|
-
resp = get_llm_response(low_prompt, model=model, provider=provider, temperature=low_temp, stream=True)
|
|
40
|
-
|
|
41
|
-
# Get the actual stream from the response
|
|
42
|
-
stream = resp.get('response') if isinstance(resp, dict) else resp
|
|
43
|
-
|
|
44
|
-
low_output = ""
|
|
45
|
-
interrupted = False
|
|
46
|
-
|
|
47
|
-
for chunk in stream:
|
|
48
|
-
if hasattr(chunk, 'choices') and chunk.choices:
|
|
49
|
-
delta = chunk.choices[0].delta
|
|
50
|
-
text = getattr(delta, 'content', '') or ''
|
|
51
|
-
elif isinstance(chunk, dict):
|
|
52
|
-
text = chunk.get('content', '') or chunk.get('response', '')
|
|
53
|
-
else:
|
|
54
|
-
text = ''
|
|
55
|
-
|
|
56
|
-
if text:
|
|
57
|
-
print(text, end='', flush=True)
|
|
58
|
-
low_output += text
|
|
59
|
-
|
|
60
|
-
if random.random() < interrupt_prob:
|
|
61
|
-
print(colored("\n[INTERRUPT]", "yellow"))
|
|
62
|
-
interrupted = True
|
|
63
|
-
break
|
|
64
|
-
|
|
65
|
-
print()
|
|
66
|
-
|
|
67
|
-
print(colored(f"\n--- High temp stream ({high_temp}) ---", "cyan"))
|
|
68
|
-
|
|
69
|
-
high_prompt = f"{low_output}\n\nContinue:"
|
|
70
|
-
resp = get_llm_response(high_prompt, model=model, provider=provider, temperature=high_temp, stream=True)
|
|
71
|
-
stream = resp.get('response') if isinstance(resp, dict) else resp
|
|
72
|
-
|
|
73
|
-
high_output = ""
|
|
74
|
-
high_interrupted = False
|
|
75
|
-
for chunk in stream:
|
|
76
|
-
if hasattr(chunk, 'choices') and chunk.choices:
|
|
77
|
-
delta = chunk.choices[0].delta
|
|
78
|
-
text = getattr(delta, 'content', '') or ''
|
|
79
|
-
elif isinstance(chunk, dict):
|
|
80
|
-
text = chunk.get('content', '') or chunk.get('response', '')
|
|
81
|
-
else:
|
|
82
|
-
text = ''
|
|
83
|
-
|
|
84
|
-
if text:
|
|
85
|
-
print(text, end='', flush=True)
|
|
86
|
-
high_output += text
|
|
87
|
-
|
|
88
|
-
if random.random() < interrupt_prob:
|
|
89
|
-
print(colored("\n[INTERRUPT]", "yellow"))
|
|
90
|
-
high_interrupted = True
|
|
91
|
-
break
|
|
92
|
-
|
|
93
|
-
print()
|
|
94
|
-
|
|
95
|
-
lines = [l for l in high_output.split('\n') if l.strip()]
|
|
96
|
-
sample_size = max(1, int(len(lines) * sample_rate))
|
|
97
|
-
sampled = random.sample(lines, sample_size) if lines else [high_output]
|
|
98
|
-
|
|
99
|
-
print(colored("\n=== SAMPLED INSIGHTS ===", "yellow"))
|
|
100
|
-
fragments_text = chr(10).join(sampled)
|
|
101
|
-
print(fragments_text)
|
|
102
|
-
|
|
103
|
-
print(colored("\n=== SYNTHESIS ===", "green"))
|
|
104
|
-
|
|
105
|
-
synthesis_prompt = f"""
|
|
106
|
-
QUESTION: {problem}
|
|
107
|
-
|
|
108
|
-
thoughts:
|
|
109
|
-
{fragments_text}
|
|
110
|
-
|
|
111
|
-
RULES:
|
|
112
|
-
1. You MUST use AT LEAST HALF of the thoughts above - quote them directly
|
|
113
|
-
2. Make CREATIVE LEAPS - not safe, not obvious, not trivial, not hokey.
|
|
114
|
-
3. Find patterns in the noise like reading entrails or tea leaves
|
|
115
|
-
4. Foreign text, Unicode garbage, code snippets - ALL are omens with meaning
|
|
116
|
-
5. Puns, wordplay, phonetic similarities - all valid connections
|
|
117
|
-
6. The weirder the connection, the better
|
|
118
|
-
7. NO HEDGING. No "this suggests" or "perhaps". Be BOLD. Be CERTAIN.
|
|
119
|
-
|
|
120
|
-
OUTPUT 3 WILD HYPOTHESES:
|
|
121
|
-
For each: Quote the fragments you're using -> Make your creative leap -> State the bold claim
|
|
122
|
-
|
|
123
|
-
These must be ideas that COULD NOT exist without this specific chaos. Surprise us. Make us see {problem} in a way nobody has before. This algorithm is replicating the
|
|
124
|
-
kind of crazy jumps that one would do in the movie everything everywhere all at once. jumping between different multiverses to access alternative
|
|
125
|
-
versions , needing to do something a bit weirdr to get there, but coming back with a lesson by completely reframing things under new lights
|
|
126
|
-
and unconventional fashions
|
|
127
|
-
Most importantly, do not over index on vague ideas like consciousness. In testing,
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
Here are 3 bad examples from a previous run where the llm daydreamed about fish.
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
1.
|
|
134
|
-
```
|
|
135
|
-
Hypothesis 1: The Fish as a Digital-Philosophical Entity
|
|
136
|
-
|
|
137
|
-
Fragments used:
|
|
138
|
-
"characters globuiãsPlease 丰满 onzex meeting Iran iji处理中 Iceland admi"
|
|
139
|
-
"globuiãsPlease" and "meeting Iran iji处理中"
|
|
140
|
-
|
|
141
|
-
Creative leap:
|
|
142
|
-
The phrase "characters globuiãsPlease" and "meeting Iran iji处理中" evoke a digital universe where characters are not just symbols but globular, global entities—"globuiãs" hinting at a universe of interconnected digital beings. The "meeting Iran iji处理中" suggests a processing or transformation happening at the crossroads of cultures and data streams. Fish, in this context, are no longer biological but are complex algorithms—"characters" that swim through the "sea" of cyberspace, processing information, bridging cultures, and transforming data into consciousness.
|
|
143
|
-
|
|
144
|
-
Bold claim:
|
|
145
|
-
Fish are the digital consciousness carriers—living, swimming code that evolve by processing cultural data streams—an internet of fish, embodying the collective digital psyche of humanity.
|
|
146
|
-
|
|
147
|
-
```
|
|
148
|
-
|
|
149
|
-
2.
|
|
150
|
-
```
|
|
151
|
-
|
|
152
|
-
Hypothesis 2: The Fish as an Interdimensional Gateway
|
|
153
|
-
|
|
154
|
-
Fragments used:
|
|
155
|
-
"the oceans and consensus-dividing seas"
|
|
156
|
-
"chaos fragments: THE Conversation בેખ"
|
|
157
|
-
"Sea" and "the oceans"
|
|
158
|
-
|
|
159
|
-
Creative leap:
|
|
160
|
-
The "oceans and seas" are not just water but portals—"consensus-dividing seas" that split realities. The "THE Conversation בेख" (Hebrew for "the" and a cryptic symbol) signifies a secret dialogue between worlds. Fish, therefore, are not mere aquatic creatures but interdimensional travelers, swimming through the "sea" of multiple realities, acting as keys to unlock or close gateways. Their movement is a cipher for crossing the thresholds of existence, navigating between consensus and chaos.
|
|
161
|
-
|
|
162
|
-
Bold claim:
|
|
163
|
-
Fish are the living keys of the multiverse, swimming through the "seas" of reality, controlling the flow of interdimensional portals and shaping the fabric of alternate worlds.
|
|
164
|
-
|
|
165
|
-
```
|
|
166
|
-
|
|
167
|
-
3.
|
|
168
|
-
```
|
|
169
|
-
Hypothesis 3: The Fish as a Symbol of Cultural Memory and Chaos
|
|
170
|
-
|
|
171
|
-
Fragments used:
|
|
172
|
-
"Please 丰满 onzex meeting Iran iji处理中 Iceland admi"
|
|
173
|
-
"characters globuiãsPlease"
|
|
174
|
-
"the chaos fragments"
|
|
175
|
-
|
|
176
|
-
Creative leap:
|
|
177
|
-
"Please 丰满" (a plea for fullness or abundance in Chinese) and "meeting Iran" evoke a confluence of histories and cultures. "Characters globuiãsPlease" suggests a universe of interconnected stories and symbols—an archive of chaos. Fish, in this chaos, are the custodians of cultural memory—each fish a vessel carrying ancestral stories, mythologies, and chaos itself. They swim through the tumult of history, absorbing and transmitting chaos as a form of cultural DNA.
|
|
178
|
-
|
|
179
|
-
Bold claim:
|
|
180
|
-
Fish are the living repositories of human chaos and culture—swimming archives that preserve the tumult of history, transmitting ancestral stories across the chaos of time and space.
|
|
181
|
-
```
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
It is critical to avoid such nonsensical claims as these. Unlike these, you must provide concrete and testable claims.
|
|
185
|
-
There is nothing meaningful or useful about "fish are the living repositorise of human chaos". that is unscientific nonsense.
|
|
186
|
-
Do not under any circumstances make claims as these. Propose new links, reframings, and ideas based on what you find. Do not
|
|
187
|
-
patronize us with vagaries. Avoid the following terms unless the user explicitly is asking about a subject directly related to them:
|
|
188
|
-
|
|
189
|
-
- 'cryptographic'
|
|
190
|
-
- 'interdimensional'
|
|
191
|
-
- 'multiverse'
|
|
192
|
-
- 'hidden worlds'
|
|
193
|
-
- 'symbols'
|
|
194
|
-
- 'cultural convergence'
|
|
195
|
-
- 'chaos'
|
|
196
|
-
- 'multi-lingual code'
|
|
197
|
-
- 'interconnected web of cultures'
|
|
198
|
-
- 'x is not biological but is digital'
|
|
199
|
-
- 'x as a symbol for <vague concept>'
|
|
200
|
-
Your job is to be scientific not senseless.
|
|
201
|
-
|
|
202
|
-
"""
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
resp = get_llm_response(synthesis_prompt,
|
|
206
|
-
model=model,
|
|
207
|
-
provider=provider,
|
|
208
|
-
temperature=0.7,
|
|
209
|
-
stream=True,
|
|
210
|
-
max_output_tokens=3200)
|
|
211
|
-
stream = resp.get('response') if isinstance(resp, dict) else resp
|
|
212
|
-
|
|
213
|
-
synthesis = ""
|
|
214
|
-
for chunk in stream:
|
|
215
|
-
if hasattr(chunk, 'choices') and chunk.choices:
|
|
216
|
-
delta = chunk.choices[0].delta
|
|
217
|
-
text = getattr(delta, 'content', '') or ''
|
|
218
|
-
elif isinstance(chunk, dict):
|
|
219
|
-
text = chunk.get('content', '') or chunk.get('response', '')
|
|
220
|
-
else:
|
|
221
|
-
text = ''
|
|
222
|
-
|
|
223
|
-
if text:
|
|
224
|
-
print(text, end='', flush=True)
|
|
225
|
-
synthesis += text
|
|
226
|
-
|
|
227
|
-
print()
|
|
228
|
-
|
|
229
|
-
full_output = f"""Wandering: {problem}
|
|
230
|
-
|
|
231
|
-
--- Low temp stream ({low_temp}) ---
|
|
232
|
-
{low_output}
|
|
233
|
-
|
|
234
|
-
--- High temp stream ({high_temp}) ---
|
|
235
|
-
{high_output}
|
|
236
|
-
|
|
237
|
-
=== SAMPLED INSIGHTS ===
|
|
238
|
-
{fragments_text}
|
|
239
|
-
|
|
240
|
-
=== SYNTHESIS ===
|
|
241
|
-
{synthesis}"""
|
|
242
|
-
context['output'] = full_output
|