npcsh 1.1.4__py3-none-any.whl → 1.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/_state.py +470 -367
- npcsh/npc_team/corca_example.png +0 -0
- npcsh/npc_team/jinxs/{python_executor.jinx → code/python.jinx} +1 -1
- npcsh/npc_team/jinxs/{bash_executer.jinx → code/sh.jinx} +1 -2
- npcsh/npc_team/jinxs/code/sql.jinx +16 -0
- npcsh/npc_team/jinxs/modes/alicanto.jinx +88 -0
- npcsh/npc_team/jinxs/modes/corca.jinx +28 -0
- npcsh/npc_team/jinxs/modes/guac.jinx +46 -0
- npcsh/npc_team/jinxs/modes/plonk.jinx +57 -0
- npcsh/npc_team/jinxs/modes/pti.jinx +28 -0
- npcsh/npc_team/jinxs/modes/spool.jinx +40 -0
- npcsh/npc_team/jinxs/modes/wander.jinx +81 -0
- npcsh/npc_team/jinxs/modes/yap.jinx +25 -0
- npcsh/npc_team/jinxs/utils/breathe.jinx +20 -0
- npcsh/npc_team/jinxs/utils/core/build.jinx +65 -0
- npcsh/npc_team/jinxs/utils/core/compile.jinx +50 -0
- npcsh/npc_team/jinxs/utils/core/help.jinx +52 -0
- npcsh/npc_team/jinxs/utils/core/init.jinx +41 -0
- npcsh/npc_team/jinxs/utils/core/jinxs.jinx +32 -0
- npcsh/npc_team/jinxs/utils/core/set.jinx +40 -0
- npcsh/npc_team/jinxs/{edit_file.jinx → utils/edit_file.jinx} +1 -1
- npcsh/npc_team/jinxs/utils/flush.jinx +39 -0
- npcsh/npc_team/jinxs/utils/npc-studio.jinx +77 -0
- npcsh/npc_team/jinxs/utils/ots.jinx +61 -0
- npcsh/npc_team/jinxs/utils/plan.jinx +33 -0
- npcsh/npc_team/jinxs/utils/roll.jinx +66 -0
- npcsh/npc_team/jinxs/utils/sample.jinx +56 -0
- npcsh/npc_team/jinxs/utils/search.jinx +130 -0
- npcsh/npc_team/jinxs/utils/serve.jinx +29 -0
- npcsh/npc_team/jinxs/utils/sleep.jinx +116 -0
- npcsh/npc_team/jinxs/utils/trigger.jinx +36 -0
- npcsh/npc_team/jinxs/utils/vixynt.jinx +117 -0
- npcsh/npcsh.py +13 -11
- npcsh/routes.py +97 -1419
- npcsh-1.1.6.data/data/npcsh/npc_team/alicanto.jinx +88 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/breathe.jinx +20 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/build.jinx +65 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/compile.jinx +50 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/corca.jinx +28 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/corca_example.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/edit_file.jinx +1 -1
- npcsh-1.1.6.data/data/npcsh/npc_team/flush.jinx +39 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/guac.jinx +46 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/help.jinx +52 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/init.jinx +41 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/jinxs.jinx +32 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/npc-studio.jinx +77 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/ots.jinx +61 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/plan.jinx +33 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/plonk.jinx +57 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/pti.jinx +28 -0
- npcsh-1.1.4.data/data/npcsh/npc_team/python_executor.jinx → npcsh-1.1.6.data/data/npcsh/npc_team/python.jinx +1 -1
- npcsh-1.1.6.data/data/npcsh/npc_team/roll.jinx +66 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/sample.jinx +56 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/search.jinx +130 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/serve.jinx +29 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/set.jinx +40 -0
- npcsh-1.1.4.data/data/npcsh/npc_team/bash_executer.jinx → npcsh-1.1.6.data/data/npcsh/npc_team/sh.jinx +1 -2
- npcsh-1.1.6.data/data/npcsh/npc_team/sleep.jinx +116 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/spool.jinx +40 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/sql.jinx +16 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/trigger.jinx +36 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/vixynt.jinx +117 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/wander.jinx +81 -0
- npcsh-1.1.6.data/data/npcsh/npc_team/yap.jinx +25 -0
- {npcsh-1.1.4.dist-info → npcsh-1.1.6.dist-info}/METADATA +1 -10
- npcsh-1.1.6.dist-info/RECORD +124 -0
- npcsh/npc_team/jinxs/image_generation.jinx +0 -29
- npcsh/npc_team/jinxs/internet_search.jinx +0 -31
- npcsh/npc_team/jinxs/kg_search.jinx +0 -43
- npcsh/npc_team/jinxs/memory_search.jinx +0 -36
- npcsh/npc_team/jinxs/screen_cap.jinx +0 -25
- npcsh-1.1.4.data/data/npcsh/npc_team/image_generation.jinx +0 -29
- npcsh-1.1.4.data/data/npcsh/npc_team/internet_search.jinx +0 -31
- npcsh-1.1.4.data/data/npcsh/npc_team/kg_search.jinx +0 -43
- npcsh-1.1.4.data/data/npcsh/npc_team/memory_search.jinx +0 -36
- npcsh-1.1.4.data/data/npcsh/npc_team/screen_cap.jinx +0 -25
- npcsh-1.1.4.dist-info/RECORD +0 -78
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/alicanto.npc +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/alicanto.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/corca.npc +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/corca.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/foreman.npc +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/frederic.npc +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/frederic4.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/guac.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/kadiefa.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/plonk.npc +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/plonk.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/plonkjr.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/sibiji.npc +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/sibiji.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/spool.png +0 -0
- {npcsh-1.1.4.data → npcsh-1.1.6.data}/data/npcsh/npc_team/yap.png +0 -0
- {npcsh-1.1.4.dist-info → npcsh-1.1.6.dist-info}/WHEEL +0 -0
- {npcsh-1.1.4.dist-info → npcsh-1.1.6.dist-info}/entry_points.txt +0 -0
- {npcsh-1.1.4.dist-info → npcsh-1.1.6.dist-info}/licenses/LICENSE +0 -0
- {npcsh-1.1.4.dist-info → npcsh-1.1.6.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
jinx_name: "sleep"
|
|
2
|
+
description: "Evolve knowledge graph. Use --dream to also run creative synthesis."
|
|
3
|
+
inputs:
|
|
4
|
+
- dream: False # Boolean flag to also run creative synthesis (dream process).
|
|
5
|
+
- ops: "" # Comma-separated list of operations to configure KG sleep process.
|
|
6
|
+
- model: "" # LLM model to use for KG evolution. Defaults to NPC's model.
|
|
7
|
+
- provider: "" # LLM provider to use for KG evolution. Defaults to NPC's provider.
|
|
8
|
+
steps:
|
|
9
|
+
- name: "evolve_knowledge_graph"
|
|
10
|
+
engine: "python"
|
|
11
|
+
code: |
|
|
12
|
+
import os
|
|
13
|
+
import traceback
|
|
14
|
+
from npcpy.memory.command_history import CommandHistory, load_kg_from_db, save_kg_to_db
|
|
15
|
+
from npcpy.memory.knowledge_graph import kg_sleep_process, kg_dream_process
|
|
16
|
+
# Assuming render_markdown is available if needed for logging progress
|
|
17
|
+
|
|
18
|
+
is_dreaming = context.get('dream')
|
|
19
|
+
operations_str = context.get('ops')
|
|
20
|
+
llm_model = context.get('model')
|
|
21
|
+
llm_provider = context.get('provider')
|
|
22
|
+
output_messages = context.get('messages', [])
|
|
23
|
+
current_npc = context.get('npc')
|
|
24
|
+
current_team = context.get('team')
|
|
25
|
+
|
|
26
|
+
operations_config = None
|
|
27
|
+
if operations_str and isinstance(operations_str, str):
|
|
28
|
+
operations_config = [op.strip() for op in operations_str.split(',')]
|
|
29
|
+
|
|
30
|
+
# Fallback for model/provider if not explicitly set in Jinx inputs
|
|
31
|
+
if not llm_model and current_npc and current_npc.model:
|
|
32
|
+
llm_model = current_npc.model
|
|
33
|
+
if not llm_provider and current_npc and current_npc.provider:
|
|
34
|
+
llm_provider = current_npc.provider
|
|
35
|
+
|
|
36
|
+
# Final fallbacks (these would ideally come from npcsh._state config)
|
|
37
|
+
if not llm_model: llm_model = "gemini-1.5-pro" # Example default
|
|
38
|
+
if not llm_provider: llm_provider = "gemini" # Example default
|
|
39
|
+
|
|
40
|
+
team_name = current_team.name if current_team else "__none__"
|
|
41
|
+
npc_name = current_npc.name if isinstance(current_npc, type(None).__class__) else "__none__"
|
|
42
|
+
current_path = os.getcwd()
|
|
43
|
+
scope_str = f"Team: '{team_name}', NPC: '{npc_name}', Path: '{current_path}'"
|
|
44
|
+
|
|
45
|
+
# Assume render_markdown exists
|
|
46
|
+
# render_markdown(f"- Checking knowledge graph for scope: {scope_str}")
|
|
47
|
+
|
|
48
|
+
command_history = None
|
|
49
|
+
try:
|
|
50
|
+
db_path = os.getenv("NPCSH_DB_PATH", os.path.expanduser("~/npcsh_history.db"))
|
|
51
|
+
command_history = CommandHistory(db_path)
|
|
52
|
+
engine = command_history.engine
|
|
53
|
+
except Exception as e:
|
|
54
|
+
context['output'] = f"Error connecting to history database for KG access: {e}"
|
|
55
|
+
context['messages'] = output_messages
|
|
56
|
+
exit()
|
|
57
|
+
|
|
58
|
+
output_result = ""
|
|
59
|
+
try:
|
|
60
|
+
current_kg = load_kg_from_db(engine, team_name, npc_name, current_path)
|
|
61
|
+
|
|
62
|
+
if not current_kg or not current_kg.get('facts'):
|
|
63
|
+
output_msg = f"Knowledge graph for the current scope is empty. Nothing to process.\n"
|
|
64
|
+
output_msg += f" - Scope Checked: {scope_str}\n\n"
|
|
65
|
+
output_msg += "**Hint:** Have a conversation or run some commands first to build up knowledge in this specific context. The KG is unique to each combination of Team, NPC, and directory."
|
|
66
|
+
context['output'] = output_msg
|
|
67
|
+
context['messages'] = output_messages
|
|
68
|
+
exit()
|
|
69
|
+
|
|
70
|
+
original_facts = len(current_kg.get('facts', []))
|
|
71
|
+
original_concepts = len(current_kg.get('concepts', []))
|
|
72
|
+
|
|
73
|
+
process_type = "Sleep"
|
|
74
|
+
ops_display = f"with operations: {operations_config}" if operations_config else "with random operations"
|
|
75
|
+
# render_markdown(f"- Initiating sleep process {ops_display}")
|
|
76
|
+
|
|
77
|
+
evolved_kg, _ = kg_sleep_process(
|
|
78
|
+
existing_kg=current_kg,
|
|
79
|
+
model=llm_model,
|
|
80
|
+
provider=llm_provider,
|
|
81
|
+
npc=current_npc,
|
|
82
|
+
operations_config=operations_config
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
if is_dreaming:
|
|
86
|
+
process_type += " & Dream"
|
|
87
|
+
# render_markdown(f"- Initiating dream process on the evolved KG...")
|
|
88
|
+
evolved_kg, _ = kg_dream_process(
|
|
89
|
+
existing_kg=evolved_kg,
|
|
90
|
+
model=llm_model,
|
|
91
|
+
provider=llm_provider,
|
|
92
|
+
npc=current_npc
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
save_kg_to_db(engine, evolved_kg, team_name, npc_name, current_path) # Changed conn to engine
|
|
96
|
+
|
|
97
|
+
new_facts = len(evolved_kg.get('facts', []))
|
|
98
|
+
new_concepts = len(evolved_kg.get('concepts', []))
|
|
99
|
+
|
|
100
|
+
output_result = f"{process_type} process complete.\n"
|
|
101
|
+
output_result += f"- Facts: {original_facts} -> {new_facts} ({new_facts - original_facts:+})\n"
|
|
102
|
+
output_result += f"- Concepts: {original_concepts} -> {new_concepts} ({new_concepts - original_concepts:+})"
|
|
103
|
+
|
|
104
|
+
print('Evolved facts:', evolved_kg.get('facts'))
|
|
105
|
+
print('Evolved concepts:', evolved_kg.get('concepts'))
|
|
106
|
+
|
|
107
|
+
context['output'] = output_result
|
|
108
|
+
context['messages'] = output_messages
|
|
109
|
+
|
|
110
|
+
except Exception as e:
|
|
111
|
+
traceback.print_exc()
|
|
112
|
+
context['output'] = f"Error during KG evolution process: {e}"
|
|
113
|
+
context['messages'] = output_messages
|
|
114
|
+
finally:
|
|
115
|
+
if command_history: # Check if it was successfully initialized
|
|
116
|
+
command_history.close()
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
jinx_name: "trigger"
|
|
2
|
+
description: "Execute a trigger command"
|
|
3
|
+
inputs:
|
|
4
|
+
- trigger_description: "" # Required description of the trigger to execute.
|
|
5
|
+
steps:
|
|
6
|
+
- name: "execute_trigger"
|
|
7
|
+
engine: "python"
|
|
8
|
+
code: |
|
|
9
|
+
import traceback
|
|
10
|
+
from npcpy.work.trigger import execute_trigger_command
|
|
11
|
+
|
|
12
|
+
trigger_description = context.get('trigger_description')
|
|
13
|
+
output_messages = context.get('messages', [])
|
|
14
|
+
|
|
15
|
+
if not trigger_description or not trigger_description.strip():
|
|
16
|
+
context['output'] = "Usage: /trigger <trigger_description>"
|
|
17
|
+
context['messages'] = output_messages
|
|
18
|
+
exit()
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
# Pass all current context as kwargs to execute_trigger_command
|
|
22
|
+
result = execute_trigger_command(command=trigger_description, **context)
|
|
23
|
+
|
|
24
|
+
if isinstance(result, dict):
|
|
25
|
+
context['output'] = result.get('output', 'Trigger executed.')
|
|
26
|
+
context['messages'] = result.get('messages', output_messages)
|
|
27
|
+
else:
|
|
28
|
+
context['output'] = str(result)
|
|
29
|
+
context['messages'] = output_messages
|
|
30
|
+
except NameError:
|
|
31
|
+
context['output'] = "Trigger function (execute_trigger_command) not available."
|
|
32
|
+
context['messages'] = output_messages
|
|
33
|
+
except Exception as e:
|
|
34
|
+
traceback.print_exc()
|
|
35
|
+
context['output'] = f"Error executing trigger: {e}"
|
|
36
|
+
context['messages'] = output_messages
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
jinx_name: "vixynt"
|
|
2
|
+
description: "Generates images from text descriptions or edits existing ones."
|
|
3
|
+
inputs:
|
|
4
|
+
- prompt
|
|
5
|
+
- model: ""
|
|
6
|
+
- provider: ""
|
|
7
|
+
- output_name: ""
|
|
8
|
+
- attachments: ""
|
|
9
|
+
- n_images: 1
|
|
10
|
+
- height: 1024
|
|
11
|
+
- width: 1024
|
|
12
|
+
steps:
|
|
13
|
+
- name: "generate_or_edit_image"
|
|
14
|
+
engine: "python"
|
|
15
|
+
code: |
|
|
16
|
+
import os
|
|
17
|
+
import base64
|
|
18
|
+
from io import BytesIO
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
from PIL import Image
|
|
21
|
+
from npcpy.llm_funcs import gen_image
|
|
22
|
+
|
|
23
|
+
# Extract inputs from context
|
|
24
|
+
image_prompt = context.get('prompt', '').strip()
|
|
25
|
+
output_name = context.get('output_name')
|
|
26
|
+
attachments_str = context.get('attachments')
|
|
27
|
+
n_images = int(context.get('n_images', 1))
|
|
28
|
+
height = int(context.get('height', 1024))
|
|
29
|
+
width = int(context.get('width', 1024))
|
|
30
|
+
model = context.get('model')
|
|
31
|
+
provider = context.get('provider')
|
|
32
|
+
|
|
33
|
+
input_images = []
|
|
34
|
+
if attachments_str and attachments_str.strip():
|
|
35
|
+
input_images = [p.strip() for p in attachments_str.split(',')]
|
|
36
|
+
|
|
37
|
+
# Use NPC's model/provider as fallback
|
|
38
|
+
if not model and npc and npc.model:
|
|
39
|
+
model = npc.model
|
|
40
|
+
if not provider and npc and npc.provider:
|
|
41
|
+
provider = npc.provider
|
|
42
|
+
|
|
43
|
+
# Final fallbacks
|
|
44
|
+
if not model:
|
|
45
|
+
model = "runwayml/stable-diffusion-v1-5"
|
|
46
|
+
if not provider:
|
|
47
|
+
provider = "diffusers"
|
|
48
|
+
|
|
49
|
+
output_messages = context.get('messages', [])
|
|
50
|
+
|
|
51
|
+
if not image_prompt:
|
|
52
|
+
context['output'] = "Error: No prompt provided for image generation."
|
|
53
|
+
context['messages'] = output_messages
|
|
54
|
+
exit()
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
# Generate image(s)
|
|
58
|
+
result = gen_image(
|
|
59
|
+
prompt=image_prompt,
|
|
60
|
+
model=model,
|
|
61
|
+
provider=provider,
|
|
62
|
+
npc=npc,
|
|
63
|
+
height=height,
|
|
64
|
+
width=width,
|
|
65
|
+
n_images=n_images,
|
|
66
|
+
input_images=input_images if input_images else None
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
# Ensure we have a list of images
|
|
70
|
+
if not isinstance(result, list):
|
|
71
|
+
images_list = [result] if result is not None else []
|
|
72
|
+
else:
|
|
73
|
+
images_list = result
|
|
74
|
+
|
|
75
|
+
saved_files = []
|
|
76
|
+
|
|
77
|
+
for i, image in enumerate(images_list):
|
|
78
|
+
if image is None:
|
|
79
|
+
continue
|
|
80
|
+
|
|
81
|
+
# Determine output filename
|
|
82
|
+
if output_name and output_name.strip():
|
|
83
|
+
base_name, ext = os.path.splitext(os.path.expanduser(output_name))
|
|
84
|
+
if not ext:
|
|
85
|
+
ext = ".png"
|
|
86
|
+
current_output_file = f"{base_name}_{i}{ext}" if len(images_list) > 1 else f"{base_name}{ext}"
|
|
87
|
+
else:
|
|
88
|
+
os.makedirs(os.path.expanduser("~/.npcsh/images/"), exist_ok=True)
|
|
89
|
+
current_output_file = (
|
|
90
|
+
os.path.expanduser("~/.npcsh/images/")
|
|
91
|
+
+ f"image_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{i}.png"
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
# Save image to file
|
|
95
|
+
image.save(current_output_file)
|
|
96
|
+
saved_files.append(current_output_file)
|
|
97
|
+
|
|
98
|
+
if saved_files:
|
|
99
|
+
if input_images:
|
|
100
|
+
output = f"Image(s) edited and saved to: {', '.join(saved_files)}"
|
|
101
|
+
else:
|
|
102
|
+
output = f"Image(s) generated and saved to: {', '.join(saved_files)}"
|
|
103
|
+
|
|
104
|
+
# DO NOT include base64 data - just reference the file paths
|
|
105
|
+
output += f"\n\nThe image files have been saved and are ready to view."
|
|
106
|
+
else:
|
|
107
|
+
output = "No images were generated."
|
|
108
|
+
|
|
109
|
+
except Exception as e:
|
|
110
|
+
import traceback
|
|
111
|
+
traceback.print_exc()
|
|
112
|
+
output = f"Error {'editing' if input_images else 'generating'} image: {str(e)}"
|
|
113
|
+
|
|
114
|
+
context['output'] = output
|
|
115
|
+
context['messages'] = output_messages
|
|
116
|
+
context['model'] = model
|
|
117
|
+
context['provider'] = provider
|
npcsh/npcsh.py
CHANGED
|
@@ -21,7 +21,6 @@ from npcpy.memory.knowledge_graph import (
|
|
|
21
21
|
kg_evolve_incremental
|
|
22
22
|
)
|
|
23
23
|
|
|
24
|
-
from npcsh.routes import router
|
|
25
24
|
try:
|
|
26
25
|
import readline
|
|
27
26
|
except:
|
|
@@ -46,10 +45,6 @@ from npcsh._state import (
|
|
|
46
45
|
|
|
47
46
|
|
|
48
47
|
def print_welcome_message():
|
|
49
|
-
'''
|
|
50
|
-
function for printing npcsh graphic
|
|
51
|
-
'''
|
|
52
|
-
|
|
53
48
|
print(
|
|
54
49
|
"""
|
|
55
50
|
___________________________________________
|
|
@@ -75,8 +70,9 @@ Begin by asking a question, issuing a bash command, or typing '/help' for more i
|
|
|
75
70
|
)
|
|
76
71
|
|
|
77
72
|
|
|
78
|
-
def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
73
|
+
def run_repl(command_history: CommandHistory, initial_state: ShellState, router):
|
|
79
74
|
state = initial_state
|
|
75
|
+
|
|
80
76
|
print_welcome_message()
|
|
81
77
|
|
|
82
78
|
render_markdown(f'- Using {state.current_mode} mode. Use /agent, /cmd, or /chat to switch to other modes')
|
|
@@ -85,7 +81,7 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
|
85
81
|
|
|
86
82
|
is_windows = platform.system().lower().startswith("win")
|
|
87
83
|
try:
|
|
88
|
-
completer = make_completer(state)
|
|
84
|
+
completer = make_completer(state, router)
|
|
89
85
|
readline.set_completer(completer)
|
|
90
86
|
except:
|
|
91
87
|
pass
|
|
@@ -157,7 +153,7 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
|
157
153
|
state.messages = [{"role": "system", "content": f"Session context: {compressed_state}"}]
|
|
158
154
|
|
|
159
155
|
try:
|
|
160
|
-
completer = make_completer(state)
|
|
156
|
+
completer = make_completer(state, router)
|
|
161
157
|
readline.set_completer(completer)
|
|
162
158
|
except:
|
|
163
159
|
pass
|
|
@@ -229,6 +225,8 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState):
|
|
|
229
225
|
|
|
230
226
|
|
|
231
227
|
def main() -> None:
|
|
228
|
+
from npcsh.routes import router
|
|
229
|
+
|
|
232
230
|
parser = argparse.ArgumentParser(description="npcsh - An NPC-powered shell.")
|
|
233
231
|
parser.add_argument(
|
|
234
232
|
"-v", "--version", action="version", version=f"npcsh version {VERSION}"
|
|
@@ -239,13 +237,17 @@ def main() -> None:
|
|
|
239
237
|
args = parser.parse_args()
|
|
240
238
|
|
|
241
239
|
command_history, team, default_npc = setup_shell()
|
|
240
|
+
|
|
241
|
+
if team and hasattr(team, 'jinxs_dict'):
|
|
242
|
+
for jinx_name, jinx_obj in team.jinxs_dict.items():
|
|
243
|
+
router.register_jinx(jinx_obj)
|
|
242
244
|
|
|
243
245
|
initial_state.npc = default_npc
|
|
244
246
|
initial_state.team = team
|
|
245
247
|
if args.command:
|
|
246
248
|
state = initial_state
|
|
247
249
|
state.current_path = os.getcwd()
|
|
248
|
-
final_state, output = execute_command(args.command, state)
|
|
250
|
+
final_state, output = execute_command(args.command, state, router=router, command_history=command_history)
|
|
249
251
|
if final_state.stream_output:
|
|
250
252
|
for chunk in output:
|
|
251
253
|
print(str(chunk), end='')
|
|
@@ -253,7 +255,7 @@ def main() -> None:
|
|
|
253
255
|
elif output is not None:
|
|
254
256
|
print(output)
|
|
255
257
|
else:
|
|
256
|
-
run_repl(command_history, initial_state)
|
|
257
|
-
|
|
258
|
+
run_repl(command_history, initial_state, router)
|
|
259
|
+
|
|
258
260
|
if __name__ == "__main__":
|
|
259
261
|
main()
|