npcsh 1.1.8__py3-none-any.whl → 1.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. npcsh/_state.py +52 -9
  2. npcsh/guac.py +6 -4
  3. npcsh/npc_team/jinxs/utils/compress.jinx +140 -0
  4. npcsh/npc_team/jinxs/utils/load_file.jinx +35 -0
  5. npcsh/npc_team/jinxs/utils/serve.jinx +0 -3
  6. npcsh/npc_team/jinxs/utils/trigger.jinx +39 -14
  7. npcsh/npcsh.py +2 -1
  8. npcsh/routes.py +0 -1
  9. npcsh-1.1.10.data/data/npcsh/npc_team/compress.jinx +140 -0
  10. npcsh-1.1.10.data/data/npcsh/npc_team/load_file.jinx +35 -0
  11. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/serve.jinx +0 -3
  12. npcsh-1.1.10.data/data/npcsh/npc_team/trigger.jinx +61 -0
  13. {npcsh-1.1.8.dist-info → npcsh-1.1.10.dist-info}/METADATA +1 -1
  14. npcsh-1.1.10.dist-info/RECORD +122 -0
  15. npcsh/npc_team/jinxs/utils/breathe.jinx +0 -20
  16. npcsh/npc_team/jinxs/utils/flush.jinx +0 -39
  17. npcsh/npc_team/jinxs/utils/plan.jinx +0 -33
  18. npcsh-1.1.8.data/data/npcsh/npc_team/breathe.jinx +0 -20
  19. npcsh-1.1.8.data/data/npcsh/npc_team/flush.jinx +0 -39
  20. npcsh-1.1.8.data/data/npcsh/npc_team/plan.jinx +0 -33
  21. npcsh-1.1.8.data/data/npcsh/npc_team/trigger.jinx +0 -36
  22. npcsh-1.1.8.dist-info/RECORD +0 -124
  23. /npcsh/npc_team/jinxs/{utils → npc_studio}/npc-studio.jinx +0 -0
  24. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/alicanto.jinx +0 -0
  25. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/alicanto.npc +0 -0
  26. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/alicanto.png +0 -0
  27. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/build.jinx +0 -0
  28. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/compile.jinx +0 -0
  29. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/corca.jinx +0 -0
  30. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/corca.npc +0 -0
  31. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/corca.png +0 -0
  32. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/corca_example.png +0 -0
  33. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/edit_file.jinx +0 -0
  34. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/foreman.npc +0 -0
  35. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/frederic.npc +0 -0
  36. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/frederic4.png +0 -0
  37. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/guac.jinx +0 -0
  38. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/guac.png +0 -0
  39. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/help.jinx +0 -0
  40. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/init.jinx +0 -0
  41. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/jinxs.jinx +0 -0
  42. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
  43. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/kadiefa.png +0 -0
  44. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/npc-studio.jinx +0 -0
  45. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
  46. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
  47. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/ots.jinx +0 -0
  48. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/plonk.jinx +0 -0
  49. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/plonk.npc +0 -0
  50. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/plonk.png +0 -0
  51. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
  52. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/plonkjr.png +0 -0
  53. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/pti.jinx +0 -0
  54. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/python.jinx +0 -0
  55. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/roll.jinx +0 -0
  56. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/sample.jinx +0 -0
  57. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/search.jinx +0 -0
  58. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/set.jinx +0 -0
  59. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/sh.jinx +0 -0
  60. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/sibiji.npc +0 -0
  61. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/sibiji.png +0 -0
  62. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/sleep.jinx +0 -0
  63. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/spool.jinx +0 -0
  64. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/spool.png +0 -0
  65. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/sql.jinx +0 -0
  66. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/vixynt.jinx +0 -0
  67. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/wander.jinx +0 -0
  68. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/yap.jinx +0 -0
  69. {npcsh-1.1.8.data → npcsh-1.1.10.data}/data/npcsh/npc_team/yap.png +0 -0
  70. {npcsh-1.1.8.dist-info → npcsh-1.1.10.dist-info}/WHEEL +0 -0
  71. {npcsh-1.1.8.dist-info → npcsh-1.1.10.dist-info}/entry_points.txt +0 -0
  72. {npcsh-1.1.8.dist-info → npcsh-1.1.10.dist-info}/licenses/LICENSE +0 -0
  73. {npcsh-1.1.8.dist-info → npcsh-1.1.10.dist-info}/top_level.txt +0 -0
npcsh/_state.py CHANGED
@@ -111,6 +111,7 @@ except importlib.metadata.PackageNotFoundError:
111
111
  VERSION = "unknown"
112
112
 
113
113
 
114
+ from litellm import RateLimitError
114
115
 
115
116
 
116
117
  NPCSH_CHAT_MODEL = os.environ.get("NPCSH_CHAT_MODEL", "gemma3:4b")
@@ -2108,10 +2109,23 @@ def process_pipeline_command(
2108
2109
  exec_provider = provider_override or npc_provider or state.chat_provider
2109
2110
 
2110
2111
  if cmd_to_process.startswith("/"):
2111
- with SpinnerContext(
2112
- f"Routing to {cmd_to_process.split()[0]}",
2113
- style="arrow"
2114
- ):
2112
+ command_name = cmd_to_process.split()[0].lstrip('/')
2113
+
2114
+ # Check if this is an interactive mode by looking for the jinx file in modes/
2115
+ is_interactive_mode = False
2116
+
2117
+ # Check global modes
2118
+ global_modes_jinx = os.path.expanduser(f'~/.npcsh/npc_team/jinxs/modes/{command_name}.jinx')
2119
+ if os.path.exists(global_modes_jinx):
2120
+ is_interactive_mode = True
2121
+
2122
+ # Check team modes
2123
+ if not is_interactive_mode and state.team and state.team.team_path:
2124
+ team_modes_jinx = os.path.join(state.team.team_path, 'jinxs', 'modes', f'{command_name}.jinx')
2125
+ if os.path.exists(team_modes_jinx):
2126
+ is_interactive_mode = True
2127
+
2128
+ if is_interactive_mode:
2115
2129
  result = execute_slash_command(
2116
2130
  cmd_to_process,
2117
2131
  stdin_input,
@@ -2119,8 +2133,19 @@ def process_pipeline_command(
2119
2133
  stream_final,
2120
2134
  router
2121
2135
  )
2136
+ else:
2137
+ with SpinnerContext(
2138
+ f"Routing to {cmd_to_process.split()[0]}",
2139
+ style="arrow"
2140
+ ):
2141
+ result = execute_slash_command(
2142
+ cmd_to_process,
2143
+ stdin_input,
2144
+ state,
2145
+ stream_final,
2146
+ router
2147
+ )
2122
2148
  return result
2123
-
2124
2149
  cmd_parts = parse_command_safely(cmd_to_process)
2125
2150
  if not cmd_parts:
2126
2151
  return state, stdin_input
@@ -2239,7 +2264,7 @@ def process_pipeline_command(
2239
2264
  images=state.attachments,
2240
2265
  stream=stream_final,
2241
2266
  context=info,
2242
- extra_globals=application_globals_for_jinx # NOW PASS IT
2267
+ extra_globals=application_globals_for_jinx
2243
2268
  )
2244
2269
  if not review:
2245
2270
  if isinstance(llm_result, dict):
@@ -2489,7 +2514,23 @@ def execute_command(
2489
2514
  f" → Passing to stage {stage_num + 1}",
2490
2515
  "blue"
2491
2516
  ))
2492
-
2517
+ except RateLimitError:
2518
+ print(colored('Rate Limit Exceeded'))
2519
+ # wait 30 seconds then truncate messages/condense context with breathing mechanism
2520
+ # for now just limit to first plus last 10
2521
+ messages = current_state.messages[0:1] + current_state.messages[-2:]
2522
+ current_state.messages = messages
2523
+ #retry
2524
+ import time
2525
+ print('sleeping...')
2526
+ print(current_state)
2527
+ print(current_state.messages)
2528
+ time.sleep(30)
2529
+
2530
+
2531
+ return execute_command(command, current_state, review=review, router=router,)
2532
+
2533
+
2493
2534
  except Exception as pipeline_error:
2494
2535
  import traceback
2495
2536
  traceback.print_exc()
@@ -2641,10 +2682,12 @@ def setup_shell() -> Tuple[CommandHistory, Team, Optional[NPC]]:
2641
2682
  if use_jinxs == "c":
2642
2683
  global_jinxs_dir = os.path.expanduser("~/.npcsh/npc_team/jinxs")
2643
2684
  if os.path.exists(global_jinxs_dir):
2644
- shutil.copytree(global_jinxs_dir, team_dir, dirs_exist_ok=True)
2685
+ # Create the 'jinxs' subfolder within the new team's directory
2686
+ destination_jinxs_dir = os.path.join(team_dir, "jinxs")
2687
+ os.makedirs(destination_jinxs_dir, exist_ok=True)
2688
+ shutil.copytree(global_jinxs_dir, destination_jinxs_dir, dirs_exist_ok=True)
2645
2689
  else:
2646
2690
  team_ctx_data["use_global_jinxs"] = True
2647
-
2648
2691
  with open(ctx_path, "w") as f:
2649
2692
  yaml.dump(team_ctx_data, f)
2650
2693
  else:
npcsh/guac.py CHANGED
@@ -1376,10 +1376,12 @@ def get_guac_prompt_char(command_count: int, guac_refresh_period = 100) -> str:
1376
1376
  def execute_guac_command(command: str, state: ShellState, locals_dict: Dict[str, Any], project_name: str, src_dir: Path, router) -> Tuple[ShellState, Any]:
1377
1377
  stripped_command = command.strip()
1378
1378
  output = None
1379
- cmd_parts = shlex.split(stripped_command)
1380
- if cmd_parts and cmd_parts[0] in ["cd", "ls", "pwd"]:
1381
- return execute_command(stripped_command, state, review=False, router=router)
1382
-
1379
+ try:
1380
+ cmd_parts = shlex.split(stripped_command)
1381
+ if cmd_parts and cmd_parts[0] in ["cd", "ls", "pwd"]:
1382
+ return execute_command(stripped_command, state, review=False, router=router)
1383
+ except Exception as e:
1384
+ pass
1383
1385
  npc_team_dir = Path(state.team.team_path) if state.team and hasattr(state.team, 'team_path') else Path.cwd() / "npc_team"
1384
1386
 
1385
1387
  if not stripped_command:
@@ -0,0 +1,140 @@
1
+ jinx_name: "compress"
2
+ description: "Manages conversation and knowledge context. Defaults to compacting context. Use flags for other operations."
3
+ inputs:
4
+ - flush: "" # The number of recent messages to flush.
5
+ - sleep: False # If true, evolves the knowledge graph.
6
+ - dream: False # Used with --sleep. Runs creative synthesis.
7
+ - ops: "" # Used with --sleep. Comma-separated list of KG operations.
8
+ - model: "" # Used with --sleep. LLM model for KG evolution.
9
+ - provider: "" # Used with --sleep. LLM provider for KG evolution.
10
+ steps:
11
+ - name: "manage_context_and_memory"
12
+ engine: "python"
13
+ code: |
14
+ import os
15
+ import traceback
16
+ from npcpy.llm_funcs import breathe
17
+ from npcpy.memory.command_history import CommandHistory, load_kg_from_db, save_kg_to_db
18
+ from npcpy.memory.knowledge_graph import kg_sleep_process, kg_dream_process
19
+
20
+ # --- Get all inputs from context ---
21
+ flush_n_str = context.get('flush')
22
+ is_sleeping = context.get('sleep')
23
+ is_dreaming = context.get('dream')
24
+ operations_str = context.get('ops')
25
+ llm_model = context.get('model')
26
+ llm_provider = context.get('provider')
27
+ output_messages = context.get('messages', [])
28
+
29
+ USAGE = """Usage:
30
+ /compress (Compacts conversation context)
31
+ /compress --flush <number> (Removes the last N messages)
32
+ /compress --sleep [...] (Evolves the knowledge graph)
33
+ --dream (With --sleep: enables creative synthesis)
34
+ --ops "op1,op2" (With --sleep: specifies KG operations)
35
+ --model <name> (With --sleep: specifies LLM model)
36
+ --provider <name> (With --sleep: specifies LLM provider)"""
37
+
38
+ # --- Argument Validation: Ensure mutual exclusivity ---
39
+ is_flushing = flush_n_str is not None and flush_n_str.strip() != ''
40
+ if is_sleeping and is_flushing:
41
+ context['output'] = f"Error: --sleep and --flush are mutually exclusive.\n{USAGE}"
42
+ context['messages'] = output_messages
43
+ exit()
44
+
45
+ # --- Dispatcher: Route to the correct functionality ---
46
+
47
+ # 1. SLEEP: Evolve the Knowledge Graph
48
+ if is_sleeping:
49
+ current_npc = context.get('npc')
50
+ current_team = context.get('team')
51
+
52
+ # Parameter setup for KG process
53
+ operations_config = [op.strip() for op in operations_str.split(',')] if operations_str else None
54
+ if not llm_model and current_npc: llm_model = current_npc.model
55
+ if not llm_provider and current_npc: llm_provider = current_npc.provider
56
+ if not llm_model: llm_model = "gemini-1.5-pro"
57
+ if not llm_provider: llm_provider = "gemini"
58
+
59
+ team_name = current_team.name if current_team else "__none__"
60
+ npc_name = current_npc.name if current_npc else "__none__"
61
+ current_path = os.getcwd()
62
+ scope_str = f"Team: '{team_name}', NPC: '{npc_name}', Path: '{current_path}'"
63
+
64
+ command_history = None
65
+ try:
66
+ db_path = os.getenv("NPCSH_DB_PATH", os.path.expanduser("~/npcsh_history.db"))
67
+ command_history = CommandHistory(db_path)
68
+ engine = command_history.engine
69
+ current_kg = load_kg_from_db(engine, team_name, npc_name, current_path)
70
+
71
+ if not current_kg or not current_kg.get('facts'):
72
+ context['output'] = f"Knowledge graph for the current scope is empty. Nothing to process.\n- Scope: {scope_str}"
73
+ exit()
74
+
75
+ original_facts = len(current_kg.get('facts', []))
76
+ original_concepts = len(current_kg.get('concepts', []))
77
+
78
+ evolved_kg, _ = kg_sleep_process(existing_kg=current_kg, model=llm_model, provider=llm_provider, npc=current_npc, operations_config=operations_config)
79
+ process_type = "Sleep"
80
+
81
+ if is_dreaming:
82
+ evolved_kg, _ = kg_dream_process(existing_kg=evolved_kg, model=llm_model, provider=llm_provider, npc=current_npc)
83
+ process_type += " & Dream"
84
+
85
+ save_kg_to_db(engine, evolved_kg, team_name, npc_name, current_path)
86
+
87
+ new_facts = len(evolved_kg.get('facts', []))
88
+ new_concepts = len(evolved_kg.get('concepts', []))
89
+
90
+ context['output'] = (f"{process_type} process complete.\n"
91
+ f"- Facts: {original_facts} -> {new_facts} ({new_facts - original_facts:+})\n"
92
+ f"- Concepts: {original_concepts} -> {new_concepts} ({new_concepts - original_concepts:+})")
93
+ except Exception as e:
94
+ traceback.print_exc()
95
+ context['output'] = f"Error during KG evolution: {e}"
96
+ finally:
97
+ if command_history: command_history.close()
98
+ context['messages'] = output_messages
99
+
100
+ # 2. FLUSH: Remove messages from context
101
+ elif is_flushing:
102
+ try:
103
+ n = int(flush_n_str)
104
+ if n <= 0:
105
+ context['output'] = "Error: Number of messages to flush must be positive."
106
+ exit()
107
+ except ValueError:
108
+ context['output'] = f"Error: Invalid number '{flush_n_str}'. {USAGE}"
109
+ exit()
110
+
111
+ messages_list = list(output_messages)
112
+ original_len = len(messages_list)
113
+ final_messages = []
114
+
115
+ if messages_list and messages_list[0].get("role") == "system":
116
+ system_message = messages_list.pop(0)
117
+ num_to_remove = min(n, len(messages_list))
118
+ final_messages = [system_message] + messages_list[:-num_to_remove]
119
+ else:
120
+ num_to_remove = min(n, original_len)
121
+ final_messages = messages_list[:-num_to_remove]
122
+
123
+ removed_count = original_len - len(final_messages)
124
+ context['output'] = f"Flushed {removed_count} message(s). Context is now {len(final_messages)} messages."
125
+ context['messages'] = final_messages
126
+
127
+ # 3. DEFAULT: Compact conversation context
128
+ else:
129
+ try:
130
+ result = breathe(**context)
131
+ if isinstance(result, dict):
132
+ context['output'] = result.get('output', 'Context compressed.')
133
+ context['messages'] = result.get('messages', output_messages)
134
+ else:
135
+ context['output'] = "Context compression process initiated."
136
+ context['messages'] = output_messages
137
+ except Exception as e:
138
+ traceback.print_exc()
139
+ context['output'] = f"Error during context compression: {e}"
140
+ context['messages'] = output_messages
@@ -0,0 +1,35 @@
1
+ jinx_name: load_file
2
+ description: Loads and returns the contents of a file using npcpy's file loaders
3
+ inputs:
4
+ - file_path
5
+ steps:
6
+ - name: "load_file"
7
+ engine: "python"
8
+ code: |
9
+ import os
10
+ from npcpy.data.load import load_file_contents
11
+
12
+ # Expand user path and get absolute path
13
+ file_path = os.path.expanduser("{{ file_path }}")
14
+
15
+ # Check if file exists
16
+ if not os.path.exists(file_path):
17
+ output = f"Error: File not found at {file_path}"
18
+ else:
19
+ try:
20
+ # Load file contents using npcpy's loader
21
+ # Returns chunks by default with chunk_size=250
22
+ chunks = load_file_contents(file_path)
23
+
24
+ # Join chunks back together for full content
25
+ if isinstance(chunks, list):
26
+ if chunks and chunks[0].startswith("Error"):
27
+ output = chunks[0]
28
+ else:
29
+ file_content = "\n".join(chunks)
30
+ output = f"File: {file_path}\n\n{file_content}"
31
+ else:
32
+ output = f"File: {file_path}\n\n{chunks}"
33
+
34
+ except Exception as e:
35
+ output = f"Error reading file {file_path}: {str(e)}"
@@ -17,9 +17,6 @@ steps:
17
17
  if cors_str and cors_str.strip():
18
18
  cors_origins = [origin.strip() for origin in cors_str.split(",")]
19
19
 
20
- # start_flask_server blocks, so this will hold the Jinx until the server is stopped.
21
- # In a real-world scenario, you might want to run this in a separate process
22
- # or have a non-blocking server start.
23
20
  start_flask_server(
24
21
  port=int(port), # Ensure port is an integer
25
22
  cors_origins=cors_origins,
@@ -1,36 +1,61 @@
1
1
  jinx_name: "trigger"
2
- description: "Execute a trigger command"
2
+ description: "Creates a persistent listener (--listen) or a scheduled task (--cron)."
3
3
  inputs:
4
- - trigger_description: "" # Required description of the trigger to execute.
4
+ - listen: "" # The description for a persistent, event-driven listener.
5
+ - cron: "" # The description for a scheduled, time-based task.
5
6
  steps:
6
- - name: "execute_trigger"
7
+ - name: "execute_command"
7
8
  engine: "python"
8
9
  code: |
9
10
  import traceback
10
- from npcpy.work.trigger import execute_trigger_command
11
+ from npcpy.work.trigger import execute_trigger_command # For --listen
12
+ from npcpy.work.plan import execute_plan_command # For --cron
11
13
 
12
- trigger_description = context.get('trigger_description')
14
+ listen_description = context.get('listen')
15
+ cron_description = context.get('cron')
13
16
  output_messages = context.get('messages', [])
14
17
 
15
- if not trigger_description or not trigger_description.strip():
16
- context['output'] = "Usage: /trigger <trigger_description>"
18
+ USAGE = 'Usage: /trigger --listen "<description>" OR /trigger --cron "<description>"'
19
+
20
+ # Determine which command was used and set the appropriate variables
21
+ subcommand = None
22
+ description = None
23
+ executor_func = None
24
+
25
+ # --- Argument Validation ---
26
+ # Ensure mutual exclusivity
27
+ if listen_description and cron_description:
28
+ context['output'] = f"Error: --listen and --cron are mutually exclusive. {USAGE}"
29
+ context['messages'] = output_messages
30
+ exit()
31
+
32
+ # --- Command Dispatch ---
33
+ if listen_description:
34
+ subcommand = 'listen'
35
+ description = listen_description
36
+ executor_func = execute_trigger_command
37
+ elif cron_description:
38
+ subcommand = 'cron'
39
+ description = cron_description
40
+ executor_func = execute_plan_command
41
+ else:
42
+ # Handle case where no arguments were provided
43
+ context['output'] = f"Error: You must provide either --listen or --cron. {USAGE}"
17
44
  context['messages'] = output_messages
18
45
  exit()
19
46
 
47
+ # --- Execution ---
20
48
  try:
21
- # Pass all current context as kwargs to execute_trigger_command
22
- result = execute_trigger_command(command=trigger_description, **context)
49
+ result = executor_func(command=description, **context)
23
50
 
24
51
  if isinstance(result, dict):
25
- context['output'] = result.get('output', 'Trigger executed.')
52
+ output_key = 'Listener' if subcommand == 'listen' else 'Cron job'
53
+ context['output'] = result.get('output', f'{output_key} created successfully.')
26
54
  context['messages'] = result.get('messages', output_messages)
27
55
  else:
28
56
  context['output'] = str(result)
29
57
  context['messages'] = output_messages
30
- except NameError:
31
- context['output'] = "Trigger function (execute_trigger_command) not available."
32
- context['messages'] = output_messages
33
58
  except Exception as e:
34
59
  traceback.print_exc()
35
- context['output'] = f"Error executing trigger: {e}"
60
+ context['output'] = f"Error creating {subcommand}: {e}"
36
61
  context['messages'] = output_messages
npcsh/npcsh.py CHANGED
@@ -78,7 +78,8 @@ def run_repl(command_history: CommandHistory, initial_state: ShellState, router)
78
78
  render_markdown(f'- Using {state.current_mode} mode. Use /agent, /cmd, or /chat to switch to other modes')
79
79
  render_markdown(f'- To switch to a different NPC, type /npc <npc_name> or /n <npc_name> to switch to that NPC.')
80
80
  render_markdown('\n- Here are the current NPCs available in your team: ' + ', '.join([npc_name for npc_name in state.team.npcs.keys()]))
81
-
81
+ render_markdown('\n- Here are the currently available Jinxs: ' + ', '.join([jinx_name for jinx_name in state.team.jinxs_dict.keys()]))
82
+
82
83
  is_windows = platform.system().lower().startswith("win")
83
84
  try:
84
85
  completer = make_completer(state, router)
npcsh/routes.py CHANGED
@@ -90,7 +90,6 @@ class CommandRouter:
90
90
 
91
91
  jinx_output = jinx.execute(
92
92
  input_values=input_values,
93
- jinxs_dict=kwargs.get('jinxs_dict', {}),
94
93
  npc=npc,
95
94
  messages=messages,
96
95
  extra_globals=application_globals_for_jinx
@@ -0,0 +1,140 @@
1
+ jinx_name: "compress"
2
+ description: "Manages conversation and knowledge context. Defaults to compacting context. Use flags for other operations."
3
+ inputs:
4
+ - flush: "" # The number of recent messages to flush.
5
+ - sleep: False # If true, evolves the knowledge graph.
6
+ - dream: False # Used with --sleep. Runs creative synthesis.
7
+ - ops: "" # Used with --sleep. Comma-separated list of KG operations.
8
+ - model: "" # Used with --sleep. LLM model for KG evolution.
9
+ - provider: "" # Used with --sleep. LLM provider for KG evolution.
10
+ steps:
11
+ - name: "manage_context_and_memory"
12
+ engine: "python"
13
+ code: |
14
+ import os
15
+ import traceback
16
+ from npcpy.llm_funcs import breathe
17
+ from npcpy.memory.command_history import CommandHistory, load_kg_from_db, save_kg_to_db
18
+ from npcpy.memory.knowledge_graph import kg_sleep_process, kg_dream_process
19
+
20
+ # --- Get all inputs from context ---
21
+ flush_n_str = context.get('flush')
22
+ is_sleeping = context.get('sleep')
23
+ is_dreaming = context.get('dream')
24
+ operations_str = context.get('ops')
25
+ llm_model = context.get('model')
26
+ llm_provider = context.get('provider')
27
+ output_messages = context.get('messages', [])
28
+
29
+ USAGE = """Usage:
30
+ /compress (Compacts conversation context)
31
+ /compress --flush <number> (Removes the last N messages)
32
+ /compress --sleep [...] (Evolves the knowledge graph)
33
+ --dream (With --sleep: enables creative synthesis)
34
+ --ops "op1,op2" (With --sleep: specifies KG operations)
35
+ --model <name> (With --sleep: specifies LLM model)
36
+ --provider <name> (With --sleep: specifies LLM provider)"""
37
+
38
+ # --- Argument Validation: Ensure mutual exclusivity ---
39
+ is_flushing = flush_n_str is not None and flush_n_str.strip() != ''
40
+ if is_sleeping and is_flushing:
41
+ context['output'] = f"Error: --sleep and --flush are mutually exclusive.\n{USAGE}"
42
+ context['messages'] = output_messages
43
+ exit()
44
+
45
+ # --- Dispatcher: Route to the correct functionality ---
46
+
47
+ # 1. SLEEP: Evolve the Knowledge Graph
48
+ if is_sleeping:
49
+ current_npc = context.get('npc')
50
+ current_team = context.get('team')
51
+
52
+ # Parameter setup for KG process
53
+ operations_config = [op.strip() for op in operations_str.split(',')] if operations_str else None
54
+ if not llm_model and current_npc: llm_model = current_npc.model
55
+ if not llm_provider and current_npc: llm_provider = current_npc.provider
56
+ if not llm_model: llm_model = "gemini-1.5-pro"
57
+ if not llm_provider: llm_provider = "gemini"
58
+
59
+ team_name = current_team.name if current_team else "__none__"
60
+ npc_name = current_npc.name if current_npc else "__none__"
61
+ current_path = os.getcwd()
62
+ scope_str = f"Team: '{team_name}', NPC: '{npc_name}', Path: '{current_path}'"
63
+
64
+ command_history = None
65
+ try:
66
+ db_path = os.getenv("NPCSH_DB_PATH", os.path.expanduser("~/npcsh_history.db"))
67
+ command_history = CommandHistory(db_path)
68
+ engine = command_history.engine
69
+ current_kg = load_kg_from_db(engine, team_name, npc_name, current_path)
70
+
71
+ if not current_kg or not current_kg.get('facts'):
72
+ context['output'] = f"Knowledge graph for the current scope is empty. Nothing to process.\n- Scope: {scope_str}"
73
+ exit()
74
+
75
+ original_facts = len(current_kg.get('facts', []))
76
+ original_concepts = len(current_kg.get('concepts', []))
77
+
78
+ evolved_kg, _ = kg_sleep_process(existing_kg=current_kg, model=llm_model, provider=llm_provider, npc=current_npc, operations_config=operations_config)
79
+ process_type = "Sleep"
80
+
81
+ if is_dreaming:
82
+ evolved_kg, _ = kg_dream_process(existing_kg=evolved_kg, model=llm_model, provider=llm_provider, npc=current_npc)
83
+ process_type += " & Dream"
84
+
85
+ save_kg_to_db(engine, evolved_kg, team_name, npc_name, current_path)
86
+
87
+ new_facts = len(evolved_kg.get('facts', []))
88
+ new_concepts = len(evolved_kg.get('concepts', []))
89
+
90
+ context['output'] = (f"{process_type} process complete.\n"
91
+ f"- Facts: {original_facts} -> {new_facts} ({new_facts - original_facts:+})\n"
92
+ f"- Concepts: {original_concepts} -> {new_concepts} ({new_concepts - original_concepts:+})")
93
+ except Exception as e:
94
+ traceback.print_exc()
95
+ context['output'] = f"Error during KG evolution: {e}"
96
+ finally:
97
+ if command_history: command_history.close()
98
+ context['messages'] = output_messages
99
+
100
+ # 2. FLUSH: Remove messages from context
101
+ elif is_flushing:
102
+ try:
103
+ n = int(flush_n_str)
104
+ if n <= 0:
105
+ context['output'] = "Error: Number of messages to flush must be positive."
106
+ exit()
107
+ except ValueError:
108
+ context['output'] = f"Error: Invalid number '{flush_n_str}'. {USAGE}"
109
+ exit()
110
+
111
+ messages_list = list(output_messages)
112
+ original_len = len(messages_list)
113
+ final_messages = []
114
+
115
+ if messages_list and messages_list[0].get("role") == "system":
116
+ system_message = messages_list.pop(0)
117
+ num_to_remove = min(n, len(messages_list))
118
+ final_messages = [system_message] + messages_list[:-num_to_remove]
119
+ else:
120
+ num_to_remove = min(n, original_len)
121
+ final_messages = messages_list[:-num_to_remove]
122
+
123
+ removed_count = original_len - len(final_messages)
124
+ context['output'] = f"Flushed {removed_count} message(s). Context is now {len(final_messages)} messages."
125
+ context['messages'] = final_messages
126
+
127
+ # 3. DEFAULT: Compact conversation context
128
+ else:
129
+ try:
130
+ result = breathe(**context)
131
+ if isinstance(result, dict):
132
+ context['output'] = result.get('output', 'Context compressed.')
133
+ context['messages'] = result.get('messages', output_messages)
134
+ else:
135
+ context['output'] = "Context compression process initiated."
136
+ context['messages'] = output_messages
137
+ except Exception as e:
138
+ traceback.print_exc()
139
+ context['output'] = f"Error during context compression: {e}"
140
+ context['messages'] = output_messages
@@ -0,0 +1,35 @@
1
+ jinx_name: load_file
2
+ description: Loads and returns the contents of a file using npcpy's file loaders
3
+ inputs:
4
+ - file_path
5
+ steps:
6
+ - name: "load_file"
7
+ engine: "python"
8
+ code: |
9
+ import os
10
+ from npcpy.data.load import load_file_contents
11
+
12
+ # Expand user path and get absolute path
13
+ file_path = os.path.expanduser("{{ file_path }}")
14
+
15
+ # Check if file exists
16
+ if not os.path.exists(file_path):
17
+ output = f"Error: File not found at {file_path}"
18
+ else:
19
+ try:
20
+ # Load file contents using npcpy's loader
21
+ # Returns chunks by default with chunk_size=250
22
+ chunks = load_file_contents(file_path)
23
+
24
+ # Join chunks back together for full content
25
+ if isinstance(chunks, list):
26
+ if chunks and chunks[0].startswith("Error"):
27
+ output = chunks[0]
28
+ else:
29
+ file_content = "\n".join(chunks)
30
+ output = f"File: {file_path}\n\n{file_content}"
31
+ else:
32
+ output = f"File: {file_path}\n\n{chunks}"
33
+
34
+ except Exception as e:
35
+ output = f"Error reading file {file_path}: {str(e)}"
@@ -17,9 +17,6 @@ steps:
17
17
  if cors_str and cors_str.strip():
18
18
  cors_origins = [origin.strip() for origin in cors_str.split(",")]
19
19
 
20
- # start_flask_server blocks, so this will hold the Jinx until the server is stopped.
21
- # In a real-world scenario, you might want to run this in a separate process
22
- # or have a non-blocking server start.
23
20
  start_flask_server(
24
21
  port=int(port), # Ensure port is an integer
25
22
  cors_origins=cors_origins,
@@ -0,0 +1,61 @@
1
+ jinx_name: "trigger"
2
+ description: "Creates a persistent listener (--listen) or a scheduled task (--cron)."
3
+ inputs:
4
+ - listen: "" # The description for a persistent, event-driven listener.
5
+ - cron: "" # The description for a scheduled, time-based task.
6
+ steps:
7
+ - name: "execute_command"
8
+ engine: "python"
9
+ code: |
10
+ import traceback
11
+ from npcpy.work.trigger import execute_trigger_command # For --listen
12
+ from npcpy.work.plan import execute_plan_command # For --cron
13
+
14
+ listen_description = context.get('listen')
15
+ cron_description = context.get('cron')
16
+ output_messages = context.get('messages', [])
17
+
18
+ USAGE = 'Usage: /trigger --listen "<description>" OR /trigger --cron "<description>"'
19
+
20
+ # Determine which command was used and set the appropriate variables
21
+ subcommand = None
22
+ description = None
23
+ executor_func = None
24
+
25
+ # --- Argument Validation ---
26
+ # Ensure mutual exclusivity
27
+ if listen_description and cron_description:
28
+ context['output'] = f"Error: --listen and --cron are mutually exclusive. {USAGE}"
29
+ context['messages'] = output_messages
30
+ exit()
31
+
32
+ # --- Command Dispatch ---
33
+ if listen_description:
34
+ subcommand = 'listen'
35
+ description = listen_description
36
+ executor_func = execute_trigger_command
37
+ elif cron_description:
38
+ subcommand = 'cron'
39
+ description = cron_description
40
+ executor_func = execute_plan_command
41
+ else:
42
+ # Handle case where no arguments were provided
43
+ context['output'] = f"Error: You must provide either --listen or --cron. {USAGE}"
44
+ context['messages'] = output_messages
45
+ exit()
46
+
47
+ # --- Execution ---
48
+ try:
49
+ result = executor_func(command=description, **context)
50
+
51
+ if isinstance(result, dict):
52
+ output_key = 'Listener' if subcommand == 'listen' else 'Cron job'
53
+ context['output'] = result.get('output', f'{output_key} created successfully.')
54
+ context['messages'] = result.get('messages', output_messages)
55
+ else:
56
+ context['output'] = str(result)
57
+ context['messages'] = output_messages
58
+ except Exception as e:
59
+ traceback.print_exc()
60
+ context['output'] = f"Error creating {subcommand}: {e}"
61
+ context['messages'] = output_messages
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcsh
3
- Version: 1.1.8
3
+ Version: 1.1.10
4
4
  Summary: npcsh is a command-line toolkit for using AI agents in novel ways.
5
5
  Home-page: https://github.com/NPC-Worldwide/npcsh
6
6
  Author: Christopher Agostino