npcsh 1.1.3__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. npcsh/_state.py +48 -64
  2. npcsh/npc_team/corca_example.png +0 -0
  3. npcsh/npc_team/jinxs/{python_executor.jinx → code/python.jinx} +1 -1
  4. npcsh/npc_team/jinxs/{bash_executer.jinx → code/sh.jinx} +1 -1
  5. npcsh/npc_team/jinxs/code/sql.jinx +18 -0
  6. npcsh/npc_team/jinxs/modes/alicanto.jinx +88 -0
  7. npcsh/npc_team/jinxs/modes/corca.jinx +28 -0
  8. npcsh/npc_team/jinxs/modes/guac.jinx +46 -0
  9. npcsh/npc_team/jinxs/modes/plonk.jinx +57 -0
  10. npcsh/npc_team/jinxs/modes/pti.jinx +28 -0
  11. npcsh/npc_team/jinxs/modes/spool.jinx +40 -0
  12. npcsh/npc_team/jinxs/modes/wander.jinx +81 -0
  13. npcsh/npc_team/jinxs/modes/yap.jinx +25 -0
  14. npcsh/npc_team/jinxs/utils/breathe.jinx +20 -0
  15. npcsh/npc_team/jinxs/utils/core/build.jinx +65 -0
  16. npcsh/npc_team/jinxs/utils/core/compile.jinx +50 -0
  17. npcsh/npc_team/jinxs/utils/core/help.jinx +52 -0
  18. npcsh/npc_team/jinxs/utils/core/init.jinx +41 -0
  19. npcsh/npc_team/jinxs/utils/core/jinxs.jinx +32 -0
  20. npcsh/npc_team/jinxs/utils/core/set.jinx +40 -0
  21. npcsh/npc_team/jinxs/{edit_file.jinx → utils/edit_file.jinx} +1 -1
  22. npcsh/npc_team/jinxs/utils/flush.jinx +39 -0
  23. npcsh/npc_team/jinxs/utils/npc-studio.jinx +82 -0
  24. npcsh/npc_team/jinxs/utils/ots.jinx +92 -0
  25. npcsh/npc_team/jinxs/utils/plan.jinx +33 -0
  26. npcsh/npc_team/jinxs/utils/roll.jinx +66 -0
  27. npcsh/npc_team/jinxs/utils/sample.jinx +56 -0
  28. npcsh/npc_team/jinxs/utils/search/brainblast.jinx +51 -0
  29. npcsh/npc_team/jinxs/utils/search/rag.jinx +70 -0
  30. npcsh/npc_team/jinxs/utils/search/search.jinx +192 -0
  31. npcsh/npc_team/jinxs/utils/serve.jinx +29 -0
  32. npcsh/npc_team/jinxs/utils/sleep.jinx +116 -0
  33. npcsh/npc_team/jinxs/utils/trigger.jinx +36 -0
  34. npcsh/npc_team/jinxs/utils/vixynt.jinx +129 -0
  35. npcsh/npcsh.py +14 -12
  36. npcsh/routes.py +80 -1420
  37. npcsh-1.1.5.data/data/npcsh/npc_team/alicanto.jinx +88 -0
  38. npcsh-1.1.5.data/data/npcsh/npc_team/brainblast.jinx +51 -0
  39. npcsh-1.1.5.data/data/npcsh/npc_team/breathe.jinx +20 -0
  40. npcsh-1.1.5.data/data/npcsh/npc_team/build.jinx +65 -0
  41. npcsh-1.1.5.data/data/npcsh/npc_team/compile.jinx +50 -0
  42. npcsh-1.1.5.data/data/npcsh/npc_team/corca.jinx +28 -0
  43. npcsh-1.1.5.data/data/npcsh/npc_team/corca_example.png +0 -0
  44. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/edit_file.jinx +1 -1
  45. npcsh-1.1.5.data/data/npcsh/npc_team/flush.jinx +39 -0
  46. npcsh-1.1.5.data/data/npcsh/npc_team/guac.jinx +46 -0
  47. npcsh-1.1.5.data/data/npcsh/npc_team/help.jinx +52 -0
  48. npcsh-1.1.5.data/data/npcsh/npc_team/init.jinx +41 -0
  49. npcsh-1.1.5.data/data/npcsh/npc_team/jinxs.jinx +32 -0
  50. npcsh-1.1.5.data/data/npcsh/npc_team/npc-studio.jinx +82 -0
  51. npcsh-1.1.5.data/data/npcsh/npc_team/ots.jinx +92 -0
  52. npcsh-1.1.5.data/data/npcsh/npc_team/plan.jinx +33 -0
  53. npcsh-1.1.5.data/data/npcsh/npc_team/plonk.jinx +57 -0
  54. npcsh-1.1.5.data/data/npcsh/npc_team/pti.jinx +28 -0
  55. npcsh-1.1.3.data/data/npcsh/npc_team/python_executor.jinx → npcsh-1.1.5.data/data/npcsh/npc_team/python.jinx +1 -1
  56. npcsh-1.1.5.data/data/npcsh/npc_team/rag.jinx +70 -0
  57. npcsh-1.1.5.data/data/npcsh/npc_team/roll.jinx +66 -0
  58. npcsh-1.1.5.data/data/npcsh/npc_team/sample.jinx +56 -0
  59. npcsh-1.1.5.data/data/npcsh/npc_team/search.jinx +192 -0
  60. npcsh-1.1.5.data/data/npcsh/npc_team/serve.jinx +29 -0
  61. npcsh-1.1.5.data/data/npcsh/npc_team/set.jinx +40 -0
  62. npcsh-1.1.3.data/data/npcsh/npc_team/bash_executer.jinx → npcsh-1.1.5.data/data/npcsh/npc_team/sh.jinx +1 -1
  63. npcsh-1.1.5.data/data/npcsh/npc_team/sleep.jinx +116 -0
  64. npcsh-1.1.5.data/data/npcsh/npc_team/spool.jinx +40 -0
  65. npcsh-1.1.5.data/data/npcsh/npc_team/sql.jinx +18 -0
  66. npcsh-1.1.5.data/data/npcsh/npc_team/trigger.jinx +36 -0
  67. npcsh-1.1.5.data/data/npcsh/npc_team/vixynt.jinx +129 -0
  68. npcsh-1.1.5.data/data/npcsh/npc_team/wander.jinx +81 -0
  69. npcsh-1.1.5.data/data/npcsh/npc_team/yap.jinx +25 -0
  70. {npcsh-1.1.3.dist-info → npcsh-1.1.5.dist-info}/METADATA +1 -1
  71. npcsh-1.1.5.dist-info/RECORD +132 -0
  72. npcsh/npc_team/jinxs/image_generation.jinx +0 -29
  73. npcsh/npc_team/jinxs/internet_search.jinx +0 -31
  74. npcsh/npc_team/jinxs/screen_cap.jinx +0 -25
  75. npcsh-1.1.3.data/data/npcsh/npc_team/image_generation.jinx +0 -29
  76. npcsh-1.1.3.data/data/npcsh/npc_team/internet_search.jinx +0 -31
  77. npcsh-1.1.3.data/data/npcsh/npc_team/screen_cap.jinx +0 -25
  78. npcsh-1.1.3.dist-info/RECORD +0 -78
  79. /npcsh/npc_team/jinxs/{kg_search.jinx → utils/search/kg_search.jinx} +0 -0
  80. /npcsh/npc_team/jinxs/{memory_search.jinx → utils/search/memory_search.jinx} +0 -0
  81. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/alicanto.npc +0 -0
  82. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/alicanto.png +0 -0
  83. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/corca.npc +0 -0
  84. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/corca.png +0 -0
  85. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/foreman.npc +0 -0
  86. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/frederic.npc +0 -0
  87. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/frederic4.png +0 -0
  88. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/guac.png +0 -0
  89. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
  90. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/kadiefa.png +0 -0
  91. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/kg_search.jinx +0 -0
  92. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/memory_search.jinx +0 -0
  93. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
  94. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
  95. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/plonk.npc +0 -0
  96. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/plonk.png +0 -0
  97. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
  98. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/plonkjr.png +0 -0
  99. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/sibiji.npc +0 -0
  100. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/sibiji.png +0 -0
  101. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/spool.png +0 -0
  102. {npcsh-1.1.3.data → npcsh-1.1.5.data}/data/npcsh/npc_team/yap.png +0 -0
  103. {npcsh-1.1.3.dist-info → npcsh-1.1.5.dist-info}/WHEEL +0 -0
  104. {npcsh-1.1.3.dist-info → npcsh-1.1.5.dist-info}/entry_points.txt +0 -0
  105. {npcsh-1.1.3.dist-info → npcsh-1.1.5.dist-info}/licenses/LICENSE +0 -0
  106. {npcsh-1.1.3.dist-info → npcsh-1.1.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,25 @@
1
+ jinx_name: "yap"
2
+ description: "Enter voice chat (yap) mode"
3
+ inputs: [] # Yap mode takes its parameters directly from the environment/kwargs
4
+ steps:
5
+ - name: "enter_yap"
6
+ engine: "python"
7
+ code: |
8
+ import traceback
9
+ from npcsh.yap import enter_yap_mode
10
+
11
+ output_messages = context.get('messages', [])
12
+
13
+ try:
14
+ result = enter_yap_mode(**context) # Pass all context as kwargs
15
+
16
+ if isinstance(result, dict):
17
+ context['output'] = result.get('output', 'Exited Yap Mode.')
18
+ context['messages'] = result.get('messages', output_messages)
19
+ else:
20
+ context['output'] = str(result)
21
+ context['messages'] = output_messages
22
+ except Exception as e:
23
+ traceback.print_exc()
24
+ context['output'] = f"Error entering yap mode: {e}"
25
+ context['messages'] = output_messages
@@ -0,0 +1,20 @@
1
+ jinx_name: "breathe"
2
+ description: "Condense context on a regular cadence"
3
+ inputs: [] # The breathe command takes all relevant context from the NPC's environment
4
+ steps:
5
+ - name: "condense_context"
6
+ engine: "python"
7
+ code: |
8
+ from npcpy.llm_funcs import breathe
9
+
10
+ output_messages = context.get('messages', [])
11
+
12
+ # Pass all current context as kwargs to breathe
13
+ result = breathe(**context)
14
+
15
+ if isinstance(result, dict):
16
+ context['output'] = result.get('output', 'Context condensed.')
17
+ context['messages'] = result.get('messages', output_messages)
18
+ else:
19
+ context['output'] = "Context condensation process initiated."
20
+ context['messages'] = output_messages
@@ -0,0 +1,65 @@
1
+ jinx_name: "build"
2
+ description: "Build deployment artifacts for NPC team"
3
+ inputs:
4
+ - target: "flask" # The type of deployment target (e.g., flask, docker, cli, static).
5
+ - output: "./build" # The output directory for built artifacts.
6
+ - team: "./npc_team" # The path to the NPC team directory.
7
+ - port: 5337 # The port for flask server builds.
8
+ - cors: "" # Comma-separated CORS origins for flask server builds.
9
+ steps:
10
+ - name: "execute_build"
11
+ engine: "python"
12
+ code: |
13
+ import os
14
+
15
+ # Assume these build functions are available in the execution environment
16
+ # from a larger project context, e.g., from npcpy.build_funcs
17
+ try:
18
+ from npcpy.build_funcs import (
19
+ build_flask_server,
20
+ build_docker_compose,
21
+ build_cli_executable,
22
+ build_static_site,
23
+ )
24
+ except ImportError:
25
+ # Provide mock functions for demonstration or error handling
26
+ def build_flask_server(config, **kwargs): return {"output": f"Mock build flask: {config}", "messages": []}
27
+ def build_docker_compose(config, **kwargs): return {"output": f"Mock build docker: {config}", "messages": []}
28
+ def build_cli_executable(config, **kwargs): return {"output": f"Mock build cli: {config}", "messages": []}
29
+ def build_static_site(config, **kwargs): return {"output": f"Mock build static: {config}", "messages": []}
30
+
31
+ target = context.get('target')
32
+ output_dir = context.get('output')
33
+ team_path = context.get('team')
34
+ port = context.get('port')
35
+ cors_origins_str = context.get('cors')
36
+
37
+ cors_origins = [origin.strip() for origin in cors_origins_str.split(',')] if cors_origins_str.strip() else None
38
+
39
+ build_config = {
40
+ 'team_path': os.path.abspath(os.path.expanduser(team_path)),
41
+ 'output_dir': os.path.abspath(os.path.expanduser(output_dir)),
42
+ 'target': target,
43
+ 'port': port,
44
+ 'cors_origins': cors_origins,
45
+ }
46
+
47
+ builders = {
48
+ 'flask': build_flask_server,
49
+ 'docker': build_docker_compose,
50
+ 'cli': build_cli_executable,
51
+ 'static': build_static_site,
52
+ }
53
+
54
+ output_messages = context.get('messages', [])
55
+ output_result = ""
56
+
57
+ if target not in builders:
58
+ output_result = f"Unknown target: {target}. Available: {list(builders.keys())}"
59
+ else:
60
+ result = builders[target](build_config, messages=output_messages)
61
+ output_result = result.get('output', 'Build command executed.')
62
+ output_messages = result.get('messages', output_messages) # Update messages from builder call
63
+
64
+ context['output'] = output_result
65
+ context['messages'] = output_messages
@@ -0,0 +1,50 @@
1
+ jinx_name: "compile"
2
+ description: "Compile NPC profiles"
3
+ inputs:
4
+ - npc_file_path: "" # Optional path to a specific NPC file to compile.
5
+ - npc_team_dir: "./npc_team" # Directory containing NPC profiles to compile, if no specific file is given.
6
+ steps:
7
+ - name: "compile_npcs"
8
+ engine: "python"
9
+ code: |
10
+ import os
11
+ import traceback
12
+ from npcpy.npc_compiler import NPC, Team # Assuming Team might be needed for full directory compilation
13
+
14
+ npc_file_path_arg = context.get('npc_file_path')
15
+ npc_team_dir = context.get('npc_team_dir')
16
+ output_messages = context.get('messages', [])
17
+
18
+ output_result = ""
19
+ compiled_npc_object = None
20
+
21
+ try:
22
+ if npc_file_path_arg and npc_file_path_arg.strip():
23
+ npc_full_path = os.path.abspath(os.path.expanduser(npc_file_path_arg))
24
+ if os.path.exists(npc_full_path):
25
+ # Assuming NPC() constructor "compiles" it by loading its definition
26
+ compiled_npc_object = NPC(file=npc_full_path, db_conn=context.get('db_conn'))
27
+ output_result = f"Compiled NPC: {npc_full_path}"
28
+ else:
29
+ output_result = f"Error: NPC file not found: {npc_full_path}"
30
+ else:
31
+ # Compile all NPCs in the directory. This would typically involve iterating and loading.
32
+ # For simplicity in this Jinx, we just acknowledge the directory.
33
+ # A more robust implementation would loop through .npc files and compile them.
34
+ abs_npc_team_dir = os.path.abspath(os.path.expanduser(npc_team_dir))
35
+ if os.path.exists(abs_npc_team_dir):
36
+ output_result = f"Acknowledged compilation for all NPCs in directory: {abs_npc_team_dir}"
37
+ # Example of loading a Team and setting the compiled_npc_object to its forenpc if available
38
+ # team = Team(team_path=abs_npc_team_dir, db_conn=context.get('db_conn'))
39
+ # if team.forenpc:
40
+ # compiled_npc_object = team.forenpc
41
+ else:
42
+ output_result = f"Error: NPC team directory not found: {npc_team_dir}"
43
+ except Exception as e:
44
+ traceback.print_exc()
45
+ output_result = f"Error compiling: {e}"
46
+
47
+ context['output'] = output_result
48
+ context['messages'] = output_messages
49
+ if compiled_npc_object:
50
+ context['compiled_npc_object'] = compiled_npc_object # Store the compiled NPC object if any
@@ -0,0 +1,52 @@
1
+ jinx_name: help
2
+ description: Show help for commands, NPCs, or Jinxs
3
+ inputs:
4
+ - topic: null
5
+ steps:
6
+ - name: show_help
7
+ engine: python
8
+ code: |
9
+ import json
10
+ from npcsh._state import CANONICAL_ARGS, get_argument_help
11
+
12
+ topic = context.get('topic')
13
+
14
+ if not topic:
15
+ output_lines = ["# Available Commands\n\n"]
16
+
17
+ all_jinxs = {}
18
+ if hasattr(npc, 'team') and npc.team and hasattr(npc.team, 'jinxs_dict'):
19
+ all_jinxs.update(npc.team.jinxs_dict)
20
+ if hasattr(npc, 'jinxs_dict') and npc.jinxs_dict:
21
+ all_jinxs.update(npc.jinxs_dict)
22
+
23
+ for cmd in sorted(all_jinxs.keys()):
24
+ jinx_obj = all_jinxs[cmd]
25
+ desc = getattr(jinx_obj, 'description', 'No description')
26
+ output_lines.append(f"/{cmd} - {desc}\n\n")
27
+
28
+ arg_help_map = get_argument_help()
29
+ if arg_help_map:
30
+ output_lines.append("## Common Command-Line Flags\n\n")
31
+ output_lines.append("The shortest unambiguous prefix works.\n")
32
+
33
+ for arg in sorted(CANONICAL_ARGS):
34
+ aliases = arg_help_map.get(arg, [])
35
+ alias_str = f"(-{min(aliases, key=len)})" if aliases else ""
36
+ output_lines.append(f"--{arg:<20} {alias_str}\n")
37
+
38
+ output = "".join(output_lines)
39
+ else:
40
+ jinx_obj = None
41
+ if hasattr(npc, 'team') and npc.team and hasattr(npc.team, 'jinxs_dict'):
42
+ jinx_obj = npc.team.jinxs_dict.get(topic)
43
+ if not jinx_obj and hasattr(npc, 'jinxs_dict'):
44
+ jinx_obj = npc.jinxs_dict.get(topic)
45
+
46
+ if jinx_obj:
47
+ output = f"## Help for Jinx: `/{topic}`\n\n"
48
+ output += f"- **Description**: {jinx_obj.description}\n"
49
+ if hasattr(jinx_obj, 'inputs') and jinx_obj.inputs:
50
+ output += f"- **Inputs**: {json.dumps(jinx_obj.inputs, indent=2)}\n"
51
+ else:
52
+ output = f"No help topic found for `{topic}`."
@@ -0,0 +1,41 @@
1
+ jinx_name: "init"
2
+ description: "Initialize NPC project"
3
+ inputs:
4
+ - directory: "." # The directory where the NPC project should be initialized.
5
+ - templates: "" # Optional templates to use for initialization.
6
+ - context: "" # Optional context for project initialization.
7
+ - model: "" # Optional LLM model to set as default for the project.
8
+ - provider: "" # Optional LLM provider to set as default for the project.
9
+ steps:
10
+ - name: "initialize_project"
11
+ engine: "python"
12
+ code: |
13
+ import os
14
+ import traceback
15
+ from npcpy.npc_compiler import initialize_npc_project
16
+
17
+ directory = context.get('directory')
18
+ templates = context.get('templates')
19
+ context_param = context.get('context') # Renamed to avoid conflict with Jinx context
20
+ model = context.get('model')
21
+ provider = context.get('provider')
22
+ output_messages = context.get('messages', [])
23
+
24
+ output_result = ""
25
+ try:
26
+ initialize_npc_project(
27
+ directory=directory,
28
+ templates=templates,
29
+ context=context_param, # Use the renamed context parameter
30
+ model=model,
31
+ provider=provider
32
+ )
33
+ output_result = f"NPC project initialized in {os.path.abspath(directory)}."
34
+ except NameError:
35
+ output_result = "Init function (initialize_npc_project) not available."
36
+ except Exception as e:
37
+ traceback.print_exc()
38
+ output_result = f"Error initializing project: {e}"
39
+
40
+ context['output'] = output_result
41
+ context['messages'] = output_messages
@@ -0,0 +1,32 @@
1
+ jinx_name: jinxs
2
+ description: Show available jinxs for the current NPC/Team
3
+ inputs: []
4
+ steps:
5
+ - name: list_jinxs
6
+ engine: python
7
+ code: |
8
+ output_lines = ["Available Jinxs:\n"]
9
+ jinxs_listed = set()
10
+
11
+ if hasattr(npc, 'team') and npc.team:
12
+ team = npc.team
13
+
14
+ if hasattr(team, 'jinxs_dict') and team.jinxs_dict:
15
+ output_lines.append(f"\n--- Team Jinxs ---\n")
16
+ for name, jinx_obj in sorted(team.jinxs_dict.items()):
17
+ desc = getattr(jinx_obj, 'description', 'No description available.')
18
+ output_lines.append(f"- /{name}: {desc}\n")
19
+ jinxs_listed.add(name)
20
+
21
+ if hasattr(npc, 'jinxs_dict') and npc.jinxs_dict:
22
+ output_lines.append(f"\n--- NPC Jinxs for {npc.name} ---\n")
23
+ for name, jinx_obj in sorted(npc.jinxs_dict.items()):
24
+ if name not in jinxs_listed:
25
+ desc = getattr(jinx_obj, 'description', 'No description available.')
26
+ output_lines.append(f"- /{name}: {desc}\n")
27
+ jinxs_listed.add(name)
28
+
29
+ if not jinxs_listed:
30
+ output = "No jinxs available for the current context."
31
+ else:
32
+ output = "".join(output_lines)
@@ -0,0 +1,40 @@
1
+ jinx_name: "set"
2
+ description: "Set configuration values"
3
+ inputs:
4
+ - key: "" # The configuration key to set.
5
+ - value: "" # The value to set for the configuration key.
6
+ steps:
7
+ - name: "set_config_value"
8
+ engine: "python"
9
+ code: |
10
+ import traceback
11
+ # Assuming set_npcsh_config_value is accessible
12
+ try:
13
+ from npcsh._state import set_npcsh_config_value
14
+ except ImportError:
15
+ def set_npcsh_config_value(key, value):
16
+ print(f"Mock: Setting config '{key}' to '{value}'")
17
+ # In a real scenario, this might write to a config file or global state
18
+ pass
19
+
20
+ key = context.get('key')
21
+ value = context.get('value')
22
+ output_messages = context.get('messages', [])
23
+
24
+ output_result = ""
25
+ if not key or not value:
26
+ context['output'] = "Usage: /set <key>=<value>"
27
+ context['messages'] = output_messages
28
+ exit()
29
+
30
+ try:
31
+ set_npcsh_config_value(key, value)
32
+ output_result = f"Configuration value '{key}' set."
33
+ except NameError:
34
+ output_result = "Set function (set_npcsh_config_value) not available."
35
+ except Exception as e:
36
+ traceback.print_exc()
37
+ output_result = f"Error setting configuration '{key}': {e}"
38
+
39
+ context['output'] = output_result
40
+ context['messages'] = output_messages
@@ -1,4 +1,4 @@
1
- jinx_name: file_editor
1
+ jinx_name: edit_file
2
2
  description: Examines a file, determines what changes are needed, and applies those
3
3
  changes.
4
4
  inputs:
@@ -0,0 +1,39 @@
1
+ jinx_name: "flush"
2
+ description: "Flush the last N messages from the conversation history."
3
+ inputs:
4
+ - n: 1 # The number of messages to flush (default to 1).
5
+ steps:
6
+ - name: "flush_messages"
7
+ engine: "python"
8
+ code: |
9
+ n = int(context.get('n', 1))
10
+ output_messages = context.get('messages', [])
11
+
12
+ if n <= 0:
13
+ context['output'] = "Error: Number of messages must be positive."
14
+ context['messages'] = output_messages
15
+ exit()
16
+
17
+ new_messages = list(output_messages)
18
+ original_len = len(new_messages)
19
+ removed_count = 0
20
+
21
+ if new_messages and new_messages[0].get("role") == "system":
22
+ system_message = new_messages[0]
23
+ working_messages = new_messages[1:]
24
+ num_to_remove = min(n, len(working_messages))
25
+ if num_to_remove > 0:
26
+ final_messages = [system_message] + working_messages[:-num_to_remove]
27
+ removed_count = num_to_remove
28
+ else:
29
+ final_messages = [system_message]
30
+ else:
31
+ num_to_remove = min(n, original_len)
32
+ if num_to_remove > 0:
33
+ final_messages = new_messages[:-num_to_remove]
34
+ removed_count = num_to_remove
35
+ else:
36
+ final_messages = []
37
+
38
+ context['output'] = f"Flushed {removed_count} message(s). Context is now {len(final_messages)} messages."
39
+ context['messages'] = final_messages
@@ -0,0 +1,82 @@
1
+ jinx_name: "npc-studio"
2
+ description: "Start npc studio"
3
+ inputs:
4
+ - user_command: "" # Any additional arguments to pass to the npc studio launch.
5
+ steps:
6
+ - name: "launch_npc_studio"
7
+ engine: "python"
8
+ code: |
9
+ import os
10
+ import subprocess
11
+ import sys
12
+ from pathlib import Path
13
+ import traceback
14
+
15
+ NPC_STUDIO_DIR = Path.home() / ".npcsh" / "npc-studio"
16
+
17
+ def ensure_repo():
18
+ """Clone or update the npc-studio repo."""
19
+ if not NPC_STUDIO_DIR.exists():
20
+ os.makedirs(NPC_STUDIO_DIR.parent, exist_ok=True)
21
+ subprocess.check_call([
22
+ "git", "clone",
23
+ "https://github.com/npc-worldwide/npc-studio.git",
24
+ str(NPC_STUDIO_DIR)
25
+ ])
26
+ else:
27
+ subprocess.check_call(
28
+ ["git", "pull"],
29
+ cwd=NPC_STUDIO_DIR
30
+ )
31
+
32
+ def install_dependencies():
33
+ """Install npm and pip dependencies."""
34
+ subprocess.check_call(["npm", "install"], cwd=NPC_STUDIO_DIR)
35
+
36
+ req_file = NPC_STUDIO_DIR / "requirements.txt"
37
+ if req_file.exists():
38
+ subprocess.check_call([sys.executable, "-m", "pip", "install", "-r", str(req_file)])
39
+
40
+ def launch_npc_studio(path_to_open: str = None):
41
+ """
42
+ Launch the NPC Studio backend + frontend.
43
+ Returns PIDs for processes.
44
+ """
45
+ ensure_repo()
46
+ install_dependencies()
47
+
48
+ backend = subprocess.Popen(
49
+ [sys.executable, "npc_studio_serve.py"],
50
+ cwd=NPC_STUDIO_DIR,
51
+ shell = False
52
+ )
53
+
54
+ # npm run dev is typically for the frontend development server
55
+ dev_server = subprocess.Popen(
56
+ ["npm", "run", "dev"],
57
+ cwd=NPC_STUDIO_DIR,
58
+ shell=False
59
+ )
60
+
61
+ # npm start is typically for electron or other packaged frontend
62
+ frontend = subprocess.Popen(
63
+ ["npm", "start"],
64
+ cwd=NPC_STUDIO_DIR,
65
+ shell=False
66
+ )
67
+
68
+ return backend, dev_server, frontend
69
+
70
+ user_command = context.get('user_command')
71
+ output_messages = context.get('messages', [])
72
+ output_result = ""
73
+
74
+ try:
75
+ backend, electron, frontend = launch_npc_studio(user_command or None)
76
+ output_result = f"NPC Studio started!\nBackend PID={backend.pid}, Electron PID={electron.pid} Frontend PID={frontend.pid}"
77
+ except Exception as e:
78
+ traceback.print_exc()
79
+ output_result = f"Failed to start NPC Studio: {e}"
80
+
81
+ context['output'] = output_result
82
+ context['messages'] = output_messages
@@ -0,0 +1,92 @@
1
+ jinx_name: "ots"
2
+ description: "Take screenshot and analyze with vision model"
3
+ inputs:
4
+ - image_paths_args: "" # Optional comma-separated paths to image files for analysis.
5
+ - prompt: "" # The prompt for the LLM about the image(s).
6
+ - vmodel: "" # Vision model to use. Defaults to NPCSH_VISION_MODEL or NPC's model.
7
+ - vprovider: "" # Vision model provider. Defaults to NPCSH_VISION_PROVIDER or NPC's provider.
8
+ - stream: False # Whether to stream the output from the LLM.
9
+ - api_url: "" # API URL for the LLM.
10
+ - api_key: "" # API key for the LLM.
11
+ steps:
12
+ - name: "analyze_screenshot_or_image"
13
+ engine: "python"
14
+ code: |
15
+ import os
16
+ import traceback
17
+ from npcpy.llm_funcs import get_llm_response
18
+ from npcpy.data.image import capture_screenshot
19
+ # Assuming NPCSH_VISION_MODEL and NPCSH_VISION_PROVIDER are accessible through _state or defaults
20
+ # For simplicity in Jinx, we'll use fallbacks or assume context will provide
21
+
22
+ image_paths_args_str = context.get('image_paths_args')
23
+ user_prompt = context.get('prompt')
24
+ vision_model = context.get('vmodel')
25
+ vision_provider = context.get('vprovider')
26
+ stream_output = context.get('stream')
27
+ api_url = context.get('api_url')
28
+ api_key = context.get('api_key')
29
+ output_messages = context.get('messages', [])
30
+ current_npc = context.get('npc')
31
+
32
+ image_paths = []
33
+ if image_paths_args_str and image_paths_args_str.strip():
34
+ for img_path_arg in image_paths_args_str.split(','):
35
+ full_path = os.path.abspath(os.path.expanduser(img_path_arg.strip()))
36
+ if os.path.exists(full_path):
37
+ image_paths.append(full_path)
38
+ else:
39
+ context['output'] = f"Error: Image file not found at {full_path}"
40
+ context['messages'] = output_messages
41
+ exit()
42
+
43
+ if not image_paths:
44
+ screenshot_info = capture_screenshot(full=False)
45
+ if screenshot_info and "file_path" in screenshot_info:
46
+ image_paths.append(screenshot_info["file_path"])
47
+ print(f"Screenshot captured: {screenshot_info.get('filename', os.path.basename(screenshot_info['file_path']))}")
48
+ else:
49
+ context['output'] = "Error: Failed to capture screenshot."
50
+ context['messages'] = output_messages
51
+ exit()
52
+
53
+ if not image_paths:
54
+ context['output'] = "No valid images found or captured."
55
+ context['messages'] = output_messages
56
+ exit()
57
+
58
+ if not user_prompt or not user_prompt.strip():
59
+ # In a non-interactive Jinx, a default prompt is better than waiting for input
60
+ user_prompt = "Describe the image(s)."
61
+
62
+ # Fallback for model/provider if not explicitly set in Jinx inputs
63
+ if not vision_model and current_npc and current_npc.model:
64
+ vision_model = current_npc.model
65
+ if not vision_provider and current_npc and current_npc.provider:
66
+ vision_provider = current_npc.provider
67
+
68
+ # Final fallbacks (these would ideally come from npcsh._state config)
69
+ if not vision_model: vision_model = "gemini-1.5-pro-vision" # Example default
70
+ if not vision_provider: vision_provider = "gemini" # Example default
71
+
72
+ try:
73
+ response_data = get_llm_response(
74
+ prompt=user_prompt,
75
+ model=vision_model,
76
+ provider=vision_provider,
77
+ messages=output_messages, # Pass current messages to LLM
78
+ images=image_paths,
79
+ stream=stream_output,
80
+ npc=current_npc,
81
+ api_url=api_url,
82
+ api_key=api_key
83
+ )
84
+ context['output'] = response_data.get('response')
85
+ context['messages'] = response_data.get('messages', output_messages)
86
+ context['model'] = vision_model
87
+ context['provider'] = vision_provider
88
+
89
+ except Exception as e:
90
+ traceback.print_exc()
91
+ context['output'] = f"Error during /ots command: {e}"
92
+ context['messages'] = output_messages
@@ -0,0 +1,33 @@
1
+ jinx_name: "plan"
2
+ description: "Execute a plan command"
3
+ inputs:
4
+ - plan_description: "" # Description of the plan to execute.
5
+ steps:
6
+ - name: "execute_plan"
7
+ engine: "python"
8
+ code: |
9
+ import traceback
10
+ from npcpy.work.plan import execute_plan_command
11
+
12
+ plan_description = context.get('plan_description')
13
+ output_messages = context.get('messages', [])
14
+
15
+ if not plan_description or not plan_description.strip():
16
+ context['output'] = "Usage: /plan <description_of_plan>"
17
+ context['messages'] = output_messages
18
+ exit()
19
+
20
+ try:
21
+ # Pass all current context as kwargs to execute_plan_command
22
+ result = execute_plan_command(command=plan_description, **context)
23
+
24
+ if isinstance(result, dict):
25
+ context['output'] = result.get('output', 'Plan executed.')
26
+ context['messages'] = result.get('messages', output_messages)
27
+ else:
28
+ context['output'] = str(result)
29
+ context['messages'] = output_messages
30
+ except Exception as e:
31
+ traceback.print_exc()
32
+ context['output'] = f"Error executing plan: {e}"
33
+ context['messages'] = output_messages
@@ -0,0 +1,66 @@
1
+ jinx_name: "roll"
2
+ description: "Generate a video from a text prompt."
3
+ inputs:
4
+ - prompt: "" # Required text prompt for video generation.
5
+ - num_frames: 125 # Number of frames for the video.
6
+ - width: 256 # Width of the video.
7
+ - height: 256 # Height of the video.
8
+ - output_path: "output.mp4" # Output file path for the video.
9
+ - vgmodel: "" # Video generation model to use. Defaults to NPCSH_VIDEO_GEN_MODEL or NPC's model.
10
+ - vgprovider: "" # Video generation provider to use. Defaults to NPCSH_VIDEO_GEN_PROVIDER or NPC's provider.
11
+ steps:
12
+ - name: "generate_video"
13
+ engine: "python"
14
+ code: |
15
+ import traceback
16
+ from npcpy.llm_funcs import gen_video
17
+ # Assuming NPCSH_VIDEO_GEN_MODEL and NPCSH_VIDEO_GEN_PROVIDER are accessible
18
+
19
+ prompt = context.get('prompt')
20
+ num_frames = int(context.get('num_frames', 125)) # Ensure int type
21
+ width = int(context.get('width', 256)) # Ensure int type
22
+ height = int(context.get('height', 256)) # Ensure int type
23
+ output_path = context.get('output_path')
24
+ video_gen_model = context.get('vgmodel')
25
+ video_gen_provider = context.get('vgprovider')
26
+ output_messages = context.get('messages', [])
27
+ current_npc = context.get('npc')
28
+
29
+ if not prompt or not prompt.strip():
30
+ context['output'] = "Usage: /roll <your prompt>"
31
+ context['messages'] = output_messages
32
+ exit()
33
+
34
+ # Fallback for model/provider if not explicitly set in Jinx inputs
35
+ if not video_gen_model and current_npc and current_npc.model:
36
+ video_gen_model = current_npc.model
37
+ if not video_gen_provider and current_npc and current_npc.provider:
38
+ video_gen_provider = current_npc.provider
39
+
40
+ # Final fallbacks (these would ideally come from npcsh._state config)
41
+ if not video_gen_model: video_gen_model = "stable-video-diffusion" # Example default
42
+ if not video_gen_provider: video_gen_provider = "diffusers" # Example default
43
+
44
+ try:
45
+ result = gen_video(
46
+ prompt=prompt,
47
+ model=video_gen_model,
48
+ provider=video_gen_provider,
49
+ npc=current_npc,
50
+ num_frames=num_frames,
51
+ width=width,
52
+ height=height,
53
+ output_path=output_path,
54
+ **context.get('api_kwargs', {}) # Assuming api_kwargs might be passed
55
+ )
56
+
57
+ if isinstance(result, dict):
58
+ context['output'] = result.get('output', 'Video generated.')
59
+ context['messages'] = result.get('messages', output_messages)
60
+ else:
61
+ context['output'] = str(result)
62
+ context['messages'] = output_messages
63
+ except Exception as e:
64
+ traceback.print_exc()
65
+ context['output'] = f"Error generating video: {e}"
66
+ context['messages'] = output_messages