npcsh 1.1.10__tar.gz → 1.1.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. {npcsh-1.1.10 → npcsh-1.1.11}/PKG-INFO +1 -1
  2. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/_state.py +22 -8
  3. npcsh-1.1.11/npcsh/npc_team/jinxs/code/sh.jinx +38 -0
  4. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/code/sql.jinx +2 -2
  5. npcsh-1.1.11/npcsh/npc_team/jinxs/utils/agent.jinx +17 -0
  6. npcsh-1.1.11/npcsh/npc_team/jinxs/utils/chat.jinx +17 -0
  7. npcsh-1.1.11/npcsh/npc_team/jinxs/utils/vixynt.jinx +144 -0
  8. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh.egg-info/PKG-INFO +1 -1
  9. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh.egg-info/SOURCES.txt +2 -0
  10. {npcsh-1.1.10 → npcsh-1.1.11}/setup.py +1 -1
  11. npcsh-1.1.10/npcsh/npc_team/jinxs/code/sh.jinx +0 -19
  12. npcsh-1.1.10/npcsh/npc_team/jinxs/utils/vixynt.jinx +0 -117
  13. {npcsh-1.1.10 → npcsh-1.1.11}/LICENSE +0 -0
  14. {npcsh-1.1.10 → npcsh-1.1.11}/README.md +0 -0
  15. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/__init__.py +0 -0
  16. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/alicanto.py +0 -0
  17. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/build.py +0 -0
  18. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/corca.py +0 -0
  19. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/guac.py +0 -0
  20. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/mcp_helpers.py +0 -0
  21. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/mcp_server.py +0 -0
  22. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc.py +0 -0
  23. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/alicanto.npc +0 -0
  24. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/alicanto.png +0 -0
  25. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/corca.npc +0 -0
  26. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/corca.png +0 -0
  27. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/corca_example.png +0 -0
  28. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/foreman.npc +0 -0
  29. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/frederic.npc +0 -0
  30. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/frederic4.png +0 -0
  31. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/guac.png +0 -0
  32. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/code/python.jinx +0 -0
  33. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/modes/alicanto.jinx +0 -0
  34. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/modes/corca.jinx +0 -0
  35. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/modes/guac.jinx +0 -0
  36. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/modes/plonk.jinx +0 -0
  37. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/modes/pti.jinx +0 -0
  38. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/modes/spool.jinx +0 -0
  39. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/modes/wander.jinx +0 -0
  40. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/modes/yap.jinx +0 -0
  41. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/npc_studio/npc-studio.jinx +0 -0
  42. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/compress.jinx +0 -0
  43. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/core/build.jinx +0 -0
  44. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/core/compile.jinx +0 -0
  45. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/core/help.jinx +0 -0
  46. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/core/init.jinx +0 -0
  47. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/core/jinxs.jinx +0 -0
  48. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/core/set.jinx +0 -0
  49. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/edit_file.jinx +0 -0
  50. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/load_file.jinx +0 -0
  51. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/ots.jinx +0 -0
  52. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/roll.jinx +0 -0
  53. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/sample.jinx +0 -0
  54. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/search.jinx +0 -0
  55. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/serve.jinx +0 -0
  56. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/sleep.jinx +0 -0
  57. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/jinxs/utils/trigger.jinx +0 -0
  58. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/kadiefa.npc +0 -0
  59. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/kadiefa.png +0 -0
  60. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/npcsh.ctx +0 -0
  61. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/npcsh_sibiji.png +0 -0
  62. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/plonk.npc +0 -0
  63. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/plonk.png +0 -0
  64. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/plonkjr.npc +0 -0
  65. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/plonkjr.png +0 -0
  66. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/sibiji.npc +0 -0
  67. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/sibiji.png +0 -0
  68. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/spool.png +0 -0
  69. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npc_team/yap.png +0 -0
  70. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/npcsh.py +0 -0
  71. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/plonk.py +0 -0
  72. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/pti.py +0 -0
  73. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/routes.py +0 -0
  74. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/spool.py +0 -0
  75. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/wander.py +0 -0
  76. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh/yap.py +0 -0
  77. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh.egg-info/dependency_links.txt +0 -0
  78. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh.egg-info/entry_points.txt +0 -0
  79. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh.egg-info/requires.txt +0 -0
  80. {npcsh-1.1.10 → npcsh-1.1.11}/npcsh.egg-info/top_level.txt +0 -0
  81. {npcsh-1.1.10 → npcsh-1.1.11}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcsh
3
- Version: 1.1.10
3
+ Version: 1.1.11
4
4
  Summary: npcsh is a command-line toolkit for using AI agents in novel ways.
5
5
  Home-page: https://github.com/NPC-Worldwide/npcsh
6
6
  Author: Christopher Agostino
@@ -2475,7 +2475,9 @@ def execute_command(
2475
2475
  review=review,
2476
2476
  router=router
2477
2477
  )
2478
-
2478
+ if isinstance(output, dict) and 'output' in output:
2479
+ output = output['output']
2480
+
2479
2481
  if is_last_command:
2480
2482
  print(colored("✅ Pipeline complete", "green"))
2481
2483
  return current_state, output
@@ -2821,7 +2823,6 @@ def process_memory_approvals(command_history, memory_queue):
2821
2823
  approval['decision'],
2822
2824
  approval.get('final_memory')
2823
2825
  )
2824
-
2825
2826
  def process_result(
2826
2827
  user_input: str,
2827
2828
  result_state: ShellState,
@@ -2854,10 +2855,17 @@ def process_result(
2854
2855
 
2855
2856
  final_output_str = None
2856
2857
 
2858
+ # FIX: Handle dict output properly
2857
2859
  if isinstance(output, dict):
2858
2860
  output_content = output.get('output')
2859
2861
  model_for_stream = output.get('model', active_npc.model)
2860
2862
  provider_for_stream = output.get('provider', active_npc.provider)
2863
+
2864
+ # If output_content is still a dict or None, convert to string
2865
+ if isinstance(output_content, dict):
2866
+ output_content = str(output_content)
2867
+ elif output_content is None:
2868
+ output_content = "Command completed with no output"
2861
2869
  else:
2862
2870
  output_content = output
2863
2871
  model_for_stream = active_npc.model
@@ -2870,15 +2878,21 @@ def process_result(
2870
2878
  else:
2871
2879
  render_markdown(str(output_content))
2872
2880
  elif result_state.stream_output:
2873
- final_output_str = print_and_process_stream_with_markdown(
2874
- output_content,
2875
- model_for_stream,
2876
- provider_for_stream,
2877
- show=True
2878
- )
2881
+ # FIX: Only stream if output_content is a generator, not a string
2882
+ if isinstance(output_content, str):
2883
+ final_output_str = output_content
2884
+ render_markdown(final_output_str)
2885
+ else:
2886
+ final_output_str = print_and_process_stream_with_markdown(
2887
+ output_content,
2888
+ model_for_stream,
2889
+ provider_for_stream,
2890
+ show=True
2891
+ )
2879
2892
  elif output_content is not None:
2880
2893
  final_output_str = str(output_content)
2881
2894
  render_markdown(final_output_str)
2895
+
2882
2896
 
2883
2897
  if final_output_str:
2884
2898
  if result_state.messages:
@@ -0,0 +1,38 @@
1
+ jinx_name: sh
2
+ description: Execute bash queries. Should be used to grep for file contents, list directories, explore information to answer user questions more practically.
3
+ inputs:
4
+ - bash_command
5
+ steps:
6
+ - name: execute_bash
7
+ engine: python
8
+ code: |
9
+ import subprocess
10
+ import sys # Import sys to explicitly write to stderr for visibility
11
+
12
+ # Force a simple print to see if anything comes out
13
+ print("--- Jinx 'sh' code started ---", file=sys.stderr)
14
+
15
+ cmd = '{{ bash_command }}'
16
+
17
+ # Initialize output to an empty string to ensure it always exists
18
+ output = ""
19
+
20
+
21
+ process = subprocess.Popen(
22
+ cmd,
23
+ shell=True,
24
+ stdout=subprocess.PIPE,
25
+ stderr=subprocess.PIPE
26
+ )
27
+ stdout, stderr = process.communicate()
28
+
29
+ # Print raw stdout/stderr to sys.stderr for better visibility in some environments
30
+ print(f"Jinx 'sh' raw stdout: {stdout.decode('utf-8', errors='ignore')}", file=sys.stderr)
31
+ print(f"Jinx 'sh' raw stderr: {stderr.decode('utf-8', errors='ignore')}", file=sys.stderr)
32
+
33
+ if stderr:
34
+ output = f"Error: {stderr.decode('utf-8')}"
35
+ else:
36
+ output = stdout.decode('utf-8')
37
+
38
+
@@ -1,4 +1,4 @@
1
- jinx_name: sql_executor
1
+ jinx_name: sql
2
2
  description: Execute queries on the ~/npcsh_history.db to pull data. The database
3
3
  contains only information about conversations and other user-provided data. It does
4
4
  not store any information about individual files. Avoid using percent signs unless absolutely necessary.
@@ -13,4 +13,4 @@ steps:
13
13
  df = pd.read_sql_query(query, npc.db_conn)
14
14
  except Exception as e:
15
15
  df = pd.DataFrame({'Error': [str(e)]})
16
- output = df.to_string()
16
+ output = df.to_string()
@@ -0,0 +1,17 @@
1
+ jinx_name: agent
2
+ description: Provides an LLM response with tool use enabled.
3
+ inputs:
4
+ - query
5
+ - auto_process_tool_calls: True
6
+ - use_core_tools: True
7
+ steps:
8
+ - name: get_agent_response
9
+ engine: python
10
+ code: |
11
+ response = npc.get_llm_response(
12
+ request=query,
13
+ messages=context.get('messages', []),
14
+ auto_process_tool_calls={{ auto_process_tool_calls | default(True) }},
15
+ use_core_tools={{ use_core_tools | default(True) }}
16
+ )
17
+ output = response.get('response', '')
@@ -0,0 +1,17 @@
1
+ jinx_name: chat
2
+ description: Provides a direct LLM response without tool use.
3
+ inputs:
4
+ - query
5
+ - auto_process_tool_calls: False
6
+ - use_core_tools: False
7
+ steps:
8
+ - name: get_chat_response
9
+ engine: python
10
+ code: |
11
+ response = npc.get_llm_response(
12
+ request=query,
13
+ messages=context.get('messages', []),
14
+ auto_process_tool_calls={{ auto_process_tool_calls | default(False) }},
15
+ use_core_tools={{ use_core_tools | default(False) }}
16
+ )
17
+ output = response.get('response', '')
@@ -0,0 +1,144 @@
1
+ jinx_name: "vixynt"
2
+ description: "Generates images from text descriptions or edits existing ones."
3
+ inputs:
4
+ - prompt
5
+ - model: null
6
+ - provider: null
7
+ - output_name: null
8
+ - attachments: null
9
+ - n_images: null
10
+ - height: null
11
+ - width: null
12
+ steps:
13
+ - name: "generate_or_edit_image"
14
+ engine: "python"
15
+ code: |
16
+ import os
17
+ import base64
18
+ from io import BytesIO
19
+ from datetime import datetime
20
+ from PIL import Image
21
+ from npcpy.llm_funcs import gen_image
22
+
23
+ # Extract inputs from context with proper type conversion
24
+ image_prompt = str(context.get('prompt', '')).strip()
25
+ output_name = context.get('output_name')
26
+ attachments_str = context.get('attachments')
27
+
28
+ # Handle integer inputs - they may come as strings or ints
29
+ try:
30
+ n_images = int(context.get('n_images', 1))
31
+ except (ValueError, TypeError):
32
+ n_images = 1
33
+
34
+ try:
35
+ height = int(context.get('height', 1024))
36
+ except (ValueError, TypeError):
37
+ height = 1024
38
+
39
+ try:
40
+ width = int(context.get('width', 1024))
41
+ except (ValueError, TypeError):
42
+ width = 1024
43
+
44
+ # Get model and provider, prioritizing context, then NPC, then environment variables
45
+ model = context.get('model')
46
+ provider = context.get('provider')
47
+
48
+ # Use NPC's model/provider as fallback
49
+ if not model and npc and hasattr(npc, 'model') and npc.model:
50
+ model = npc.model
51
+ if not provider and npc and hasattr(npc, 'provider') and npc.provider:
52
+ provider = npc.provider
53
+
54
+ # Fallback to environment variables
55
+ if not model:
56
+ model = os.getenv('NPCSH_IMAGE_GEN_MODEL')
57
+ if not provider:
58
+ provider = os.getenv('NPCSH_IMAGE_GEN_PROVIDER')
59
+
60
+ # Final hardcoded fallbacks if nothing else is set
61
+ if not model:
62
+ model = "runwayml/stable-diffusion-v1-5"
63
+ if not provider:
64
+ provider = "diffusers"
65
+
66
+ # Parse attachments
67
+ input_images = []
68
+ if attachments_str and str(attachments_str).strip():
69
+ input_images = [p.strip() for p in str(attachments_str).split(',')]
70
+
71
+ output_messages = context.get('messages', [])
72
+
73
+ if not image_prompt:
74
+ output = "Error: No prompt provided for image generation."
75
+ else:
76
+ try:
77
+ # Generate image(s)
78
+ result = gen_image(
79
+ prompt=image_prompt,
80
+ model=model,
81
+ provider=provider,
82
+ npc=npc,
83
+ height=height,
84
+ width=width,
85
+ n_images=n_images,
86
+ input_images=input_images if input_images else None
87
+ )
88
+
89
+ # Ensure we have a list of images
90
+ if not isinstance(result, list):
91
+ images_list = [result] if result is not None else []
92
+ else:
93
+ images_list = result
94
+
95
+ saved_files = []
96
+ html_image_tags = [] # This list will store the raw HTML <img> tags
97
+
98
+ for i, image in enumerate(images_list):
99
+ if image is None:
100
+ continue
101
+
102
+ # Determine output filename
103
+ if output_name and str(output_name).strip():
104
+ base_name, ext = os.path.splitext(os.path.expanduser(str(output_name)))
105
+ if not ext:
106
+ ext = ".png"
107
+ current_output_file = f"{base_name}_{i}{ext}" if len(images_list) > 1 else f"{base_name}{ext}"
108
+ else:
109
+ os.makedirs(os.path.expanduser("~/.npcsh/images/"), exist_ok=True)
110
+ current_output_file = (
111
+ os.path.expanduser("~/.npcsh/images/")
112
+ + f"image_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{i}.png"
113
+ )
114
+
115
+ # Save image to file
116
+ image.save(current_output_file)
117
+ saved_files.append(current_output_file)
118
+
119
+ # Convert image to base64 and create an HTML <img> tag
120
+ with open(current_output_file, 'rb') as f:
121
+ img_data = base64.b64encode(f.read()).decode()
122
+ # Using raw HTML <img> tag with data URI
123
+ html_image_tags.append(f'<img src="data:image/png;base64,{img_data}" alt="Generated Image {i+1}" style="max-width: 100%; display: block; margin-top: 10px;">')
124
+
125
+ if saved_files:
126
+ output_text_message = f"Image(s) generated and saved to: {', '.join(saved_files)}"
127
+ if input_images:
128
+ output_text_message = f"Image(s) edited and saved to: {', '.join(saved_files)}"
129
+
130
+ output = output_text_message # Keep the text message clean
131
+ output += f"\n\nThe image files have been saved and are ready to view."
132
+ output += "\n\n" + "\n".join(html_image_tags) # Append all HTML <img> tags to the output
133
+ else:
134
+ output = "No images were generated."
135
+
136
+ except Exception as e:
137
+ import traceback
138
+ traceback.print_exc()
139
+ output = f"Error {'editing' if input_images else 'generating'} image: {str(e)}"
140
+
141
+ context['output'] = output
142
+ context['messages'] = output_messages
143
+ context['model'] = model
144
+ context['provider'] = provider
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcsh
3
- Version: 1.1.10
3
+ Version: 1.1.11
4
4
  Summary: npcsh is a command-line toolkit for using AI agents in novel ways.
5
5
  Home-page: https://github.com/NPC-Worldwide/npcsh
6
6
  Author: Christopher Agostino
@@ -56,6 +56,8 @@ npcsh/npc_team/jinxs/modes/spool.jinx
56
56
  npcsh/npc_team/jinxs/modes/wander.jinx
57
57
  npcsh/npc_team/jinxs/modes/yap.jinx
58
58
  npcsh/npc_team/jinxs/npc_studio/npc-studio.jinx
59
+ npcsh/npc_team/jinxs/utils/agent.jinx
60
+ npcsh/npc_team/jinxs/utils/chat.jinx
59
61
  npcsh/npc_team/jinxs/utils/compress.jinx
60
62
  npcsh/npc_team/jinxs/utils/edit_file.jinx
61
63
  npcsh/npc_team/jinxs/utils/load_file.jinx
@@ -78,7 +78,7 @@ extra_files = package_files("npcsh/npc_team/")
78
78
 
79
79
  setup(
80
80
  name="npcsh",
81
- version="1.1.10",
81
+ version="1.1.11",
82
82
  packages=find_packages(exclude=["tests*"]),
83
83
  install_requires=base_requirements, # Only install base requirements by default
84
84
  extras_require={
@@ -1,19 +0,0 @@
1
- jinx_name: sh
2
- description: Execute bash queries. Should be used to grep for file contents, list directories, explore information to answer user questions more practically.
3
- inputs:
4
- - bash_command
5
- steps:
6
- - engine: python
7
- code: |
8
- import subprocess
9
- import os
10
- cmd = '{{bash_command}}'
11
- def run_command(cmd):
12
- process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
13
- stdout, stderr = process.communicate()
14
- if stderr:
15
- print(f"Error: {stderr.decode('utf-8')}")
16
- return stderr
17
- return stdout
18
- result = run_command(cmd)
19
- output = result.decode('utf-8')
@@ -1,117 +0,0 @@
1
- jinx_name: "vixynt"
2
- description: "Generates images from text descriptions or edits existing ones."
3
- inputs:
4
- - prompt
5
- - model: ""
6
- - provider: ""
7
- - output_name: ""
8
- - attachments: ""
9
- - n_images: 1
10
- - height: 1024
11
- - width: 1024
12
- steps:
13
- - name: "generate_or_edit_image"
14
- engine: "python"
15
- code: |
16
- import os
17
- import base64
18
- from io import BytesIO
19
- from datetime import datetime
20
- from PIL import Image
21
- from npcpy.llm_funcs import gen_image
22
-
23
- # Extract inputs from context
24
- image_prompt = context.get('prompt', '').strip()
25
- output_name = context.get('output_name')
26
- attachments_str = context.get('attachments')
27
- n_images = int(context.get('n_images', 1))
28
- height = int(context.get('height', 1024))
29
- width = int(context.get('width', 1024))
30
- model = context.get('model')
31
- provider = context.get('provider')
32
-
33
- input_images = []
34
- if attachments_str and attachments_str.strip():
35
- input_images = [p.strip() for p in attachments_str.split(',')]
36
-
37
- # Use NPC's model/provider as fallback
38
- if not model and npc and npc.model:
39
- model = npc.model
40
- if not provider and npc and npc.provider:
41
- provider = npc.provider
42
-
43
- # Final fallbacks
44
- if not model:
45
- model = "runwayml/stable-diffusion-v1-5"
46
- if not provider:
47
- provider = "diffusers"
48
-
49
- output_messages = context.get('messages', [])
50
-
51
- if not image_prompt:
52
- context['output'] = "Error: No prompt provided for image generation."
53
- context['messages'] = output_messages
54
- exit()
55
-
56
- try:
57
- # Generate image(s)
58
- result = gen_image(
59
- prompt=image_prompt,
60
- model=model,
61
- provider=provider,
62
- npc=npc,
63
- height=height,
64
- width=width,
65
- n_images=n_images,
66
- input_images=input_images if input_images else None
67
- )
68
-
69
- # Ensure we have a list of images
70
- if not isinstance(result, list):
71
- images_list = [result] if result is not None else []
72
- else:
73
- images_list = result
74
-
75
- saved_files = []
76
-
77
- for i, image in enumerate(images_list):
78
- if image is None:
79
- continue
80
-
81
- # Determine output filename
82
- if output_name and output_name.strip():
83
- base_name, ext = os.path.splitext(os.path.expanduser(output_name))
84
- if not ext:
85
- ext = ".png"
86
- current_output_file = f"{base_name}_{i}{ext}" if len(images_list) > 1 else f"{base_name}{ext}"
87
- else:
88
- os.makedirs(os.path.expanduser("~/.npcsh/images/"), exist_ok=True)
89
- current_output_file = (
90
- os.path.expanduser("~/.npcsh/images/")
91
- + f"image_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{i}.png"
92
- )
93
-
94
- # Save image to file
95
- image.save(current_output_file)
96
- saved_files.append(current_output_file)
97
-
98
- if saved_files:
99
- if input_images:
100
- output = f"Image(s) edited and saved to: {', '.join(saved_files)}"
101
- else:
102
- output = f"Image(s) generated and saved to: {', '.join(saved_files)}"
103
-
104
- # DO NOT include base64 data - just reference the file paths
105
- output += f"\n\nThe image files have been saved and are ready to view."
106
- else:
107
- output = "No images were generated."
108
-
109
- except Exception as e:
110
- import traceback
111
- traceback.print_exc()
112
- output = f"Error {'editing' if input_images else 'generating'} image: {str(e)}"
113
-
114
- context['output'] = output
115
- context['messages'] = output_messages
116
- context['model'] = model
117
- context['provider'] = provider
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes