npcpy 1.2.30__tar.gz → 1.2.32__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. {npcpy-1.2.30/npcpy.egg-info → npcpy-1.2.32}/PKG-INFO +1 -1
  2. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/gen/response.py +73 -20
  3. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/gen/video_gen.py +2 -1
  4. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/llm_funcs.py +2 -1
  5. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/npc_sysenv.py +44 -20
  6. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/serve.py +1 -0
  7. {npcpy-1.2.30 → npcpy-1.2.32/npcpy.egg-info}/PKG-INFO +1 -1
  8. {npcpy-1.2.30 → npcpy-1.2.32}/setup.py +1 -1
  9. {npcpy-1.2.30 → npcpy-1.2.32}/LICENSE +0 -0
  10. {npcpy-1.2.30 → npcpy-1.2.32}/MANIFEST.in +0 -0
  11. {npcpy-1.2.30 → npcpy-1.2.32}/README.md +0 -0
  12. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/__init__.py +0 -0
  13. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/data/__init__.py +0 -0
  14. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/data/audio.py +0 -0
  15. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/data/data_models.py +0 -0
  16. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/data/image.py +0 -0
  17. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/data/load.py +0 -0
  18. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/data/text.py +0 -0
  19. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/data/video.py +0 -0
  20. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/data/web.py +0 -0
  21. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/ft/__init__.py +0 -0
  22. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/ft/diff.py +0 -0
  23. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/ft/ge.py +0 -0
  24. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/ft/memory_trainer.py +0 -0
  25. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/ft/model_ensembler.py +0 -0
  26. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/ft/rl.py +0 -0
  27. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/ft/sft.py +0 -0
  28. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/ft/usft.py +0 -0
  29. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/gen/__init__.py +0 -0
  30. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/gen/audio_gen.py +0 -0
  31. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/gen/embeddings.py +0 -0
  32. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/gen/image_gen.py +0 -0
  33. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/main.py +0 -0
  34. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/memory/__init__.py +0 -0
  35. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/memory/command_history.py +0 -0
  36. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/memory/kg_vis.py +0 -0
  37. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/memory/knowledge_graph.py +0 -0
  38. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/memory/memory_processor.py +0 -0
  39. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/memory/search.py +0 -0
  40. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/mix/__init__.py +0 -0
  41. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/mix/debate.py +0 -0
  42. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/npc_compiler.py +0 -0
  43. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/npcs.py +0 -0
  44. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/sql/__init__.py +0 -0
  45. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/sql/ai_function_tools.py +0 -0
  46. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/sql/database_ai_adapters.py +0 -0
  47. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/sql/database_ai_functions.py +0 -0
  48. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/sql/model_runner.py +0 -0
  49. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/sql/npcsql.py +0 -0
  50. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/sql/sql_model_compiler.py +0 -0
  51. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/tools.py +0 -0
  52. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/work/__init__.py +0 -0
  53. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/work/desktop.py +0 -0
  54. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/work/plan.py +0 -0
  55. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy/work/trigger.py +0 -0
  56. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy.egg-info/SOURCES.txt +0 -0
  57. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy.egg-info/dependency_links.txt +0 -0
  58. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy.egg-info/requires.txt +0 -0
  59. {npcpy-1.2.30 → npcpy-1.2.32}/npcpy.egg-info/top_level.txt +0 -0
  60. {npcpy-1.2.30 → npcpy-1.2.32}/setup.cfg +0 -0
  61. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_audio.py +0 -0
  62. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_command_history.py +0 -0
  63. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_image.py +0 -0
  64. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_llm_funcs.py +0 -0
  65. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_load.py +0 -0
  66. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_npc_compiler.py +0 -0
  67. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_npcsql.py +0 -0
  68. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_response.py +0 -0
  69. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_serve.py +0 -0
  70. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_text.py +0 -0
  71. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_tools.py +0 -0
  72. {npcpy-1.2.30 → npcpy-1.2.32}/tests/test_web.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcpy
3
- Version: 1.2.30
3
+ Version: 1.2.32
4
4
  Summary: npcpy is the premier open-source library for integrating LLMs and Agents into python systems.
5
5
  Home-page: https://github.com/NPC-Worldwide/npcpy
6
6
  Author: Christopher Agostino
@@ -378,21 +378,50 @@ def get_ollama_response(
378
378
 
379
379
  result["response"] = ollama.chat(**stream_api_params, options=options)
380
380
  else:
381
-
381
+
382
382
  if format == "json":
383
383
  try:
384
- if isinstance(response_content, str):
385
- if response_content.startswith("```json"):
386
- response_content = (
387
- response_content.replace("```json", "")
388
- .replace("```", "")
389
- .strip()
390
- )
391
- parsed_response = json.loads(response_content)
392
- result["response"] = parsed_response
393
- except json.JSONDecodeError:
394
- result["error"] = f"Invalid JSON response: {response_content}"
395
-
384
+ if isinstance(llm_response, str):
385
+ llm_response = llm_response.strip()
386
+
387
+ if '```json' in llm_response:
388
+ start = llm_response.find('```json') + 7
389
+ end = llm_response.rfind('```')
390
+ if end > start:
391
+ llm_response = llm_response[start:end].strip()
392
+
393
+ first_brace = llm_response.find('{')
394
+ first_bracket = llm_response.find('[')
395
+
396
+ if first_brace == -1 and first_bracket == -1:
397
+ result["response"] = {}
398
+ result["error"] = "No JSON found in response"
399
+ return result
400
+
401
+ if first_brace != -1 and (first_bracket == -1 or first_brace < first_bracket):
402
+ llm_response = llm_response[first_brace:]
403
+ last_brace = llm_response.rfind('}')
404
+ if last_brace != -1:
405
+ llm_response = llm_response[:last_brace+1]
406
+ else:
407
+ llm_response = llm_response[first_bracket:]
408
+ last_bracket = llm_response.rfind(']')
409
+ if last_bracket != -1:
410
+ llm_response = llm_response[:last_bracket+1]
411
+
412
+ parsed_json = json.loads(llm_response, strict=False)
413
+
414
+ if "json" in parsed_json:
415
+ result["response"] = parsed_json["json"]
416
+ else:
417
+ result["response"] = parsed_json
418
+
419
+ except (json.JSONDecodeError, TypeError) as e:
420
+ print(f"JSON parsing error: {str(e)}")
421
+ print(f"Raw response: {llm_response[:500]}")
422
+ result["response"] = {}
423
+ result["error"] = "Invalid JSON response"
424
+
396
425
  return result
397
426
 
398
427
  import time
@@ -553,7 +582,7 @@ def get_litellm_response(
553
582
  litellm.include_cost_in_streaming_usage = True
554
583
  api_params['stream_options'] = {"include_usage": True}
555
584
 
556
- if api_url is not None and (provider == "openai-like" or provider == "openai"):
585
+ if api_url is not None and ('openai-like' in provider or provider == "openai-like" or provider == "openai"):
557
586
  api_params["api_base"] = api_url
558
587
  provider = "openai"
559
588
 
@@ -609,14 +638,37 @@ def get_litellm_response(
609
638
 
610
639
  if hasattr(resp.choices[0].message, 'tool_calls') and resp.choices[0].message.tool_calls:
611
640
  result["tool_calls"] = resp.choices[0].message.tool_calls
612
-
613
-
614
641
  if format == "json":
615
642
  try:
616
643
  if isinstance(llm_response, str):
617
- if llm_response.startswith("```json"):
618
- llm_response = llm_response.replace("```json", "").replace("```", "").strip()
619
- parsed_json = json.loads(llm_response)
644
+ llm_response = llm_response.strip()
645
+
646
+ if '```json' in llm_response:
647
+ start = llm_response.find('```json') + 7
648
+ end = llm_response.rfind('```')
649
+ if end > start:
650
+ llm_response = llm_response[start:end].strip()
651
+
652
+ first_brace = llm_response.find('{')
653
+ first_bracket = llm_response.find('[')
654
+
655
+ if first_brace == -1 and first_bracket == -1:
656
+ result["response"] = {}
657
+ result["error"] = "No JSON found in response"
658
+ return result
659
+
660
+ if first_brace != -1 and (first_bracket == -1 or first_brace < first_bracket):
661
+ llm_response = llm_response[first_brace:]
662
+ last_brace = llm_response.rfind('}')
663
+ if last_brace != -1:
664
+ llm_response = llm_response[:last_brace+1]
665
+ else:
666
+ llm_response = llm_response[first_bracket:]
667
+ last_bracket = llm_response.rfind(']')
668
+ if last_bracket != -1:
669
+ llm_response = llm_response[:last_bracket+1]
670
+
671
+ parsed_json = json.loads(llm_response, strict=False)
620
672
 
621
673
  if "json" in parsed_json:
622
674
  result["response"] = parsed_json["json"]
@@ -625,7 +677,8 @@ def get_litellm_response(
625
677
 
626
678
  except (json.JSONDecodeError, TypeError) as e:
627
679
  print(f"JSON parsing error: {str(e)}")
628
- print(f"Raw response: {llm_response}")
680
+ print(f"Raw response: {llm_response[:500]}")
681
+ result["response"] = {}
629
682
  result["error"] = "Invalid JSON response"
630
683
 
631
684
  return result
@@ -74,6 +74,7 @@ def generate_video_diffusers(
74
74
 
75
75
  def generate_video_veo3(
76
76
  prompt: str,
77
+ model: str,
77
78
  negative_prompt: str = "",
78
79
  output_path: str = "",
79
80
  ):
@@ -93,7 +94,7 @@ def generate_video_veo3(
93
94
  config.negative_prompt = negative_prompt
94
95
 
95
96
  operation = client.models.generate_videos(
96
- model="veo-3.0-generate-preview",
97
+ model=model,
97
98
  prompt=prompt,
98
99
  config=config,
99
100
  )
@@ -105,6 +105,7 @@ def gen_video(
105
105
  try:
106
106
  output_path = generate_video_veo3(
107
107
  prompt=prompt,
108
+ model=model,
108
109
  negative_prompt=negative_prompt,
109
110
  output_path=output_path,
110
111
  )
@@ -593,7 +594,7 @@ def handle_jinx_call(
593
594
  render_markdown(f""" ## jinx OUTPUT FROM CALLING {jinx_name} \n \n output:{jinx_output['output']}""" )
594
595
  response = get_llm_response(f"""
595
596
  The user had the following request: {command}.
596
- Here were the jinx outputs from calling {jinx_name}: {jinx_output}
597
+ Here were the jinx outputs from calling {jinx_name}: {jinx_output.get('output', '')}
597
598
 
598
599
  Given the jinx outputs and the user request, please format a simple answer that
599
600
  provides the answer without requiring the user to carry out any further steps.
@@ -144,7 +144,7 @@ def get_locally_available_models(project_directory, airplane_mode=False):
144
144
 
145
145
  models = fetch_custom_models()
146
146
  for model in models:
147
- available_models[model] = provider_name
147
+ available_models[model] = 'openai-like'
148
148
 
149
149
  logging.info(
150
150
  f"Loaded {len(models)} models "
@@ -157,32 +157,56 @@ def get_locally_available_models(project_directory, airplane_mode=False):
157
157
  f"custom provider '{provider_name}': {e}"
158
158
  )
159
159
 
160
-
160
+
161
161
  airplane_mode = False
162
162
  if not airplane_mode:
163
163
  timeout_seconds = 3.5
164
164
 
165
165
 
166
166
  with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
167
- if 'NPC_STUDIO_LICENSE_KEY' in env_vars or os.environ.get('NPC_STUDIO_LICENSE_KEY'):
168
- try:
169
- def fetch_enpisi_models():
170
- import requests
171
-
172
- api_url = 'https://api.enpisi.com'
173
- headers = {
174
- 'Authorization': f"Bearer {env_vars.get('NPC_STUDIO_LICENSE_KEY') or os.environ.get('NPC_STUDIO_LICENSE_KEY')}",
175
- 'Content-Type': 'application/json'
176
- }
177
- response = requests.get(f"{api_url}/models", headers=headers)
178
-
179
- return [model['id'] for model in response.json().get('data','')]
180
- for model in fetch_enpisi_models():
181
- available_models[model+'-npc'] = 'enpisi'
182
-
183
- except Exception as e:
184
- logging.error(f"Error fetching NPC Studio models: {e}")
167
+
168
+ if 'NPCSH_API_URL' in env_vars or os.environ.get('NPCSH_API_URL'):
169
+ try:
170
+ import requests
171
+
172
+ def fetch_custom_models():
173
+ base_url = env_vars.get('NPCSH_API_URL') or os.environ.get('NPCSH_API_URL')
174
+ models_endpoint = f"{base_url.rstrip('/')}/models"
175
+ response = requests.get(
176
+ models_endpoint,
177
+
178
+ timeout=3.5
179
+ )
180
+
181
+ if response.status_code == 200:
182
+ data = response.json()
183
+
184
+ if isinstance(data, dict) and 'data' in data:
185
+ return [
186
+ m['id'] for m in data['data']
187
+ if 'id' in m
188
+ ]
189
+ elif isinstance(data, list):
190
+ return [
191
+ m['id'] for m in data
192
+ if isinstance(m, dict) and 'id' in m
193
+ ]
194
+ return []
195
+
196
+ models = fetch_custom_models()
197
+ for model in models:
198
+ available_models[model] = 'openai-like'
199
+
185
200
 
201
+
202
+
203
+ except Exception as e:
204
+ logging.warning(
205
+ f"Failed to load models from "
206
+ f"custom provider 'openai-like': {e}"
207
+ )
208
+
209
+
186
210
  if "ANTHROPIC_API_KEY" in env_vars or os.environ.get("ANTHROPIC_API_KEY"):
187
211
  try:
188
212
  import anthropic
@@ -2166,6 +2166,7 @@ def stream():
2166
2166
  model=model,
2167
2167
  provider=provider,
2168
2168
  npc=npc_object,
2169
+ api_url = npc_object.api_url if npc_object.api_url else None,
2169
2170
  team=team_object,
2170
2171
  stream=True,
2171
2172
  attachments=attachment_paths_for_llm,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcpy
3
- Version: 1.2.30
3
+ Version: 1.2.32
4
4
  Summary: npcpy is the premier open-source library for integrating LLMs and Agents into python systems.
5
5
  Home-page: https://github.com/NPC-Worldwide/npcpy
6
6
  Author: Christopher Agostino
@@ -83,7 +83,7 @@ extra_files = package_files("npcpy/npc_team/")
83
83
 
84
84
  setup(
85
85
  name="npcpy",
86
- version="1.2.30",
86
+ version="1.2.32",
87
87
  packages=find_packages(exclude=["tests*"]),
88
88
  install_requires=base_requirements,
89
89
  extras_require={
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes