npcpy 1.2.23__py3-none-any.whl → 1.2.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
npcpy/npc_sysenv.py CHANGED
@@ -63,7 +63,7 @@ warnings.filterwarnings("ignore", module="torch.serialization")
63
63
  os.environ["PYTHONWARNINGS"] = "ignore"
64
64
  os.environ["SDL_AUDIODRIVER"] = "dummy"
65
65
 
66
- def check_internet_connection(timeout=0.5):
66
+ def check_internet_connection(timeout=5):
67
67
  """
68
68
  Checks for internet connectivity by trying to connect to a well-known host.
69
69
  """
@@ -87,16 +87,78 @@ def get_locally_available_models(project_directory, airplane_mode=False):
87
87
  key, value = line.split("=", 1)
88
88
  env_vars[key.strip()] = value.strip().strip("\"'")
89
89
 
90
-
91
90
  internet_available = check_internet_connection()
92
91
  if not internet_available:
93
- logging.info("No internet connection detected. External API calls will be skipped (effective airplane_mode).")
94
-
92
+ logging.info(
93
+ "No internet connection detected. "
94
+ "External API calls will be skipped."
95
+ )
95
96
  airplane_mode = True
96
97
  else:
97
- logging.info("Internet connection detected. Proceeding based on 'airplane_mode' parameter.")
98
+ logging.info(
99
+ "Internet connection detected. "
100
+ "Proceeding based on 'airplane_mode' parameter."
101
+ )
102
+
103
+ custom_providers = load_custom_providers()
104
+
105
+ for provider_name, config in custom_providers.items():
106
+ api_key_var = config.get('api_key_var')
107
+ if not api_key_var:
108
+ api_key_var = f"{provider_name.upper()}_API_KEY"
109
+
110
+ if api_key_var in env_vars or os.environ.get(api_key_var):
111
+ try:
112
+ import requests
113
+
114
+ def fetch_custom_models():
115
+ base_url = config.get('base_url', '')
116
+ headers = config.get('headers', {})
117
+
118
+ api_key = env_vars.get(api_key_var) or \
119
+ os.environ.get(api_key_var)
120
+ if api_key:
121
+ headers['Authorization'] = f'Bearer {api_key}'
122
+
123
+ models_endpoint = f"{base_url.rstrip('/')}/models"
124
+ response = requests.get(
125
+ models_endpoint,
126
+ headers=headers,
127
+ timeout=3.5
128
+ )
129
+
130
+ if response.status_code == 200:
131
+ data = response.json()
132
+
133
+ if isinstance(data, dict) and 'data' in data:
134
+ return [
135
+ m['id'] for m in data['data']
136
+ if 'id' in m
137
+ ]
138
+ elif isinstance(data, list):
139
+ return [
140
+ m['id'] for m in data
141
+ if isinstance(m, dict) and 'id' in m
142
+ ]
143
+ return []
144
+
145
+ models = fetch_custom_models()
146
+ for model in models:
147
+ available_models[model] = provider_name
148
+
149
+ logging.info(
150
+ f"Loaded {len(models)} models "
151
+ f"from custom provider '{provider_name}'"
152
+ )
153
+
154
+ except Exception as e:
155
+ logging.warning(
156
+ f"Failed to load models from "
157
+ f"custom provider '{provider_name}': {e}"
158
+ )
98
159
 
99
160
 
161
+ airplane_mode = False
100
162
  if not airplane_mode:
101
163
  timeout_seconds = 3.5
102
164
 
@@ -802,50 +864,116 @@ def load_env_from_execution_dir() -> None:
802
864
 
803
865
 
804
866
 
867
+
805
868
  def lookup_provider(model: str) -> str:
806
869
  """
807
- Function Description:
808
- This function determines the provider based on the model name.
870
+ Determine the provider based on the model name.
871
+ Checks custom providers first, then falls back to known providers.
872
+
809
873
  Args:
810
- model (str): The model name.
811
- Keyword Args:
812
- None
874
+ model: The model name
875
+
813
876
  Returns:
814
- str: The provider based on the model name.
877
+ The provider name or None if not found
815
878
  """
879
+ custom_providers = load_custom_providers()
880
+
881
+ for provider_name, config in custom_providers.items():
882
+ if model.startswith(f"{provider_name}-"):
883
+ return provider_name
884
+
885
+ try:
886
+ import requests
887
+ api_key_var = config.get('api_key_var') or \
888
+ f"{provider_name.upper()}_API_KEY"
889
+ api_key = os.environ.get(api_key_var)
890
+
891
+ if api_key:
892
+ base_url = config.get('base_url', '')
893
+ headers = config.get('headers', {})
894
+ headers['Authorization'] = f'Bearer {api_key}'
895
+
896
+ models_endpoint = f"{base_url.rstrip('/')}/models"
897
+ response = requests.get(
898
+ models_endpoint,
899
+ headers=headers,
900
+ timeout=1.0
901
+ )
902
+
903
+ if response.status_code == 200:
904
+ data = response.json()
905
+ models = []
906
+
907
+ if isinstance(data, dict) and 'data' in data:
908
+ models = [m['id'] for m in data['data']]
909
+ elif isinstance(data, list):
910
+ models = [m['id'] for m in data]
911
+
912
+ if model in models:
913
+ return provider_name
914
+ except:
915
+ pass
916
+
816
917
  if model == "deepseek-chat" or model == "deepseek-reasoner":
817
918
  return "deepseek"
919
+
818
920
  ollama_prefixes = [
819
- "llama",
820
- "deepseek",
821
- "qwen",
822
- "llava",
823
- "phi",
824
- "mistral",
825
- "mixtral",
826
- "dolphin",
827
- "codellama",
828
- "gemma",
829
- ]
921
+ "llama", "deepseek", "qwen", "llava",
922
+ "phi", "mistral", "mixtral", "dolphin",
923
+ "codellama", "gemma",]
830
924
  if any(model.startswith(prefix) for prefix in ollama_prefixes):
831
925
  return "ollama"
832
926
 
833
-
834
927
  openai_prefixes = ["gpt-", "dall-e-", "whisper-", "o1"]
835
928
  if any(model.startswith(prefix) for prefix in openai_prefixes):
836
929
  return "openai"
837
930
 
838
-
839
931
  if model.startswith("claude"):
840
932
  return "anthropic"
841
933
  if model.startswith("gemini"):
842
934
  return "gemini"
843
935
  if "diffusion" in model:
844
936
  return "diffusers"
937
+
845
938
  return None
939
+
940
+
941
+ def load_custom_providers():
942
+ """
943
+ Load custom provider configurations from .npcshrc
944
+
945
+ Returns:
946
+ dict: Custom provider configurations keyed by provider name
947
+ """
948
+ custom_providers = {}
949
+ npcshrc_path = os.path.expanduser("~/.npcshrc")
950
+
951
+ if os.path.exists(npcshrc_path):
952
+ with open(npcshrc_path, "r") as f:
953
+ for line in f:
954
+ line = line.split("#")[0].strip()
955
+ if "CUSTOM_PROVIDER_" in line and "=" in line:
956
+ key, value = line.split("=", 1)
957
+ key = key.strip().replace("export ", "")
958
+ value = value.strip().strip("\"'")
959
+
960
+ try:
961
+ config = json.loads(value)
962
+ provider_name = key.replace(
963
+ "CUSTOM_PROVIDER_", ""
964
+ ).lower()
965
+ custom_providers[provider_name] = config
966
+ except json.JSONDecodeError as e:
967
+ logging.warning(
968
+ f"Failed to parse custom provider {key}: {e}"
969
+ )
970
+ continue
971
+
972
+ return custom_providers
846
973
  load_env_from_execution_dir()
847
974
  deepseek_api_key = os.getenv("DEEPSEEK_API_KEY", None)
848
975
  gemini_api_key = os.getenv("GEMINI_API_KEY", None)
849
976
 
850
977
  anthropic_api_key = os.getenv("ANTHROPIC_API_KEY", None)
851
978
  openai_api_key = os.getenv("OPENAI_API_KEY", None)
979
+
npcpy/serve.py CHANGED
@@ -1942,7 +1942,7 @@ def stream():
1942
1942
 
1943
1943
  print('.', end="", flush=True)
1944
1944
  dot_count += 1
1945
- if "hf.co" in model or provider == 'ollama':
1945
+ if "hf.co" in model or provider == 'ollama' and 'gpt-oss' not in model:
1946
1946
  chunk_content = response_chunk["message"]["content"] if "message" in response_chunk and "content" in response_chunk["message"] else ""
1947
1947
  if "message" in response_chunk and "tool_calls" in response_chunk["message"]:
1948
1948
  for tool_call in response_chunk["message"]["tool_calls"]:
@@ -1959,7 +1959,9 @@ def stream():
1959
1959
  if chunk_content:
1960
1960
  complete_response.append(chunk_content)
1961
1961
  chunk_data = {
1962
- "id": None, "object": None, "created": response_chunk["created_at"], "model": response_chunk["model"],
1962
+ "id": None, "object": None,
1963
+ "created": response_chunk["created_at"] or datetime.datetime.now(),
1964
+ "model": response_chunk["model"],
1963
1965
  "choices": [{"index": 0, "delta": {"content": chunk_content, "role": response_chunk["message"]["role"]}, "finish_reason": response_chunk.get("done_reason")}]
1964
1966
  }
1965
1967
  yield f"data: {json.dumps(chunk_data)}\n\n"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcpy
3
- Version: 1.2.23
3
+ Version: 1.2.24
4
4
  Summary: npcpy is the premier open-source library for integrating LLMs and Agents into python systems.
5
5
  Home-page: https://github.com/NPC-Worldwide/npcpy
6
6
  Author: Christopher Agostino
@@ -2,9 +2,9 @@ npcpy/__init__.py,sha256=9imxFtK74_6Rw9rz0kyMnZYl_voPb569tkTlYLt0Urg,131
2
2
  npcpy/llm_funcs.py,sha256=UkesCnRmclEoqBZPMZa2hKoSTjFzjxDCzPGKgeDegPQ,85101
3
3
  npcpy/main.py,sha256=RWoRIj6VQLxKdOKvdVyaq2kwG35oRpeXPvp1CAAoG-w,81
4
4
  npcpy/npc_compiler.py,sha256=10vu-9WUmlVzaFM_hMJH28iNS1IJXQP3Rb5RT1rZmpA,95326
5
- npcpy/npc_sysenv.py,sha256=lPYlKM_TeR4l4-Jcgiqq3CCge8b2oFHdfISD4L_G7eo,30308
5
+ npcpy/npc_sysenv.py,sha256=H_A7BajE41W_r6TKt-uSUtMqruTHXIngYaRSPGQWFXE,35241
6
6
  npcpy/npcs.py,sha256=eExuVsbTfrRobTRRptRpDm46jCLWUgbvy4_U7IUQo-c,744
7
- npcpy/serve.py,sha256=O1dxISi0nQ6jsSOSxBXsULgkltnIcyBS6Z0AjfWmuXA,100296
7
+ npcpy/serve.py,sha256=kc3j3puHJJEwUnXMY9nB6we93q1u32gOcRys87abJsM,100400
8
8
  npcpy/tools.py,sha256=A5_oVmZkzGnI3BI-NmneuxeXQq-r29PbpAZP4nV4jrc,5303
9
9
  npcpy/data/__init__.py,sha256=1tcoChR-Hjn905JDLqaW9ElRmcISCTJdE7BGXPlym2Q,642
10
10
  npcpy/data/audio.py,sha256=goon4HfsYgx0bI-n1lhkrzWPrJoejJlycXcB0P62pyk,11280
@@ -47,8 +47,8 @@ npcpy/work/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
47
  npcpy/work/desktop.py,sha256=F3I8mUtJp6LAkXodsh8hGZIncoads6c_2Utty-0EdDA,2986
48
48
  npcpy/work/plan.py,sha256=QyUwg8vElWiHuoS-xK4jXTxxHvkMD3VkaCEsCmrEPQk,8300
49
49
  npcpy/work/trigger.py,sha256=P1Y8u1wQRsS2WACims_2IdkBEar-iBQix-2TDWoW0OM,9948
50
- npcpy-1.2.23.dist-info/licenses/LICENSE,sha256=j0YPvce7Ng9e32zYOu0EmXjXeJ0Nwawd0RA3uSGGH4E,1070
51
- npcpy-1.2.23.dist-info/METADATA,sha256=-Q2qDZh5Khizw_PcCZnLCyIkMmFyworeU8gPzgEhBpY,29885
52
- npcpy-1.2.23.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
- npcpy-1.2.23.dist-info/top_level.txt,sha256=g1pbSvrOOncB74Bg5-J0Olg4V0A5VzDw-Xz5YObq8BU,6
54
- npcpy-1.2.23.dist-info/RECORD,,
50
+ npcpy-1.2.24.dist-info/licenses/LICENSE,sha256=j0YPvce7Ng9e32zYOu0EmXjXeJ0Nwawd0RA3uSGGH4E,1070
51
+ npcpy-1.2.24.dist-info/METADATA,sha256=yvWvzVYXVN4jzBRdSeDIGPWWEupFyzSIsAYKc88M5i0,29885
52
+ npcpy-1.2.24.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
+ npcpy-1.2.24.dist-info/top_level.txt,sha256=g1pbSvrOOncB74Bg5-J0Olg4V0A5VzDw-Xz5YObq8BU,6
54
+ npcpy-1.2.24.dist-info/RECORD,,
File without changes