alita-sdk 0.3.428__py3-none-any.whl → 0.3.428.post1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of alita-sdk might be problematic. Click here for more details.

@@ -206,18 +206,37 @@ class AlitaClient:
206
206
 
207
207
  logger.info(f"Creating ChatOpenAI model: {model_name} with config: {model_config}")
208
208
 
209
- return ChatOpenAI(
210
- base_url=f"{self.base_url}{self.llm_path}",
211
- model=model_name,
212
- api_key=self.auth_token,
213
- streaming=model_config.get("streaming", True),
214
- stream_usage=model_config.get("stream_usage", True),
215
- max_tokens=model_config.get("max_tokens", None),
216
- temperature=model_config.get("temperature"),
217
- max_retries=model_config.get("max_retries", 3),
218
- seed=model_config.get("seed", None),
219
- openai_organization=str(self.project_id),
220
- )
209
+ try:
210
+ from tools import this # pylint: disable=E0401,C0415
211
+ worker_config = this.for_module("indexer_worker").descriptor.config
212
+ except: # pylint: disable=W0702
213
+ worker_config = {}
214
+
215
+ use_responses_api = False
216
+
217
+ if worker_config and isinstance(worker_config, dict):
218
+ for target_name_tag in worker_config.get("use_responses_api_for", []):
219
+ if target_name_tag in model_name:
220
+ use_responses_api = True
221
+ break
222
+
223
+ target_kwargs = {
224
+ "base_url": f"{self.base_url}{self.llm_path}",
225
+ "model": model_name,
226
+ "api_key": self.auth_token,
227
+ "streaming": model_config.get("streaming", True),
228
+ "stream_usage": model_config.get("stream_usage", True),
229
+ "max_tokens": model_config.get("max_tokens", None),
230
+ "temperature": model_config.get("temperature"),
231
+ "max_retries": model_config.get("max_retries", 3),
232
+ "seed": model_config.get("seed", None),
233
+ "openai_organization": str(self.project_id),
234
+ }
235
+
236
+ if use_responses_api:
237
+ target_kwargs["use_responses_api"] = True
238
+
239
+ return ChatOpenAI(**target_kwargs)
221
240
 
222
241
  def generate_image(self,
223
242
  prompt: str,