vectorvein 0.1.62__py3-none-any.whl → 0.1.64__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,6 @@
2
2
  # @Date: 2024-07-26 14:48:55
3
3
  import json
4
4
  import random
5
- from functools import cached_property
6
5
  from typing import overload, Generator, AsyncGenerator, Any, Literal, Iterable
7
6
 
8
7
  import httpx
@@ -41,6 +40,7 @@ from ..types.llm_parameters import (
41
40
  NotGiven,
42
41
  ToolParam,
43
42
  ToolChoice,
43
+ EndpointSetting,
44
44
  AnthropicToolParam,
45
45
  AnthropicToolChoice,
46
46
  ChatCompletionMessage,
@@ -178,19 +178,45 @@ class AnthropicChatClient(BaseChatClient):
178
178
  backend_name,
179
179
  )
180
180
  self.model_id = None
181
+ self.endpoint = None
182
+
183
+ def set_model_id_by_endpoint_id(self, endpoint_id: str):
184
+ for endpoint_option in self.backend_settings.models[self.model].endpoints:
185
+ if isinstance(endpoint_option, dict):
186
+ if endpoint_id == endpoint_option["endpoint_id"]:
187
+ self.model_id = endpoint_option["model_id"]
188
+ break
189
+ else:
190
+ if endpoint_id == endpoint_option:
191
+ self.model_id = endpoint_option
192
+ break
193
+ return self.model_id
194
+
195
+ def _set_endpoint(self):
196
+ if self.endpoint is None:
197
+ if self.random_endpoint:
198
+ self.random_endpoint = True
199
+ endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
200
+ if isinstance(endpoint, dict):
201
+ self.endpoint_id = endpoint["endpoint_id"]
202
+ self.model_id = endpoint["model_id"]
203
+ else:
204
+ self.endpoint_id = endpoint
205
+ self.endpoint = settings.get_endpoint(self.endpoint_id)
206
+ else:
207
+ self.endpoint = settings.get_endpoint(self.endpoint_id)
208
+ self.set_model_id_by_endpoint_id(self.endpoint_id)
209
+ elif isinstance(self.endpoint, EndpointSetting):
210
+ self.endpoint_id = self.endpoint.id
211
+ self.set_model_id_by_endpoint_id(self.endpoint_id)
212
+ else:
213
+ raise ValueError("Invalid endpoint")
214
+
215
+ return self.endpoint, self.model_id
181
216
 
182
217
  @property
183
218
  def raw_client(self): # type: ignore
184
- if self.random_endpoint:
185
- self.random_endpoint = True
186
- endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
187
- self.model_id = None
188
- if isinstance(endpoint, dict):
189
- self.endpoint_id = endpoint["endpoint_id"]
190
- self.model_id = endpoint["model_id"]
191
- else:
192
- self.endpoint_id = endpoint
193
- self.endpoint = settings.get_endpoint(self.endpoint_id)
219
+ self.endpoint, self.model_id = self._set_endpoint()
194
220
 
195
221
  if self.endpoint.is_vertex:
196
222
  if self.endpoint.credentials is None:
@@ -330,28 +356,24 @@ class AnthropicChatClient(BaseChatClient):
330
356
  if temperature is not None:
331
357
  self.temperature = temperature
332
358
 
333
- if self.random_endpoint:
334
- self.random_endpoint = True
335
- endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
336
- self.model_id = None
337
- if isinstance(endpoint, dict):
338
- self.endpoint_id = endpoint["endpoint_id"]
339
- self.model_id = endpoint["model_id"]
340
- else:
341
- self.endpoint_id = endpoint
342
- self.endpoint = settings.get_endpoint(self.endpoint_id)
359
+ self.model_setting = self.backend_settings.models[self.model]
360
+ if self.model_id is None:
361
+ self.model_id = self.model_setting.id
362
+
363
+ self.endpoint, self.model_id = self._set_endpoint()
343
364
 
344
365
  if self.endpoint.api_schema_type == "openai":
345
366
  _tools = OPENAI_NOT_GIVEN if tools is NOT_GIVEN else tools
346
367
  _tool_choice = OPENAI_NOT_GIVEN if tool_choice is NOT_GIVEN else tool_choice
347
368
 
348
369
  formatted_messages = refactor_into_openai_messages(messages)
370
+ model_id = self.model_id
349
371
 
350
372
  if self.stream:
351
373
 
352
374
  def _generator():
353
375
  response = OpenAICompatibleChatClient(
354
- model=self.model,
376
+ model=model_id,
355
377
  stream=True,
356
378
  temperature=self.temperature,
357
379
  context_length_control=self.context_length_control,
@@ -379,7 +401,7 @@ class AnthropicChatClient(BaseChatClient):
379
401
  return _generator()
380
402
  else:
381
403
  return OpenAICompatibleChatClient(
382
- model=self.model,
404
+ model=model_id,
383
405
  stream=False,
384
406
  temperature=self.temperature,
385
407
  context_length_control=self.context_length_control,
@@ -411,10 +433,6 @@ class AnthropicChatClient(BaseChatClient):
411
433
  if isinstance(top_p, OpenAINotGiven) or top_p is None:
412
434
  top_p = NOT_GIVEN
413
435
 
414
- self.model_setting = self.backend_settings.models[self.model]
415
- if self.model_id is None:
416
- self.model_id = self.model_setting.id
417
-
418
436
  if messages[0].get("role") == "system":
419
437
  system_prompt: str = messages[0]["content"]
420
438
  messages = messages[1:]
@@ -569,19 +587,45 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
569
587
  backend_name,
570
588
  )
571
589
  self.model_id = None
590
+ self.endpoint = None
591
+
592
+ def set_model_id_by_endpoint_id(self, endpoint_id: str):
593
+ for endpoint_option in self.backend_settings.models[self.model].endpoints:
594
+ if isinstance(endpoint_option, dict):
595
+ if endpoint_id == endpoint_option["endpoint_id"]:
596
+ self.model_id = endpoint_option["model_id"]
597
+ break
598
+ else:
599
+ if endpoint_id == endpoint_option:
600
+ self.model_id = endpoint_option
601
+ break
602
+ return self.model_id
603
+
604
+ def _set_endpoint(self):
605
+ if self.endpoint is None:
606
+ if self.random_endpoint:
607
+ self.random_endpoint = True
608
+ endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
609
+ if isinstance(endpoint, dict):
610
+ self.endpoint_id = endpoint["endpoint_id"]
611
+ self.model_id = endpoint["model_id"]
612
+ else:
613
+ self.endpoint_id = endpoint
614
+ self.endpoint = settings.get_endpoint(self.endpoint_id)
615
+ else:
616
+ self.endpoint = settings.get_endpoint(self.endpoint_id)
617
+ self.set_model_id_by_endpoint_id(self.endpoint_id)
618
+ elif isinstance(self.endpoint, EndpointSetting):
619
+ self.endpoint_id = self.endpoint.id
620
+ self.set_model_id_by_endpoint_id(self.endpoint_id)
621
+ else:
622
+ raise ValueError("Invalid endpoint")
623
+
624
+ return self.endpoint, self.model_id
572
625
 
573
626
  @property
574
627
  def raw_client(self): # type: ignore
575
- if self.random_endpoint:
576
- self.random_endpoint = True
577
- endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
578
- self.model_id = None
579
- if isinstance(endpoint, dict):
580
- self.endpoint_id = endpoint["endpoint_id"]
581
- self.model_id = endpoint["model_id"]
582
- else:
583
- self.endpoint_id = endpoint
584
- self.endpoint = settings.get_endpoint(self.endpoint_id)
628
+ self.endpoint, self.model_id = self._set_endpoint()
585
629
 
586
630
  if self.endpoint.is_vertex:
587
631
  if self.endpoint.credentials is None:
@@ -720,28 +764,24 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
720
764
  if temperature is not None:
721
765
  self.temperature = temperature
722
766
 
723
- if self.random_endpoint:
724
- self.random_endpoint = True
725
- endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
726
- self.model_id = None
727
- if isinstance(endpoint, dict):
728
- self.endpoint_id = endpoint["endpoint_id"]
729
- self.model_id = endpoint["model_id"]
730
- else:
731
- self.endpoint_id = endpoint
732
- self.endpoint = settings.get_endpoint(self.endpoint_id)
767
+ self.model_setting = self.backend_settings.models[self.model]
768
+ if self.model_id is None:
769
+ self.model_id = self.model_setting.id
770
+
771
+ self.endpoint, self.model_id = self._set_endpoint()
733
772
 
734
773
  if self.endpoint.api_schema_type == "openai":
735
774
  _tools = OPENAI_NOT_GIVEN if tools is NOT_GIVEN else tools
736
775
  _tool_choice = OPENAI_NOT_GIVEN if tool_choice is NOT_GIVEN else tool_choice
737
776
 
738
777
  formatted_messages = refactor_into_openai_messages(messages)
778
+ model_id = self.model_id
739
779
 
740
780
  if self.stream:
741
781
 
742
782
  async def _generator():
743
783
  client = AsyncOpenAICompatibleChatClient(
744
- model=self.model,
784
+ model=model_id,
745
785
  stream=True,
746
786
  temperature=self.temperature,
747
787
  context_length_control=self.context_length_control,
@@ -770,7 +810,7 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
770
810
  return _generator()
771
811
  else:
772
812
  client = AsyncOpenAICompatibleChatClient(
773
- model=self.model,
813
+ model=model_id,
774
814
  stream=False,
775
815
  temperature=self.temperature,
776
816
  context_length_control=self.context_length_control,
@@ -803,10 +843,6 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
803
843
  if isinstance(top_p, OpenAINotGiven) or top_p is None:
804
844
  top_p = NOT_GIVEN
805
845
 
806
- self.model_setting = self.backend_settings.models[self.model]
807
- if self.model_id is None:
808
- self.model_id = self.model_setting.id
809
-
810
846
  if messages[0].get("role") == "system":
811
847
  system_prompt = messages[0]["content"]
812
848
  messages = messages[1:]
@@ -73,6 +73,8 @@ class OpenAICompatibleChatClient(BaseChatClient):
73
73
  self.endpoint = settings.get_endpoint(self.endpoint_id)
74
74
 
75
75
  if self.endpoint.is_azure:
76
+ if self.endpoint.api_base is None:
77
+ raise ValueError("Azure endpoint is not set")
76
78
  return AzureOpenAI(
77
79
  azure_endpoint=self.endpoint.api_base,
78
80
  api_key=self.endpoint.api_key,
@@ -339,6 +341,8 @@ class AsyncOpenAICompatibleChatClient(BaseAsyncChatClient):
339
341
  self.endpoint = settings.get_endpoint(self.endpoint_id)
340
342
 
341
343
  if self.endpoint.is_azure:
344
+ if self.endpoint.api_base is None:
345
+ raise ValueError("Azure endpoint is not set")
342
346
  return AsyncAzureOpenAI(
343
347
  azure_endpoint=self.endpoint.api_base,
344
348
  api_key=self.endpoint.api_key,
@@ -28,7 +28,7 @@ class EndpointOptionDict(TypedDict):
28
28
  class EndpointSetting(BaseModel):
29
29
  id: str = Field(..., description="The id of the endpoint.")
30
30
  region: Optional[str] = Field(None, description="The region for the endpoint.")
31
- api_base: str = Field(None, description="The base URL for the API.")
31
+ api_base: Optional[str] = Field(None, description="The base URL for the API.")
32
32
  api_key: Optional[str] = Field(None, description="The API key for authentication.")
33
33
  api_schema_type: Optional[str] = Field(
34
34
  "default",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vectorvein
3
- Version: 0.1.62
3
+ Version: 0.1.64
4
4
  Summary: VectorVein python SDK
5
5
  Author-Email: Anderson <andersonby@163.com>
6
6
  License: MIT
@@ -1,9 +1,9 @@
1
- vectorvein-0.1.62.dist-info/METADATA,sha256=5c_Miwx6OAcWpmDNNdP7SGXZQXmudq6y4TB7weXGL_w,641
2
- vectorvein-0.1.62.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
- vectorvein-0.1.62.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
1
+ vectorvein-0.1.64.dist-info/METADATA,sha256=ZthopkhnsP7tm1D9r-6TLH88LewZUC235dwC9ZPF3fQ,641
2
+ vectorvein-0.1.64.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
+ vectorvein-0.1.64.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
4
  vectorvein/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  vectorvein/chat_clients/__init__.py,sha256=Oev7Lv1DIEWCMD-2Pm7e2cwzX7JFQTnIK-j6o4iUuyQ,17725
6
- vectorvein/chat_clients/anthropic_client.py,sha256=efuegF1XFMgC5AKK-CqxAx_ktJHYth_ojCtELqv-RF0,38564
6
+ vectorvein/chat_clients/anthropic_client.py,sha256=4d1LvkJxHEbbNKfkkeHtqSKG3sREqe9OYZlbLiWvLS8,40005
7
7
  vectorvein/chat_clients/baichuan_client.py,sha256=CVMvpgjdrZGv0BWnTOBD-f2ufZ3wq3496wqukumsAr4,526
8
8
  vectorvein/chat_clients/base_client.py,sha256=tmD3ai6YjQnCKHuPsUww1khRlJeJ2AJzYubksb-2UaM,10489
9
9
  vectorvein/chat_clients/deepseek_client.py,sha256=3qWu01NlJAP2N-Ff62d5-CZXZitlizE1fzb20LNetig,526
@@ -14,7 +14,7 @@ vectorvein/chat_clients/minimax_client.py,sha256=uNYz3ccJOhTAgzkDNvWfM4_LaBaOpHj
14
14
  vectorvein/chat_clients/mistral_client.py,sha256=1aKSylzBDaLYcFnaBIL4-sXSzWmXfBeON9Q0rq-ziWw,534
15
15
  vectorvein/chat_clients/moonshot_client.py,sha256=gbu-6nGxx8uM_U2WlI4Wus881rFRotzHtMSoYOcruGU,526
16
16
  vectorvein/chat_clients/openai_client.py,sha256=Nz6tV45pWcsOupxjnsRsGTicbQNJWIZyxuJoJ5DGMpg,527
17
- vectorvein/chat_clients/openai_compatible_client.py,sha256=hbSggW5itvTRk3Qb3rejNTt3vd0r6R95irLTjeSzm2g,23102
17
+ vectorvein/chat_clients/openai_compatible_client.py,sha256=-n0iDTuthnbVmS_Dbc49TafjmheVS1LeGEF5cY1L2qY,23324
18
18
  vectorvein/chat_clients/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
19
  vectorvein/chat_clients/qwen_client.py,sha256=-ryh-m9PgsO0fc4ulcCmPTy1155J8YUy15uPoJQOHA0,513
20
20
  vectorvein/chat_clients/stepfun_client.py,sha256=zsD2W5ahmR4DD9cqQTXmJr3txrGuvxbRWhFlRdwNijI,519
@@ -29,8 +29,8 @@ vectorvein/settings/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
29
29
  vectorvein/types/defaults.py,sha256=Hb9BFNJIRJcwjePsQTKKihHqJGeJGbFHfNeV_pIV8gM,24479
30
30
  vectorvein/types/enums.py,sha256=7KTJSVtQueImmbr1fSwv3rQVtc0RyMWXJmoE2tDOaso,1667
31
31
  vectorvein/types/exception.py,sha256=gnW4GnJ76jND6UGnodk9xmqkcbeS7Cz2rvncA2HpD5E,69
32
- vectorvein/types/llm_parameters.py,sha256=Q1mBJPMF7pzFQw1G9ut9RgRPGUEYmY5Kvb7CSYurZQ8,5350
32
+ vectorvein/types/llm_parameters.py,sha256=g2Q0RKMC2vOcMI0tFpZ53xfVSfC8MLoK0YntPqce49U,5360
33
33
  vectorvein/types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  vectorvein/utilities/media_processing.py,sha256=CTRq-lGlFkFgP_FSRhNwF_qUgmOrXPf2_1Ok9HY42_g,5887
35
35
  vectorvein/utilities/retry.py,sha256=6KFS9R2HdhqM3_9jkjD4F36ZSpEx2YNFGOVlpOsUetM,2208
36
- vectorvein-0.1.62.dist-info/RECORD,,
36
+ vectorvein-0.1.64.dist-info/RECORD,,