langroid 0.31.3__py3-none-any.whl → 0.32.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langroid/agent/base.py +3 -1
- langroid/language_models/openai_gpt.py +28 -2
- langroid/language_models/utils.py +4 -0
- {langroid-0.31.3.dist-info → langroid-0.32.1.dist-info}/METADATA +1 -1
- {langroid-0.31.3.dist-info → langroid-0.32.1.dist-info}/RECORD +8 -8
- pyproject.toml +1 -1
- {langroid-0.31.3.dist-info → langroid-0.32.1.dist-info}/LICENSE +0 -0
- {langroid-0.31.3.dist-info → langroid-0.32.1.dist-info}/WHEEL +0 -0
langroid/agent/base.py
CHANGED
@@ -1129,7 +1129,9 @@ class Agent(ABC):
|
|
1129
1129
|
# or they were added by an agent_response.
|
1130
1130
|
# note these could be from a forwarded msg from another agent,
|
1131
1131
|
# so return ONLY the messages THIS agent to enabled to handle.
|
1132
|
-
|
1132
|
+
if all_tools:
|
1133
|
+
return msg.tool_messages
|
1134
|
+
return [t for t in msg.tool_messages if self._tool_recipient_match(t)]
|
1133
1135
|
assert isinstance(msg, ChatDocument)
|
1134
1136
|
if (
|
1135
1137
|
msg.content != ""
|
@@ -67,6 +67,7 @@ if "OLLAMA_HOST" in os.environ:
|
|
67
67
|
else:
|
68
68
|
OLLAMA_BASE_URL = "http://localhost:11434/v1"
|
69
69
|
|
70
|
+
DEEPSEEK_BASE_URL = "https://api.deepseek.com/v1"
|
70
71
|
OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1"
|
71
72
|
GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai"
|
72
73
|
GLHF_BASE_URL = "https://glhf.chat/api/openai/v1"
|
@@ -77,6 +78,10 @@ VLLM_API_KEY = os.environ.get("VLLM_API_KEY", DUMMY_API_KEY)
|
|
77
78
|
LLAMACPP_API_KEY = os.environ.get("LLAMA_API_KEY", DUMMY_API_KEY)
|
78
79
|
|
79
80
|
|
81
|
+
class DeepSeekModel(str, Enum):
|
82
|
+
DEEPSEEK = "deepseek/deepseek-chat"
|
83
|
+
|
84
|
+
|
80
85
|
class AnthropicModel(str, Enum):
|
81
86
|
"""Enum for Anthropic models"""
|
82
87
|
|
@@ -130,6 +135,11 @@ _context_length: Dict[str, int] = {
|
|
130
135
|
AnthropicModel.CLAUDE_3_OPUS: 200_000,
|
131
136
|
AnthropicModel.CLAUDE_3_SONNET: 200_000,
|
132
137
|
AnthropicModel.CLAUDE_3_HAIKU: 200_000,
|
138
|
+
DeepSeekModel.DEEPSEEK: 64_000,
|
139
|
+
GeminiModel.GEMINI_2_FLASH: 1_000_000,
|
140
|
+
GeminiModel.GEMINI_1_5_FLASH: 1_000_000,
|
141
|
+
GeminiModel.GEMINI_1_5_FLASH_8B: 1_000_000,
|
142
|
+
GeminiModel.GEMINI_1_5_PRO: 2_000_000,
|
133
143
|
}
|
134
144
|
|
135
145
|
_cost_per_1k_tokens: Dict[str, Tuple[float, float]] = {
|
@@ -146,6 +156,8 @@ _cost_per_1k_tokens: Dict[str, Tuple[float, float]] = {
|
|
146
156
|
AnthropicModel.CLAUDE_3_OPUS: (0.015, 0.075),
|
147
157
|
AnthropicModel.CLAUDE_3_SONNET: (0.003, 0.015),
|
148
158
|
AnthropicModel.CLAUDE_3_HAIKU: (0.00025, 0.00125),
|
159
|
+
DeepSeekModel.DEEPSEEK: (0.00014, 0.00028),
|
160
|
+
# Gemini models have complex pricing based on input-len
|
149
161
|
}
|
150
162
|
|
151
163
|
|
@@ -444,6 +456,8 @@ class OpenAIGPT(LanguageModel):
|
|
444
456
|
config = config.copy()
|
445
457
|
super().__init__(config)
|
446
458
|
self.config: OpenAIGPTConfig = config
|
459
|
+
# save original model name such as `provider/model` before
|
460
|
+
# we strip out the `provider`
|
447
461
|
self.chat_model_orig = self.config.chat_model
|
448
462
|
|
449
463
|
# Run the first time the model is used
|
@@ -569,6 +583,7 @@ class OpenAIGPT(LanguageModel):
|
|
569
583
|
self.is_groq = self.config.chat_model.startswith("groq/")
|
570
584
|
self.is_cerebras = self.config.chat_model.startswith("cerebras/")
|
571
585
|
self.is_gemini = self.is_gemini_model()
|
586
|
+
self.is_deepseek = self.is_deepseek_model()
|
572
587
|
self.is_glhf = self.config.chat_model.startswith("glhf/")
|
573
588
|
self.is_openrouter = self.config.chat_model.startswith("openrouter/")
|
574
589
|
|
@@ -609,6 +624,10 @@ class OpenAIGPT(LanguageModel):
|
|
609
624
|
)
|
610
625
|
self.api_key = os.getenv("OPENROUTER_API_KEY", DUMMY_API_KEY)
|
611
626
|
self.api_base = OPENROUTER_BASE_URL
|
627
|
+
elif self.is_deepseek:
|
628
|
+
self.config.chat_model = self.config.chat_model.replace("deepseek/", "")
|
629
|
+
self.api_base = DEEPSEEK_BASE_URL
|
630
|
+
self.api_key = os.getenv("DEEPSEEK_API_KEY", DUMMY_API_KEY)
|
612
631
|
|
613
632
|
self.client = OpenAI(
|
614
633
|
api_key=self.api_key,
|
@@ -699,6 +718,13 @@ class OpenAIGPT(LanguageModel):
|
|
699
718
|
"gemini/"
|
700
719
|
)
|
701
720
|
|
721
|
+
def is_deepseek_model(self) -> bool:
|
722
|
+
deepseek_models = [e.value for e in DeepSeekModel]
|
723
|
+
return (
|
724
|
+
self.chat_model_orig in deepseek_models
|
725
|
+
or self.chat_model_orig.startswith("deepseek/")
|
726
|
+
)
|
727
|
+
|
702
728
|
def requires_first_user_message(self) -> bool:
|
703
729
|
"""
|
704
730
|
Does the chat_model require a non-empty first user message?
|
@@ -711,7 +737,7 @@ class OpenAIGPT(LanguageModel):
|
|
711
737
|
"""
|
712
738
|
List of params that are not supported by the current model
|
713
739
|
"""
|
714
|
-
match self.
|
740
|
+
match self.chat_model_orig:
|
715
741
|
case OpenAIChatModel.O1_MINI | OpenAIChatModel.O1_PREVIEW:
|
716
742
|
return ["temperature", "stream"]
|
717
743
|
case _:
|
@@ -764,7 +790,7 @@ class OpenAIGPT(LanguageModel):
|
|
764
790
|
models/endpoints.
|
765
791
|
Get it from the dict, otherwise fail-over to general method
|
766
792
|
"""
|
767
|
-
return _cost_per_1k_tokens.get(self.
|
793
|
+
return _cost_per_1k_tokens.get(self.chat_model_orig, super().chat_cost())
|
768
794
|
|
769
795
|
def set_stream(self, stream: bool) -> bool:
|
770
796
|
"""Enable or disable streaming output from API.
|
@@ -53,6 +53,10 @@ def retry_with_exponential_backoff(
|
|
53
53
|
logger.error(f"OpenAI API request failed with error: {e}.")
|
54
54
|
raise e
|
55
55
|
|
56
|
+
except openai.UnprocessableEntityError as e:
|
57
|
+
logger.error(f"OpenAI API request failed with error: {e}.")
|
58
|
+
raise e
|
59
|
+
|
56
60
|
# Retry on specified errors
|
57
61
|
except errors as e:
|
58
62
|
# Increment retries
|
@@ -12,7 +12,7 @@ langroid/agent/.chainlit/translations/ta.json,sha256=8JPW6BwLN2dl9wuq5wSkMvazcY8
|
|
12
12
|
langroid/agent/.chainlit/translations/te.json,sha256=JzW2YXWg1qqvWgIvEgMelQz5s6EzTb_uD_3TEHAHiQw,23526
|
13
13
|
langroid/agent/.chainlit/translations/zh-CN.json,sha256=aLBSSSQ0yojlYGuMMlOYvkD_ruG9-d2AgnjJWhPODVw,11737
|
14
14
|
langroid/agent/__init__.py,sha256=ll0Cubd2DZ-fsCMl7e10hf9ZjFGKzphfBco396IKITY,786
|
15
|
-
langroid/agent/base.py,sha256
|
15
|
+
langroid/agent/base.py,sha256=-wColF3AGsbIm-uiTLfu8cyGUqMRCzZETVirvgZgYGQ,77642
|
16
16
|
langroid/agent/batch.py,sha256=qK3ph6VNj_1sOhfXCZY4r6gh035DglDKU751p8BU0tY,14665
|
17
17
|
langroid/agent/callbacks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
18
18
|
langroid/agent/callbacks/chainlit.py,sha256=C6zzzYC30qC4eMA7al7eFpRoTgoe3475kaMKyXgQM0Q,20695
|
@@ -88,12 +88,12 @@ langroid/language_models/azure_openai.py,sha256=zNQzzsERxNestq-hFfQZbvTzK43G2vjR
|
|
88
88
|
langroid/language_models/base.py,sha256=6hXR-bclyPif-BvFbyXevP-gEwiawQAJHX3N1AKNei0,23786
|
89
89
|
langroid/language_models/config.py,sha256=9Q8wk5a7RQr8LGMT_0WkpjY8S4ywK06SalVRjXlfCiI,378
|
90
90
|
langroid/language_models/mock_lm.py,sha256=5BgHKDVRWFbUwDT_PFgTZXz9-k8wJSA2e3PZmyDgQ1k,4022
|
91
|
-
langroid/language_models/openai_gpt.py,sha256=
|
91
|
+
langroid/language_models/openai_gpt.py,sha256=MW-eOc8bFVAvnJZaj6iTi8NYJOR0hcAx7ZbBuY4mFfY,76410
|
92
92
|
langroid/language_models/prompt_formatter/__init__.py,sha256=2-5cdE24XoFDhifOLl8yiscohil1ogbP1ECkYdBlBsk,372
|
93
93
|
langroid/language_models/prompt_formatter/base.py,sha256=eDS1sgRNZVnoajwV_ZIha6cba5Dt8xjgzdRbPITwx3Q,1221
|
94
94
|
langroid/language_models/prompt_formatter/hf_formatter.py,sha256=PVJppmjRvD-2DF-XNC6mE05vTZ9wbu37SmXwZBQhad0,5055
|
95
95
|
langroid/language_models/prompt_formatter/llama2_formatter.py,sha256=YdcO88qyBeuMENVIVvVqSYuEpvYSTndUe_jd6hVTko4,2899
|
96
|
-
langroid/language_models/utils.py,sha256=
|
96
|
+
langroid/language_models/utils.py,sha256=L4_CbihDMTGcsg0TOG1Yd5JFEto46--h7CX_14m89sQ,5016
|
97
97
|
langroid/mytypes.py,sha256=ptAFxEAtiwmIfUnGisNotTe8wT9LKBf22lOfPgZoQIY,2368
|
98
98
|
langroid/parsing/__init__.py,sha256=ZgSAfgTC6VsTLFlRSWT-TwYco7SQeRMeZG-49MnKYGY,936
|
99
99
|
langroid/parsing/agent_chats.py,sha256=sbZRV9ujdM5QXvvuHVjIi2ysYSYlap-uqfMMUKulrW0,1068
|
@@ -155,8 +155,8 @@ langroid/vector_store/meilisearch.py,sha256=6frB7GFWeWmeKzRfLZIvzRjllniZ1cYj3Hmh
|
|
155
155
|
langroid/vector_store/momento.py,sha256=UNHGT6jXuQtqY9f6MdqGU14bVnS0zHgIJUa30ULpUJo,10474
|
156
156
|
langroid/vector_store/qdrant_cloud.py,sha256=3im4Mip0QXLkR6wiqVsjV1QvhSElfxdFSuDKddBDQ-4,188
|
157
157
|
langroid/vector_store/qdrantdb.py,sha256=v7mCsijc2GdRJyil-yFaUVAX4SX5D75mD3vzlpjCMuo,17393
|
158
|
-
pyproject.toml,sha256=
|
159
|
-
langroid-0.
|
160
|
-
langroid-0.
|
161
|
-
langroid-0.
|
162
|
-
langroid-0.
|
158
|
+
pyproject.toml,sha256=LAG50y4HkneTim_5r19S2yshnGVdyNjIPB8FhorUVfY,7525
|
159
|
+
langroid-0.32.1.dist-info/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
|
160
|
+
langroid-0.32.1.dist-info/METADATA,sha256=fPESlAMHeicD5BP4ZfhJ6aTVdjUav7lMw-J8YpeKBXI,58250
|
161
|
+
langroid-0.32.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
162
|
+
langroid-0.32.1.dist-info/RECORD,,
|
pyproject.toml
CHANGED
File without changes
|
File without changes
|