langroid 0.23.0__py3-none-any.whl → 0.23.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langroid/agent/base.py +17 -13
- langroid/agent/special/doc_chat_agent.py +10 -10
- langroid/agent/tool_message.py +2 -1
- {langroid-0.23.0.dist-info → langroid-0.23.1.dist-info}/METADATA +3 -1
- {langroid-0.23.0.dist-info → langroid-0.23.1.dist-info}/RECORD +8 -8
- pyproject.toml +1 -1
- {langroid-0.23.0.dist-info → langroid-0.23.1.dist-info}/LICENSE +0 -0
- {langroid-0.23.0.dist-info → langroid-0.23.1.dist-info}/WHEEL +0 -0
langroid/agent/base.py
CHANGED
@@ -756,18 +756,18 @@ class Agent(ABC):
|
|
756
756
|
@no_type_check
|
757
757
|
async def llm_response_async(
|
758
758
|
self,
|
759
|
-
|
759
|
+
message: Optional[str | ChatDocument] = None,
|
760
760
|
) -> Optional[ChatDocument]:
|
761
761
|
"""
|
762
762
|
Asynch version of `llm_response`. See there for details.
|
763
763
|
"""
|
764
|
-
if
|
764
|
+
if message is None or not self.llm_can_respond(message):
|
765
765
|
return None
|
766
766
|
|
767
|
-
if isinstance(
|
768
|
-
prompt =
|
767
|
+
if isinstance(message, ChatDocument):
|
768
|
+
prompt = message.content
|
769
769
|
else:
|
770
|
-
prompt =
|
770
|
+
prompt = message
|
771
771
|
|
772
772
|
output_len = self.config.llm.max_output_tokens
|
773
773
|
if self.num_tokens(prompt) + output_len > self.llm.completion_context_length():
|
@@ -807,29 +807,31 @@ class Agent(ABC):
|
|
807
807
|
)
|
808
808
|
cdoc = ChatDocument.from_LLMResponse(response, displayed=True)
|
809
809
|
# Preserve trail of tool_ids for OpenAI Assistant fn-calls
|
810
|
-
cdoc.metadata.tool_ids =
|
810
|
+
cdoc.metadata.tool_ids = (
|
811
|
+
[] if isinstance(message, str) else message.metadata.tool_ids
|
812
|
+
)
|
811
813
|
return cdoc
|
812
814
|
|
813
815
|
@no_type_check
|
814
816
|
def llm_response(
|
815
817
|
self,
|
816
|
-
|
818
|
+
message: Optional[str | ChatDocument] = None,
|
817
819
|
) -> Optional[ChatDocument]:
|
818
820
|
"""
|
819
821
|
LLM response to a prompt.
|
820
822
|
Args:
|
821
|
-
|
823
|
+
message (str|ChatDocument): prompt string, or ChatDocument object
|
822
824
|
|
823
825
|
Returns:
|
824
826
|
Response from LLM, packaged as a ChatDocument
|
825
827
|
"""
|
826
|
-
if
|
828
|
+
if message is None or not self.llm_can_respond(message):
|
827
829
|
return None
|
828
830
|
|
829
|
-
if isinstance(
|
830
|
-
prompt =
|
831
|
+
if isinstance(message, ChatDocument):
|
832
|
+
prompt = message.content
|
831
833
|
else:
|
832
|
-
prompt =
|
834
|
+
prompt = message
|
833
835
|
|
834
836
|
with ExitStack() as stack: # for conditionally using rich spinner
|
835
837
|
if not self.llm.get_stream():
|
@@ -879,7 +881,9 @@ class Agent(ABC):
|
|
879
881
|
)
|
880
882
|
cdoc = ChatDocument.from_LLMResponse(response, displayed=True)
|
881
883
|
# Preserve trail of tool_ids for OpenAI Assistant fn-calls
|
882
|
-
cdoc.metadata.tool_ids =
|
884
|
+
cdoc.metadata.tool_ids = (
|
885
|
+
[] if isinstance(message, str) else message.metadata.tool_ids
|
886
|
+
)
|
883
887
|
return cdoc
|
884
888
|
|
885
889
|
def has_tool_message_attempt(self, msg: str | ChatDocument | None) -> bool:
|
@@ -663,15 +663,15 @@ class DocChatAgent(ChatAgent):
|
|
663
663
|
|
664
664
|
def llm_response(
|
665
665
|
self,
|
666
|
-
|
666
|
+
message: None | str | ChatDocument = None,
|
667
667
|
) -> Optional[ChatDocument]:
|
668
|
-
if not self.llm_can_respond(
|
668
|
+
if not self.llm_can_respond(message):
|
669
669
|
return None
|
670
670
|
query_str: str | None
|
671
|
-
if isinstance(
|
672
|
-
query_str =
|
671
|
+
if isinstance(message, ChatDocument):
|
672
|
+
query_str = message.content
|
673
673
|
else:
|
674
|
-
query_str =
|
674
|
+
query_str = message
|
675
675
|
if query_str is None or query_str.startswith("!"):
|
676
676
|
# direct query to LLM
|
677
677
|
query_str = query_str[1:] if query_str is not None else None
|
@@ -714,16 +714,16 @@ class DocChatAgent(ChatAgent):
|
|
714
714
|
|
715
715
|
async def llm_response_async(
|
716
716
|
self,
|
717
|
-
|
717
|
+
message: None | str | ChatDocument = None,
|
718
718
|
) -> Optional[ChatDocument]:
|
719
719
|
apply_nest_asyncio()
|
720
|
-
if not self.llm_can_respond(
|
720
|
+
if not self.llm_can_respond(message):
|
721
721
|
return None
|
722
722
|
query_str: str | None
|
723
|
-
if isinstance(
|
724
|
-
query_str =
|
723
|
+
if isinstance(message, ChatDocument):
|
724
|
+
query_str = message.content
|
725
725
|
else:
|
726
|
-
query_str =
|
726
|
+
query_str = message
|
727
727
|
if query_str is None or query_str.startswith("!"):
|
728
728
|
# direct query to LLM
|
729
729
|
query_str = query_str[1:] if query_str is not None else None
|
langroid/agent/tool_message.py
CHANGED
@@ -171,7 +171,8 @@ class ToolMessage(ABC, BaseModel):
|
|
171
171
|
|
172
172
|
Args:
|
173
173
|
tool: instructions for Langroid-native tool use? (e.g. for non-OpenAI LLM)
|
174
|
-
(or else it would be for OpenAI Function calls)
|
174
|
+
(or else it would be for OpenAI Function calls).
|
175
|
+
Ignored in the default implementation, but can be used in subclasses.
|
175
176
|
Returns:
|
176
177
|
str: instructions on how to use the message
|
177
178
|
"""
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: langroid
|
3
|
-
Version: 0.23.
|
3
|
+
Version: 0.23.1
|
4
4
|
Summary: Harness LLMs with Multi-Agent Programming
|
5
5
|
License: MIT
|
6
6
|
Author: Prasad Chalasani
|
@@ -249,6 +249,8 @@ teacher_task.run()
|
|
249
249
|
<summary> <b>Click to expand</b></summary>
|
250
250
|
|
251
251
|
- **Nov 2024:**
|
252
|
+
- **[0.23.0](https://langroid.github.io/langroid/tutorials/local-llm-setup/#local-llms-hosted-on-glhfchat)**:
|
253
|
+
support for LLMs (e.g. `Qwen2.5-Coder-32b-Instruct`) hosted on glhf.chat
|
252
254
|
- **[0.22.0](https://langroid.github.io/langroid/notes/large-tool-results/)**:
|
253
255
|
Optional parameters to truncate large tool results.
|
254
256
|
- **[0.21.0](https://langroid.github.io/langroid/notes/gemini/)** Direct support for Gemini models via OpenAI client instead of using LiteLLM.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
langroid/__init__.py,sha256=z_fCOLQJPOw3LLRPBlFB5-2HyCjpPgQa4m4iY5Fvb8Y,1800
|
2
2
|
langroid/agent/__init__.py,sha256=ll0Cubd2DZ-fsCMl7e10hf9ZjFGKzphfBco396IKITY,786
|
3
|
-
langroid/agent/base.py,sha256=
|
3
|
+
langroid/agent/base.py,sha256=bYfVh_F-lYDecMpL_7SXzBetAZriT7ZfQE4vXHwI0xI,67945
|
4
4
|
langroid/agent/batch.py,sha256=QZdlt1563hx4l3AXrCaGovE-PNG93M3DsvQAbDzdiS8,13705
|
5
5
|
langroid/agent/callbacks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
6
|
langroid/agent/callbacks/chainlit.py,sha256=JJXI3UGTyTDg2FFath4rqY1GyUo_0pbVBt8CZpvdtn4,23289
|
@@ -15,7 +15,7 @@ langroid/agent/special/arangodb/arangodb_agent.py,sha256=12Y54c84c9qXV-YXRBcI5Ha
|
|
15
15
|
langroid/agent/special/arangodb/system_messages.py,sha256=udwfLleTdyz_DuxHuoiv2wHEZoAPBPbwdF_ivjIfP5c,6867
|
16
16
|
langroid/agent/special/arangodb/tools.py,sha256=Mixl9WS0r0Crd4nrw2YAB0eY33fTsKISul1053eyeio,3590
|
17
17
|
langroid/agent/special/arangodb/utils.py,sha256=LIevtkayIdVVXyj3jlbKH2WgdZTtH5-JLgbXOHC7uxs,1420
|
18
|
-
langroid/agent/special/doc_chat_agent.py,sha256=
|
18
|
+
langroid/agent/special/doc_chat_agent.py,sha256=zw2MvdCWRPH93d73PKh27KFiQ8sUCFPxAfLDdkxvdZQ,59301
|
19
19
|
langroid/agent/special/lance_doc_chat_agent.py,sha256=s8xoRs0gGaFtDYFUSIRchsgDVbS5Q3C2b2mr3V1Fd-Q,10419
|
20
20
|
langroid/agent/special/lance_rag/__init__.py,sha256=QTbs0IVE2ZgDg8JJy1zN97rUUg4uEPH7SLGctFNumk4,174
|
21
21
|
langroid/agent/special/lance_rag/critic_agent.py,sha256=OtFuHthKQLkdVkvuZ2m0GNq1qOYLqHkm1pfLRFnSg5c,9548
|
@@ -39,7 +39,7 @@ langroid/agent/special/sql/utils/tools.py,sha256=ovCePzq5cmbqw0vsVPBzxdZpUcSUIfT
|
|
39
39
|
langroid/agent/special/table_chat_agent.py,sha256=d9v2wsblaRx7oMnKhLV7uO_ujvk9gh59pSGvBXyeyNc,9659
|
40
40
|
langroid/agent/structured_message.py,sha256=y7pud1EgRNeTFZlJmBkLmwME3yQJ_IYik-Xds9kdZbY,282
|
41
41
|
langroid/agent/task.py,sha256=D7mGWdZ8H71AG2ZPLK6RIiG29Kegn_3lTEBfsBU0I_8,87397
|
42
|
-
langroid/agent/tool_message.py,sha256=
|
42
|
+
langroid/agent/tool_message.py,sha256=noPvn2PxFY_xJvJXJzv-n5RVgy3CjH-Y_FZ5jEik5pQ,11422
|
43
43
|
langroid/agent/tools/__init__.py,sha256=IMgCte-_ZIvCkozGQmvMqxIw7_nKLKzD78ccJL1bnQU,804
|
44
44
|
langroid/agent/tools/duckduckgo_search_tool.py,sha256=NhsCaGZkdv28nja7yveAhSK_w6l_Ftym8agbrdzqgfo,1935
|
45
45
|
langroid/agent/tools/file_tools.py,sha256=GjPB5YDILucYapElnvvoYpGJuZQ25ecLs2REv7edPEo,7292
|
@@ -142,8 +142,8 @@ langroid/vector_store/meilisearch.py,sha256=6frB7GFWeWmeKzRfLZIvzRjllniZ1cYj3Hmh
|
|
142
142
|
langroid/vector_store/momento.py,sha256=qR-zBF1RKVHQZPZQYW_7g-XpTwr46p8HJuYPCkfJbM4,10534
|
143
143
|
langroid/vector_store/qdrant_cloud.py,sha256=3im4Mip0QXLkR6wiqVsjV1QvhSElfxdFSuDKddBDQ-4,188
|
144
144
|
langroid/vector_store/qdrantdb.py,sha256=v88lqFkepADvlN6lByUj9I4NEKa9X9lWH16uTPPbYrE,17457
|
145
|
-
pyproject.toml,sha256=
|
146
|
-
langroid-0.23.
|
147
|
-
langroid-0.23.
|
148
|
-
langroid-0.23.
|
149
|
-
langroid-0.23.
|
145
|
+
pyproject.toml,sha256=NS0v0e2ghAIfe3rj5cJ3LRYhuI5aliFQ_Oq78fGBC-k,7488
|
146
|
+
langroid-0.23.1.dist-info/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
|
147
|
+
langroid-0.23.1.dist-info/METADATA,sha256=n4FASD-t2NGYrid0FZNB5_0rjMCQZm6DMJ9WemcfSTM,57300
|
148
|
+
langroid-0.23.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
149
|
+
langroid-0.23.1.dist-info/RECORD,,
|
pyproject.toml
CHANGED
File without changes
|
File without changes
|