langroid 0.37.7__py3-none-any.whl → 0.39.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langroid/agent/base.py +5 -0
- langroid/agent/chat_agent.py +29 -1
- langroid/embedding_models/__init__.py +4 -0
- langroid/embedding_models/base.py +4 -0
- langroid/embedding_models/models.py +60 -0
- langroid/mytypes.py +9 -0
- {langroid-0.37.7.dist-info → langroid-0.39.0.dist-info}/METADATA +3 -1
- {langroid-0.37.7.dist-info → langroid-0.39.0.dist-info}/RECORD +10 -10
- {langroid-0.37.7.dist-info → langroid-0.39.0.dist-info}/WHEEL +0 -0
- {langroid-0.37.7.dist-info → langroid-0.39.0.dist-info}/licenses/LICENSE +0 -0
langroid/agent/base.py
CHANGED
@@ -333,6 +333,11 @@ class Agent(ABC):
|
|
333
333
|
if hasattr(message_class, "handle_message_fallback") and (
|
334
334
|
inspect.isfunction(message_class.handle_message_fallback)
|
335
335
|
):
|
336
|
+
# When a ToolMessage has a `handle_message_fallback` method,
|
337
|
+
# we inject it into the agent as a method, overriding the default
|
338
|
+
# `handle_message_fallback` method (which does nothing).
|
339
|
+
# It's possible multiple tool messages have a `handle_message_fallback`,
|
340
|
+
# in which case, the last one inserted will be used.
|
336
341
|
setattr(
|
337
342
|
self,
|
338
343
|
"handle_message_fallback",
|
langroid/agent/chat_agent.py
CHANGED
@@ -5,7 +5,7 @@ import logging
|
|
5
5
|
import textwrap
|
6
6
|
from contextlib import ExitStack
|
7
7
|
from inspect import isclass
|
8
|
-
from typing import Dict, List, Optional, Self, Set, Tuple, Type, Union, cast
|
8
|
+
from typing import Any, Dict, List, Optional, Self, Set, Tuple, Type, Union, cast
|
9
9
|
|
10
10
|
import openai
|
11
11
|
from rich import print
|
@@ -31,6 +31,7 @@ from langroid.language_models.base import (
|
|
31
31
|
ToolChoiceTypes,
|
32
32
|
)
|
33
33
|
from langroid.language_models.openai_gpt import OpenAIGPT
|
34
|
+
from langroid.mytypes import Entity, Routing
|
34
35
|
from langroid.pydantic_v1 import BaseModel, ValidationError
|
35
36
|
from langroid.utils.configuration import settings
|
36
37
|
from langroid.utils.object_registry import ObjectRegistry
|
@@ -52,6 +53,7 @@ class ChatAgentConfig(AgentConfig):
|
|
52
53
|
user_message: user message to include in message sequence.
|
53
54
|
Used only if `task` is not specified in the constructor.
|
54
55
|
use_tools: whether to use our own ToolMessages mechanism
|
56
|
+
non_tool_routing (Routing|str): routing when LLM generates non-tool msg.
|
55
57
|
use_functions_api: whether to use functions/tools native to the LLM API
|
56
58
|
(e.g. OpenAI's `function_call` or `tool_call` mechanism)
|
57
59
|
use_tools_api: When `use_functions_api` is True, if this is also True,
|
@@ -84,6 +86,7 @@ class ChatAgentConfig(AgentConfig):
|
|
84
86
|
|
85
87
|
system_message: str = "You are a helpful assistant."
|
86
88
|
user_message: Optional[str] = None
|
89
|
+
non_tool_routing: Routing | None = None
|
87
90
|
use_tools: bool = False
|
88
91
|
use_functions_api: bool = True
|
89
92
|
use_tools_api: bool = False
|
@@ -579,6 +582,31 @@ class ChatAgent(Agent):
|
|
579
582
|
# remove leading and trailing newlines and other whitespace
|
580
583
|
return LLMMessage(role=Role.SYSTEM, content=content.strip())
|
581
584
|
|
585
|
+
def handle_message_fallback(self, msg: str | ChatDocument) -> Any:
|
586
|
+
"""
|
587
|
+
Fallback method for the "no-tools" scenario.
|
588
|
+
Users the self.config.non_tool_routing to determine the action to take.
|
589
|
+
|
590
|
+
This method can be overridden by subclasses, e.g.,
|
591
|
+
to create a "reminder" message when a tool is expected but the LLM "forgot"
|
592
|
+
to generate one.
|
593
|
+
|
594
|
+
Args:
|
595
|
+
msg (str | ChatDocument): The input msg to handle
|
596
|
+
Returns:
|
597
|
+
Any: The result of the handler method
|
598
|
+
"""
|
599
|
+
if self.config.non_tool_routing is None:
|
600
|
+
return None
|
601
|
+
if isinstance(msg, ChatDocument) and msg.metadata.sender == Entity.LLM:
|
602
|
+
from langroid.agent.tools.orchestration import AgentDoneTool, ForwardTool
|
603
|
+
|
604
|
+
match self.config.non_tool_routing:
|
605
|
+
case Routing.FORWARD_USER:
|
606
|
+
return ForwardTool(agent="User")
|
607
|
+
case Routing.DONE:
|
608
|
+
return AgentDoneTool(content=msg.content, tools=msg.tool_messages)
|
609
|
+
|
582
610
|
def unhandled_tools(self) -> set[str]:
|
583
611
|
"""The set of tools that are known but not handled.
|
584
612
|
Useful in task flow: an agent can refuse to accept an incoming msg
|
@@ -13,6 +13,8 @@ from .models import (
|
|
13
13
|
SentenceTransformerEmbeddingsConfig,
|
14
14
|
LlamaCppServerEmbeddings,
|
15
15
|
LlamaCppServerEmbeddingsConfig,
|
16
|
+
GeminiEmbeddings,
|
17
|
+
GeminiEmbeddingsConfig,
|
16
18
|
embedding_model,
|
17
19
|
)
|
18
20
|
from .remote_embeds import (
|
@@ -33,6 +35,8 @@ __all__ = [
|
|
33
35
|
"SentenceTransformerEmbeddingsConfig",
|
34
36
|
"LlamaCppServerEmbeddings",
|
35
37
|
"LlamaCppServerEmbeddingsConfig",
|
38
|
+
"GeminiEmbeddings",
|
39
|
+
"GeminiEmbeddingsConfig",
|
36
40
|
"embedding_model",
|
37
41
|
"RemoteEmbeddingsConfig",
|
38
42
|
"RemoteEmbeddings",
|
@@ -28,6 +28,8 @@ class EmbeddingModel(ABC):
|
|
28
28
|
AzureOpenAIEmbeddingsConfig,
|
29
29
|
FastEmbedEmbeddings,
|
30
30
|
FastEmbedEmbeddingsConfig,
|
31
|
+
GeminiEmbeddings,
|
32
|
+
GeminiEmbeddingsConfig,
|
31
33
|
LlamaCppServerEmbeddings,
|
32
34
|
LlamaCppServerEmbeddingsConfig,
|
33
35
|
OpenAIEmbeddings,
|
@@ -52,6 +54,8 @@ class EmbeddingModel(ABC):
|
|
52
54
|
return FastEmbedEmbeddings(config)
|
53
55
|
elif isinstance(config, LlamaCppServerEmbeddingsConfig):
|
54
56
|
return LlamaCppServerEmbeddings(config)
|
57
|
+
elif isinstance(config, GeminiEmbeddingsConfig):
|
58
|
+
return GeminiEmbeddings(config)
|
55
59
|
else:
|
56
60
|
raise ValueError(f"Unknown embedding config: {config.__repr_name__}")
|
57
61
|
|
@@ -77,6 +77,14 @@ class LlamaCppServerEmbeddingsConfig(EmbeddingModelsConfig):
|
|
77
77
|
batch_size: int = 2048
|
78
78
|
|
79
79
|
|
80
|
+
class GeminiEmbeddingsConfig(EmbeddingModelsConfig):
|
81
|
+
model_type: str = "gemini"
|
82
|
+
model_name: str = "models/text-embedding-004"
|
83
|
+
api_key: str = ""
|
84
|
+
dims: int = 768
|
85
|
+
batch_size: int = 512
|
86
|
+
|
87
|
+
|
80
88
|
class EmbeddingFunctionCallable:
|
81
89
|
"""
|
82
90
|
A callable class designed to generate embeddings for a list of texts using
|
@@ -160,6 +168,8 @@ class EmbeddingFunctionCallable:
|
|
160
168
|
self.embed_model.detokenize_string(list(token_batch))
|
161
169
|
)
|
162
170
|
embeds.append(gen_embedding)
|
171
|
+
elif isinstance(self.embed_model, GeminiEmbeddings):
|
172
|
+
embeds = self.embed_model.generate_embeddings(input)
|
163
173
|
return embeds
|
164
174
|
|
165
175
|
|
@@ -437,6 +447,54 @@ class LlamaCppServerEmbeddings(EmbeddingModel):
|
|
437
447
|
return self.config.dims
|
438
448
|
|
439
449
|
|
450
|
+
class GeminiEmbeddings(EmbeddingModel):
|
451
|
+
def __init__(self, config: GeminiEmbeddingsConfig = GeminiEmbeddingsConfig()):
|
452
|
+
try:
|
453
|
+
import google.generativeai as genai
|
454
|
+
except ImportError as e:
|
455
|
+
raise LangroidImportError(extra="google-generativeai", error=str(e))
|
456
|
+
super().__init__()
|
457
|
+
self.config = config
|
458
|
+
load_dotenv()
|
459
|
+
self.config.api_key = os.getenv("GEMINI_API_KEY", "")
|
460
|
+
|
461
|
+
if self.config.api_key == "":
|
462
|
+
raise ValueError(
|
463
|
+
"""
|
464
|
+
GEMINI_API_KEY env variable must be set to use GeminiEmbeddings.
|
465
|
+
"""
|
466
|
+
)
|
467
|
+
genai.configure(api_key=self.config.api_key) # type: ignore[attr-defined]
|
468
|
+
self.client = genai
|
469
|
+
|
470
|
+
def embedding_fn(self) -> Callable[[List[str]], Embeddings]:
|
471
|
+
return EmbeddingFunctionCallable(self, self.config.batch_size)
|
472
|
+
|
473
|
+
def generate_embeddings(self, texts: List[str]) -> Embeddings:
|
474
|
+
all_embeddings = [] # More precise type hint
|
475
|
+
for batch in batched(texts, self.config.batch_size):
|
476
|
+
result = self.client.embed_content( # type: ignore[attr-defined]
|
477
|
+
model=self.config.model_name,
|
478
|
+
content=batch,
|
479
|
+
task_type="RETRIEVAL_DOCUMENT",
|
480
|
+
)
|
481
|
+
|
482
|
+
embeddings = result["embedding"]
|
483
|
+
if not isinstance(embeddings, list):
|
484
|
+
raise ValueError("Unexpected format for embeddings: not a list")
|
485
|
+
|
486
|
+
if embeddings and isinstance(embeddings[0], list):
|
487
|
+
all_embeddings.extend(embeddings)
|
488
|
+
else:
|
489
|
+
all_embeddings.append(embeddings)
|
490
|
+
|
491
|
+
return all_embeddings
|
492
|
+
|
493
|
+
@property
|
494
|
+
def embedding_dims(self) -> int:
|
495
|
+
return self.config.dims
|
496
|
+
|
497
|
+
|
440
498
|
def embedding_model(embedding_fn_type: str = "openai") -> EmbeddingModel:
|
441
499
|
"""
|
442
500
|
Args:
|
@@ -457,5 +515,7 @@ def embedding_model(embedding_fn_type: str = "openai") -> EmbeddingModel:
|
|
457
515
|
return FastEmbedEmbeddings # type: ignore
|
458
516
|
elif embedding_fn_type == "llamacppserver":
|
459
517
|
return LlamaCppServerEmbeddings # type: ignore
|
518
|
+
elif embedding_fn_type == "gemini":
|
519
|
+
return GeminiEmbeddings # type: ignore
|
460
520
|
else: # default sentence transformer
|
461
521
|
return SentenceTransformerEmbeddings # type: ignore
|
langroid/mytypes.py
CHANGED
@@ -93,3 +93,12 @@ class Document(BaseModel):
|
|
93
93
|
SOURCE:{self.metadata.source}
|
94
94
|
"""
|
95
95
|
)
|
96
|
+
|
97
|
+
|
98
|
+
class Routing(str, Enum):
|
99
|
+
"""
|
100
|
+
Possible Routing options. Mainly used to handle non-tool msgs from LLM.
|
101
|
+
"""
|
102
|
+
|
103
|
+
FORWARD_USER = "user" # forward msg to user
|
104
|
+
DONE = "done" # task done
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: langroid
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.39.0
|
4
4
|
Summary: Harness LLMs with Multi-Agent Programming
|
5
5
|
Author-email: Prasad Chalasani <pchalasani@gmail.com>
|
6
6
|
License: MIT
|
@@ -106,6 +106,8 @@ Provides-Extra: docx
|
|
106
106
|
Requires-Dist: python-docx<2.0.0,>=1.1.0; extra == 'docx'
|
107
107
|
Provides-Extra: fastembed
|
108
108
|
Requires-Dist: fastembed<0.4.0,>=0.3.1; extra == 'fastembed'
|
109
|
+
Provides-Extra: google-generativeai
|
110
|
+
Requires-Dist: google-generativeai>=0.8.4; extra == 'google-generativeai'
|
109
111
|
Provides-Extra: hf-embeddings
|
110
112
|
Requires-Dist: sentence-transformers<3.0.0,>=2.2.2; extra == 'hf-embeddings'
|
111
113
|
Requires-Dist: torch<3.0.0,>=2.0.0; extra == 'hf-embeddings'
|
@@ -1,11 +1,11 @@
|
|
1
1
|
langroid/__init__.py,sha256=z_fCOLQJPOw3LLRPBlFB5-2HyCjpPgQa4m4iY5Fvb8Y,1800
|
2
2
|
langroid/exceptions.py,sha256=OPjece_8cwg94DLPcOGA1ddzy5bGh65pxzcHMnssTz8,2995
|
3
|
-
langroid/mytypes.py,sha256=
|
3
|
+
langroid/mytypes.py,sha256=NVLwkiP404ekwnRTfn-6B2iWqS69b3fZDNOo9VB_7Vc,2848
|
4
4
|
langroid/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
langroid/agent/__init__.py,sha256=ll0Cubd2DZ-fsCMl7e10hf9ZjFGKzphfBco396IKITY,786
|
6
|
-
langroid/agent/base.py,sha256=
|
6
|
+
langroid/agent/base.py,sha256=o406IulFni4y0L8u8g0MWphASX-d57jZgXLeS61iMko,78204
|
7
7
|
langroid/agent/batch.py,sha256=vi1r5i1-vN80WfqHDSwjEym_KfGsqPGUtwktmiK1nuk,20635
|
8
|
-
langroid/agent/chat_agent.py,sha256=
|
8
|
+
langroid/agent/chat_agent.py,sha256=9WgxuMibiF0jC6s0x721p6XJ_MH6NXM1hqriZ_iqTLQ,83499
|
9
9
|
langroid/agent/chat_document.py,sha256=xzMtrPbaW-Y-BnF7kuhr2dorsD-D5rMWzfOqJ8HAoo8,17885
|
10
10
|
langroid/agent/openai_assistant.py,sha256=JkAcs02bIrgPNVvUWVR06VCthc5-ulla2QMBzux_q6o,34340
|
11
11
|
langroid/agent/task.py,sha256=XrXUbSoiFasvpIsZPn_cBpdWaTCKljJPRimtLMrSZrs,90347
|
@@ -55,9 +55,9 @@ langroid/cachedb/__init__.py,sha256=icAT2s7Vhf-ZGUeqpDQGNU6ob6o0aFEyjwcxxUGRFjg,
|
|
55
55
|
langroid/cachedb/base.py,sha256=ztVjB1DtN6pLCujCWnR6xruHxwVj3XkYniRTYAKKqk0,1354
|
56
56
|
langroid/cachedb/momento_cachedb.py,sha256=YEOJ62hEcV6iIeMr5aGgRYgWQqFYaej9gEDEcY0sm7M,3172
|
57
57
|
langroid/cachedb/redis_cachedb.py,sha256=7kgnbf4b5CKsCrlL97mHWKvdvlLt8zgn7lc528jEpiE,5141
|
58
|
-
langroid/embedding_models/__init__.py,sha256=
|
59
|
-
langroid/embedding_models/base.py,sha256=
|
60
|
-
langroid/embedding_models/models.py,sha256=
|
58
|
+
langroid/embedding_models/__init__.py,sha256=KyYxR3jDFUCfYjSuCL86qjAmrq6mXXjOT4lFNOKVj6Y,955
|
59
|
+
langroid/embedding_models/base.py,sha256=Ml7oA6PzQm0wZmIYn3fhF7dvZCi-amviWUwOeBegH3A,2562
|
60
|
+
langroid/embedding_models/models.py,sha256=fKQBiaaG7uYoeELDbAiNxwLdn-CWN8dyiVEZcdk_bjI,18959
|
61
61
|
langroid/embedding_models/remote_embeds.py,sha256=6_kjXByVbqhY9cGwl9R83ZcYC2km-nGieNNAo1McHaY,5151
|
62
62
|
langroid/embedding_models/protoc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
63
63
|
langroid/embedding_models/protoc/embeddings.proto,sha256=_O-SgFpTaylQeOTgSpxhEJ7CUw7PeCQQJLaPqpPYKJg,321
|
@@ -123,7 +123,7 @@ langroid/vector_store/meilisearch.py,sha256=6frB7GFWeWmeKzRfLZIvzRjllniZ1cYj3Hmh
|
|
123
123
|
langroid/vector_store/momento.py,sha256=xOaU7Hlyyn_5ihb0ARS5JHtmrKrTCt2IdRA-ioMM5ek,10307
|
124
124
|
langroid/vector_store/qdrantdb.py,sha256=v7TAsIoj_vxeKDYS9tpwJLBZA8fuTweTYxHo0X_uawM,17949
|
125
125
|
langroid/vector_store/weaviatedb.py,sha256=FOzgvqLqvdN5jJebVtJ-8tu2CeBzBfSP3ih4_ODEOOw,10605
|
126
|
-
langroid-0.
|
127
|
-
langroid-0.
|
128
|
-
langroid-0.
|
129
|
-
langroid-0.
|
126
|
+
langroid-0.39.0.dist-info/METADATA,sha256=hhbwLxNJulTOPXHqMK0F_arDZ0FASsY_7WBIrCJXE0g,60634
|
127
|
+
langroid-0.39.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
128
|
+
langroid-0.39.0.dist-info/licenses/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
|
129
|
+
langroid-0.39.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|