langroid 0.1.258__py3-none-any.whl → 0.1.260__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langroid/agent/callbacks/chainlit.py +45 -7
- langroid/agent/task.py +21 -3
- langroid/language_models/openai_gpt.py +3 -1
- langroid/vector_store/qdrantdb.py +1 -1
- {langroid-0.1.258.dist-info → langroid-0.1.260.dist-info}/METADATA +3 -2
- {langroid-0.1.258.dist-info → langroid-0.1.260.dist-info}/RECORD +9 -9
- pyproject.toml +1 -1
- {langroid-0.1.258.dist-info → langroid-0.1.260.dist-info}/LICENSE +0 -0
- {langroid-0.1.258.dist-info → langroid-0.1.260.dist-info}/WHEEL +0 -0
@@ -214,6 +214,7 @@ def wrap_text_preserving_structure(text: str, width: int = 90) -> str:
|
|
214
214
|
|
215
215
|
class ChainlitCallbackConfig(BaseSettings):
|
216
216
|
user_has_agent_name: bool = True # show agent name in front of "YOU" ?
|
217
|
+
show_subtask_response: bool = True # show sub-task response as a step?
|
217
218
|
|
218
219
|
|
219
220
|
class ChainlitAgentCallbacks:
|
@@ -277,12 +278,6 @@ class ChainlitAgentCallbacks:
|
|
277
278
|
|
278
279
|
def start_llm_stream(self) -> Callable[[str], None]:
|
279
280
|
"""Returns a streaming fn that can be passed to the LLM class"""
|
280
|
-
logger.info(
|
281
|
-
f"""
|
282
|
-
Starting LLM stream for {self.agent.config.name}
|
283
|
-
under parent {self._get_parent_id()}
|
284
|
-
"""
|
285
|
-
)
|
286
281
|
self.stream = cl.Step(
|
287
282
|
id=self.curr_step.id if self.curr_step is not None else None,
|
288
283
|
name=self._entity_name("llm"),
|
@@ -291,6 +286,13 @@ class ChainlitAgentCallbacks:
|
|
291
286
|
)
|
292
287
|
self.last_step = self.stream
|
293
288
|
self.curr_step = None
|
289
|
+
logger.info(
|
290
|
+
f"""
|
291
|
+
Starting LLM stream for {self.agent.config.name}
|
292
|
+
id = {self.stream.id}
|
293
|
+
under parent {self._get_parent_id()}
|
294
|
+
"""
|
295
|
+
)
|
294
296
|
run_sync(self.stream.send()) # type: ignore
|
295
297
|
|
296
298
|
def stream_token(t: str) -> None:
|
@@ -323,6 +325,13 @@ class ChainlitAgentCallbacks:
|
|
323
325
|
language="json" if is_tool else None,
|
324
326
|
)
|
325
327
|
step.output = textwrap.dedent(content) or NO_ANSWER
|
328
|
+
logger.info(
|
329
|
+
f"""
|
330
|
+
Finish STREAM LLM response for {self.agent.config.name}
|
331
|
+
id = {step.id}
|
332
|
+
under parent {self._get_parent_id()}
|
333
|
+
"""
|
334
|
+
)
|
326
335
|
run_sync(step.update()) # type: ignore
|
327
336
|
|
328
337
|
def show_llm_response(
|
@@ -343,6 +352,13 @@ class ChainlitAgentCallbacks:
|
|
343
352
|
self.last_step = step
|
344
353
|
self.curr_step = None
|
345
354
|
step.output = textwrap.dedent(content) or NO_ANSWER
|
355
|
+
logger.info(
|
356
|
+
f"""
|
357
|
+
Showing NON-STREAM LLM response for {self.agent.config.name}
|
358
|
+
id = {step.id}
|
359
|
+
under parent {self._get_parent_id()}
|
360
|
+
"""
|
361
|
+
)
|
346
362
|
run_sync(step.send()) # type: ignore
|
347
363
|
|
348
364
|
def show_error_message(self, error: str) -> None:
|
@@ -374,6 +390,13 @@ class ChainlitAgentCallbacks:
|
|
374
390
|
self.last_step = step
|
375
391
|
self.curr_step = None
|
376
392
|
step.output = content
|
393
|
+
logger.info(
|
394
|
+
f"""
|
395
|
+
Showing AGENT response for {self.agent.config.name}
|
396
|
+
id = {step.id}
|
397
|
+
under parent {self._get_parent_id()}
|
398
|
+
"""
|
399
|
+
)
|
377
400
|
run_sync(step.send()) # type: ignore
|
378
401
|
|
379
402
|
def show_start_response(self, entity: str) -> None:
|
@@ -390,6 +413,13 @@ class ChainlitAgentCallbacks:
|
|
390
413
|
step.output = ""
|
391
414
|
self.last_step = step
|
392
415
|
self.curr_step = step
|
416
|
+
logger.info(
|
417
|
+
f"""
|
418
|
+
Showing START response for {self.agent.config.name} ({entity})
|
419
|
+
id = {step.id}
|
420
|
+
under parent {self._get_parent_id()}
|
421
|
+
"""
|
422
|
+
)
|
393
423
|
run_sync(step.send()) # type: ignore
|
394
424
|
|
395
425
|
def _entity_name(
|
@@ -459,6 +489,13 @@ class ChainlitAgentCallbacks:
|
|
459
489
|
parent_id=self._get_parent_id(),
|
460
490
|
)
|
461
491
|
step.output = message
|
492
|
+
logger.info(
|
493
|
+
f"""
|
494
|
+
Showing USER response for {self.agent.config.name}
|
495
|
+
id = {step.id}
|
496
|
+
under parent {self._get_parent_id()}
|
497
|
+
"""
|
498
|
+
)
|
462
499
|
run_sync(step.send())
|
463
500
|
|
464
501
|
def show_first_user_message(self, msg: cl.Message):
|
@@ -575,7 +612,8 @@ class ChainlitTaskCallbacks(ChainlitAgentCallbacks):
|
|
575
612
|
super().__init__(task.agent, msg, config)
|
576
613
|
self._inject_callbacks(task)
|
577
614
|
self.task = task
|
578
|
-
|
615
|
+
if config.show_subtask_response:
|
616
|
+
self.task.callbacks.show_subtask_response = self.show_subtask_response
|
579
617
|
|
580
618
|
@classmethod
|
581
619
|
def _inject_callbacks(
|
langroid/agent/task.py
CHANGED
@@ -156,6 +156,11 @@ class Task:
|
|
156
156
|
interactive (bool): if true, wait for human input after each non-human
|
157
157
|
response (prevents infinite loop of non-human responses).
|
158
158
|
Default is true. If false, then `default_human_response` is set to ""
|
159
|
+
Note: When interactive = False, the one exception is when the user
|
160
|
+
is explicitly addressed, via "@user" or using RecipientTool, in which
|
161
|
+
case the system will wait for a user response. In other words, use
|
162
|
+
`interactive=False` when you want a "largely non-interactive"
|
163
|
+
run, with the exception of explicit user addressing.
|
159
164
|
only_user_quits_root (bool): if true, only user can quit the root task.
|
160
165
|
[This param is ignored & deprecated; Keeping for backward compatibility.
|
161
166
|
Instead of this, setting `interactive` suffices]
|
@@ -912,7 +917,11 @@ class Task:
|
|
912
917
|
parent (ChatDocument|None): parent message of the current message
|
913
918
|
"""
|
914
919
|
self.n_stalled_steps += 1
|
915
|
-
|
920
|
+
user_dummy_response = self.pending_sender != Entity.USER and self.interactive
|
921
|
+
if (not self.is_pass_thru) and (
|
922
|
+
not self.task_progress or self.allow_null_result or user_dummy_response
|
923
|
+
):
|
924
|
+
|
916
925
|
# There has been no progress at all in this task, so we
|
917
926
|
# update the pending_message to a dummy NO_ANSWER msg
|
918
927
|
# from the entity 'opposite' to the current pending_sender,
|
@@ -1375,9 +1384,18 @@ class Task:
|
|
1375
1384
|
)
|
1376
1385
|
|
1377
1386
|
def _can_respond(self, e: Responder) -> bool:
|
1378
|
-
|
1379
|
-
#
|
1387
|
+
user_can_respond = self.interactive or (
|
1388
|
+
# regardless of self.interactive, if a msg is explicitly addressed to
|
1389
|
+
# user, then wait for user response
|
1390
|
+
self.pending_message is not None
|
1391
|
+
and self.pending_message.metadata.recipient == Entity.USER
|
1392
|
+
)
|
1393
|
+
|
1394
|
+
if self.pending_sender == e or (e == Entity.USER and not user_can_respond):
|
1395
|
+
# sender is same as e (an entity cannot respond to its own msg),
|
1396
|
+
# or user cannot respond
|
1380
1397
|
return False
|
1398
|
+
|
1381
1399
|
if self.pending_message is None:
|
1382
1400
|
return True
|
1383
1401
|
if self._recipient_mismatch(e):
|
@@ -421,7 +421,9 @@ class OpenAIGPT(LanguageModel):
|
|
421
421
|
self.api_base = "http://" + self.api_base
|
422
422
|
elif self.config.chat_model.startswith("ollama/"):
|
423
423
|
self.config.ollama = True
|
424
|
-
|
424
|
+
|
425
|
+
# use api_base from config if set, else fall back on OLLAMA_BASE_URL
|
426
|
+
self.api_base = self.config.api_base or OLLAMA_BASE_URL
|
425
427
|
self.api_key = OLLAMA_API_KEY
|
426
428
|
self.config.chat_model = self.config.chat_model.replace("ollama/", "")
|
427
429
|
else:
|
@@ -68,7 +68,7 @@ class QdrantDBConfig(VectorStoreConfig):
|
|
68
68
|
embedding: EmbeddingModelsConfig = OpenAIEmbeddingsConfig()
|
69
69
|
distance: str = Distance.COSINE
|
70
70
|
use_sparse_embeddings: bool = False
|
71
|
-
sparse_embedding_model: str = ""
|
71
|
+
sparse_embedding_model: str = "naver/splade-v3-distilbert"
|
72
72
|
sparse_limit: int = 3
|
73
73
|
|
74
74
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: langroid
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.260
|
4
4
|
Summary: Harness LLMs with Multi-Agent Programming
|
5
5
|
License: MIT
|
6
6
|
Author: Prasad Chalasani
|
@@ -110,6 +110,7 @@ Description-Content-Type: text/markdown
|
|
110
110
|
<div align="center">
|
111
111
|
|
112
112
|
[](https://pypi.org/project/langroid/)
|
113
|
+
[](https://pypi.org/project/langroid/)
|
113
114
|
[](https://github.com/langroid/langroid/actions/workflows/pytest.yml)
|
114
115
|
[](https://codecov.io/gh/langroid/langroid)
|
115
116
|
[](https://github.com/langroid/langroid/actions/workflows/docker-publish.yml)
|
@@ -436,7 +437,7 @@ section above)
|
|
436
437
|
a task of an agent can delegate to other sub-tasks: from the point of view of a Task,
|
437
438
|
sub-tasks are simply additional responders, to be used in a round-robin fashion
|
438
439
|
after the agent's own responders.
|
439
|
-
- **Modularity,
|
440
|
+
- **Modularity, Reusability, Loose coupling:** The `Agent` and `Task` abstractions allow users to design
|
440
441
|
Agents with specific skills, wrap them in Tasks, and combine tasks in a flexible way.
|
441
442
|
- **LLM Support**: Langroid supports OpenAI LLMs as well as LLMs from hundreds of
|
442
443
|
providers (local/open or remote/commercial) via proxy libraries and local model servers
|
@@ -3,7 +3,7 @@ langroid/agent/__init__.py,sha256=ll0Cubd2DZ-fsCMl7e10hf9ZjFGKzphfBco396IKITY,78
|
|
3
3
|
langroid/agent/base.py,sha256=aSwWmOBg0d3QQHUSauscMNfnl8Wkv6nrk2nngKa9DjM,37183
|
4
4
|
langroid/agent/batch.py,sha256=feRA_yRG768ElOQjrKEefcRv6Aefd_yY7qktuYUQDwc,10040
|
5
5
|
langroid/agent/callbacks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
|
-
langroid/agent/callbacks/chainlit.py,sha256=
|
6
|
+
langroid/agent/callbacks/chainlit.py,sha256=6gkk9Qf_i4fOD13w8ZdUfMcgKYPzLMw30hzFUN60AIc,22044
|
7
7
|
langroid/agent/chat_agent.py,sha256=hnmeOxdi4i5w8WaL2kPjQOEpenoRW_hG5EfeMWuuVsQ,39478
|
8
8
|
langroid/agent/chat_document.py,sha256=uwCq53SHRyxQw6qyhjzPYuJG48VHBgOf2122Ew3fk6c,9316
|
9
9
|
langroid/agent/helpers.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -32,7 +32,7 @@ langroid/agent/special/sql/utils/populate_metadata.py,sha256=1J22UsyEPKzwK0XlJZt
|
|
32
32
|
langroid/agent/special/sql/utils/system_message.py,sha256=qKLHkvQWRQodTtPLPxr1GSLUYUFASZU8x-ybV67cB68,1885
|
33
33
|
langroid/agent/special/sql/utils/tools.py,sha256=6uB2424SLtmapui9ggcEr0ZTiB6_dL1-JRGgN8RK9Js,1332
|
34
34
|
langroid/agent/special/table_chat_agent.py,sha256=d9v2wsblaRx7oMnKhLV7uO_ujvk9gh59pSGvBXyeyNc,9659
|
35
|
-
langroid/agent/task.py,sha256=
|
35
|
+
langroid/agent/task.py,sha256=Su1TpEmt3aPVTzXnTvJdyjSqZsXmUUcHN12KAs-gMY0,60408
|
36
36
|
langroid/agent/tool_message.py,sha256=7t-UGEbykosKHAvaLI0Rm59sgxvN31IO3-P7bg7gLug,9730
|
37
37
|
langroid/agent/tools/__init__.py,sha256=8Pc9BlGCB5FQ2IDGKS_WPpHCoWp5jblMU8EHJwwikAY,303
|
38
38
|
langroid/agent/tools/duckduckgo_search_tool.py,sha256=NhsCaGZkdv28nja7yveAhSK_w6l_Ftym8agbrdzqgfo,1935
|
@@ -65,7 +65,7 @@ langroid/language_models/azure_openai.py,sha256=ncRCbKooqLVOY-PWQUIo9C3yTuKEFbAw
|
|
65
65
|
langroid/language_models/base.py,sha256=8FTvWtOmIrz6K78kzyrVqf2uJk03dBc0AUnVY-l9ucg,21031
|
66
66
|
langroid/language_models/config.py,sha256=5UF3DzO1a-Dfsc3vghE0XGq7g9t_xDsRCsuRiU4dgBg,366
|
67
67
|
langroid/language_models/openai_assistants.py,sha256=9K-DEAL2aSWHeXj2hwCo2RAlK9_1oCPtqX2u1wISCj8,36
|
68
|
-
langroid/language_models/openai_gpt.py,sha256=
|
68
|
+
langroid/language_models/openai_gpt.py,sha256=OZcFAtVI8JPGOPPRozWI1PfkdE450hS16RChaUc2uFM,50702
|
69
69
|
langroid/language_models/prompt_formatter/__init__.py,sha256=2-5cdE24XoFDhifOLl8yiscohil1ogbP1ECkYdBlBsk,372
|
70
70
|
langroid/language_models/prompt_formatter/base.py,sha256=eDS1sgRNZVnoajwV_ZIha6cba5Dt8xjgzdRbPITwx3Q,1221
|
71
71
|
langroid/language_models/prompt_formatter/hf_formatter.py,sha256=TFL6ppmeQWnzr6CKQzRZFYY810zE1mr8DZnhw6i85ok,5217
|
@@ -125,9 +125,9 @@ langroid/vector_store/lancedb.py,sha256=nC5pcrFoUOOO941Y7XiPZONUO4LuoZIAR1aR4Pec
|
|
125
125
|
langroid/vector_store/meilisearch.py,sha256=6frB7GFWeWmeKzRfLZIvzRjllniZ1cYj3HmhHQICXLs,11663
|
126
126
|
langroid/vector_store/momento.py,sha256=QaPzUnTwlswoawGB-paLtUPyLRvckFXLfLDfvbTzjNQ,10505
|
127
127
|
langroid/vector_store/qdrant_cloud.py,sha256=3im4Mip0QXLkR6wiqVsjV1QvhSElfxdFSuDKddBDQ-4,188
|
128
|
-
langroid/vector_store/qdrantdb.py,sha256=
|
129
|
-
pyproject.toml,sha256=
|
130
|
-
langroid-0.1.
|
131
|
-
langroid-0.1.
|
132
|
-
langroid-0.1.
|
133
|
-
langroid-0.1.
|
128
|
+
langroid/vector_store/qdrantdb.py,sha256=wYOuu5c2vIKn9ZgvTXcAiZXMpV8AOXEWFAzI8S8UP-0,16828
|
129
|
+
pyproject.toml,sha256=6Vqvoq2dWP_JzMKXoKVlTteWCZGmWPW4tFQV0KfRtVQ,7026
|
130
|
+
langroid-0.1.260.dist-info/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
|
131
|
+
langroid-0.1.260.dist-info/METADATA,sha256=ftMLe9Jwmp1GkP9bsiO4kdxABtcgFuR1hsOdaXrKNkI,52506
|
132
|
+
langroid-0.1.260.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
133
|
+
langroid-0.1.260.dist-info/RECORD,,
|
pyproject.toml
CHANGED
File without changes
|
File without changes
|