langroid 0.1.253__py3-none-any.whl → 0.1.254__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,7 +16,8 @@ from .table_chat_agent import (
16
16
  TableChatAgentConfig,
17
17
  PandasEvalTool,
18
18
  )
19
- from . import sql
19
+
20
+
20
21
  from . import relevance_extractor_agent
21
22
  from . import doc_chat_agent
22
23
  from . import retriever_agent
@@ -39,7 +40,6 @@ __all__ = [
39
40
  "TableChatAgent",
40
41
  "TableChatAgentConfig",
41
42
  "PandasEvalTool",
42
- "sql",
43
43
  "relevance_extractor_agent",
44
44
  "doc_chat_agent",
45
45
  "retriever_agent",
@@ -49,3 +49,11 @@ __all__ = [
49
49
  "lance_doc_chat_agent",
50
50
  "lance_rag",
51
51
  ]
52
+
53
+ try:
54
+ from . import sql
55
+
56
+ sql
57
+ __all__.append("sql")
58
+ except ImportError:
59
+ pass
@@ -35,6 +35,7 @@ from langroid.embedding_models.models import (
35
35
  OpenAIEmbeddingsConfig,
36
36
  SentenceTransformerEmbeddingsConfig,
37
37
  )
38
+ from langroid.exceptions import LangroidImportError
38
39
  from langroid.language_models.base import StreamingIfAllowed
39
40
  from langroid.language_models.openai_gpt import OpenAIChatModel, OpenAIGPTConfig
40
41
  from langroid.mytypes import DocMetaData, Document, Entity
@@ -108,6 +109,9 @@ vecdb_config: VectorStoreConfig = QdrantDBConfig(
108
109
  )
109
110
 
110
111
  try:
112
+ import lancedb
113
+
114
+ lancedb # appease mypy
111
115
  from langroid.vector_store.lancedb import LanceDBConfig
112
116
 
113
117
  vecdb_config = LanceDBConfig(
@@ -117,7 +121,7 @@ try:
117
121
  embedding=(hf_embed_config if has_sentence_transformers else oai_embed_config),
118
122
  )
119
123
 
120
- except ImportError:
124
+ except (ImportError, LangroidImportError):
121
125
  pass
122
126
 
123
127
 
@@ -1,10 +1,17 @@
1
- from . import sql_chat_agent, utils
2
- from .sql_chat_agent import SQLChatAgentConfig, SQLChatAgent
1
+ from . import utils
3
2
 
4
3
 
5
4
  __all__ = [
6
- "SQLChatAgentConfig",
7
- "SQLChatAgent",
8
- "sql_chat_agent",
9
5
  "utils",
10
6
  ]
7
+
8
+ try:
9
+ from . import sql_chat_agent
10
+ from .sql_chat_agent import SQLChatAgentConfig, SQLChatAgent
11
+
12
+ sql_chat_agent
13
+ SQLChatAgent
14
+ SQLChatAgentConfig
15
+ __all__.extend(["SQLChatAgentConfig", "SQLChatAgent", "sql_chat_agent"])
16
+ except ImportError:
17
+ pass
@@ -1,9 +1,17 @@
1
1
  from . import base
2
- from . import momento_cachedb
2
+
3
3
  from . import redis_cachedb
4
4
 
5
5
  __all__ = [
6
6
  "base",
7
- "momento_cachedb",
8
7
  "redis_cachedb",
9
8
  ]
9
+
10
+
11
+ try:
12
+ from . import momento_cachedb
13
+
14
+ momento_cachedb
15
+ __all__.append("momento_cachedb")
16
+ except ImportError:
17
+ pass
@@ -11,7 +11,6 @@ from . import urls
11
11
  from . import utils
12
12
  from . import search
13
13
  from . import web_search
14
- from . import spider
15
14
 
16
15
  from .parser import (
17
16
  Splitter,
@@ -36,7 +35,6 @@ __all__ = [
36
35
  "utils",
37
36
  "search",
38
37
  "web_search",
39
- "spider",
40
38
  "Splitter",
41
39
  "PdfParsingConfig",
42
40
  "DocxParsingConfig",
@@ -44,3 +42,11 @@ __all__ = [
44
42
  "ParsingConfig",
45
43
  "Parser",
46
44
  ]
45
+
46
+ try:
47
+ from . import spider
48
+
49
+ spider
50
+ __all__.append("spider")
51
+ except ImportError:
52
+ pass
@@ -1,26 +1,37 @@
1
+ from __future__ import annotations
2
+
1
3
  import itertools
2
4
  import logging
3
5
  import re
4
6
  from enum import Enum
5
7
  from io import BytesIO
6
- from typing import Any, Generator, List, Tuple
8
+ from typing import TYPE_CHECKING, Any, Generator, List, Tuple
7
9
 
8
10
  from langroid.exceptions import LangroidImportError
9
11
 
10
12
  try:
11
13
  import fitz
12
14
  except ImportError:
13
- raise LangroidImportError("PyMuPDF", "pdf-parsers")
15
+ if not TYPE_CHECKING:
16
+ fitz = None
14
17
 
15
18
  try:
16
19
  import pypdf
17
20
  except ImportError:
18
- raise LangroidImportError("pypdf", "pdf-parsers")
21
+ if not TYPE_CHECKING:
22
+ pypdf = None
23
+
24
+ try:
25
+ import pdfplumber
26
+ except ImportError:
27
+ if not TYPE_CHECKING:
28
+ pdfplumber = None
19
29
 
20
- import pdfplumber
21
30
  import requests
22
31
  from bs4 import BeautifulSoup
23
- from PIL import Image
32
+
33
+ if TYPE_CHECKING:
34
+ from PIL import Image
24
35
 
25
36
  from langroid.mytypes import DocMetaData, Document
26
37
  from langroid.parsing.parser import Parser, ParsingConfig
@@ -373,19 +384,21 @@ class FitzPDFParser(DocumentParser):
373
384
  Parser for processing PDFs using the `fitz` library.
374
385
  """
375
386
 
376
- def iterate_pages(self) -> Generator[Tuple[int, fitz.Page], None, None]:
387
+ def iterate_pages(self) -> Generator[Tuple[int, "fitz.Page"], None, None]:
377
388
  """
378
389
  Yield each page in the PDF using `fitz`.
379
390
 
380
391
  Returns:
381
392
  Generator[fitz.Page]: Generator yielding each page.
382
393
  """
394
+ if fitz is None:
395
+ raise LangroidImportError("fitz", "pdf-parsers")
383
396
  doc = fitz.open(stream=self.doc_bytes, filetype="pdf")
384
397
  for i, page in enumerate(doc):
385
398
  yield i, page
386
399
  doc.close()
387
400
 
388
- def extract_text_from_page(self, page: fitz.Page) -> str:
401
+ def extract_text_from_page(self, page: "fitz.Page") -> str:
389
402
  """
390
403
  Extract text from a given `fitz` page.
391
404
 
@@ -410,6 +423,8 @@ class PyPDFParser(DocumentParser):
410
423
  Returns:
411
424
  Generator[pypdf.pdf.PageObject]: Generator yielding each page.
412
425
  """
426
+ if pypdf is None:
427
+ raise LangroidImportError("pypdf", "pdf-parsers")
413
428
  reader = pypdf.PdfReader(self.doc_bytes)
414
429
  for i, page in enumerate(reader.pages):
415
430
  yield i, page
@@ -441,6 +456,8 @@ class PDFPlumberParser(DocumentParser):
441
456
  Returns:
442
457
  Generator[pdfplumber.Page]: Generator yielding each page.
443
458
  """
459
+ if pdfplumber is None:
460
+ raise LangroidImportError("pdfplumber", "pdf-parsers")
444
461
  with pdfplumber.open(self.doc_bytes) as pdf:
445
462
  for i, page in enumerate(pdf.pages):
446
463
  yield i, page
@@ -654,7 +671,10 @@ class PythonDocxParser(DocumentParser):
654
671
  In a DOCX file, pages are not explicitly defined,
655
672
  so we consider each paragraph as a separate 'page' for simplicity.
656
673
  """
657
- import docx
674
+ try:
675
+ import docx
676
+ except ImportError:
677
+ raise LangroidImportError("python-docx", "docx")
658
678
 
659
679
  doc = docx.Document(self.doc_bytes)
660
680
  for i, para in enumerate(doc.paragraphs, start=1):
@@ -1,13 +1,18 @@
1
1
  from typing import List, Set, no_type_check
2
2
  from urllib.parse import urlparse
3
3
 
4
- from pydispatch import dispatcher
5
- from scrapy import signals
6
- from scrapy.crawler import CrawlerRunner
7
- from scrapy.http import Response
8
- from scrapy.linkextractors import LinkExtractor
9
- from scrapy.spiders import CrawlSpider, Rule
10
- from twisted.internet import defer, reactor
4
+ from langroid.exceptions import LangroidImportError
5
+
6
+ try:
7
+ from pydispatch import dispatcher
8
+ from scrapy import signals
9
+ from scrapy.crawler import CrawlerRunner
10
+ from scrapy.http import Response
11
+ from scrapy.linkextractors import LinkExtractor
12
+ from scrapy.spiders import CrawlSpider, Rule
13
+ from twisted.internet import defer, reactor
14
+ except ImportError:
15
+ raise LangroidImportError("scrapy", "scrapy")
11
16
 
12
17
 
13
18
  @no_type_check
@@ -10,14 +10,8 @@ __all__ = [
10
10
  "VectorStore",
11
11
  "VectorStoreConfig",
12
12
  "qdrantdb",
13
- "meilisearch",
14
- "lancedb",
15
13
  "QdrantDBConfig",
16
14
  "QdrantDB",
17
- "MeiliSearch",
18
- "MeiliSearchConfig",
19
- "LanceDB",
20
- "LanceDBConfig",
21
15
  ]
22
16
 
23
17
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langroid
3
- Version: 0.1.253
3
+ Version: 0.1.254
4
4
  Summary: Harness LLMs with Multi-Agent Programming
5
5
  License: MIT
6
6
  Author: Prasad Chalasani
@@ -12,6 +12,10 @@ Classifier: Programming Language :: Python :: 3.10
12
12
  Classifier: Programming Language :: Python :: 3.11
13
13
  Provides-Extra: chainlit
14
14
  Provides-Extra: chromadb
15
+ Provides-Extra: db
16
+ Provides-Extra: doc-chat
17
+ Provides-Extra: docx
18
+ Provides-Extra: embeddings
15
19
  Provides-Extra: hf-embeddings
16
20
  Provides-Extra: lancedb
17
21
  Provides-Extra: litellm
@@ -23,6 +27,7 @@ Provides-Extra: mysql
23
27
  Provides-Extra: neo4j
24
28
  Provides-Extra: pdf-parsers
25
29
  Provides-Extra: postgres
30
+ Provides-Extra: scrapy
26
31
  Provides-Extra: sql
27
32
  Provides-Extra: transformers
28
33
  Provides-Extra: unstructured
@@ -42,7 +47,7 @@ Requires-Dist: google-generativeai (>=0.5.2,<0.6.0)
42
47
  Requires-Dist: groq (>=0.5.0,<0.6.0)
43
48
  Requires-Dist: grpcio (>=1.62.1,<2.0.0)
44
49
  Requires-Dist: halo (>=0.0.31,<0.0.32)
45
- Requires-Dist: huggingface-hub (>=0.21.2,<0.22.0) ; extra == "transformers"
50
+ Requires-Dist: huggingface-hub (>=0.21.2,<0.22.0) ; extra == "embeddings" or extra == "transformers"
46
51
  Requires-Dist: jinja2 (>=3.1.2,<4.0.0)
47
52
  Requires-Dist: lancedb (>=0.6.2,<0.7.0) ; extra == "lancedb"
48
53
  Requires-Dist: litellm (>=1.30.1,<2.0.0) ; extra == "litellm"
@@ -66,20 +71,20 @@ Requires-Dist: nltk (>=3.8.1,<4.0.0)
66
71
  Requires-Dist: onnxruntime (>=1.16.1,<2.0.0)
67
72
  Requires-Dist: openai (>=1.14.0,<2.0.0)
68
73
  Requires-Dist: pandas (>=2.0.3,<3.0.0)
69
- Requires-Dist: pdf2image (>=1.17.0,<2.0.0) ; extra == "pdf-parsers"
70
- Requires-Dist: pdfplumber (>=0.10.2,<0.11.0)
74
+ Requires-Dist: pdf2image (>=1.17.0,<2.0.0) ; extra == "doc-chat" or extra == "pdf-parsers"
75
+ Requires-Dist: pdfplumber (>=0.10.2,<0.11.0) ; extra == "doc-chat" or extra == "pdf-parsers"
71
76
  Requires-Dist: prettytable (>=3.8.0,<4.0.0)
72
- Requires-Dist: psycopg2 (>=2.9.7,<3.0.0) ; extra == "postgres" or extra == "sql"
77
+ Requires-Dist: psycopg2 (>=2.9.7,<3.0.0) ; extra == "db" or extra == "postgres" or extra == "sql"
73
78
  Requires-Dist: pyarrow (==15.0.0) ; extra == "lancedb"
74
79
  Requires-Dist: pydantic (==1.10.13)
75
80
  Requires-Dist: pygithub (>=1.58.1,<2.0.0)
76
81
  Requires-Dist: pygments (>=2.15.1,<3.0.0)
77
- Requires-Dist: pymupdf (>=1.23.3,<2.0.0) ; extra == "pdf-parsers"
78
- Requires-Dist: pymysql (>=1.1.0,<2.0.0) ; extra == "mysql" or extra == "sql"
82
+ Requires-Dist: pymupdf (>=1.23.3,<2.0.0) ; extra == "doc-chat" or extra == "pdf-parsers"
83
+ Requires-Dist: pymysql (>=1.1.0,<2.0.0) ; extra == "db" or extra == "mysql" or extra == "sql"
79
84
  Requires-Dist: pyparsing (>=3.0.9,<4.0.0)
80
- Requires-Dist: pypdf (>=3.12.2,<4.0.0) ; extra == "pdf-parsers"
81
- Requires-Dist: pytesseract (>=0.3.10,<0.4.0) ; extra == "pdf-parsers"
82
- Requires-Dist: python-docx (>=1.1.0,<2.0.0)
85
+ Requires-Dist: pypdf (>=3.12.2,<4.0.0) ; extra == "doc-chat" or extra == "pdf-parsers"
86
+ Requires-Dist: pytesseract (>=0.3.10,<0.4.0) ; extra == "doc-chat" or extra == "pdf-parsers"
87
+ Requires-Dist: python-docx (>=1.1.0,<2.0.0) ; extra == "doc-chat" or extra == "docx"
83
88
  Requires-Dist: python-dotenv (>=1.0.0,<2.0.0)
84
89
  Requires-Dist: python-magic (>=0.4.27,<0.5.0)
85
90
  Requires-Dist: python-socketio (>=5.11.0,<6.0.0) ; extra == "chainlit"
@@ -89,17 +94,17 @@ Requires-Dist: redis (>=5.0.1,<6.0.0)
89
94
  Requires-Dist: requests (>=2.31.0,<3.0.0)
90
95
  Requires-Dist: requests-oauthlib (>=1.3.1,<2.0.0)
91
96
  Requires-Dist: rich (>=13.3.4,<14.0.0)
92
- Requires-Dist: scrapy (>=2.11.0,<3.0.0)
93
- Requires-Dist: sentence-transformers (==2.2.2) ; extra == "hf-embeddings"
94
- Requires-Dist: sqlalchemy (>=2.0.19,<3.0.0) ; extra == "sql"
97
+ Requires-Dist: scrapy (>=2.11.0,<3.0.0) ; extra == "scrapy"
98
+ Requires-Dist: sentence-transformers (==2.2.2) ; extra == "embeddings" or extra == "hf-embeddings"
99
+ Requires-Dist: sqlalchemy (>=2.0.19,<3.0.0) ; extra == "db" or extra == "sql"
95
100
  Requires-Dist: tantivy (>=0.21.0,<0.22.0) ; extra == "lancedb"
96
101
  Requires-Dist: thefuzz (>=0.20.0,<0.21.0)
97
102
  Requires-Dist: tiktoken (>=0.7.0,<0.8.0)
98
- Requires-Dist: torch (==2.0.0) ; extra == "hf-embeddings" or extra == "transformers"
103
+ Requires-Dist: torch (==2.0.0) ; extra == "embeddings" or extra == "hf-embeddings" or extra == "transformers"
99
104
  Requires-Dist: trafilatura (>=1.5.0,<2.0.0)
100
- Requires-Dist: transformers (>=4.40.1,<5.0.0) ; extra == "transformers"
105
+ Requires-Dist: transformers (>=4.40.1,<5.0.0) ; extra == "embeddings" or extra == "transformers"
101
106
  Requires-Dist: typer (>=0.9.0,<0.10.0)
102
- Requires-Dist: unstructured[docx,pdf,pptx] (>=0.10.16,<0.10.18) ; extra == "unstructured"
107
+ Requires-Dist: unstructured[docx,pdf,pptx] (>=0.10.16,<0.10.18) ; extra == "doc-chat" or extra == "unstructured"
103
108
  Requires-Dist: wget (>=3.2,<4.0)
104
109
  Description-Content-Type: text/markdown
105
110
 
@@ -459,7 +464,8 @@ such as [LiteLLM](https://docs.litellm.ai/docs/providers) that in effect mimic t
459
464
 
460
465
  ### Install `langroid`
461
466
  Langroid requires Python 3.11+. We recommend using a virtual environment.
462
- Use `pip` to install `langroid` (from PyPi) to your virtual environment:
467
+ Use `pip` to install a bare-bones slim version of `langroid` (from PyPi) to your virtual
468
+ environment:
463
469
  ```bash
464
470
  pip install langroid
465
471
  ```
@@ -467,17 +473,21 @@ The core Langroid package lets you use OpenAI Embeddings models via their API.
467
473
  If you instead want to use the `sentence-transformers` embedding models from HuggingFace,
468
474
  install Langroid like this:
469
475
  ```bash
470
- pip install langroid[hf-embeddings]
471
- ```
472
- If using `zsh` (or similar shells), you may need to escape the square brackets, e.g.:
473
- ```
474
- pip install langroid\[hf-embeddings\]
475
- ```
476
- or use quotes:
477
- ```
478
476
  pip install "langroid[hf-embeddings]"
479
477
  ```
480
-
478
+ For many practical scenarios, you may need additional optional dependencies:
479
+ - To use various document-parsers, install langroid with the `doc-chat` extra:
480
+ ```bash
481
+ pip install "langroid[doc-chat]"
482
+ ```
483
+ - For "chat with databases", use the `db` extra:
484
+ ```bash
485
+ pip install "langroid[db]"
486
+ ``
487
+ - You can specify multiple extras by separating them with commas, e.g.:
488
+ ```bash
489
+ pip install "langroid[doc-chat,db]"
490
+ ```
481
491
 
482
492
  <details>
483
493
  <summary><b>Optional Installs for using SQL Chat with a PostgreSQL DB </b></summary>
@@ -9,8 +9,8 @@ langroid/agent/chat_document.py,sha256=uwCq53SHRyxQw6qyhjzPYuJG48VHBgOf2122Ew3fk
9
9
  langroid/agent/helpers.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  langroid/agent/junk,sha256=LxfuuW7Cijsg0szAzT81OjWWv1PMNI-6w_-DspVIO2s,339
11
11
  langroid/agent/openai_assistant.py,sha256=kIVDI4r-xGvplLU5s0nShPVHs6Jq-wOsfWE0kcMhAdQ,33056
12
- langroid/agent/special/__init__.py,sha256=NoR_vTfZ2mkhVhKdcwstLMKONpRwL5aLiidD_A2yB78,1208
13
- langroid/agent/special/doc_chat_agent.py,sha256=v3czg6jNIrfe1v4JXEbT3N2v2M7xaffD9UsKTRajqUI,54305
12
+ langroid/agent/special/__init__.py,sha256=gik_Xtm_zV7U9s30Mn8UX3Gyuy4jTjQe9zjiE3HWmEo,1273
13
+ langroid/agent/special/doc_chat_agent.py,sha256=_jXEjxuymNjkDcwT2xEcpf9sWoUW0P-1UWHCuRQFt_w,54428
14
14
  langroid/agent/special/lance_doc_chat_agent.py,sha256=USp0U3eTaJzwF_3bdqE7CedSLbaqAi2tm-VzygcyLaA,10175
15
15
  langroid/agent/special/lance_rag/__init__.py,sha256=QTbs0IVE2ZgDg8JJy1zN97rUUg4uEPH7SLGctFNumk4,174
16
16
  langroid/agent/special/lance_rag/critic_agent.py,sha256=ufTdpHSeHgCzN85Q0sfWOrpBpsCjGVZdAg5yOH1ogU8,7296
@@ -24,7 +24,7 @@ langroid/agent/special/neo4j/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
24
24
  langroid/agent/special/neo4j/utils/system_message.py,sha256=vRpz1P-OYLLiC6OGYYoK6x77yxVzDxMTCEJSsYUIuG4,2242
25
25
  langroid/agent/special/relevance_extractor_agent.py,sha256=zIx8GUdVo1aGW6ASla0NPQjYYIpmriK_TYMijqAx3F8,4796
26
26
  langroid/agent/special/retriever_agent.py,sha256=lvMvf-u9rSosg4YASuFdUbGLgkzLPknXAbJZfZ1LZCc,1868
27
- langroid/agent/special/sql/__init__.py,sha256=W--80eB-TlwDjjupcsjlqpdk1xuU_w5FaYPB3pb_j9I,194
27
+ langroid/agent/special/sql/__init__.py,sha256=mWfmm1QpXCezpFOS2eI57M0L_Ok3q5_ukG8tXBnBrEA,319
28
28
  langroid/agent/special/sql/sql_chat_agent.py,sha256=Now37yznNeiCE0ysC5W5BPG41xO69kxrHZWzLqs5dhQ,13903
29
29
  langroid/agent/special/sql/utils/__init__.py,sha256=JFif6CRTrN-bc91uuAI4K9fe2ndIWSNMVxJ0WA68--M,446
30
30
  langroid/agent/special/sql/utils/description_extractors.py,sha256=cX8TIpmTPXZXQTMpIi3OUFwFsPywxFFdurpx717Kq0I,6529
@@ -45,7 +45,7 @@ langroid/agent/tools/retrieval_tool.py,sha256=2q2pfoYbZNfbWQ0McxrtmfF0ekGglIgRl-
45
45
  langroid/agent/tools/run_python_code.py,sha256=BvoxYzzHijU-p4703n2iVlt5BCieR1oMSy50w0tQZAg,1787
46
46
  langroid/agent/tools/segment_extract_tool.py,sha256=WOwZdTTOqKaJUDIqI0jWDV126VM1UjJzIUandHsnC-g,1320
47
47
  langroid/agent_config.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
- langroid/cachedb/__init__.py,sha256=ygx42MS7fvh2UwRMjukTk3dWBkzv_rACebTBRYa_MkU,148
48
+ langroid/cachedb/__init__.py,sha256=icAT2s7Vhf-ZGUeqpDQGNU6ob6o0aFEyjwcxxUGRFjg,225
49
49
  langroid/cachedb/base.py,sha256=LKiJyOFQUN1NRzPIynfbYKGFfSanA6auDfBNEedBK7Y,1342
50
50
  langroid/cachedb/momento_cachedb.py,sha256=YEOJ62hEcV6iIeMr5aGgRYgWQqFYaej9gEDEcY0sm7M,3172
51
51
  langroid/cachedb/redis_cachedb.py,sha256=NukuCWgdp1AWWNgguiZfuypbH9GHwiYe34ZZy866u54,4981
@@ -72,12 +72,12 @@ langroid/language_models/prompt_formatter/hf_formatter.py,sha256=TFL6ppmeQWnzr6C
72
72
  langroid/language_models/prompt_formatter/llama2_formatter.py,sha256=YdcO88qyBeuMENVIVvVqSYuEpvYSTndUe_jd6hVTko4,2899
73
73
  langroid/language_models/utils.py,sha256=j8xEEm__-2b9eql1oTiWQk5dHW59UwmrRKs5kMHaGGo,4803
74
74
  langroid/mytypes.py,sha256=qD3o2v1pccICz-xeei4cwkvJviVC2llJ3eIYgBP9RDE,3045
75
- langroid/parsing/__init__.py,sha256=2O5HOW8nDE3v-JInc5z2wIbFGejf4h5ZTdPqxsFtaWE,870
75
+ langroid/parsing/__init__.py,sha256=ZgSAfgTC6VsTLFlRSWT-TwYco7SQeRMeZG-49MnKYGY,936
76
76
  langroid/parsing/agent_chats.py,sha256=sbZRV9ujdM5QXvvuHVjIi2ysYSYlap-uqfMMUKulrW0,1068
77
77
  langroid/parsing/code-parsing.md,sha256=--cyyNiSZSDlIwcjAV4-shKrSiRe2ytF3AdSoS_hD2g,3294
78
78
  langroid/parsing/code_parser.py,sha256=BbDAzp35wkYQ9U1dpf1ARL0lVyi0tfqEc6_eox2C090,3727
79
79
  langroid/parsing/config.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
80
- langroid/parsing/document_parser.py,sha256=L1-ltLdHkk2yUvVCAA9g7MH7P28sb4Te8pck40u1efY,23496
80
+ langroid/parsing/document_parser.py,sha256=bN-D1kqx6qe1Sx-AMR8a8WbPJYPdoxLYPdgGl7dfW3I,24017
81
81
  langroid/parsing/image_text.py,sha256=sbLIQ5nHe2UnYUksBaQsmZGaX-X0qgEpPd7CEzi_z5M,910
82
82
  langroid/parsing/para_sentence_split.py,sha256=AJBzZojP3zpB-_IMiiHismhqcvkrVBQ3ZINoQyx_bE4,2000
83
83
  langroid/parsing/parse_json.py,sha256=tgB_oatcrgt6L9ZplC-xBBXjLzL1gjSQf1L2_W5kwFA,4230
@@ -86,7 +86,7 @@ langroid/parsing/parser.pyi,sha256=4t55zlG_23hUFO7OvOttY1xzbLze2elpGjoVTUK9izM,1
86
86
  langroid/parsing/repo_loader.py,sha256=My5UIe-h1xr0I-6Icu0ZVwRHmGRRRW8SrJYMc9J1M9Q,29361
87
87
  langroid/parsing/routing.py,sha256=_NFPe7wLjd5B6s47w3M8-5vldL8e2Sz51Gb5bwF5ooY,1072
88
88
  langroid/parsing/search.py,sha256=plQtjarB9afGfJLB0CyPXPq3mM4m7kRsfd0_4brziEI,8846
89
- langroid/parsing/spider.py,sha256=w_mHR1B4KOmxsBLoVI8kMkMTEbwTzeK3ath9fOMJrTk,3043
89
+ langroid/parsing/spider.py,sha256=Y6y7b86Y2k770LdhxgjVlImBxuuy1V9n8-XQ3QPaG5s,3199
90
90
  langroid/parsing/table_loader.py,sha256=qNM4obT_0Y4tjrxNBCNUYjKQ9oETCZ7FbolKBTcz-GM,3410
91
91
  langroid/parsing/url_loader.py,sha256=Na2TBlKuQkloZzkE2d7xl6mh9olS3CbpgCsJbJ-xhIA,4472
92
92
  langroid/parsing/url_loader_cookies.py,sha256=Lg4sNpRz9MByWq2mde6T0hKv68VZSV3mtMjNEHuFeSU,2327
@@ -118,7 +118,7 @@ langroid/utils/pydantic_utils.py,sha256=yb-ghaQYL7EIYeiZ0tailvZvAuJZNF7UBXkd3z35
118
118
  langroid/utils/system.py,sha256=RfAcQODu4tjl-pAO8zZ65yKB9-6WsvzSz2dEPkJdSdw,4909
119
119
  langroid/utils/web/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
120
120
  langroid/utils/web/login.py,sha256=1iz9eUAHa87vpKIkzwkmFa00avwFWivDSAr7QUhK7U0,2528
121
- langroid/vector_store/__init__.py,sha256=BHrYUKy7LHBJAMPuObcD0vs_TRj-B-Xipw7JLy1J9q4,1086
121
+ langroid/vector_store/__init__.py,sha256=6xBjb_z4QtUy4vz4RuFbcbSwmHrggHL8-q0DwCf3PMM,972
122
122
  langroid/vector_store/base.py,sha256=VZl-pvGs6K-ruTT8SQmDthsCp-VARYaf6OuzKmcXN58,13469
123
123
  langroid/vector_store/chromadb.py,sha256=bZ5HjwgKgfJj1PUHsatYsrHv-v0dpOfMR2l0tJ2H0_A,7890
124
124
  langroid/vector_store/lancedb.py,sha256=nC5pcrFoUOOO941Y7XiPZONUO4LuoZIAR1aR4PecKto,19014
@@ -126,7 +126,7 @@ langroid/vector_store/meilisearch.py,sha256=6frB7GFWeWmeKzRfLZIvzRjllniZ1cYj3Hmh
126
126
  langroid/vector_store/momento.py,sha256=QaPzUnTwlswoawGB-paLtUPyLRvckFXLfLDfvbTzjNQ,10505
127
127
  langroid/vector_store/qdrant_cloud.py,sha256=3im4Mip0QXLkR6wiqVsjV1QvhSElfxdFSuDKddBDQ-4,188
128
128
  langroid/vector_store/qdrantdb.py,sha256=sk5Qb2ZNbooi0rorsMuqIMokF7WADw6PJ0D6goM2XBw,16802
129
- langroid-0.1.253.dist-info/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
130
- langroid-0.1.253.dist-info/METADATA,sha256=7uPgSyezQf5zVej0TNnFlAA2wD0CIwOGpX6_MdV_L5w,50190
131
- langroid-0.1.253.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
132
- langroid-0.1.253.dist-info/RECORD,,
129
+ langroid-0.1.254.dist-info/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
130
+ langroid-0.1.254.dist-info/METADATA,sha256=moT2a-C4KIhExM6a0sNAh8du03kaFfNJ7IiSwUwdVn0,50962
131
+ langroid-0.1.254.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
132
+ langroid-0.1.254.dist-info/RECORD,,