langroid 0.58.1__py3-none-any.whl → 0.58.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -49,6 +49,8 @@ def get_openai_client(
49
49
  organization: Optional[str] = None,
50
50
  timeout: Union[float, Timeout] = 120.0,
51
51
  default_headers: Optional[Dict[str, str]] = None,
52
+ http_client: Optional[Any] = None,
53
+ http_client_config: Optional[Dict[str, Any]] = None,
52
54
  ) -> OpenAI:
53
55
  """
54
56
  Get or create a singleton OpenAI client with the given configuration.
@@ -59,6 +61,8 @@ def get_openai_client(
59
61
  organization: Optional organization ID
60
62
  timeout: Request timeout
61
63
  default_headers: Optional default headers
64
+ http_client: Optional httpx.Client instance
65
+ http_client_config: Optional config dict for creating httpx.Client
62
66
 
63
67
  Returns:
64
68
  OpenAI client instance
@@ -66,6 +70,32 @@ def get_openai_client(
66
70
  if isinstance(timeout, (int, float)):
67
71
  timeout = Timeout(timeout)
68
72
 
73
+ # If http_client is provided directly, don't cache (complex object)
74
+ if http_client is not None:
75
+ client = OpenAI(
76
+ api_key=api_key,
77
+ base_url=base_url,
78
+ organization=organization,
79
+ timeout=timeout,
80
+ default_headers=default_headers,
81
+ http_client=http_client,
82
+ )
83
+ _all_clients.add(client)
84
+ return client
85
+
86
+ # If http_client_config is provided, create client from config and cache
87
+ created_http_client = None
88
+ if http_client_config is not None:
89
+ try:
90
+ from httpx import Client
91
+
92
+ created_http_client = Client(**http_client_config)
93
+ except ImportError:
94
+ raise ValueError(
95
+ "httpx is required to use http_client_config. "
96
+ "Install it with: pip install httpx"
97
+ )
98
+
69
99
  cache_key = _get_cache_key(
70
100
  "openai",
71
101
  api_key=api_key,
@@ -73,6 +103,7 @@ def get_openai_client(
73
103
  organization=organization,
74
104
  timeout=timeout,
75
105
  default_headers=default_headers,
106
+ http_client_config=http_client_config, # Include config in cache key
76
107
  )
77
108
 
78
109
  if cache_key in _client_cache:
@@ -84,6 +115,7 @@ def get_openai_client(
84
115
  organization=organization,
85
116
  timeout=timeout,
86
117
  default_headers=default_headers,
118
+ http_client=created_http_client, # Use the client created from config
87
119
  )
88
120
 
89
121
  _client_cache[cache_key] = client
@@ -97,6 +129,8 @@ def get_async_openai_client(
97
129
  organization: Optional[str] = None,
98
130
  timeout: Union[float, Timeout] = 120.0,
99
131
  default_headers: Optional[Dict[str, str]] = None,
132
+ http_client: Optional[Any] = None,
133
+ http_client_config: Optional[Dict[str, Any]] = None,
100
134
  ) -> AsyncOpenAI:
101
135
  """
102
136
  Get or create a singleton AsyncOpenAI client with the given configuration.
@@ -107,6 +141,8 @@ def get_async_openai_client(
107
141
  organization: Optional organization ID
108
142
  timeout: Request timeout
109
143
  default_headers: Optional default headers
144
+ http_client: Optional httpx.AsyncClient instance
145
+ http_client_config: Optional config dict for creating httpx.AsyncClient
110
146
 
111
147
  Returns:
112
148
  AsyncOpenAI client instance
@@ -114,6 +150,32 @@ def get_async_openai_client(
114
150
  if isinstance(timeout, (int, float)):
115
151
  timeout = Timeout(timeout)
116
152
 
153
+ # If http_client is provided directly, don't cache (complex object)
154
+ if http_client is not None:
155
+ client = AsyncOpenAI(
156
+ api_key=api_key,
157
+ base_url=base_url,
158
+ organization=organization,
159
+ timeout=timeout,
160
+ default_headers=default_headers,
161
+ http_client=http_client,
162
+ )
163
+ _all_clients.add(client)
164
+ return client
165
+
166
+ # If http_client_config is provided, create async client from config and cache
167
+ created_http_client = None
168
+ if http_client_config is not None:
169
+ try:
170
+ from httpx import AsyncClient
171
+
172
+ created_http_client = AsyncClient(**http_client_config)
173
+ except ImportError:
174
+ raise ValueError(
175
+ "httpx is required to use http_client_config. "
176
+ "Install it with: pip install httpx"
177
+ )
178
+
117
179
  cache_key = _get_cache_key(
118
180
  "async_openai",
119
181
  api_key=api_key,
@@ -121,6 +183,7 @@ def get_async_openai_client(
121
183
  organization=organization,
122
184
  timeout=timeout,
123
185
  default_headers=default_headers,
186
+ http_client_config=http_client_config, # Include config in cache key
124
187
  )
125
188
 
126
189
  if cache_key in _client_cache:
@@ -132,6 +195,7 @@ def get_async_openai_client(
132
195
  organization=organization,
133
196
  timeout=timeout,
134
197
  default_headers=default_headers,
198
+ http_client=created_http_client, # Use the client created from config
135
199
  )
136
200
 
137
201
  _client_cache[cache_key] = client
@@ -287,6 +287,9 @@ class OpenAIGPTConfig(LLMConfig):
287
287
  langdb_params: LangDBParams = LangDBParams()
288
288
  portkey_params: PortkeyParams = PortkeyParams()
289
289
  headers: Dict[str, str] = {}
290
+ http_client_factory: Optional[Callable[[], Any]] = None # Factory for httpx.Client
291
+ http_verify_ssl: bool = True # Simple flag for SSL verification
292
+ http_client_config: Optional[Dict[str, Any]] = None # Config dict for httpx.Client
290
293
 
291
294
  def __init__(self, **kwargs) -> None: # type: ignore
292
295
  local_model = "api_base" in kwargs and kwargs["api_base"] is not None
@@ -631,6 +634,32 @@ class OpenAIGPT(LanguageModel):
631
634
  # Add Portkey-specific headers
632
635
  self.config.headers.update(self.config.portkey_params.get_headers())
633
636
 
637
+ # Create http_client if needed - Priority order:
638
+ # 1. http_client_factory (most flexibility, not cacheable)
639
+ # 2. http_client_config (cacheable, moderate flexibility)
640
+ # 3. http_verify_ssl=False (cacheable, simple SSL bypass)
641
+ http_client = None
642
+ async_http_client = None
643
+ http_client_config_used = None
644
+
645
+ if self.config.http_client_factory is not None:
646
+ # Use the factory to create http_client (not cacheable)
647
+ http_client = self.config.http_client_factory()
648
+ # Don't set async_http_client from sync client - create separately
649
+ # This avoids type mismatch issues
650
+ async_http_client = None
651
+ elif self.config.http_client_config is not None:
652
+ # Use config dict (cacheable)
653
+ http_client_config_used = self.config.http_client_config
654
+ elif not self.config.http_verify_ssl:
655
+ # Simple SSL bypass (cacheable)
656
+ http_client_config_used = {"verify": False}
657
+ logging.warning(
658
+ "SSL verification has been disabled. This is insecure and "
659
+ "should only be used in trusted environments (e.g., "
660
+ "corporate networks with self-signed certificates)."
661
+ )
662
+
634
663
  if self.config.use_cached_client:
635
664
  self.client = get_openai_client(
636
665
  api_key=self.api_key,
@@ -638,6 +667,8 @@ class OpenAIGPT(LanguageModel):
638
667
  organization=self.config.organization,
639
668
  timeout=Timeout(self.config.timeout),
640
669
  default_headers=self.config.headers,
670
+ http_client=http_client,
671
+ http_client_config=http_client_config_used,
641
672
  )
642
673
  self.async_client = get_async_openai_client(
643
674
  api_key=self.api_key,
@@ -645,23 +676,56 @@ class OpenAIGPT(LanguageModel):
645
676
  organization=self.config.organization,
646
677
  timeout=Timeout(self.config.timeout),
647
678
  default_headers=self.config.headers,
679
+ http_client=async_http_client,
680
+ http_client_config=http_client_config_used,
648
681
  )
649
682
  else:
650
683
  # Create new clients without caching
651
- self.client = OpenAI(
684
+ client_kwargs: Dict[str, Any] = dict(
652
685
  api_key=self.api_key,
653
686
  base_url=self.api_base,
654
687
  organization=self.config.organization,
655
688
  timeout=Timeout(self.config.timeout),
656
689
  default_headers=self.config.headers,
657
690
  )
658
- self.async_client = AsyncOpenAI(
691
+ if http_client is not None:
692
+ client_kwargs["http_client"] = http_client
693
+ elif http_client_config_used is not None:
694
+ # Create http_client from config for non-cached scenario
695
+ try:
696
+ from httpx import Client
697
+
698
+ client_kwargs["http_client"] = Client(**http_client_config_used)
699
+ except ImportError:
700
+ raise ValueError(
701
+ "httpx is required to use http_client_config. "
702
+ "Install it with: pip install httpx"
703
+ )
704
+ self.client = OpenAI(**client_kwargs)
705
+
706
+ async_client_kwargs: Dict[str, Any] = dict(
659
707
  api_key=self.api_key,
660
708
  base_url=self.api_base,
661
709
  organization=self.config.organization,
662
710
  timeout=Timeout(self.config.timeout),
663
711
  default_headers=self.config.headers,
664
712
  )
713
+ if async_http_client is not None:
714
+ async_client_kwargs["http_client"] = async_http_client
715
+ elif http_client_config_used is not None:
716
+ # Create async http_client from config for non-cached scenario
717
+ try:
718
+ from httpx import AsyncClient
719
+
720
+ async_client_kwargs["http_client"] = AsyncClient(
721
+ **http_client_config_used
722
+ )
723
+ except ImportError:
724
+ raise ValueError(
725
+ "httpx is required to use http_client_config. "
726
+ "Install it with: pip install httpx"
727
+ )
728
+ self.async_client = AsyncOpenAI(**async_client_kwargs)
665
729
 
666
730
  self.cache: CacheDB | None = None
667
731
  use_cache = self.config.cache_config is not None
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
25
25
  from crawl4ai.extraction_strategy import ExtractionStrategy
26
26
  from crawl4ai.markdown_generation_strategy import MarkdownGenerationStrategy
27
27
  except ImportError:
28
- raise LangroidImportError("crawl4ai", "crawl-4-ai")
28
+ raise LangroidImportError("crawl4ai", "crawl4ai")
29
29
 
30
30
  load_dotenv()
31
31
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langroid
3
- Version: 0.58.1
3
+ Version: 0.58.3
4
4
  Summary: Harness LLMs with Multi-Agent Programming
5
5
  Author-email: Prasad Chalasani <pchalasani@gmail.com>
6
6
  License: MIT
@@ -94,8 +94,8 @@ Requires-Dist: chainlit<3.0.0,>=2.0.1; extra == 'chainlit'
94
94
  Requires-Dist: python-socketio<6.0.0,>=5.11.0; extra == 'chainlit'
95
95
  Provides-Extra: chromadb
96
96
  Requires-Dist: chromadb<=0.4.23,>=0.4.21; extra == 'chromadb'
97
- Provides-Extra: crawl-4-ai
98
- Requires-Dist: crawl4ai>=0.6.3; extra == 'crawl-4-ai'
97
+ Provides-Extra: crawl4ai
98
+ Requires-Dist: crawl4ai>=0.6.3; extra == 'crawl4ai'
99
99
  Provides-Extra: db
100
100
  Requires-Dist: psycopg2-binary>=2.9.10; extra == 'db'
101
101
  Requires-Dist: psycopg2<3.0.0,>=2.9.7; extra == 'db'
@@ -74,11 +74,11 @@ langroid/embedding_models/protoc/embeddings_pb2_grpc.py,sha256=9dYQqkW3JPyBpSEje
74
74
  langroid/language_models/__init__.py,sha256=3aD2qC1lz8v12HX4B-dilv27gNxYdGdeu1QvDlkqqHs,1095
75
75
  langroid/language_models/azure_openai.py,sha256=SW0Fp_y6HpERr9l6TtF6CYsKgKwjUf_hSL_2mhTV4wI,5034
76
76
  langroid/language_models/base.py,sha256=r0MckcZGmuv_opKR2xvjzOz94mmWCzn9LJKgqyBjJ7c,28559
77
- langroid/language_models/client_cache.py,sha256=YtGcpalYkS_ckMU12J7VmUOGmVv1wzuLUBxgIagcpmA,6896
77
+ langroid/language_models/client_cache.py,sha256=c8tAUTKxbd-CVGRy9WPYhI1pLtarFWWwX6-qm4ZXaqk,9399
78
78
  langroid/language_models/config.py,sha256=9Q8wk5a7RQr8LGMT_0WkpjY8S4ywK06SalVRjXlfCiI,378
79
79
  langroid/language_models/mock_lm.py,sha256=tA9JpURznsMZ59iRhFYMmaYQzAc0D0BT-PiJIV58sAk,4079
80
80
  langroid/language_models/model_info.py,sha256=LzRfZsWmOm7WF6KGJfcN0aVdRqk0URNuDGMMz6cFt50,17121
81
- langroid/language_models/openai_gpt.py,sha256=fYRZlfpo1CF8ePF1Cuy2CtStxDhpq6AeTxwuqK-z_T8,89329
81
+ langroid/language_models/openai_gpt.py,sha256=xlMqji6yA5RgbDjY8uMXkl5xsko4oXBuJBrb5kD6LVY,92727
82
82
  langroid/language_models/provider_params.py,sha256=fX25NAmYUIc1-nliMKpmTGZO6D6RpyTXtSDdZCZdb5w,5464
83
83
  langroid/language_models/utils.py,sha256=n55Oe2_V_4VNGhytvPWLYC-0tFS07RTjN83KWl-p_MI,6032
84
84
  langroid/language_models/prompt_formatter/__init__.py,sha256=2-5cdE24XoFDhifOLl8yiscohil1ogbP1ECkYdBlBsk,372
@@ -102,7 +102,7 @@ langroid/parsing/routing.py,sha256=-FcnlqldzL4ZoxuDwXjQPNHgBe9F9-F4R6q7b_z9CvI,1
102
102
  langroid/parsing/search.py,sha256=0NJ5-Rou_BbrHAD7O9b20bKjZJnbadjObvGm4Zq8Kis,9818
103
103
  langroid/parsing/spider.py,sha256=hAVM6wxh1pQ0EN4tI5wMBtAjIk0T-xnpi-ZUzWybhos,3258
104
104
  langroid/parsing/table_loader.py,sha256=qNM4obT_0Y4tjrxNBCNUYjKQ9oETCZ7FbolKBTcz-GM,3410
105
- langroid/parsing/url_loader.py,sha256=ELLSimgdf0-oscmtnDxXJcL-W9jVdLDNfQ90ouCDfHE,25402
105
+ langroid/parsing/url_loader.py,sha256=zSvYV1wY5Clf-uTuoJ9fksR9oVPJVJLQ8tEAxMckM8M,25400
106
106
  langroid/parsing/urls.py,sha256=Tjzr64YsCusiYkY0LEGB5-rSuX8T2P_4DVoOFKAeKuI,8081
107
107
  langroid/parsing/utils.py,sha256=WwqzOhbQRlorbVvddDIZKv9b1KqZCBDm955lgIHDXRw,12828
108
108
  langroid/parsing/web_search.py,sha256=atk8wIpOfiGTvW8yL_26TvjvyY2zD24xIHIi0QjEklI,8599
@@ -139,7 +139,7 @@ langroid/vector_store/pineconedb.py,sha256=otxXZNaBKb9f_H75HTaU3lMHiaR2NUp5MqwLZ
139
139
  langroid/vector_store/postgres.py,sha256=wHPtIi2qM4fhO4pMQr95pz1ZCe7dTb2hxl4VYspGZoA,16104
140
140
  langroid/vector_store/qdrantdb.py,sha256=ZYrT9mxoUCx_67Qzb5xnkWuFG12rfe30yAg4NgG2ueA,19168
141
141
  langroid/vector_store/weaviatedb.py,sha256=Yn8pg139gOy3zkaPfoTbMXEEBCiLiYa1MU5d_3UA1K4,11847
142
- langroid-0.58.1.dist-info/METADATA,sha256=mkfwCAdL_zIZxy4v12_ikK7AZLMmbHwwEkh_RJNt3QE,66270
143
- langroid-0.58.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
144
- langroid-0.58.1.dist-info/licenses/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
145
- langroid-0.58.1.dist-info/RECORD,,
142
+ langroid-0.58.3.dist-info/METADATA,sha256=Crx8SyajMne8W50NYBeoal3XH0GyMUQk363RqBCRir8,66266
143
+ langroid-0.58.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
144
+ langroid-0.58.3.dist-info/licenses/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
145
+ langroid-0.58.3.dist-info/RECORD,,