langroid 0.52.0__py3-none-any.whl → 0.52.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -31,6 +31,8 @@ class OpenAIChatModel(ModelName):
31
31
  O1 = "o1"
32
32
  O1_MINI = "o1-mini"
33
33
  O3_MINI = "o3-mini"
34
+ O3 = "o3"
35
+ O4_MINI = "o4-mini"
34
36
  GPT4_1 = "gpt-4.1"
35
37
  GPT4_1_MINI = "gpt-4.1-mini"
36
38
  GPT4_1_NANO = "gpt-4.1-nano"
@@ -226,6 +228,20 @@ MODEL_INFO: Dict[str, ModelInfo] = {
226
228
  has_tools=False,
227
229
  description="O1 Reasoning LM",
228
230
  ),
231
+ OpenAIChatModel.O3.value: ModelInfo(
232
+ name=OpenAIChatModel.O3.value,
233
+ provider=ModelProvider.OPENAI,
234
+ context_length=200_000,
235
+ max_output_tokens=100_000,
236
+ input_cost_per_million=10.0,
237
+ output_cost_per_million=40.0,
238
+ allows_streaming=True,
239
+ allows_system_message=False,
240
+ unsupported_params=["temperature"],
241
+ rename_params={"max_tokens": "max_completion_tokens"},
242
+ has_tools=False,
243
+ description="O1 Reasoning LM",
244
+ ),
229
245
  OpenAIChatModel.O1_MINI.value: ModelInfo(
230
246
  name=OpenAIChatModel.O1_MINI.value,
231
247
  provider=ModelProvider.OPENAI,
@@ -254,6 +270,20 @@ MODEL_INFO: Dict[str, ModelInfo] = {
254
270
  has_tools=False,
255
271
  description="O3 Mini Reasoning LM",
256
272
  ),
273
+ OpenAIChatModel.O4_MINI.value: ModelInfo(
274
+ name=OpenAIChatModel.O4_MINI.value,
275
+ provider=ModelProvider.OPENAI,
276
+ context_length=200_000,
277
+ max_output_tokens=100_000,
278
+ input_cost_per_million=1.10,
279
+ output_cost_per_million=4.40,
280
+ allows_streaming=False,
281
+ allows_system_message=False,
282
+ unsupported_params=["temperature", "stream"],
283
+ rename_params={"max_tokens": "max_completion_tokens"},
284
+ has_tools=False,
285
+ description="O3 Mini Reasoning LM",
286
+ ),
257
287
  # Anthropic Models
258
288
  AnthropicModel.CLAUDE_3_5_SONNET.value: ModelInfo(
259
289
  name=AnthropicModel.CLAUDE_3_5_SONNET.value,
@@ -3,6 +3,7 @@ import mimetypes
3
3
  import uuid
4
4
  from pathlib import Path
5
5
  from typing import Any, BinaryIO, Dict, Optional, Union
6
+ from urllib.parse import urlparse
6
7
 
7
8
  from langroid.pydantic_v1 import BaseModel
8
9
 
@@ -13,6 +14,8 @@ class FileAttachment(BaseModel):
13
14
  content: bytes
14
15
  filename: Optional[str] = None
15
16
  mime_type: str = "application/octet-stream"
17
+ url: str | None = None
18
+ detail: str | None = None
16
19
 
17
20
  def __init__(self, **data: Any) -> None:
18
21
  """Initialize with sensible defaults for filename if not provided."""
@@ -23,7 +26,11 @@ class FileAttachment(BaseModel):
23
26
  super().__init__(**data)
24
27
 
25
28
  @classmethod
26
- def from_path(cls, file_path: Union[str, Path]) -> "FileAttachment":
29
+ def _from_path(
30
+ cls,
31
+ file_path: Union[str, Path],
32
+ detail: Optional[str] = None,
33
+ ) -> "FileAttachment":
27
34
  """Create a FileAttachment from a file path.
28
35
 
29
36
  Args:
@@ -40,7 +47,74 @@ class FileAttachment(BaseModel):
40
47
  if mime_type is None:
41
48
  mime_type = "application/octet-stream"
42
49
 
43
- return cls(content=content, filename=path.name, mime_type=mime_type)
50
+ return cls(
51
+ content=content,
52
+ filename=path.name,
53
+ mime_type=mime_type,
54
+ detail=detail,
55
+ )
56
+
57
+ @classmethod
58
+ def _from_url(
59
+ cls,
60
+ url: str,
61
+ content: Optional[bytes] = None,
62
+ filename: Optional[str] = None,
63
+ mime_type: Optional[str] = None,
64
+ detail: Optional[str] = None,
65
+ ) -> "FileAttachment":
66
+ """Create a FileAttachment from a URL.
67
+
68
+ Args:
69
+ url: URL to the file
70
+ content: Optional raw bytes content (if already fetched)
71
+ filename: Optional name to use for the file
72
+ mime_type: MIME type of the content, guessed from filename or url
73
+
74
+ Returns:
75
+ FileAttachment instance
76
+ """
77
+ if filename is None and url:
78
+ # Extract filename from URL if possible
79
+
80
+ parsed_url = urlparse(url)
81
+ path = parsed_url.path
82
+ filename = path.split("/")[-1] if path else None
83
+
84
+ if mime_type is None and filename:
85
+ mime_type, _ = mimetypes.guess_type(filename)
86
+
87
+ return cls(
88
+ content=content or b"", # Empty bytes if no content provided
89
+ filename=filename,
90
+ mime_type=mime_type or "application/octet-stream",
91
+ url=url,
92
+ detail=detail,
93
+ )
94
+
95
+ @classmethod
96
+ def from_path(
97
+ cls,
98
+ path: Union[str, Path],
99
+ detail: str | None = None,
100
+ ) -> "FileAttachment":
101
+ """Create a FileAttachment from either a local file path or a URL.
102
+
103
+ Args:
104
+ path_or_url: Path to the file or URL to fetch
105
+
106
+ Returns:
107
+ FileAttachment instance
108
+ """
109
+ # Convert to string if Path object
110
+ path_str = str(path)
111
+
112
+ # Check if it's a URL
113
+ if path_str.startswith(("http://", "https://", "ftp://")):
114
+ return cls._from_url(url=path_str, detail=detail)
115
+ else:
116
+ # Assume it's a local file path
117
+ return cls._from_path(path_str, detail=detail)
44
118
 
45
119
  @classmethod
46
120
  def from_bytes(
@@ -137,21 +211,37 @@ class FileAttachment(BaseModel):
137
211
  """
138
212
  if "gemini" in model.lower():
139
213
  return dict(type="image_url", image_url=dict(url=self.to_data_uri()))
140
- elif "claude" in model.lower():
214
+ else:
141
215
  # optimistically try this: some API proxies like litellm
142
216
  # support this, and others may not.
143
- return dict(
144
- type="file",
145
- file=dict(
146
- file_data=self.to_data_uri(),
147
- ),
148
- )
149
- else:
150
- # fallback: assume file upload is similar to OpenAI API
151
- return dict(
152
- type="file",
153
- file=dict(
154
- filename=self.filename,
155
- file_data=self.to_data_uri(),
156
- ),
157
- )
217
+ # For OpenAI models, handle images differently than other files
218
+ # For OpenAI models, handle images differently than other files
219
+ if self.mime_type and self.mime_type.startswith("image/"):
220
+ image_url_dict = {}
221
+
222
+ # If we have a URL and it's a full http/https URL, use it directly
223
+ if self.url and (
224
+ self.url.startswith("http://") or self.url.startswith("https://")
225
+ ):
226
+ image_url_dict["url"] = self.url
227
+ # Otherwise use base64 data URI
228
+ else:
229
+ image_url_dict["url"] = self.to_data_uri()
230
+
231
+ # Add detail parameter if specified
232
+ if self.detail:
233
+ image_url_dict["detail"] = self.detail
234
+
235
+ return dict(
236
+ type="image_url",
237
+ image_url=image_url_dict,
238
+ )
239
+ else:
240
+ # For non-image files
241
+ return dict(
242
+ type="file",
243
+ file=dict(
244
+ filename=self.filename,
245
+ file_data=self.to_data_uri(),
246
+ ),
247
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langroid
3
- Version: 0.52.0
3
+ Version: 0.52.2
4
4
  Summary: Harness LLMs with Multi-Agent Programming
5
5
  Author-email: Prasad Chalasani <pchalasani@gmail.com>
6
6
  License: MIT
@@ -72,7 +72,7 @@ langroid/language_models/azure_openai.py,sha256=SW0Fp_y6HpERr9l6TtF6CYsKgKwjUf_h
72
72
  langroid/language_models/base.py,sha256=pfN3t-BktKmN_4K8pwmpjC9OdcHxsytM5s5TmsJ-nPg,28560
73
73
  langroid/language_models/config.py,sha256=9Q8wk5a7RQr8LGMT_0WkpjY8S4ywK06SalVRjXlfCiI,378
74
74
  langroid/language_models/mock_lm.py,sha256=5BgHKDVRWFbUwDT_PFgTZXz9-k8wJSA2e3PZmyDgQ1k,4022
75
- langroid/language_models/model_info.py,sha256=0NE1zWNUHJwcM5jhwNxUqGjbpek-Nq7ljGdWpM8R3RQ,13380
75
+ langroid/language_models/model_info.py,sha256=vOaTi-XFKnz-BvHUvgjnt0XfOtl21Apev3Zy7Rhckbw,14458
76
76
  langroid/language_models/openai_gpt.py,sha256=KFbG6q143CYt6SP8rU1UEFX2mffn8yXfVOdEyuA5IaY,85854
77
77
  langroid/language_models/utils.py,sha256=hC5p61P_Qlrowkm5wMap1A1b5ZUCwK_XhPIzAQk1T1s,5483
78
78
  langroid/language_models/prompt_formatter/__init__.py,sha256=2-5cdE24XoFDhifOLl8yiscohil1ogbP1ECkYdBlBsk,372
@@ -83,7 +83,7 @@ langroid/parsing/__init__.py,sha256=2oUWJJAxIavq9Wtw5RGlkXLq3GF3zgXeVLLW4j7yeb8,
83
83
  langroid/parsing/agent_chats.py,sha256=sbZRV9ujdM5QXvvuHVjIi2ysYSYlap-uqfMMUKulrW0,1068
84
84
  langroid/parsing/code_parser.py,sha256=5ze0MBytrGGkU69pA_bJDjRm6QZz_QYfPcIwkagUa7U,3796
85
85
  langroid/parsing/document_parser.py,sha256=cUcp4JKS_LpsjX7OqnGBhHorDHx7FG5pvKGjRBkQoMw,57685
86
- langroid/parsing/file_attachment.py,sha256=iIMTmAkfu-TgV9CKnDEB-BiDDN6WOepH51sIz6-PnFw,4826
86
+ langroid/parsing/file_attachment.py,sha256=eGW9IEneN1NG-xb8OvM07blvdGNLKI2XlFM7pEIYrt4,7709
87
87
  langroid/parsing/md_parser.py,sha256=JUgsUpCaeAuBndmtDaJR9HMZaje1gmtXtaLXJHst3i8,21340
88
88
  langroid/parsing/para_sentence_split.py,sha256=AJBzZojP3zpB-_IMiiHismhqcvkrVBQ3ZINoQyx_bE4,2000
89
89
  langroid/parsing/parse_json.py,sha256=aADo38bAHQhC8on4aWZZzVzSDy-dK35vRLZsFI2ewh8,4756
@@ -130,7 +130,7 @@ langroid/vector_store/pineconedb.py,sha256=otxXZNaBKb9f_H75HTaU3lMHiaR2NUp5MqwLZ
130
130
  langroid/vector_store/postgres.py,sha256=wHPtIi2qM4fhO4pMQr95pz1ZCe7dTb2hxl4VYspGZoA,16104
131
131
  langroid/vector_store/qdrantdb.py,sha256=O6dSBoDZ0jzfeVBd7LLvsXu083xs2fxXtPa9gGX3JX4,18443
132
132
  langroid/vector_store/weaviatedb.py,sha256=Yn8pg139gOy3zkaPfoTbMXEEBCiLiYa1MU5d_3UA1K4,11847
133
- langroid-0.52.0.dist-info/METADATA,sha256=xiHZhRlCgRm6s8wRIZ300xBO5gUW1j2wqWZG9rj1npc,63642
134
- langroid-0.52.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
135
- langroid-0.52.0.dist-info/licenses/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
136
- langroid-0.52.0.dist-info/RECORD,,
133
+ langroid-0.52.2.dist-info/METADATA,sha256=Xf03bEX9NqSbKKmbTqzbvNk9uKouRLRniUxlsnDNXfg,63642
134
+ langroid-0.52.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
135
+ langroid-0.52.2.dist-info/licenses/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
136
+ langroid-0.52.2.dist-info/RECORD,,