amsdal_ml 0.2.1__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- amsdal_ml/__about__.py +1 -1
- amsdal_ml/fileio/base_loader.py +2 -1
- amsdal_ml/fileio/openai_loader.py +8 -1
- amsdal_ml/ml_models/openai_model.py +23 -10
- {amsdal_ml-0.2.1.dist-info → amsdal_ml-0.3.0.dist-info}/METADATA +4 -6
- {amsdal_ml-0.2.1.dist-info → amsdal_ml-0.3.0.dist-info}/RECORD +7 -7
- {amsdal_ml-0.2.1.dist-info → amsdal_ml-0.3.0.dist-info}/WHEEL +0 -0
amsdal_ml/__about__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '0.
|
|
1
|
+
__version__ = '0.3.0'
|
amsdal_ml/fileio/base_loader.py
CHANGED
|
@@ -24,6 +24,7 @@ class FileAttachment:
|
|
|
24
24
|
type: str # one of: PLAIN_TEXT, FILE_ID
|
|
25
25
|
content: Any
|
|
26
26
|
metadata: dict[str, Any] | None = None
|
|
27
|
+
mime_type: str | None = None
|
|
27
28
|
|
|
28
29
|
def __post_init__(self) -> None:
|
|
29
30
|
if self.metadata is None:
|
|
@@ -40,7 +41,7 @@ class FileItem:
|
|
|
40
41
|
def from_path(path: str, *, filedata: FileData | None = None) -> FileItem:
|
|
41
42
|
# Caller is responsible for lifecycle; loaders may close after upload.
|
|
42
43
|
f = open(path, 'rb')
|
|
43
|
-
return FileItem(file=f, filename=path.
|
|
44
|
+
return FileItem(file=f, filename=path.rsplit('/', 1)[-1], filedata=filedata)
|
|
44
45
|
|
|
45
46
|
@staticmethod
|
|
46
47
|
def from_bytes(data: bytes, *, filename: str | None = None, filedata: FileData | None = None) -> FileItem:
|
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import io
|
|
5
5
|
import logging
|
|
6
|
+
import mimetypes
|
|
6
7
|
from collections.abc import Sequence
|
|
7
8
|
from typing import Any
|
|
8
9
|
from typing import BinaryIO
|
|
@@ -41,6 +42,12 @@ class OpenAIFileLoader(BaseFileLoader):
|
|
|
41
42
|
|
|
42
43
|
up = await self.client.files.create(file=(filename or 'upload.bin', buf), purpose=self.purpose)
|
|
43
44
|
|
|
45
|
+
mime_type = None
|
|
46
|
+
if filedata is not None and filedata.mime:
|
|
47
|
+
mime_type = filedata.mime
|
|
48
|
+
else:
|
|
49
|
+
mime_type = mimetypes.guess_type(filename or "")[0]
|
|
50
|
+
|
|
44
51
|
meta: dict[str, Any] = {
|
|
45
52
|
'filename': filename,
|
|
46
53
|
'provider': 'openai',
|
|
@@ -56,7 +63,7 @@ class OpenAIFileLoader(BaseFileLoader):
|
|
|
56
63
|
if filedata is not None:
|
|
57
64
|
meta['filedata'] = filedata.model_dump()
|
|
58
65
|
|
|
59
|
-
return FileAttachment(type=FILE_ID, content=up.id, metadata=meta)
|
|
66
|
+
return FileAttachment(type=FILE_ID, content=up.id, metadata=meta, mime_type=mime_type)
|
|
60
67
|
|
|
61
68
|
async def load(self, item: FileItem) -> FileAttachment:
|
|
62
69
|
return await self._upload_one(item.file, filename=item.filename, filedata=item.filedata)
|
|
@@ -351,6 +351,13 @@ class OpenAIModel(MLModel):
|
|
|
351
351
|
|
|
352
352
|
return None
|
|
353
353
|
|
|
354
|
+
def _map_responses_text_config(self, response_format: dict[str, Any]) -> dict[str, Any]:
|
|
355
|
+
if response_format.get("type") == "json_schema":
|
|
356
|
+
format_config: dict[str, Any] = {"type": "json_schema"}
|
|
357
|
+
format_config.update(response_format["json_schema"])
|
|
358
|
+
return {"format": format_config}
|
|
359
|
+
return {"format": dict(response_format)}
|
|
360
|
+
|
|
354
361
|
def _ensure_async_client(self) -> None:
|
|
355
362
|
if self.client is None:
|
|
356
363
|
try:
|
|
@@ -403,8 +410,7 @@ class OpenAIModel(MLModel):
|
|
|
403
410
|
|
|
404
411
|
return atts
|
|
405
412
|
|
|
406
|
-
|
|
407
|
-
def _has_file_ids(atts: list[FileAttachment]) -> bool:
|
|
413
|
+
def _has_file_ids(self, atts: list[FileAttachment]) -> bool:
|
|
408
414
|
return any(a.type == FILE_ID for a in atts)
|
|
409
415
|
|
|
410
416
|
def _build_input_content(
|
|
@@ -416,7 +422,11 @@ class OpenAIModel(MLModel):
|
|
|
416
422
|
if a.type == PLAIN_TEXT:
|
|
417
423
|
parts.append({"type": "input_text", "text": str(a.content)})
|
|
418
424
|
elif a.type == FILE_ID:
|
|
419
|
-
|
|
425
|
+
mime = (a.mime_type or (a.metadata or {}).get("mime_type") or "").lower()
|
|
426
|
+
if mime.startswith("image/"):
|
|
427
|
+
parts.append({"type": "input_image", "file_id": str(a.content)})
|
|
428
|
+
else:
|
|
429
|
+
parts.append({"type": "input_file", "file_id": str(a.content)})
|
|
420
430
|
return [{"role": "user", "content": parts}]
|
|
421
431
|
|
|
422
432
|
messages = cast(list[StructuredMessage], [dict(msg) for msg in input])
|
|
@@ -425,7 +435,11 @@ class OpenAIModel(MLModel):
|
|
|
425
435
|
if a.type == PLAIN_TEXT:
|
|
426
436
|
parts.append({"type": "input_text", "text": str(a.content)})
|
|
427
437
|
elif a.type == FILE_ID:
|
|
428
|
-
|
|
438
|
+
mime = (a.mime_type or (a.metadata or {}).get("mime_type") or "").lower()
|
|
439
|
+
if mime.startswith("image/"):
|
|
440
|
+
parts.append({"type": "input_image", "file_id": str(a.content)})
|
|
441
|
+
else:
|
|
442
|
+
parts.append({"type": "input_file", "file_id": str(a.content)})
|
|
429
443
|
|
|
430
444
|
if parts:
|
|
431
445
|
messages.append({"role": "user", "content": parts})
|
|
@@ -439,8 +453,7 @@ class OpenAIModel(MLModel):
|
|
|
439
453
|
return f"{prompt}\n\n[ATTACHMENTS]\n" + "\n\n".join(extras)
|
|
440
454
|
|
|
441
455
|
# ---------- error mapping ----------
|
|
442
|
-
|
|
443
|
-
def _map_openai_error(err: Exception) -> ModelError:
|
|
456
|
+
def _map_openai_error(self, err: Exception) -> ModelError:
|
|
444
457
|
if isinstance(err, openai.RateLimitError):
|
|
445
458
|
return ModelRateLimitError(str(err))
|
|
446
459
|
if isinstance(err, openai.APIConnectionError):
|
|
@@ -532,7 +545,7 @@ class OpenAIModel(MLModel):
|
|
|
532
545
|
"temperature": self.temperature,
|
|
533
546
|
}
|
|
534
547
|
if response_format:
|
|
535
|
-
kwargs["
|
|
548
|
+
kwargs["text"] = self._map_responses_text_config(response_format)
|
|
536
549
|
|
|
537
550
|
try:
|
|
538
551
|
resp: Any = client.responses.create(**kwargs)
|
|
@@ -552,7 +565,7 @@ class OpenAIModel(MLModel):
|
|
|
552
565
|
"stream": True,
|
|
553
566
|
}
|
|
554
567
|
if response_format:
|
|
555
|
-
kwargs["
|
|
568
|
+
kwargs["text"] = self._map_responses_text_config(response_format)
|
|
556
569
|
|
|
557
570
|
try:
|
|
558
571
|
stream_or_resp = client.responses.create(**kwargs)
|
|
@@ -641,7 +654,7 @@ class OpenAIModel(MLModel):
|
|
|
641
654
|
"temperature": self.temperature,
|
|
642
655
|
}
|
|
643
656
|
if response_format:
|
|
644
|
-
kwargs["
|
|
657
|
+
kwargs["text"] = self._map_responses_text_config(response_format)
|
|
645
658
|
|
|
646
659
|
try:
|
|
647
660
|
resp: Any = await client.responses.create(**kwargs)
|
|
@@ -661,7 +674,7 @@ class OpenAIModel(MLModel):
|
|
|
661
674
|
"stream": True,
|
|
662
675
|
}
|
|
663
676
|
if response_format:
|
|
664
|
-
kwargs["
|
|
677
|
+
kwargs["text"] = self._map_responses_text_config(response_format)
|
|
665
678
|
|
|
666
679
|
try:
|
|
667
680
|
stream_or_resp = await client.responses.create(**kwargs)
|
|
@@ -1,14 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: amsdal_ml
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0
|
|
4
4
|
Summary: amsdal_ml plugin for AMSDAL Framework
|
|
5
5
|
Requires-Python: >=3.11
|
|
6
6
|
Requires-Dist: aiohttp==3.12.15
|
|
7
|
-
Requires-Dist: amsdal-cli>=0.
|
|
8
|
-
Requires-Dist: amsdal-
|
|
9
|
-
Requires-Dist: amsdal
|
|
10
|
-
Requires-Dist: amsdal-utils>=0.5.4
|
|
11
|
-
Requires-Dist: amsdal>=0.5.6
|
|
7
|
+
Requires-Dist: amsdal-cli>=0.6.1
|
|
8
|
+
Requires-Dist: amsdal-server>=0.6.2
|
|
9
|
+
Requires-Dist: amsdal>=0.6.0
|
|
12
10
|
Requires-Dist: mcp>=0.1
|
|
13
11
|
Requires-Dist: openai==1.100.2
|
|
14
12
|
Requires-Dist: pydantic-settings~=2.12
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
amsdal_ml/Third-Party Materials - AMSDAL Dependencies - License Notices.md,sha256=ML7PqsHrTMNNZn8E_rA-LzDCAafMSxMcrmSg8YOi-wo,113896
|
|
2
|
-
amsdal_ml/__about__.py,sha256=
|
|
2
|
+
amsdal_ml/__about__.py,sha256=3wVEs2QD_7OcTlD97cZdCeizd2hUbJJ0GeIO8wQIjrk,22
|
|
3
3
|
amsdal_ml/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
amsdal_ml/app.py,sha256=K_lUz37_FUQmPwzDakDhVXA_D2SL9z1JtcuHfuBvlKE,184
|
|
5
5
|
amsdal_ml/ml_config.py,sha256=UONMVG2RbKVbaYF-GdlhRQbm8flp52GxBzfqdzuLZ5w,1788
|
|
@@ -13,8 +13,8 @@ amsdal_ml/agents/python_tool.py,sha256=qrWOfc9pz1mAU506A-0bgwhkahaNT_t_E7RmSo2sl
|
|
|
13
13
|
amsdal_ml/agents/retriever_tool.py,sha256=ckSVPahfYQ12E3F5ouPV085LeQinlyxEQrBpRSfJzbU,2220
|
|
14
14
|
amsdal_ml/agents/tool_adapters.py,sha256=GJoAx6rRnf-db_5ExPi5KpbiROF0TetcOAh8-sT1Nf0,3244
|
|
15
15
|
amsdal_ml/fileio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
|
-
amsdal_ml/fileio/base_loader.py,sha256=
|
|
17
|
-
amsdal_ml/fileio/openai_loader.py,sha256=
|
|
16
|
+
amsdal_ml/fileio/base_loader.py,sha256=3uXmhJ9R9FP0OOnzoOF2VvI37FQ55JK0D4f5PusrRWY,1905
|
|
17
|
+
amsdal_ml/fileio/openai_loader.py,sha256=S39Wlpu1xM-WhnN5N-iaFMQ5uuIPlozR1aoGHmGFLt8,2791
|
|
18
18
|
amsdal_ml/mcp_client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
19
|
amsdal_ml/mcp_client/base.py,sha256=PifmlAxxlpFkFLLN8VHKV3XbeSXKA4jcE_u4gPacm9U,497
|
|
20
20
|
amsdal_ml/mcp_client/http_client.py,sha256=VFTNZ8b9sboa67_4db0K1R6CtwM6KdVjmH1t_l0FZMk,1765
|
|
@@ -50,7 +50,7 @@ amsdal_ml/ml_ingesting/stores/embedding_data.py,sha256=zbYF8l6L4IgNpuLmtNYqxKzvd
|
|
|
50
50
|
amsdal_ml/ml_ingesting/stores/store.py,sha256=WTX22e_7qCwCPNFmJYNIMwr8Ppkw7NlIri8SEweVoAw,617
|
|
51
51
|
amsdal_ml/ml_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
52
52
|
amsdal_ml/ml_models/models.py,sha256=w_sFzDqV4L1OUaGzmODU1Y0nCXo19SBeqjvD-W0ROfU,5501
|
|
53
|
-
amsdal_ml/ml_models/openai_model.py,sha256=
|
|
53
|
+
amsdal_ml/ml_models/openai_model.py,sha256=UwrEa1nB0v0krNfv9ZlqPGJu7CLFIyc2uU7Wv1HPxLU,25275
|
|
54
54
|
amsdal_ml/ml_models/utils.py,sha256=fOwRC4gfmmtGIFsI0aXjvSPxEApFsxiQ3Hicx71QykU,146
|
|
55
55
|
amsdal_ml/ml_retrievers/__init__.py,sha256=duWEkdfwbJN05yiJOfzKOK_xERLSxXGPUrCexZ6Ubhw,619
|
|
56
56
|
amsdal_ml/ml_retrievers/adapters.py,sha256=-luzriafS_AZAp8yfINfVX6jTXuGlM9slF-f6UztveY,2983
|
|
@@ -67,6 +67,6 @@ amsdal_ml/prompts/nl_query_filter.prompt,sha256=Qeh0qZd59ObAYxql3N3vSfDZc6PBnQUH
|
|
|
67
67
|
amsdal_ml/prompts/react_chat.prompt,sha256=KGJNaaeutqMtYQ7PBnMCuR9RQQBHBa4VzXfQ4c0D3Js,1953
|
|
68
68
|
amsdal_ml/utils/__init__.py,sha256=Q9ToZTkhPC6TiAx2qR_5Ap06cg94tvxorMfCU2v0xUQ,208
|
|
69
69
|
amsdal_ml/utils/query_utils.py,sha256=DlU1FOwabPqg4CW8xEzhel7KjuCwkYPiB41WX8s7mbc,5496
|
|
70
|
-
amsdal_ml-0.
|
|
71
|
-
amsdal_ml-0.
|
|
72
|
-
amsdal_ml-0.
|
|
70
|
+
amsdal_ml-0.3.0.dist-info/METADATA,sha256=F_UiLzJUSluKu9WFH9IeiAmlSJ7g1iDNqXbfUZZk46M,8300
|
|
71
|
+
amsdal_ml-0.3.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
72
|
+
amsdal_ml-0.3.0.dist-info/RECORD,,
|
|
File without changes
|