openai-sdk-helpers 0.0.2__py3-none-any.whl → 0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openai_sdk_helpers/prompt/summarizer.jinja +7 -0
- openai_sdk_helpers/prompt/translator.jinja +7 -0
- openai_sdk_helpers/prompt/validator.jinja +7 -0
- openai_sdk_helpers/py.typed +0 -0
- {openai_sdk_helpers-0.0.2.dist-info → openai_sdk_helpers-0.0.3.dist-info}/METADATA +57 -4
- openai_sdk_helpers-0.0.3.dist-info/RECORD +8 -0
- openai_sdk_helpers/__init__.py +0 -34
- openai_sdk_helpers/agent/__init__.py +0 -23
- openai_sdk_helpers/agent/base.py +0 -432
- openai_sdk_helpers/agent/config.py +0 -66
- openai_sdk_helpers/agent/project_manager.py +0 -416
- openai_sdk_helpers/agent/runner.py +0 -117
- openai_sdk_helpers/agent/utils.py +0 -47
- openai_sdk_helpers/agent/vector_search.py +0 -418
- openai_sdk_helpers/agent/web_search.py +0 -404
- openai_sdk_helpers/config.py +0 -141
- openai_sdk_helpers/enums/__init__.py +0 -7
- openai_sdk_helpers/enums/base.py +0 -17
- openai_sdk_helpers/environment.py +0 -27
- openai_sdk_helpers/prompt/__init__.py +0 -77
- openai_sdk_helpers/response/__init__.py +0 -16
- openai_sdk_helpers/response/base.py +0 -477
- openai_sdk_helpers/response/messages.py +0 -211
- openai_sdk_helpers/response/runner.py +0 -42
- openai_sdk_helpers/response/tool_call.py +0 -70
- openai_sdk_helpers/structure/__init__.py +0 -57
- openai_sdk_helpers/structure/base.py +0 -591
- openai_sdk_helpers/structure/plan/__init__.py +0 -13
- openai_sdk_helpers/structure/plan/enum.py +0 -48
- openai_sdk_helpers/structure/plan/plan.py +0 -104
- openai_sdk_helpers/structure/plan/task.py +0 -122
- openai_sdk_helpers/structure/prompt.py +0 -24
- openai_sdk_helpers/structure/responses.py +0 -148
- openai_sdk_helpers/structure/summary.py +0 -65
- openai_sdk_helpers/structure/vector_search.py +0 -82
- openai_sdk_helpers/structure/web_search.py +0 -46
- openai_sdk_helpers/utils/__init__.py +0 -13
- openai_sdk_helpers/utils/core.py +0 -208
- openai_sdk_helpers/vector_storage/__init__.py +0 -15
- openai_sdk_helpers/vector_storage/cleanup.py +0 -91
- openai_sdk_helpers/vector_storage/storage.py +0 -501
- openai_sdk_helpers/vector_storage/types.py +0 -58
- openai_sdk_helpers-0.0.2.dist-info/RECORD +0 -40
- {openai_sdk_helpers-0.0.2.dist-info → openai_sdk_helpers-0.0.3.dist-info}/WHEEL +0 -0
- {openai_sdk_helpers-0.0.2.dist-info → openai_sdk_helpers-0.0.3.dist-info}/licenses/LICENSE +0 -0
openai_sdk_helpers/utils/core.py
DELETED
|
@@ -1,208 +0,0 @@
|
|
|
1
|
-
"""Core utility helpers for openai-sdk-helpers."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import json
|
|
6
|
-
import logging
|
|
7
|
-
from dataclasses import asdict, is_dataclass
|
|
8
|
-
from datetime import datetime
|
|
9
|
-
from enum import Enum
|
|
10
|
-
from pathlib import Path
|
|
11
|
-
from typing import Any, Dict, Iterable, List, TypeVar
|
|
12
|
-
|
|
13
|
-
T = TypeVar("T")
|
|
14
|
-
_configured_logging = False
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
def ensure_list(value: Iterable[T] | T | None) -> List[T]:
|
|
18
|
-
"""Normalize a single item or iterable into a list.
|
|
19
|
-
|
|
20
|
-
Parameters
|
|
21
|
-
----------
|
|
22
|
-
value : Iterable[T] | T | None
|
|
23
|
-
Item or iterable to wrap. ``None`` yields an empty list.
|
|
24
|
-
|
|
25
|
-
Returns
|
|
26
|
-
-------
|
|
27
|
-
list[T]
|
|
28
|
-
Normalized list representation of ``value``.
|
|
29
|
-
"""
|
|
30
|
-
if value is None:
|
|
31
|
-
return []
|
|
32
|
-
if isinstance(value, list):
|
|
33
|
-
return value
|
|
34
|
-
if isinstance(value, tuple):
|
|
35
|
-
return list(value)
|
|
36
|
-
return [value] # type: ignore[list-item]
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def check_filepath(
|
|
40
|
-
filepath: Path | None = None, *, fullfilepath: str | None = None
|
|
41
|
-
) -> Path:
|
|
42
|
-
"""Ensure the parent directory for a file path exists.
|
|
43
|
-
|
|
44
|
-
Parameters
|
|
45
|
-
----------
|
|
46
|
-
filepath : Path | None, optional
|
|
47
|
-
Path object to validate. Mutually exclusive with ``fullfilepath``.
|
|
48
|
-
fullfilepath : str | None, optional
|
|
49
|
-
String path to validate. Mutually exclusive with ``filepath``.
|
|
50
|
-
|
|
51
|
-
Returns
|
|
52
|
-
-------
|
|
53
|
-
Path
|
|
54
|
-
Path object representing the validated file path.
|
|
55
|
-
|
|
56
|
-
Raises
|
|
57
|
-
------
|
|
58
|
-
ValueError
|
|
59
|
-
If neither ``filepath`` nor ``fullfilepath`` is provided.
|
|
60
|
-
"""
|
|
61
|
-
if filepath is None and fullfilepath is None:
|
|
62
|
-
raise ValueError("filepath or fullfilepath is required.")
|
|
63
|
-
if fullfilepath is not None:
|
|
64
|
-
target = Path(fullfilepath)
|
|
65
|
-
elif filepath is not None:
|
|
66
|
-
target = Path(filepath)
|
|
67
|
-
else:
|
|
68
|
-
raise ValueError("filepath or fullfilepath is required.")
|
|
69
|
-
target.parent.mkdir(parents=True, exist_ok=True)
|
|
70
|
-
return target
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def _to_jsonable(value: Any) -> Any:
|
|
74
|
-
"""Convert common helper types to JSON-serializable forms.
|
|
75
|
-
|
|
76
|
-
Parameters
|
|
77
|
-
----------
|
|
78
|
-
value : Any
|
|
79
|
-
Value to convert.
|
|
80
|
-
|
|
81
|
-
Returns
|
|
82
|
-
-------
|
|
83
|
-
Any
|
|
84
|
-
A JSON-safe representation of ``value``.
|
|
85
|
-
"""
|
|
86
|
-
if value is None:
|
|
87
|
-
return None
|
|
88
|
-
if isinstance(value, Enum):
|
|
89
|
-
return value.value
|
|
90
|
-
if isinstance(value, Path):
|
|
91
|
-
return str(value)
|
|
92
|
-
if isinstance(value, datetime):
|
|
93
|
-
return value.isoformat()
|
|
94
|
-
if is_dataclass(value) and not isinstance(value, type):
|
|
95
|
-
return {k: _to_jsonable(v) for k, v in asdict(value).items()}
|
|
96
|
-
if hasattr(value, "model_dump"):
|
|
97
|
-
model_dump = getattr(value, "model_dump")
|
|
98
|
-
return model_dump()
|
|
99
|
-
if isinstance(value, dict):
|
|
100
|
-
return {str(k): _to_jsonable(v) for k, v in value.items()}
|
|
101
|
-
if isinstance(value, (list, tuple, set)):
|
|
102
|
-
return [_to_jsonable(v) for v in value]
|
|
103
|
-
return value
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
class customJSONEncoder(json.JSONEncoder):
|
|
107
|
-
"""Encode common helper types like enums and paths.
|
|
108
|
-
|
|
109
|
-
Methods
|
|
110
|
-
-------
|
|
111
|
-
default(o)
|
|
112
|
-
Return a JSON-serializable representation of ``o``.
|
|
113
|
-
"""
|
|
114
|
-
|
|
115
|
-
def default(self, o: Any) -> Any:
|
|
116
|
-
"""Return a JSON-serializable representation of ``o``.
|
|
117
|
-
|
|
118
|
-
Parameters
|
|
119
|
-
----------
|
|
120
|
-
o : Any
|
|
121
|
-
Object to serialize.
|
|
122
|
-
|
|
123
|
-
Returns
|
|
124
|
-
-------
|
|
125
|
-
Any
|
|
126
|
-
JSON-safe representation of ``o``.
|
|
127
|
-
"""
|
|
128
|
-
return _to_jsonable(o)
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
class JSONSerializable:
|
|
132
|
-
"""Mixin for classes that can be serialized to JSON.
|
|
133
|
-
|
|
134
|
-
Methods
|
|
135
|
-
-------
|
|
136
|
-
to_json()
|
|
137
|
-
Return a JSON-compatible dict representation of the instance.
|
|
138
|
-
to_json_file(filepath)
|
|
139
|
-
Write serialized JSON data to a file path.
|
|
140
|
-
"""
|
|
141
|
-
|
|
142
|
-
def to_json(self) -> Dict[str, Any]:
|
|
143
|
-
"""Return a JSON-compatible dict representation.
|
|
144
|
-
|
|
145
|
-
Returns
|
|
146
|
-
-------
|
|
147
|
-
dict[str, Any]
|
|
148
|
-
Mapping with only JSON-serializable values.
|
|
149
|
-
"""
|
|
150
|
-
if is_dataclass(self) and not isinstance(self, type):
|
|
151
|
-
return {k: _to_jsonable(v) for k, v in asdict(self).items()}
|
|
152
|
-
if hasattr(self, "model_dump"):
|
|
153
|
-
model_dump = getattr(self, "model_dump")
|
|
154
|
-
return _to_jsonable(model_dump())
|
|
155
|
-
return _to_jsonable(self.__dict__)
|
|
156
|
-
|
|
157
|
-
def to_json_file(self, filepath: str | Path) -> str:
|
|
158
|
-
"""Write serialized JSON data to a file path.
|
|
159
|
-
|
|
160
|
-
Parameters
|
|
161
|
-
----------
|
|
162
|
-
filepath : str | Path
|
|
163
|
-
Destination file path. Parent directories are created as needed.
|
|
164
|
-
|
|
165
|
-
Returns
|
|
166
|
-
-------
|
|
167
|
-
str
|
|
168
|
-
String representation of the file path written.
|
|
169
|
-
"""
|
|
170
|
-
target = Path(filepath)
|
|
171
|
-
check_filepath(fullfilepath=str(target))
|
|
172
|
-
with open(target, "w", encoding="utf-8") as handle:
|
|
173
|
-
json.dump(
|
|
174
|
-
self.to_json(),
|
|
175
|
-
handle,
|
|
176
|
-
indent=2,
|
|
177
|
-
ensure_ascii=False,
|
|
178
|
-
cls=customJSONEncoder,
|
|
179
|
-
)
|
|
180
|
-
return str(target)
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
def log(message: str, level: int = logging.INFO) -> None:
|
|
184
|
-
"""Log a message with a basic configuration.
|
|
185
|
-
|
|
186
|
-
Parameters
|
|
187
|
-
----------
|
|
188
|
-
message : str
|
|
189
|
-
Message to emit.
|
|
190
|
-
level : int, optional
|
|
191
|
-
Logging level, by default ``logging.INFO``.
|
|
192
|
-
"""
|
|
193
|
-
global _configured_logging
|
|
194
|
-
if not _configured_logging:
|
|
195
|
-
logging.basicConfig(
|
|
196
|
-
level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s"
|
|
197
|
-
)
|
|
198
|
-
_configured_logging = True
|
|
199
|
-
logging.log(level, message)
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
__all__ = [
|
|
203
|
-
"ensure_list",
|
|
204
|
-
"check_filepath",
|
|
205
|
-
"JSONSerializable",
|
|
206
|
-
"customJSONEncoder",
|
|
207
|
-
"log",
|
|
208
|
-
]
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
"""Vector store helpers."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
from .cleanup import _delete_all_files, _delete_all_vector_stores
|
|
6
|
-
from .storage import VectorStorage
|
|
7
|
-
from .types import VectorStorageFileInfo, VectorStorageFileStats
|
|
8
|
-
|
|
9
|
-
__all__ = [
|
|
10
|
-
"VectorStorage",
|
|
11
|
-
"VectorStorageFileInfo",
|
|
12
|
-
"VectorStorageFileStats",
|
|
13
|
-
"_delete_all_vector_stores",
|
|
14
|
-
"_delete_all_files",
|
|
15
|
-
]
|
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
"""Cleanup helpers for vector stores."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
import logging
|
|
6
|
-
|
|
7
|
-
from openai import OpenAI
|
|
8
|
-
|
|
9
|
-
from ..utils import log
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def _delete_all_vector_stores() -> None:
|
|
13
|
-
"""Delete all vector stores and clean up any orphaned files.
|
|
14
|
-
|
|
15
|
-
This utility iterates over every vector store owned by the account,
|
|
16
|
-
deleting each one after removing all of its files. Any standalone files that
|
|
17
|
-
remain after the stores are deleted are also removed.
|
|
18
|
-
|
|
19
|
-
Returns
|
|
20
|
-
-------
|
|
21
|
-
None
|
|
22
|
-
"""
|
|
23
|
-
try:
|
|
24
|
-
client = OpenAI()
|
|
25
|
-
stores = client.vector_stores.list().data
|
|
26
|
-
log(f"Found {len(stores)} vector stores.")
|
|
27
|
-
|
|
28
|
-
attached_file_ids = set()
|
|
29
|
-
|
|
30
|
-
for store in stores:
|
|
31
|
-
log(f"Deleting vector store: {store.name} (ID: {store.id})")
|
|
32
|
-
|
|
33
|
-
files = client.vector_stores.files.list(vector_store_id=store.id).data
|
|
34
|
-
for file in files:
|
|
35
|
-
attached_file_ids.add(file.id)
|
|
36
|
-
log(f" - Deleting file {file.id}")
|
|
37
|
-
try:
|
|
38
|
-
client.vector_stores.files.delete(
|
|
39
|
-
vector_store_id=store.id, file_id=file.id
|
|
40
|
-
)
|
|
41
|
-
except Exception as file_err:
|
|
42
|
-
log(
|
|
43
|
-
f"Failed to delete file {file.id}: {file_err}",
|
|
44
|
-
level=logging.WARNING,
|
|
45
|
-
)
|
|
46
|
-
|
|
47
|
-
try:
|
|
48
|
-
client.vector_stores.delete(store.id)
|
|
49
|
-
log(f"Vector store {store.name} deleted.")
|
|
50
|
-
except Exception as store_err:
|
|
51
|
-
log(
|
|
52
|
-
f"Failed to delete vector store {store.name}: {store_err}",
|
|
53
|
-
level=logging.WARNING,
|
|
54
|
-
)
|
|
55
|
-
|
|
56
|
-
log("Checking for orphaned files in client.files...")
|
|
57
|
-
all_files = client.files.list().data
|
|
58
|
-
for file in all_files:
|
|
59
|
-
if file.id not in attached_file_ids:
|
|
60
|
-
try:
|
|
61
|
-
log(f"Deleting orphaned file {file.id}")
|
|
62
|
-
client.files.delete(file_id=file.id)
|
|
63
|
-
except Exception as exc:
|
|
64
|
-
log(
|
|
65
|
-
f"Failed to delete orphaned file {file.id}: {exc}",
|
|
66
|
-
level=logging.WARNING,
|
|
67
|
-
)
|
|
68
|
-
|
|
69
|
-
except Exception as exc:
|
|
70
|
-
log(f"Error during cleanup: {exc}", level=logging.ERROR)
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def _delete_all_files() -> None:
|
|
74
|
-
"""Delete all files from the OpenAI account.
|
|
75
|
-
|
|
76
|
-
This utility iterates over every file owned by the account and deletes them.
|
|
77
|
-
It does not check for vector stores, so it will delete all files regardless
|
|
78
|
-
of their association.
|
|
79
|
-
|
|
80
|
-
Returns
|
|
81
|
-
-------
|
|
82
|
-
None
|
|
83
|
-
"""
|
|
84
|
-
client = OpenAI()
|
|
85
|
-
all_files = client.files.list().data
|
|
86
|
-
for file in all_files:
|
|
87
|
-
try:
|
|
88
|
-
log(f"Deleting file {file.id}")
|
|
89
|
-
client.files.delete(file_id=file.id)
|
|
90
|
-
except Exception as exc:
|
|
91
|
-
log(f"Failed to delete file {file.id}: {exc}", level=logging.WARNING)
|