openai-sdk-helpers 0.0.5__py3-none-any.whl → 0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. openai_sdk_helpers/__init__.py +62 -0
  2. openai_sdk_helpers/agent/__init__.py +31 -0
  3. openai_sdk_helpers/agent/base.py +330 -0
  4. openai_sdk_helpers/agent/config.py +66 -0
  5. openai_sdk_helpers/agent/project_manager.py +511 -0
  6. openai_sdk_helpers/agent/prompt_utils.py +9 -0
  7. openai_sdk_helpers/agent/runner.py +215 -0
  8. openai_sdk_helpers/agent/summarizer.py +85 -0
  9. openai_sdk_helpers/agent/translator.py +139 -0
  10. openai_sdk_helpers/agent/utils.py +47 -0
  11. openai_sdk_helpers/agent/validation.py +97 -0
  12. openai_sdk_helpers/agent/vector_search.py +462 -0
  13. openai_sdk_helpers/agent/web_search.py +404 -0
  14. openai_sdk_helpers/config.py +199 -0
  15. openai_sdk_helpers/enums/__init__.py +7 -0
  16. openai_sdk_helpers/enums/base.py +29 -0
  17. openai_sdk_helpers/environment.py +27 -0
  18. openai_sdk_helpers/prompt/__init__.py +77 -0
  19. openai_sdk_helpers/py.typed +0 -0
  20. openai_sdk_helpers/response/__init__.py +20 -0
  21. openai_sdk_helpers/response/base.py +505 -0
  22. openai_sdk_helpers/response/messages.py +211 -0
  23. openai_sdk_helpers/response/runner.py +104 -0
  24. openai_sdk_helpers/response/tool_call.py +70 -0
  25. openai_sdk_helpers/response/vector_store.py +84 -0
  26. openai_sdk_helpers/structure/__init__.py +43 -0
  27. openai_sdk_helpers/structure/agent_blueprint.py +224 -0
  28. openai_sdk_helpers/structure/base.py +713 -0
  29. openai_sdk_helpers/structure/plan/__init__.py +13 -0
  30. openai_sdk_helpers/structure/plan/enum.py +64 -0
  31. openai_sdk_helpers/structure/plan/plan.py +253 -0
  32. openai_sdk_helpers/structure/plan/task.py +122 -0
  33. openai_sdk_helpers/structure/prompt.py +24 -0
  34. openai_sdk_helpers/structure/responses.py +132 -0
  35. openai_sdk_helpers/structure/summary.py +65 -0
  36. openai_sdk_helpers/structure/validation.py +47 -0
  37. openai_sdk_helpers/structure/vector_search.py +86 -0
  38. openai_sdk_helpers/structure/web_search.py +46 -0
  39. openai_sdk_helpers/utils/__init__.py +25 -0
  40. openai_sdk_helpers/utils/core.py +300 -0
  41. openai_sdk_helpers/vector_storage/__init__.py +15 -0
  42. openai_sdk_helpers/vector_storage/cleanup.py +91 -0
  43. openai_sdk_helpers/vector_storage/storage.py +564 -0
  44. openai_sdk_helpers/vector_storage/types.py +58 -0
  45. {openai_sdk_helpers-0.0.5.dist-info → openai_sdk_helpers-0.0.7.dist-info}/METADATA +6 -3
  46. openai_sdk_helpers-0.0.7.dist-info/RECORD +51 -0
  47. openai_sdk_helpers-0.0.5.dist-info/RECORD +0 -7
  48. {openai_sdk_helpers-0.0.5.dist-info → openai_sdk_helpers-0.0.7.dist-info}/WHEEL +0 -0
  49. {openai_sdk_helpers-0.0.5.dist-info → openai_sdk_helpers-0.0.7.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,47 @@
1
+ """Structures describing guardrail validation results."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import List, Optional
6
+
7
+ from .base import BaseStructure, spec_field
8
+
9
+
10
+ class ValidationResultStructure(BaseStructure):
11
+ """Capture guardrail validation findings for user and agent messages.
12
+
13
+ Methods
14
+ -------
15
+ print()
16
+ Return a formatted string representation of the stored fields.
17
+ """
18
+
19
+ input_safe: bool = spec_field(
20
+ "input_safe",
21
+ allow_null=False,
22
+ description="Whether the user-provided input is allowed within the guardrails.",
23
+ )
24
+ output_safe: bool = spec_field(
25
+ "output_safe",
26
+ allow_null=False,
27
+ description="Whether the agent output adheres to the safety guardrails.",
28
+ )
29
+ violations: List[str] = spec_field(
30
+ "violations",
31
+ allow_null=False,
32
+ default_factory=list,
33
+ description="Detected policy or safety issues that require mitigation.",
34
+ )
35
+ recommended_actions: List[str] = spec_field(
36
+ "recommended_actions",
37
+ allow_null=False,
38
+ default_factory=list,
39
+ description="Steps to remediate or respond to any detected violations.",
40
+ )
41
+ sanitized_output: Optional[str] = spec_field(
42
+ "sanitized_output",
43
+ description="Optional redacted or rewritten text that fits the guardrails.",
44
+ )
45
+
46
+
47
+ __all__ = ["ValidationResultStructure"]
@@ -0,0 +1,86 @@
1
+ """Shared structured output models for vector search."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import List
6
+
7
+ from .base import BaseStructure, spec_field
8
+
9
+
10
+ class VectorSearchItemStructure(BaseStructure):
11
+ """A single vector search to perform."""
12
+
13
+ reason: str = spec_field("reason")
14
+ query: str = spec_field("query")
15
+
16
+
17
+ class VectorSearchPlanStructure(BaseStructure):
18
+ """Collection of vector searches required to satisfy the query."""
19
+
20
+ searches: List[VectorSearchItemStructure] = spec_field("searches")
21
+
22
+
23
+ class VectorSearchItemResultStructure(BaseStructure):
24
+ """Result of a single vector search."""
25
+
26
+ texts: List[str] = spec_field("texts")
27
+
28
+
29
+ class VectorSearchItemResultsStructure(BaseStructure):
30
+ """Collection of search results returned from multiple queries.
31
+
32
+ Failed searches are recorded in ``errors`` to allow callers to inspect
33
+ partial outcomes without losing visibility into issues.
34
+
35
+ Methods
36
+ -------
37
+ append(item)
38
+ Add a search result to the collection.
39
+ """
40
+
41
+ item_results: List[VectorSearchItemResultStructure] = spec_field(
42
+ "item_results", default_factory=list
43
+ )
44
+ errors: List[str] = spec_field("errors", default_factory=list)
45
+
46
+ def append(self, item: VectorSearchItemResultStructure) -> None:
47
+ """Add a search result to the collection.
48
+
49
+ Parameters
50
+ ----------
51
+ item : VectorSearchItemResultStructure
52
+ Result item to append.
53
+
54
+ Returns
55
+ -------
56
+ None
57
+ """
58
+ self.item_results.append(item)
59
+
60
+
61
+ class VectorSearchReportStructure(BaseStructure):
62
+ """Structured output from the vector search writer agent."""
63
+
64
+ short_summary: str = spec_field("short_summary")
65
+ markdown_report: str = spec_field("markdown_report")
66
+ follow_up_questions: List[str] = spec_field("follow_up_questions")
67
+ sources: List[str] = spec_field("sources")
68
+
69
+
70
+ class VectorSearchStructure(BaseStructure):
71
+ """Complete output of a vector search workflow."""
72
+
73
+ query: str = spec_field("query")
74
+ plan: VectorSearchPlanStructure = spec_field("plan")
75
+ results: VectorSearchItemResultsStructure = spec_field("results")
76
+ report: VectorSearchReportStructure = spec_field("report")
77
+
78
+
79
+ __all__ = [
80
+ "VectorSearchReportStructure",
81
+ "VectorSearchItemStructure",
82
+ "VectorSearchItemResultStructure",
83
+ "VectorSearchItemResultsStructure",
84
+ "VectorSearchPlanStructure",
85
+ "VectorSearchStructure",
86
+ ]
@@ -0,0 +1,46 @@
1
+ """Shared structured output model for web search results."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import List
6
+
7
+ from .base import BaseStructure, spec_field
8
+
9
+
10
+ class WebSearchReportStructure(BaseStructure):
11
+ """Structured output from the writer agent."""
12
+
13
+ short_summary: str = spec_field("short_summary")
14
+ markdown_report: str = spec_field("markdown_report")
15
+ follow_up_questions: List[str] = spec_field("follow_up_questions")
16
+ sources: List[str] = spec_field("sources")
17
+
18
+
19
+ class WebSearchItemStructure(BaseStructure):
20
+ """A single web search to perform."""
21
+
22
+ reason: str = spec_field("reason")
23
+ query: str = spec_field("query")
24
+
25
+
26
+ class WebSearchItemResultStructure(BaseStructure):
27
+ """Result of a single web search."""
28
+
29
+ text: str = spec_field("text")
30
+
31
+
32
+ class WebSearchPlanStructure(BaseStructure):
33
+ """Collection of searches required to satisfy the query."""
34
+
35
+ searches: List[WebSearchItemStructure] = spec_field("searches")
36
+
37
+
38
+ class WebSearchStructure(BaseStructure):
39
+ """Complete output of a web search workflow."""
40
+
41
+ query: str = spec_field("query")
42
+ web_search_plan: WebSearchPlanStructure = spec_field("web_search_plan")
43
+ web_search_results: List[WebSearchItemResultStructure] = spec_field(
44
+ "web_search_results"
45
+ )
46
+ web_search_report: WebSearchReportStructure = spec_field("web_search_report")
@@ -0,0 +1,25 @@
1
+ """Shared utility helpers for openai-sdk-helpers."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from .core import (
6
+ JSONSerializable,
7
+ check_filepath,
8
+ coerce_dict,
9
+ coerce_optional_float,
10
+ coerce_optional_int,
11
+ customJSONEncoder,
12
+ ensure_list,
13
+ log,
14
+ )
15
+
16
+ __all__ = [
17
+ "ensure_list",
18
+ "check_filepath",
19
+ "coerce_optional_float",
20
+ "coerce_optional_int",
21
+ "coerce_dict",
22
+ "JSONSerializable",
23
+ "customJSONEncoder",
24
+ "log",
25
+ ]
@@ -0,0 +1,300 @@
1
+ """Core utility helpers for openai-sdk-helpers."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ import logging
7
+ from dataclasses import asdict, is_dataclass
8
+ from datetime import datetime
9
+ from enum import Enum
10
+ from pathlib import Path
11
+ from typing import Any, Dict, Iterable, List, Mapping, Optional, TypeVar
12
+
13
+
14
+ def coerce_optional_float(value: Any) -> Optional[float]:
15
+ """Return a float when the provided value can be coerced, otherwise ``None``.
16
+
17
+ Parameters
18
+ ----------
19
+ value : Any
20
+ Value to convert into a float. Strings must be parseable as floats.
21
+
22
+ Returns
23
+ -------
24
+ float | None
25
+ Converted float value or ``None`` if the input is ``None``.
26
+
27
+ Raises
28
+ ------
29
+ ValueError
30
+ If a non-empty string cannot be converted to a float.
31
+ TypeError
32
+ If the value is not a float-compatible type.
33
+ """
34
+ if value is None:
35
+ return None
36
+ if isinstance(value, (float, int)):
37
+ return float(value)
38
+ if isinstance(value, str) and value.strip():
39
+ try:
40
+ return float(value)
41
+ except ValueError as exc:
42
+ raise ValueError("timeout must be a float-compatible value") from exc
43
+ raise TypeError("timeout must be a float, int, str, or None")
44
+
45
+
46
+ def coerce_optional_int(value: Any) -> Optional[int]:
47
+ """Return an int when the provided value can be coerced, otherwise ``None``.
48
+
49
+ Parameters
50
+ ----------
51
+ value : Any
52
+ Value to convert into an int. Strings must be parseable as integers.
53
+
54
+ Returns
55
+ -------
56
+ int | None
57
+ Converted integer value or ``None`` if the input is ``None``.
58
+
59
+ Raises
60
+ ------
61
+ ValueError
62
+ If a non-empty string cannot be converted to an integer.
63
+ TypeError
64
+ If the value is not an int-compatible type.
65
+ """
66
+ if value is None:
67
+ return None
68
+ if isinstance(value, int) and not isinstance(value, bool):
69
+ return value
70
+ if isinstance(value, float) and value.is_integer():
71
+ return int(value)
72
+ if isinstance(value, str) and value.strip():
73
+ try:
74
+ return int(value)
75
+ except ValueError as exc:
76
+ raise ValueError("max_retries must be an int-compatible value") from exc
77
+ raise TypeError("max_retries must be an int, str, or None")
78
+
79
+
80
+ def coerce_dict(value: Any) -> Dict[str, Any]:
81
+ """Return a string-keyed dictionary built from ``value`` if possible.
82
+
83
+ Parameters
84
+ ----------
85
+ value : Any
86
+ Mapping-like value to convert. ``None`` yields an empty dictionary.
87
+
88
+ Returns
89
+ -------
90
+ dict[str, Any]
91
+ Dictionary representation of ``value``.
92
+
93
+ Raises
94
+ ------
95
+ TypeError
96
+ If the value cannot be treated as a mapping.
97
+ """
98
+ if value is None:
99
+ return {}
100
+ if isinstance(value, Mapping):
101
+ return dict(value)
102
+ raise TypeError("extra_client_kwargs must be a mapping or None")
103
+
104
+
105
+ T = TypeVar("T")
106
+ _configured_logging = False
107
+
108
+
109
+ def ensure_list(value: Iterable[T] | T | None) -> List[T]:
110
+ """Normalize a single item or iterable into a list.
111
+
112
+ Parameters
113
+ ----------
114
+ value : Iterable[T] | T | None
115
+ Item or iterable to wrap. ``None`` yields an empty list.
116
+
117
+ Returns
118
+ -------
119
+ list[T]
120
+ Normalized list representation of ``value``.
121
+ """
122
+ if value is None:
123
+ return []
124
+ if isinstance(value, list):
125
+ return value
126
+ if isinstance(value, tuple):
127
+ return list(value)
128
+ return [value] # type: ignore[list-item]
129
+
130
+
131
+ def check_filepath(
132
+ filepath: Path | None = None, *, fullfilepath: str | None = None
133
+ ) -> Path:
134
+ """Ensure the parent directory for a file path exists.
135
+
136
+ Parameters
137
+ ----------
138
+ filepath : Path | None, optional
139
+ Path object to validate. Mutually exclusive with ``fullfilepath``.
140
+ fullfilepath : str | None, optional
141
+ String path to validate. Mutually exclusive with ``filepath``.
142
+
143
+ Returns
144
+ -------
145
+ Path
146
+ Path object representing the validated file path.
147
+
148
+ Raises
149
+ ------
150
+ ValueError
151
+ If neither ``filepath`` nor ``fullfilepath`` is provided.
152
+ """
153
+ if filepath is None and fullfilepath is None:
154
+ raise ValueError("filepath or fullfilepath is required.")
155
+ if fullfilepath is not None:
156
+ target = Path(fullfilepath)
157
+ elif filepath is not None:
158
+ target = Path(filepath)
159
+ else:
160
+ raise ValueError("filepath or fullfilepath is required.")
161
+ target.parent.mkdir(parents=True, exist_ok=True)
162
+ return target
163
+
164
+
165
+ def _to_jsonable(value: Any) -> Any:
166
+ """Convert common helper types to JSON-serializable forms.
167
+
168
+ Parameters
169
+ ----------
170
+ value : Any
171
+ Value to convert.
172
+
173
+ Returns
174
+ -------
175
+ Any
176
+ A JSON-safe representation of ``value``.
177
+ """
178
+ if value is None:
179
+ return None
180
+ if isinstance(value, Enum):
181
+ return value.value
182
+ if isinstance(value, Path):
183
+ return str(value)
184
+ if isinstance(value, datetime):
185
+ return value.isoformat()
186
+ if is_dataclass(value) and not isinstance(value, type):
187
+ return {k: _to_jsonable(v) for k, v in asdict(value).items()}
188
+ if hasattr(value, "model_dump"):
189
+ model_dump = getattr(value, "model_dump")
190
+ return model_dump()
191
+ if isinstance(value, dict):
192
+ return {str(k): _to_jsonable(v) for k, v in value.items()}
193
+ if isinstance(value, (list, tuple, set)):
194
+ return [_to_jsonable(v) for v in value]
195
+ return value
196
+
197
+
198
+ class customJSONEncoder(json.JSONEncoder):
199
+ """Encode common helper types like enums and paths.
200
+
201
+ Methods
202
+ -------
203
+ default(o)
204
+ Return a JSON-serializable representation of ``o``.
205
+ """
206
+
207
+ def default(self, o: Any) -> Any:
208
+ """Return a JSON-serializable representation of ``o``.
209
+
210
+ Parameters
211
+ ----------
212
+ o : Any
213
+ Object to serialize.
214
+
215
+ Returns
216
+ -------
217
+ Any
218
+ JSON-safe representation of ``o``.
219
+ """
220
+ return _to_jsonable(o)
221
+
222
+
223
+ class JSONSerializable:
224
+ """Mixin for classes that can be serialized to JSON.
225
+
226
+ Methods
227
+ -------
228
+ to_json()
229
+ Return a JSON-compatible dict representation of the instance.
230
+ to_json_file(filepath)
231
+ Write serialized JSON data to a file path.
232
+ """
233
+
234
+ def to_json(self) -> Dict[str, Any]:
235
+ """Return a JSON-compatible dict representation.
236
+
237
+ Returns
238
+ -------
239
+ dict[str, Any]
240
+ Mapping with only JSON-serializable values.
241
+ """
242
+ if is_dataclass(self) and not isinstance(self, type):
243
+ return {k: _to_jsonable(v) for k, v in asdict(self).items()}
244
+ if hasattr(self, "model_dump"):
245
+ model_dump = getattr(self, "model_dump")
246
+ return _to_jsonable(model_dump())
247
+ return _to_jsonable(self.__dict__)
248
+
249
+ def to_json_file(self, filepath: str | Path) -> str:
250
+ """Write serialized JSON data to a file path.
251
+
252
+ Parameters
253
+ ----------
254
+ filepath : str | Path
255
+ Destination file path. Parent directories are created as needed.
256
+
257
+ Returns
258
+ -------
259
+ str
260
+ String representation of the file path written.
261
+ """
262
+ target = Path(filepath)
263
+ check_filepath(fullfilepath=str(target))
264
+ with open(target, "w", encoding="utf-8") as handle:
265
+ json.dump(
266
+ self.to_json(),
267
+ handle,
268
+ indent=2,
269
+ ensure_ascii=False,
270
+ cls=customJSONEncoder,
271
+ )
272
+ return str(target)
273
+
274
+
275
+ def log(message: str, level: int = logging.INFO) -> None:
276
+ """Log a message with a basic configuration.
277
+
278
+ Parameters
279
+ ----------
280
+ message : str
281
+ Message to emit.
282
+ level : int, optional
283
+ Logging level, by default ``logging.INFO``.
284
+ """
285
+ global _configured_logging
286
+ if not _configured_logging:
287
+ logging.basicConfig(
288
+ level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s"
289
+ )
290
+ _configured_logging = True
291
+ logging.log(level, message)
292
+
293
+
294
+ __all__ = [
295
+ "ensure_list",
296
+ "check_filepath",
297
+ "JSONSerializable",
298
+ "customJSONEncoder",
299
+ "log",
300
+ ]
@@ -0,0 +1,15 @@
1
+ """Vector store helpers."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from .cleanup import _delete_all_files, _delete_all_vector_stores
6
+ from .storage import VectorStorage
7
+ from .types import VectorStorageFileInfo, VectorStorageFileStats
8
+
9
+ __all__ = [
10
+ "VectorStorage",
11
+ "VectorStorageFileInfo",
12
+ "VectorStorageFileStats",
13
+ "_delete_all_vector_stores",
14
+ "_delete_all_files",
15
+ ]
@@ -0,0 +1,91 @@
1
+ """Cleanup helpers for vector stores."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import logging
6
+
7
+ from openai import OpenAI
8
+
9
+ from ..utils import log
10
+
11
+
12
+ def _delete_all_vector_stores() -> None:
13
+ """Delete all vector stores and clean up any orphaned files.
14
+
15
+ This utility iterates over every vector store owned by the account,
16
+ deleting each one after removing all of its files. Any standalone files that
17
+ remain after the stores are deleted are also removed.
18
+
19
+ Returns
20
+ -------
21
+ None
22
+ """
23
+ try:
24
+ client = OpenAI()
25
+ stores = client.vector_stores.list().data
26
+ log(f"Found {len(stores)} vector stores.")
27
+
28
+ attached_file_ids = set()
29
+
30
+ for store in stores:
31
+ log(f"Deleting vector store: {store.name} (ID: {store.id})")
32
+
33
+ files = client.vector_stores.files.list(vector_store_id=store.id).data
34
+ for file in files:
35
+ attached_file_ids.add(file.id)
36
+ log(f" - Deleting file {file.id}")
37
+ try:
38
+ client.vector_stores.files.delete(
39
+ vector_store_id=store.id, file_id=file.id
40
+ )
41
+ except Exception as file_err:
42
+ log(
43
+ f"Failed to delete file {file.id}: {file_err}",
44
+ level=logging.WARNING,
45
+ )
46
+
47
+ try:
48
+ client.vector_stores.delete(store.id)
49
+ log(f"Vector store {store.name} deleted.")
50
+ except Exception as store_err:
51
+ log(
52
+ f"Failed to delete vector store {store.name}: {store_err}",
53
+ level=logging.WARNING,
54
+ )
55
+
56
+ log("Checking for orphaned files in client.files...")
57
+ all_files = client.files.list().data
58
+ for file in all_files:
59
+ if file.id not in attached_file_ids:
60
+ try:
61
+ log(f"Deleting orphaned file {file.id}")
62
+ client.files.delete(file_id=file.id)
63
+ except Exception as exc:
64
+ log(
65
+ f"Failed to delete orphaned file {file.id}: {exc}",
66
+ level=logging.WARNING,
67
+ )
68
+
69
+ except Exception as exc:
70
+ log(f"Error during cleanup: {exc}", level=logging.ERROR)
71
+
72
+
73
+ def _delete_all_files() -> None:
74
+ """Delete all files from the OpenAI account.
75
+
76
+ This utility iterates over every file owned by the account and deletes them.
77
+ It does not check for vector stores, so it will delete all files regardless
78
+ of their association.
79
+
80
+ Returns
81
+ -------
82
+ None
83
+ """
84
+ client = OpenAI()
85
+ all_files = client.files.list().data
86
+ for file in all_files:
87
+ try:
88
+ log(f"Deleting file {file.id}")
89
+ client.files.delete(file_id=file.id)
90
+ except Exception as exc:
91
+ log(f"Failed to delete file {file.id}: {exc}", level=logging.WARNING)