openai-sdk-helpers 0.3.0__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. openai_sdk_helpers/__init__.py +6 -6
  2. openai_sdk_helpers/agent/__init__.py +2 -2
  3. openai_sdk_helpers/agent/base.py +231 -110
  4. openai_sdk_helpers/agent/config.py +83 -29
  5. openai_sdk_helpers/agent/coordination.py +64 -28
  6. openai_sdk_helpers/agent/runner.py +16 -15
  7. openai_sdk_helpers/agent/search/base.py +94 -45
  8. openai_sdk_helpers/agent/search/vector.py +86 -58
  9. openai_sdk_helpers/agent/search/web.py +71 -40
  10. openai_sdk_helpers/agent/summarizer.py +32 -7
  11. openai_sdk_helpers/agent/translator.py +57 -24
  12. openai_sdk_helpers/agent/validation.py +34 -4
  13. openai_sdk_helpers/cli.py +42 -0
  14. openai_sdk_helpers/config.py +0 -1
  15. openai_sdk_helpers/environment.py +3 -2
  16. openai_sdk_helpers/files_api.py +35 -3
  17. openai_sdk_helpers/prompt/base.py +6 -0
  18. openai_sdk_helpers/response/__init__.py +3 -3
  19. openai_sdk_helpers/response/base.py +142 -73
  20. openai_sdk_helpers/response/config.py +43 -51
  21. openai_sdk_helpers/response/files.py +5 -5
  22. openai_sdk_helpers/response/messages.py +3 -3
  23. openai_sdk_helpers/response/runner.py +7 -7
  24. openai_sdk_helpers/response/tool_call.py +94 -4
  25. openai_sdk_helpers/response/vector_store.py +3 -3
  26. openai_sdk_helpers/streamlit_app/app.py +16 -16
  27. openai_sdk_helpers/streamlit_app/config.py +38 -37
  28. openai_sdk_helpers/streamlit_app/streamlit_web_search.py +2 -2
  29. openai_sdk_helpers/structure/__init__.py +6 -2
  30. openai_sdk_helpers/structure/agent_blueprint.py +2 -2
  31. openai_sdk_helpers/structure/base.py +8 -99
  32. openai_sdk_helpers/structure/plan/plan.py +2 -2
  33. openai_sdk_helpers/structure/plan/task.py +9 -9
  34. openai_sdk_helpers/structure/prompt.py +2 -2
  35. openai_sdk_helpers/structure/responses.py +15 -15
  36. openai_sdk_helpers/structure/summary.py +3 -3
  37. openai_sdk_helpers/structure/translation.py +32 -0
  38. openai_sdk_helpers/structure/validation.py +2 -2
  39. openai_sdk_helpers/structure/vector_search.py +7 -7
  40. openai_sdk_helpers/structure/web_search.py +6 -6
  41. openai_sdk_helpers/tools.py +41 -15
  42. openai_sdk_helpers/utils/__init__.py +19 -5
  43. openai_sdk_helpers/utils/json/__init__.py +55 -0
  44. openai_sdk_helpers/utils/json/base_model.py +181 -0
  45. openai_sdk_helpers/utils/{json_utils.py → json/data_class.py} +33 -68
  46. openai_sdk_helpers/utils/json/ref.py +113 -0
  47. openai_sdk_helpers/utils/json/utils.py +203 -0
  48. openai_sdk_helpers/utils/output_validation.py +21 -1
  49. openai_sdk_helpers/utils/path_utils.py +34 -1
  50. openai_sdk_helpers/utils/registry.py +17 -6
  51. openai_sdk_helpers/vector_storage/storage.py +10 -0
  52. {openai_sdk_helpers-0.3.0.dist-info → openai_sdk_helpers-0.4.0.dist-info}/METADATA +7 -7
  53. openai_sdk_helpers-0.4.0.dist-info/RECORD +86 -0
  54. openai_sdk_helpers-0.3.0.dist-info/RECORD +0 -81
  55. {openai_sdk_helpers-0.3.0.dist-info → openai_sdk_helpers-0.4.0.dist-info}/WHEEL +0 -0
  56. {openai_sdk_helpers-0.3.0.dist-info → openai_sdk_helpers-0.4.0.dist-info}/entry_points.txt +0 -0
  57. {openai_sdk_helpers-0.3.0.dist-info → openai_sdk_helpers-0.4.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,32 @@
1
+ """Structured output model for translations."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from .base import StructureBase, spec_field
6
+
7
+
8
+ class TranslationStructure(StructureBase):
9
+ """Structured representation of translated text.
10
+
11
+ Attributes
12
+ ----------
13
+ text : str
14
+ Translated text output from the agent.
15
+
16
+ Methods
17
+ -------
18
+ print()
19
+ Return the formatted model fields.
20
+
21
+ Examples
22
+ --------
23
+ >>> translation = TranslationStructure(text="Hola mundo")
24
+ >>> print(translation.text)
25
+ 'Hola mundo'
26
+ """
27
+
28
+ text: str = spec_field(
29
+ "text",
30
+ description="Translated text output from the agent.",
31
+ examples=["Hola mundo", "Bonjour le monde"],
32
+ )
@@ -6,10 +6,10 @@ validation checks on user inputs and agent outputs.
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
- from .base import BaseStructure, spec_field
9
+ from .base import StructureBase, spec_field
10
10
 
11
11
 
12
- class ValidationResultStructure(BaseStructure):
12
+ class ValidationResultStructure(StructureBase):
13
13
  """Capture guardrail validation findings for user and agent messages.
14
14
 
15
15
  Represents the results of safety and policy validation checks performed
@@ -7,10 +7,10 @@ workflows with error tracking and result aggregation.
7
7
 
8
8
  from __future__ import annotations
9
9
 
10
- from .base import BaseStructure, spec_field
10
+ from .base import StructureBase, spec_field
11
11
 
12
12
 
13
- class VectorSearchItemStructure(BaseStructure):
13
+ class VectorSearchItemStructure(StructureBase):
14
14
  """A single vector search to perform.
15
15
 
16
16
  Represents one vector search query with rationale for its inclusion
@@ -35,7 +35,7 @@ class VectorSearchItemStructure(BaseStructure):
35
35
  query: str = spec_field("query")
36
36
 
37
37
 
38
- class VectorSearchPlanStructure(BaseStructure):
38
+ class VectorSearchPlanStructure(StructureBase):
39
39
  """Collection of vector searches required to satisfy the query.
40
40
 
41
41
  Represents a plan containing multiple vector searches that together
@@ -56,7 +56,7 @@ class VectorSearchPlanStructure(BaseStructure):
56
56
  searches: list[VectorSearchItemStructure] = spec_field("searches")
57
57
 
58
58
 
59
- class VectorSearchItemResultStructure(BaseStructure):
59
+ class VectorSearchItemResultStructure(StructureBase):
60
60
  """Result of a single vector search.
61
61
 
62
62
  Contains the text results retrieved from executing one vector search query.
@@ -74,7 +74,7 @@ class VectorSearchItemResultStructure(BaseStructure):
74
74
  texts: list[str] = spec_field("texts")
75
75
 
76
76
 
77
- class VectorSearchItemResultsStructure(BaseStructure):
77
+ class VectorSearchItemResultsStructure(StructureBase):
78
78
  """Collection of search results from multiple queries.
79
79
 
80
80
  Aggregates results from multiple vector searches while tracking any
@@ -119,7 +119,7 @@ class VectorSearchItemResultsStructure(BaseStructure):
119
119
  self.item_results.append(item)
120
120
 
121
121
 
122
- class VectorSearchReportStructure(BaseStructure):
122
+ class VectorSearchReportStructure(StructureBase):
123
123
  """Structured output from the vector search writer agent.
124
124
 
125
125
  Contains the final synthesized report from vector search results,
@@ -152,7 +152,7 @@ class VectorSearchReportStructure(BaseStructure):
152
152
  sources: list[str] = spec_field("sources")
153
153
 
154
154
 
155
- class VectorSearchStructure(BaseStructure):
155
+ class VectorSearchStructure(StructureBase):
156
156
  """Complete output of a vector search workflow.
157
157
 
158
158
  Represents the full lifecycle of a vector search operation, from the
@@ -7,10 +7,10 @@ workflows with comprehensive reporting.
7
7
 
8
8
  from __future__ import annotations
9
9
 
10
- from .base import BaseStructure, spec_field
10
+ from .base import StructureBase, spec_field
11
11
 
12
12
 
13
- class WebSearchReportStructure(BaseStructure):
13
+ class WebSearchReportStructure(StructureBase):
14
14
  """Structured output from the web search writer agent.
15
15
 
16
16
  Contains the final synthesized report from web search results,
@@ -43,7 +43,7 @@ class WebSearchReportStructure(BaseStructure):
43
43
  sources: list[str] = spec_field("sources")
44
44
 
45
45
 
46
- class WebSearchItemStructure(BaseStructure):
46
+ class WebSearchItemStructure(StructureBase):
47
47
  """A single web search to perform.
48
48
 
49
49
  Represents one web search query with rationale for its inclusion
@@ -68,7 +68,7 @@ class WebSearchItemStructure(BaseStructure):
68
68
  query: str = spec_field("query")
69
69
 
70
70
 
71
- class WebSearchItemResultStructure(BaseStructure):
71
+ class WebSearchItemResultStructure(StructureBase):
72
72
  """Result of a single web search.
73
73
 
74
74
  Contains the text content retrieved from executing one web search query.
@@ -86,7 +86,7 @@ class WebSearchItemResultStructure(BaseStructure):
86
86
  text: str = spec_field("text")
87
87
 
88
88
 
89
- class WebSearchPlanStructure(BaseStructure):
89
+ class WebSearchPlanStructure(StructureBase):
90
90
  """Collection of web searches required to satisfy the query.
91
91
 
92
92
  Represents a plan containing multiple web searches that together
@@ -107,7 +107,7 @@ class WebSearchPlanStructure(BaseStructure):
107
107
  searches: list[WebSearchItemStructure] = spec_field("searches")
108
108
 
109
109
 
110
- class WebSearchStructure(BaseStructure):
110
+ class WebSearchStructure(StructureBase):
111
111
  """Complete output of a web search workflow.
112
112
 
113
113
  Represents the full lifecycle of a web search operation, from the
@@ -10,19 +10,21 @@ definitions from named metadata structures.
10
10
 
11
11
  from __future__ import annotations
12
12
 
13
+ import asyncio
13
14
  import inspect
15
+ import threading
14
16
  from dataclasses import dataclass
15
17
  from typing import Any, Callable, TypeAlias, TypeVar
16
18
 
17
19
  from pydantic import BaseModel, ValidationError
18
20
 
19
21
  from openai_sdk_helpers.response.tool_call import parse_tool_arguments
20
- from openai_sdk_helpers.structure.base import BaseStructure
22
+ from openai_sdk_helpers.structure.base import StructureBase
21
23
  from openai_sdk_helpers.utils import coerce_jsonable, customJSONEncoder
22
24
  import json
23
25
 
24
26
  T = TypeVar("T", bound=BaseModel)
25
- StructureType: TypeAlias = type[BaseStructure]
27
+ StructureType: TypeAlias = type[StructureBase]
26
28
 
27
29
 
28
30
  def serialize_tool_result(result: Any) -> str:
@@ -81,7 +83,7 @@ def tool_handler_factory(
81
83
  The returned handler:
82
84
  1. Parses tool_call.arguments using parse_tool_arguments
83
85
  2. Validates arguments with input_model if provided
84
- 3. Calls func with validated/parsed arguments
86
+ 3. Calls func with validated/parsed arguments (handles both sync and async)
85
87
  4. Serializes the result using serialize_tool_result
86
88
 
87
89
  Parameters
@@ -124,7 +126,13 @@ def tool_handler_factory(
124
126
  ... limit: int = 10
125
127
  >>> def search_tool(query: str, limit: int = 10):
126
128
  ... return {"results": [f"Result for {query}"]}
127
- >>> handler = tool_handler_factory(search_tool, SearchInput)
129
+ >>> handler = tool_handler_factory(search_tool, input_model=SearchInput)
130
+
131
+ With async function:
132
+
133
+ >>> async def async_search_tool(query: str, limit: int = 10):
134
+ ... return {"results": [f"Result for {query}"]}
135
+ >>> handler = tool_handler_factory(async_search_tool)
128
136
 
129
137
  The handler can then be used with OpenAI tool calls:
130
138
 
@@ -135,6 +143,7 @@ def tool_handler_factory(
135
143
  >>> tool_call = ToolCall()
136
144
  >>> result = handler(tool_call) # doctest: +SKIP
137
145
  """
146
+ is_async = inspect.iscoroutinefunction(func)
138
147
 
139
148
  def handler(tool_call: Any) -> str:
140
149
  """Handle tool execution with parsing, validation, and serialization.
@@ -170,15 +179,32 @@ def tool_handler_factory(
170
179
  else:
171
180
  call_kwargs = parsed_args
172
181
 
173
- # Execute function (sync only - async functions not supported)
174
- if inspect.iscoroutinefunction(func):
175
- raise TypeError(
176
- f"Async functions are not supported by tool_handler_factory. "
177
- f"Function '{func.__name__}' is async. "
178
- "Wrap async functions in a synchronous adapter before passing to tool_handler_factory."
179
- )
180
-
181
- result = func(**call_kwargs)
182
+ # Execute function (sync or async with event loop detection)
183
+ if is_async:
184
+ # Handle async function with proper event loop detection
185
+ try:
186
+ loop = asyncio.get_running_loop()
187
+ # We're inside an event loop, need to run in thread
188
+ result_holder: dict[str, Any] = {"value": None, "exception": None}
189
+
190
+ def _thread_func() -> None:
191
+ try:
192
+ result_holder["value"] = asyncio.run(func(**call_kwargs))
193
+ except Exception as exc:
194
+ result_holder["exception"] = exc
195
+
196
+ thread = threading.Thread(target=_thread_func)
197
+ thread.start()
198
+ thread.join()
199
+
200
+ if result_holder["exception"]:
201
+ raise result_holder["exception"]
202
+ result = result_holder["value"]
203
+ except RuntimeError:
204
+ # No event loop running, can use asyncio.run directly
205
+ result = asyncio.run(func(**call_kwargs))
206
+ else:
207
+ result = func(**call_kwargs)
182
208
 
183
209
  # Serialize result
184
210
  return serialize_tool_result(result)
@@ -200,14 +226,14 @@ class ToolSpec:
200
226
  Attributes
201
227
  ----------
202
228
  structure : StructureType
203
- The BaseStructure class that defines the tool's input parameter schema.
229
+ The StructureBase class that defines the tool's input parameter schema.
204
230
  Used to generate the OpenAI tool definition.
205
231
  tool_name : str
206
232
  Name identifier for the tool.
207
233
  tool_description : str
208
234
  Human-readable description of what the tool does.
209
235
  output_structure : StructureType or None, default=None
210
- Optional BaseStructure class that defines the tool's output schema.
236
+ Optional StructureBase class that defines the tool's output schema.
211
237
  This is for documentation/reference only and is not sent to OpenAI.
212
238
  Useful when a tool accepts one type of input but returns a different
213
239
  structured output.
@@ -21,8 +21,8 @@ coercion
21
21
  Numeric coercion helpers and list normalization.
22
22
  path_utils
23
23
  File and path helpers.
24
- json_utils
25
- JSON encoding helpers and mixins.
24
+ json
25
+ JSON encoding helpers and mixins for dataclasses and Pydantic models.
26
26
  logging_config
27
27
  Centralized logger factory and convenience log helper.
28
28
  validation
@@ -45,11 +45,18 @@ from .coercion import (
45
45
  coerce_optional_int,
46
46
  ensure_list,
47
47
  )
48
- from .json_utils import (
49
- JSONSerializable,
48
+ from .json import (
49
+ BaseModelJSONSerializable,
50
+ DataclassJSONSerializable,
50
51
  coerce_jsonable,
51
52
  customJSONEncoder,
53
+ decode_module_qualname,
54
+ encode_module_qualname,
55
+ get_module_qualname,
56
+ to_jsonable,
52
57
  )
58
+ from .registry import BaseRegistry
59
+
53
60
  from .path_utils import check_filepath, ensure_directory
54
61
  from openai_sdk_helpers.logging_config import log
55
62
  from .validation import (
@@ -88,9 +95,14 @@ __all__ = [
88
95
  "coerce_optional_float",
89
96
  "coerce_optional_int",
90
97
  "coerce_dict",
98
+ "to_jsonable",
91
99
  "coerce_jsonable",
92
- "JSONSerializable",
100
+ "DataclassJSONSerializable",
101
+ "BaseModelJSONSerializable",
93
102
  "customJSONEncoder",
103
+ "get_module_qualname",
104
+ "encode_module_qualname",
105
+ "decode_module_qualname",
94
106
  "log",
95
107
  # Validation helpers
96
108
  "validate_non_empty_string",
@@ -122,4 +134,6 @@ __all__ = [
122
134
  "create_image_data_url",
123
135
  "create_file_data_url",
124
136
  "is_image_file",
137
+ # Registry
138
+ "BaseRegistry",
125
139
  ]
@@ -0,0 +1,55 @@
1
+ """JSON serialization helpers for dataclasses, Pydantic models, and reference encoding.
2
+
3
+ This package provides consistent to_json/from_json flows and a JSONEncoder that
4
+ handles common types including dataclasses, Pydantic models, and reference encoding.
5
+
6
+ Package Layout
7
+ --------------
8
+ utils.py
9
+ to_jsonable(), coerce_jsonable(), customJSONEncoder.
10
+ data_class.py
11
+ DataclassJSONSerializable mixin with to_json, to_json_file, from_json, from_json_file.
12
+ base_model.py
13
+ BaseModelJSONSerializable for Pydantic, with _serialize_fields/_deserialize_fields hooks.
14
+ ref.py
15
+ Reference helpers get_module_qualname, encode_module_qualname, decode_module_qualname.
16
+
17
+ Public API
18
+ ----------
19
+ to_jsonable(value)
20
+ Convert common types to JSON-safe forms; recursive for containers/dicts.
21
+ coerce_jsonable(value)
22
+ Ensures json.dumps succeeds; falls back to str when necessary. Special-cases ResponseBase.
23
+ customJSONEncoder
24
+ json.JSONEncoder subclass delegating to to_jsonable.
25
+ DataclassJSONSerializable
26
+ Mixin adding to_json(), to_json_file(path) -> str, from_json(data) -> T, from_json_file(path) -> T.
27
+ BaseModelJSONSerializable
28
+ Pydantic BaseModel subclass adding to_json() -> dict, to_json_file(path) -> str,
29
+ from_json(data) -> T, from_json_file(path) -> T, plus overridable _serialize_fields(data)
30
+ and _deserialize_fields(data).
31
+ get_module_qualname(obj) -> (module, qualname)
32
+ Safe retrieval.
33
+ encode_module_qualname(obj) -> dict|None
34
+ {module, qualname} for import reconstruction.
35
+ decode_module_qualname(ref) -> object|None
36
+ Import and getattr by encoded reference.
37
+ """
38
+
39
+ from __future__ import annotations
40
+
41
+ from .base_model import BaseModelJSONSerializable
42
+ from .data_class import DataclassJSONSerializable
43
+ from .ref import decode_module_qualname, encode_module_qualname, get_module_qualname
44
+ from .utils import coerce_jsonable, customJSONEncoder, to_jsonable
45
+
46
+ __all__ = [
47
+ "to_jsonable",
48
+ "coerce_jsonable",
49
+ "customJSONEncoder",
50
+ "DataclassJSONSerializable",
51
+ "BaseModelJSONSerializable",
52
+ "get_module_qualname",
53
+ "encode_module_qualname",
54
+ "decode_module_qualname",
55
+ ]
@@ -0,0 +1,181 @@
1
+ """Pydantic BaseModel JSON serialization support.
2
+
3
+ This module provides BaseModelJSONSerializable for Pydantic models,
4
+ with to_json, to_json_file, from_json, from_json_file methods and
5
+ customizable _serialize_fields/_deserialize_fields hooks.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import json
11
+ from pathlib import Path
12
+ from typing import Any, TypeVar
13
+ from pydantic import BaseModel
14
+ from ..path_utils import check_filepath
15
+ from .utils import _to_jsonable, customJSONEncoder
16
+
17
+ P = TypeVar("P", bound="BaseModelJSONSerializable")
18
+
19
+
20
+ class BaseModelJSONSerializable(BaseModel):
21
+ """Pydantic BaseModel subclass with JSON serialization support.
22
+
23
+ Adds to_json(), to_json_file(path), from_json(data), from_json_file(path),
24
+ plus overridable _serialize_fields(data) and _deserialize_fields(data) hooks.
25
+
26
+ Methods
27
+ -------
28
+ to_json()
29
+ Return a JSON-compatible dict representation.
30
+ to_json_file(filepath)
31
+ Write serialized JSON data to a file path.
32
+ from_json(data)
33
+ Create an instance from a JSON-compatible dict (class method).
34
+ from_json_file(filepath)
35
+ Load an instance from a JSON file (class method).
36
+ _serialize_fields(data)
37
+ Customize serialization (override in subclasses).
38
+ _deserialize_fields(data)
39
+ Customize deserialization (override in subclasses).
40
+
41
+ Examples
42
+ --------
43
+ >>> from pydantic import BaseModel
44
+ >>> class MyConfig(BaseModelJSONSerializable, BaseModel):
45
+ ... name: str
46
+ ... value: int
47
+ >>> cfg = MyConfig(name="test", value=42)
48
+ >>> cfg.to_json()
49
+ {'name': 'test', 'value': 42}
50
+ """
51
+
52
+ def to_json(self) -> dict[str, Any]:
53
+ """Return a JSON-compatible dict representation.
54
+
55
+ Returns
56
+ -------
57
+ dict[str, Any]
58
+ Serialized model data.
59
+ """
60
+ if hasattr(self, "model_dump"):
61
+ data = getattr(self, "model_dump")()
62
+ else:
63
+ data = self.__dict__.copy()
64
+ return self._serialize_fields(_to_jsonable(data))
65
+
66
+ def to_json_file(self, filepath: str | Path) -> str:
67
+ """Write serialized JSON data to a file path.
68
+
69
+ Parameters
70
+ ----------
71
+ filepath : str or Path
72
+ Path where the JSON file will be written.
73
+
74
+ Returns
75
+ -------
76
+ str
77
+ Absolute path to the written file.
78
+ """
79
+ target = Path(filepath)
80
+ check_filepath(fullfilepath=str(target))
81
+ with open(target, "w", encoding="utf-8") as handle:
82
+ json.dump(
83
+ self.to_json(),
84
+ handle,
85
+ indent=2,
86
+ ensure_ascii=False,
87
+ cls=customJSONEncoder,
88
+ )
89
+ return str(target)
90
+
91
+ def _serialize_fields(self, data: dict[str, Any]) -> dict[str, Any]:
92
+ """Customize field serialization.
93
+
94
+ Override this method in subclasses to add custom serialization logic.
95
+
96
+ Parameters
97
+ ----------
98
+ data : dict[str, Any]
99
+ Pre-serialized data dictionary.
100
+
101
+ Returns
102
+ -------
103
+ dict[str, Any]
104
+ Modified data dictionary.
105
+ """
106
+ return data
107
+
108
+ @classmethod
109
+ def _deserialize_fields(cls, data: dict[str, Any]) -> dict[str, Any]:
110
+ """Customize field deserialization.
111
+
112
+ Override this method in subclasses to add custom deserialization logic.
113
+
114
+ Parameters
115
+ ----------
116
+ data : dict[str, Any]
117
+ Raw data dictionary from JSON.
118
+
119
+ Returns
120
+ -------
121
+ dict[str, Any]
122
+ Modified data dictionary.
123
+ """
124
+ return data
125
+
126
+ @classmethod
127
+ def from_json(cls: type[P], data: dict[str, Any]) -> P:
128
+ """Create an instance from a JSON-compatible dict.
129
+
130
+ Parameters
131
+ ----------
132
+ data : dict[str, Any]
133
+ JSON-compatible dictionary containing the instance data.
134
+
135
+ Returns
136
+ -------
137
+ P
138
+ New instance of the class.
139
+
140
+ Examples
141
+ --------
142
+ >>> json_data = {"name": "test", "value": 42}
143
+ >>> instance = MyConfig.from_json(json_data)
144
+ """
145
+ processed_data = cls._deserialize_fields(data)
146
+ return cls(**processed_data) # type: ignore[return-value]
147
+
148
+ @classmethod
149
+ def from_json_file(cls: type[P], filepath: str | Path) -> P:
150
+ """Load an instance from a JSON file.
151
+
152
+ Parameters
153
+ ----------
154
+ filepath : str or Path
155
+ Path to the JSON file to load.
156
+
157
+ Returns
158
+ -------
159
+ P
160
+ New instance of the class loaded from the file.
161
+
162
+ Raises
163
+ ------
164
+ FileNotFoundError
165
+ If the file does not exist.
166
+
167
+ Examples
168
+ --------
169
+ >>> instance = MyConfig.from_json_file("config.json")
170
+ """
171
+ target = Path(filepath)
172
+ if not target.exists():
173
+ raise FileNotFoundError(f"JSON file not found: {target}")
174
+
175
+ with open(target, "r", encoding="utf-8") as handle:
176
+ data = json.load(handle)
177
+
178
+ return cls.from_json(data)
179
+
180
+
181
+ __all__ = ["BaseModelJSONSerializable"]
@@ -1,73 +1,24 @@
1
- """JSON serialization helpers for helper types."""
1
+ """Dataclass JSON serialization mixin.
2
+
3
+ This module provides the DataclassJSONSerializable mixin for dataclasses,
4
+ adding to_json, to_json_file, from_json, and from_json_file methods.
5
+ """
2
6
 
3
7
  from __future__ import annotations
4
8
 
5
9
  import json
6
10
  from dataclasses import asdict, fields, is_dataclass
7
- from datetime import datetime
8
- from enum import Enum
9
11
  from pathlib import Path
10
12
  from typing import Any, TypeVar, Union, get_args, get_origin, get_type_hints
11
13
 
12
- from .path_utils import check_filepath
13
-
14
- T = TypeVar("T", bound="JSONSerializable")
15
-
16
-
17
- def _to_jsonable(value: Any) -> Any:
18
- """Convert common helper types to JSON-serializable forms."""
19
- from openai_sdk_helpers.structure.base import BaseStructure
20
-
21
- if value is None:
22
- return None
23
- if isinstance(value, Enum):
24
- return value.value
25
- if isinstance(value, Path):
26
- return str(value)
27
- if isinstance(value, datetime):
28
- return value.isoformat()
29
- if is_dataclass(value) and not isinstance(value, type):
30
- return {k: _to_jsonable(v) for k, v in asdict(value).items()}
31
- if hasattr(value, "model_dump"):
32
- model_dump = getattr(value, "model_dump")
33
- return model_dump()
34
- if isinstance(value, dict):
35
- return {str(k): _to_jsonable(v) for k, v in value.items()}
36
- if isinstance(value, (list, tuple, set)):
37
- return [_to_jsonable(v) for v in value]
38
- if isinstance(value, BaseStructure):
39
- return value.model_dump()
40
- return value
41
-
42
-
43
- def coerce_jsonable(value: Any) -> Any:
44
- """Convert value into a JSON-serializable representation."""
45
- from openai_sdk_helpers.response.base import BaseResponse
46
-
47
- if value is None:
48
- return None
49
- if isinstance(value, BaseResponse):
50
- return coerce_jsonable(value.messages.to_json())
51
- if is_dataclass(value) and not isinstance(value, type):
52
- return {key: coerce_jsonable(item) for key, item in asdict(value).items()}
53
- coerced = _to_jsonable(value)
54
- try:
55
- json.dumps(coerced)
56
- return coerced
57
- except TypeError:
58
- return str(coerced)
59
-
60
-
61
- class customJSONEncoder(json.JSONEncoder):
62
- """JSON encoder for common helper types like enums and paths."""
63
-
64
- def default(self, o: Any) -> Any: # noqa: D401
65
- """Return JSON-serializable representation of ``o``."""
66
- return _to_jsonable(o)
67
-
68
-
69
- class JSONSerializable:
70
- """Mixin for classes that can be serialized to and from JSON.
14
+ from ..path_utils import check_filepath
15
+ from .utils import _to_jsonable, customJSONEncoder
16
+
17
+ T = TypeVar("T", bound="DataclassJSONSerializable")
18
+
19
+
20
+ class DataclassJSONSerializable:
21
+ """Mixin for dataclasses that can be serialized to and from JSON.
71
22
 
72
23
  Methods
73
24
  -------
@@ -79,10 +30,28 @@ class JSONSerializable:
79
30
  Create an instance from a JSON-compatible dict (class method).
80
31
  from_json_file(filepath)
81
32
  Load an instance from a JSON file (class method).
33
+
34
+ Examples
35
+ --------
36
+ >>> from dataclasses import dataclass
37
+ >>> from pathlib import Path
38
+ >>> @dataclass
39
+ ... class MyData(DataclassJSONSerializable):
40
+ ... name: str
41
+ ... path: Path
42
+ >>> instance = MyData(name="test", path=Path("/tmp/data"))
43
+ >>> json_data = instance.to_json()
44
+ >>> restored = MyData.from_json(json_data)
82
45
  """
83
46
 
84
47
  def to_json(self) -> dict[str, Any]:
85
- """Return a JSON-compatible dict representation."""
48
+ """Return a JSON-compatible dict representation.
49
+
50
+ Returns
51
+ -------
52
+ dict[str, Any]
53
+ Serialized data dictionary.
54
+ """
86
55
  if is_dataclass(self) and not isinstance(self, type):
87
56
  return {k: _to_jsonable(v) for k, v in asdict(self).items()}
88
57
  if hasattr(self, "model_dump"):
@@ -223,8 +192,4 @@ class JSONSerializable:
223
192
  return cls.from_json(data)
224
193
 
225
194
 
226
- __all__ = [
227
- "coerce_jsonable",
228
- "JSONSerializable",
229
- "customJSONEncoder",
230
- ]
195
+ __all__ = ["DataclassJSONSerializable"]