openai-sdk-helpers 0.0.8__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. openai_sdk_helpers/__init__.py +90 -2
  2. openai_sdk_helpers/agent/__init__.py +8 -4
  3. openai_sdk_helpers/agent/base.py +80 -45
  4. openai_sdk_helpers/agent/config.py +6 -4
  5. openai_sdk_helpers/agent/{project_manager.py → coordination.py} +29 -45
  6. openai_sdk_helpers/agent/prompt_utils.py +7 -1
  7. openai_sdk_helpers/agent/runner.py +67 -141
  8. openai_sdk_helpers/agent/search/__init__.py +33 -0
  9. openai_sdk_helpers/agent/search/base.py +297 -0
  10. openai_sdk_helpers/agent/{vector_search.py → search/vector.py} +89 -157
  11. openai_sdk_helpers/agent/{web_search.py → search/web.py} +77 -156
  12. openai_sdk_helpers/agent/summarizer.py +29 -8
  13. openai_sdk_helpers/agent/translator.py +40 -13
  14. openai_sdk_helpers/agent/validation.py +32 -8
  15. openai_sdk_helpers/async_utils.py +132 -0
  16. openai_sdk_helpers/config.py +101 -65
  17. openai_sdk_helpers/context_manager.py +241 -0
  18. openai_sdk_helpers/enums/__init__.py +9 -1
  19. openai_sdk_helpers/enums/base.py +67 -8
  20. openai_sdk_helpers/environment.py +33 -6
  21. openai_sdk_helpers/errors.py +133 -0
  22. openai_sdk_helpers/logging_config.py +105 -0
  23. openai_sdk_helpers/prompt/__init__.py +10 -71
  24. openai_sdk_helpers/prompt/base.py +222 -0
  25. openai_sdk_helpers/response/__init__.py +38 -3
  26. openai_sdk_helpers/response/base.py +363 -210
  27. openai_sdk_helpers/response/config.py +318 -0
  28. openai_sdk_helpers/response/messages.py +56 -40
  29. openai_sdk_helpers/response/runner.py +77 -33
  30. openai_sdk_helpers/response/tool_call.py +62 -27
  31. openai_sdk_helpers/response/vector_store.py +27 -14
  32. openai_sdk_helpers/retry.py +175 -0
  33. openai_sdk_helpers/streamlit_app/__init__.py +19 -2
  34. openai_sdk_helpers/streamlit_app/app.py +114 -39
  35. openai_sdk_helpers/streamlit_app/config.py +502 -0
  36. openai_sdk_helpers/streamlit_app/streamlit_web_search.py +5 -6
  37. openai_sdk_helpers/structure/__init__.py +72 -3
  38. openai_sdk_helpers/structure/agent_blueprint.py +82 -19
  39. openai_sdk_helpers/structure/base.py +208 -93
  40. openai_sdk_helpers/structure/plan/__init__.py +29 -1
  41. openai_sdk_helpers/structure/plan/enum.py +41 -5
  42. openai_sdk_helpers/structure/plan/helpers.py +172 -0
  43. openai_sdk_helpers/structure/plan/plan.py +109 -49
  44. openai_sdk_helpers/structure/plan/task.py +38 -6
  45. openai_sdk_helpers/structure/plan/types.py +15 -0
  46. openai_sdk_helpers/structure/prompt.py +21 -2
  47. openai_sdk_helpers/structure/responses.py +52 -11
  48. openai_sdk_helpers/structure/summary.py +55 -7
  49. openai_sdk_helpers/structure/validation.py +34 -6
  50. openai_sdk_helpers/structure/vector_search.py +132 -18
  51. openai_sdk_helpers/structure/web_search.py +125 -13
  52. openai_sdk_helpers/tools.py +193 -0
  53. openai_sdk_helpers/types.py +57 -0
  54. openai_sdk_helpers/utils/__init__.py +34 -1
  55. openai_sdk_helpers/utils/core.py +296 -34
  56. openai_sdk_helpers/validation.py +302 -0
  57. openai_sdk_helpers/vector_storage/__init__.py +21 -1
  58. openai_sdk_helpers/vector_storage/cleanup.py +25 -13
  59. openai_sdk_helpers/vector_storage/storage.py +123 -64
  60. openai_sdk_helpers/vector_storage/types.py +20 -19
  61. openai_sdk_helpers-0.1.0.dist-info/METADATA +550 -0
  62. openai_sdk_helpers-0.1.0.dist-info/RECORD +69 -0
  63. openai_sdk_helpers/streamlit_app/configuration.py +0 -324
  64. openai_sdk_helpers-0.0.8.dist-info/METADATA +0 -194
  65. openai_sdk_helpers-0.0.8.dist-info/RECORD +0 -55
  66. {openai_sdk_helpers-0.0.8.dist-info → openai_sdk_helpers-0.1.0.dist-info}/WHEEL +0 -0
  67. {openai_sdk_helpers-0.0.8.dist-info → openai_sdk_helpers-0.1.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,318 @@
1
+ """Module defining the ResponseConfiguration dataclass for managing OpenAI SDK responses."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+ from typing import Generic, Optional, Sequence, Type, TypeVar
8
+ from openai.types.responses.response_text_config_param import ResponseTextConfigParam
9
+
10
+ from ..config import OpenAISettings
11
+ from ..structure.base import BaseStructure
12
+ from ..response.base import BaseResponse, ToolHandler
13
+
14
+ TIn = TypeVar("TIn", bound="BaseStructure")
15
+ TOut = TypeVar("TOut", bound="BaseStructure")
16
+
17
+
18
+ class ResponseRegistry:
19
+ """Registry for managing ResponseConfiguration instances.
20
+
21
+ Provides centralized storage and retrieval of response configurations,
22
+ enabling reusable response specs across the application. Configurations
23
+ are stored by name and can be retrieved or listed as needed.
24
+
25
+ Methods
26
+ -------
27
+ register(config)
28
+ Add a ResponseConfiguration to the registry.
29
+ get(name)
30
+ Retrieve a configuration by name.
31
+ list_names()
32
+ Return all registered configuration names.
33
+ clear()
34
+ Remove all registered configurations.
35
+
36
+ Examples
37
+ --------
38
+ >>> registry = ResponseRegistry()
39
+ >>> config = ResponseConfiguration(
40
+ ... name="test",
41
+ ... instructions="Test instructions",
42
+ ... tools=None,
43
+ ... input_structure=None,
44
+ ... output_structure=None
45
+ ... )
46
+ >>> registry.register(config)
47
+ >>> retrieved = registry.get("test")
48
+ >>> retrieved.name
49
+ 'test'
50
+ """
51
+
52
+ def __init__(self) -> None:
53
+ """Initialize an empty registry."""
54
+ self._configs: dict[str, ResponseConfiguration] = {}
55
+
56
+ def register(self, config: ResponseConfiguration) -> None:
57
+ """Add a ResponseConfiguration to the registry.
58
+
59
+ Parameters
60
+ ----------
61
+ config : ResponseConfiguration
62
+ Configuration to register.
63
+
64
+ Raises
65
+ ------
66
+ ValueError
67
+ If a configuration with the same name is already registered.
68
+
69
+ Examples
70
+ --------
71
+ >>> registry = ResponseRegistry()
72
+ >>> config = ResponseConfiguration(...)
73
+ >>> registry.register(config)
74
+ """
75
+ if config.name in self._configs:
76
+ raise ValueError(
77
+ f"Configuration '{config.name}' is already registered. "
78
+ "Use a unique name or clear the registry first."
79
+ )
80
+ self._configs[config.name] = config
81
+
82
+ def get(self, name: str) -> ResponseConfiguration:
83
+ """Retrieve a configuration by name.
84
+
85
+ Parameters
86
+ ----------
87
+ name : str
88
+ Configuration name to look up.
89
+
90
+ Returns
91
+ -------
92
+ ResponseConfiguration
93
+ The registered configuration.
94
+
95
+ Raises
96
+ ------
97
+ KeyError
98
+ If no configuration with the given name exists.
99
+
100
+ Examples
101
+ --------
102
+ >>> registry = ResponseRegistry()
103
+ >>> config = registry.get("test")
104
+ """
105
+ if name not in self._configs:
106
+ raise KeyError(
107
+ f"No configuration named '{name}' found. "
108
+ f"Available: {list(self._configs.keys())}"
109
+ )
110
+ return self._configs[name]
111
+
112
+ def list_names(self) -> list[str]:
113
+ """Return all registered configuration names.
114
+
115
+ Returns
116
+ -------
117
+ list[str]
118
+ Sorted list of configuration names.
119
+
120
+ Examples
121
+ --------
122
+ >>> registry = ResponseRegistry()
123
+ >>> registry.list_names()
124
+ []
125
+ """
126
+ return sorted(self._configs.keys())
127
+
128
+ def clear(self) -> None:
129
+ """Remove all registered configurations.
130
+
131
+ Examples
132
+ --------
133
+ >>> registry = ResponseRegistry()
134
+ >>> registry.clear()
135
+ """
136
+ self._configs.clear()
137
+
138
+
139
+ # Global default registry instance
140
+ _default_registry = ResponseRegistry()
141
+
142
+
143
+ def get_default_registry() -> ResponseRegistry:
144
+ """Return the global default registry instance.
145
+
146
+ Returns
147
+ -------
148
+ ResponseRegistry
149
+ Singleton registry for application-wide configuration storage.
150
+
151
+ Examples
152
+ --------
153
+ >>> registry = get_default_registry()
154
+ >>> config = ResponseConfiguration(...)
155
+ >>> registry.register(config)
156
+ """
157
+ return _default_registry
158
+
159
+
160
+ @dataclass(frozen=True, slots=True)
161
+ class ResponseConfiguration(Generic[TIn, TOut]):
162
+ """
163
+ Represent an immutable configuration describing input and output structures.
164
+
165
+ Encapsulate all metadata required to define how a request is interpreted and
166
+ how a response is structured, while enforcing strict type and runtime safety.
167
+
168
+ Parameters
169
+ ----------
170
+ name : str
171
+ Unique configuration identifier. Must be a non-empty string.
172
+ instructions : str or Path
173
+ Plain text instructions or a path to a Jinja template file whose
174
+ contents are loaded at runtime.
175
+ tools : Sequence[object], optional
176
+ Tool definitions associated with the configuration. Default is None.
177
+ input_structure : Type[BaseStructure], optional
178
+ Structure class used to parse or validate input. Must subclass
179
+ BaseStructure. Default is None.
180
+ output_structure : Type[BaseStructure], optional
181
+ Structure class used to format or validate output. Schema is
182
+ automatically generated from this structure. Must subclass
183
+ BaseStructure. Default is None.
184
+
185
+ Raises
186
+ ------
187
+ TypeError
188
+ If name is not a non-empty string.
189
+ If instructions is not a string or Path.
190
+ If tools is provided and is not a sequence.
191
+ If input_structure or output_structure is not a class.
192
+ If input_structure or output_structure does not subclass BaseStructure.
193
+ ValueError
194
+ If instructions is a string that is empty or only whitespace.
195
+ FileNotFoundError
196
+ If instructions is a Path that does not point to a readable file.
197
+
198
+ Methods
199
+ -------
200
+ __post_init__()
201
+ Validate configuration invariants and enforce BaseStructure subclassing.
202
+ instructions_text
203
+ Return the resolved instruction content as a string.
204
+
205
+ Examples
206
+ --------
207
+ >>> config = Configuration(
208
+ ... name="targeting_to_plan",
209
+ ... tools=None,
210
+ ... input_structure=PromptStructure,
211
+ ... output_structure=WebSearchStructure,
212
+ ... )
213
+ >>> config.name
214
+ 'prompt_to_websearch'
215
+ """
216
+
217
+ name: str
218
+ instructions: str | Path
219
+ tools: Optional[list]
220
+ input_structure: Optional[Type[TIn]]
221
+ output_structure: Optional[Type[TOut]]
222
+
223
+ def __post_init__(self) -> None:
224
+ """
225
+ Validate configuration invariants after initialization.
226
+
227
+ Enforce non-empty naming, correct typing of structures, and ensure that
228
+ any declared structure subclasses BaseStructure.
229
+
230
+ Raises
231
+ ------
232
+ TypeError
233
+ If name is not a non-empty string.
234
+ If tools is provided and is not a sequence.
235
+ If input_structure or output_structure is not a class.
236
+ If input_structure or output_structure does not subclass BaseStructure.
237
+ """
238
+ if not self.name or not isinstance(self.name, str):
239
+ raise TypeError("Configuration.name must be a non-empty str")
240
+
241
+ instructions_value = self.instructions
242
+ if isinstance(instructions_value, str):
243
+ if not instructions_value.strip():
244
+ raise ValueError("Configuration.instructions must be a non-empty str")
245
+ elif isinstance(instructions_value, Path):
246
+ instruction_path = instructions_value.expanduser()
247
+ if not instruction_path.is_file():
248
+ raise FileNotFoundError(
249
+ f"Instruction template not found: {instruction_path}"
250
+ )
251
+ else:
252
+ raise TypeError("Configuration.instructions must be a str or Path")
253
+
254
+ for attr in ("input_structure", "output_structure"):
255
+ cls = getattr(self, attr)
256
+ if cls is None:
257
+ continue
258
+ if not isinstance(cls, type):
259
+ raise TypeError(
260
+ f"Configuration.{attr} must be a class (Type[BaseStructure]) or None"
261
+ )
262
+ if not issubclass(cls, BaseStructure):
263
+ raise TypeError(f"Configuration.{attr} must subclass BaseStructure")
264
+
265
+ if self.tools is not None and not isinstance(self.tools, Sequence):
266
+ raise TypeError("Configuration.tools must be a Sequence or None")
267
+
268
+ @property
269
+ def instructions_text(self) -> str:
270
+ """Return the resolved instruction text.
271
+
272
+ Returns
273
+ -------
274
+ str
275
+ Plain-text instructions, loading template files when necessary.
276
+ """
277
+ return self._resolve_instructions()
278
+
279
+ def _resolve_instructions(self) -> str:
280
+ if isinstance(self.instructions, Path):
281
+ instruction_path = self.instructions.expanduser()
282
+ try:
283
+ return instruction_path.read_text(encoding="utf-8")
284
+ except OSError as exc:
285
+ raise ValueError(
286
+ f"Unable to read instructions at '{instruction_path}': {exc}"
287
+ ) from exc
288
+ return self.instructions
289
+
290
+ def gen_response(
291
+ self,
292
+ openai_settings: OpenAISettings,
293
+ tool_handlers: dict[str, ToolHandler] = {},
294
+ ) -> BaseResponse[TOut]:
295
+ """Generate a BaseResponse instance based on the configuration.
296
+
297
+ Parameters
298
+ ----------
299
+ openai_settings : OpenAISettings
300
+ Authentication and model settings applied to the generated
301
+ :class:`BaseResponse`.
302
+ tool_handlers : dict[str, Callable], optional
303
+ Mapping of tool names to handler callables. Defaults to an empty
304
+ dictionary when not provided.
305
+
306
+ Returns
307
+ -------
308
+ BaseResponse[TOut]
309
+ An instance of BaseResponse configured with ``openai_settings``.
310
+ """
311
+ return BaseResponse[TOut](
312
+ name=self.name,
313
+ instructions=self.instructions_text,
314
+ tools=self.tools,
315
+ output_structure=self.output_structure,
316
+ tool_handlers=tool_handlers,
317
+ openai_settings=openai_settings,
318
+ )
@@ -1,10 +1,15 @@
1
- """Message containers for shared OpenAI responses."""
1
+ """Message containers for OpenAI response conversations.
2
+
3
+ This module provides dataclasses for managing conversation history including
4
+ user inputs, assistant outputs, system messages, and tool calls. Messages are
5
+ stored with timestamps and metadata, and can be serialized to JSON.
6
+ """
2
7
 
3
8
  from __future__ import annotations
4
9
 
5
10
  from dataclasses import dataclass, field
6
11
  from datetime import datetime, timezone
7
- from typing import Dict, List, Union, cast
12
+ from typing import cast
8
13
 
9
14
  from openai.types.responses.response_function_tool_call import ResponseFunctionToolCall
10
15
  from openai.types.responses.response_function_tool_call_param import (
@@ -25,12 +30,26 @@ from .tool_call import ResponseToolCall
25
30
 
26
31
  @dataclass
27
32
  class ResponseMessage(JSONSerializable):
28
- """Single message exchanged with the OpenAI client.
33
+ """Single message exchanged with the OpenAI API.
34
+
35
+ Represents a complete message with role, content, timestamp, and
36
+ optional metadata. Can be serialized to JSON for persistence.
37
+
38
+ Attributes
39
+ ----------
40
+ role : str
41
+ Message role: "user", "assistant", "tool", or "system".
42
+ content : ResponseInputItemParam | ResponseOutputMessage | ResponseFunctionToolCallParam | FunctionCallOutput | ResponseInputMessageContentListParam
43
+ Message content in OpenAI format.
44
+ timestamp : datetime
45
+ UTC timestamp when the message was created.
46
+ metadata : dict[str, str | float | bool]
47
+ Optional metadata for tracking or debugging.
29
48
 
30
49
  Methods
31
50
  -------
32
51
  to_openai_format()
33
- Return the payload in the format expected by the OpenAI client.
52
+ Return the message content in OpenAI API format.
34
53
  """
35
54
 
36
55
  role: str # "user", "assistant", "tool", etc.
@@ -42,7 +61,7 @@ class ResponseMessage(JSONSerializable):
42
61
  | ResponseInputMessageContentListParam
43
62
  )
44
63
  timestamp: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
45
- metadata: Dict[str, Union[str, float, bool]] = field(default_factory=dict)
64
+ metadata: dict[str, str | float | bool] = field(default_factory=dict)
46
65
 
47
66
  def to_openai_format(
48
67
  self,
@@ -65,10 +84,16 @@ class ResponseMessage(JSONSerializable):
65
84
 
66
85
  @dataclass
67
86
  class ResponseMessages(JSONSerializable):
68
- """Represent a collection of messages in a response.
87
+ """Collection of messages in a conversation.
88
+
89
+ Manages the complete history of messages exchanged during an OpenAI
90
+ API interaction. Provides methods for adding different message types
91
+ and converting to formats required by the OpenAI API.
69
92
 
70
- This dataclass encapsulates user inputs and assistant outputs during an
71
- OpenAI API interaction.
93
+ Attributes
94
+ ----------
95
+ messages : list[ResponseMessage]
96
+ Ordered list of all messages in the conversation.
72
97
 
73
98
  Methods
74
99
  -------
@@ -81,10 +106,16 @@ class ResponseMessages(JSONSerializable):
81
106
  add_tool_message(content, output, **metadata)
82
107
  Record a tool call and its output.
83
108
  to_openai_payload()
84
- Convert stored messages to the OpenAI input payload.
109
+ Convert stored messages to OpenAI input payload format.
110
+ get_last_assistant_message()
111
+ Return the most recent assistant message or None.
112
+ get_last_tool_message()
113
+ Return the most recent tool message or None.
114
+ get_last_user_message()
115
+ Return the most recent user message or None.
85
116
  """
86
117
 
87
- messages: List[ResponseMessage] = field(default_factory=list)
118
+ messages: list[ResponseMessage] = field(default_factory=list)
88
119
 
89
120
  def add_system_message(
90
121
  self, content: ResponseInputMessageContentListParam, **metadata
@@ -97,10 +128,6 @@ class ResponseMessages(JSONSerializable):
97
128
  System message content in OpenAI format.
98
129
  **metadata
99
130
  Optional metadata to store with the message.
100
-
101
- Returns
102
- -------
103
- None
104
131
  """
105
132
  response_input = cast(
106
133
  ResponseInputItemParam, {"role": "system", "content": content}
@@ -120,10 +147,6 @@ class ResponseMessages(JSONSerializable):
120
147
  Message payload supplied by the user.
121
148
  **metadata
122
149
  Optional metadata to store with the message.
123
-
124
- Returns
125
- -------
126
- None
127
150
  """
128
151
  self.messages.append(
129
152
  ResponseMessage(role="user", content=input_content, metadata=metadata)
@@ -132,20 +155,16 @@ class ResponseMessages(JSONSerializable):
132
155
  def add_assistant_message(
133
156
  self,
134
157
  content: ResponseOutputMessage,
135
- metadata: Dict[str, Union[str, float, bool]],
158
+ metadata: dict[str, str | float | bool],
136
159
  ) -> None:
137
160
  """Append an assistant message to the conversation.
138
161
 
139
162
  Parameters
140
163
  ----------
141
164
  content : ResponseOutputMessage
142
- Assistant response message.
143
- metadata : dict[str, Union[str, float, bool]]
165
+ Assistant response message from the OpenAI API.
166
+ metadata : dict[str, str | float | bool]
144
167
  Optional metadata to store with the message.
145
-
146
- Returns
147
- -------
148
- None
149
168
  """
150
169
  self.messages.append(
151
170
  ResponseMessage(role="assistant", content=content, metadata=metadata)
@@ -154,20 +173,16 @@ class ResponseMessages(JSONSerializable):
154
173
  def add_tool_message(
155
174
  self, content: ResponseFunctionToolCall, output: str, **metadata
156
175
  ) -> None:
157
- """Record a tool call and its output in the conversation history.
176
+ """Record a tool call and its output in the conversation.
158
177
 
159
178
  Parameters
160
179
  ----------
161
180
  content : ResponseFunctionToolCall
162
- Tool call received from OpenAI.
181
+ Tool call received from the OpenAI API.
163
182
  output : str
164
- JSON string returned by the executed tool.
183
+ JSON string returned by the executed tool handler.
165
184
  **metadata
166
185
  Optional metadata to store with the message.
167
-
168
- Returns
169
- -------
170
- None
171
186
  """
172
187
  tool_call = ResponseToolCall(
173
188
  call_id=content.call_id,
@@ -187,24 +202,25 @@ class ResponseMessages(JSONSerializable):
187
202
 
188
203
  def to_openai_payload(
189
204
  self,
190
- ) -> List[
205
+ ) -> list[
191
206
  ResponseInputItemParam
192
207
  | ResponseOutputMessage
193
208
  | ResponseFunctionToolCallParam
194
209
  | FunctionCallOutput
195
210
  | ResponseInputMessageContentListParam
196
211
  ]:
197
- """Convert stored messages to the input payload expected by OpenAI.
198
-
199
- Notes
200
- -----
201
- Assistant messages are model outputs and are not included in the
202
- next request's input payload.
212
+ """Convert stored messages to OpenAI API input format.
203
213
 
204
214
  Returns
205
215
  -------
206
216
  list
207
- List of message payloads excluding assistant outputs.
217
+ List of message payloads suitable for the OpenAI API.
218
+ Assistant messages are excluded as they are outputs, not inputs.
219
+
220
+ Notes
221
+ -----
222
+ Assistant messages are not included in the returned payload since
223
+ they represent model outputs rather than inputs for the next request.
208
224
  """
209
225
  return [
210
226
  msg.to_openai_format() for msg in self.messages if msg.role != "assistant"
@@ -1,10 +1,14 @@
1
- """Convenience runners for response workflows."""
1
+ """Convenience functions for executing response workflows.
2
+
3
+ This module provides high-level functions that handle the complete lifecycle
4
+ of response workflows including instantiation, execution, and resource cleanup.
5
+ They simplify common usage patterns for both synchronous and asynchronous contexts.
6
+ """
2
7
 
3
8
  from __future__ import annotations
4
9
 
5
10
  import asyncio
6
-
7
- from typing import Any, Optional, Type, TypeVar
11
+ from typing import Any, TypeVar
8
12
 
9
13
  from .base import BaseResponse
10
14
 
@@ -13,26 +17,39 @@ R = TypeVar("R", bound=BaseResponse[Any])
13
17
 
14
18
 
15
19
  def run_sync(
16
- response_cls: Type[R],
20
+ response_cls: type[R],
17
21
  *,
18
22
  content: str,
19
- response_kwargs: Optional[dict[str, Any]] = None,
23
+ response_kwargs: dict[str, Any] | None = None,
20
24
  ) -> Any:
21
- """Run a response workflow synchronously and close resources.
25
+ """Execute a response workflow synchronously with automatic cleanup.
26
+
27
+ Instantiates the response class, executes run_sync with the provided
28
+ content, and ensures cleanup occurs even if an exception is raised.
22
29
 
23
30
  Parameters
24
31
  ----------
25
- response_cls
26
- Response class to instantiate.
27
- content
32
+ response_cls : type[BaseResponse]
33
+ Response class to instantiate for the workflow.
34
+ content : str
28
35
  Prompt text to send to the OpenAI API.
29
- response_kwargs
30
- Keyword arguments forwarded to ``response_cls``. Default ``None``.
36
+ response_kwargs : dict[str, Any] or None, default None
37
+ Optional keyword arguments forwarded to response_cls constructor.
31
38
 
32
39
  Returns
33
40
  -------
34
41
  Any
35
- Parsed response from :meth:`BaseResponse.run_response`.
42
+ Parsed response from BaseResponse.run_sync, typically a structured
43
+ output or None.
44
+
45
+ Examples
46
+ --------
47
+ >>> from openai_sdk_helpers.response import run_sync
48
+ >>> result = run_sync(
49
+ ... MyResponse,
50
+ ... content="Analyze this text",
51
+ ... response_kwargs={"openai_settings": settings}
52
+ ... )
36
53
  """
37
54
  response = response_cls(**(response_kwargs or {}))
38
55
  try:
@@ -42,26 +59,39 @@ def run_sync(
42
59
 
43
60
 
44
61
  async def run_async(
45
- response_cls: Type[R],
62
+ response_cls: type[R],
46
63
  *,
47
64
  content: str,
48
- response_kwargs: Optional[dict[str, Any]] = None,
65
+ response_kwargs: dict[str, Any] | None = None,
49
66
  ) -> Any:
50
- """Run a response workflow asynchronously and close resources.
67
+ """Execute a response workflow asynchronously with automatic cleanup.
68
+
69
+ Instantiates the response class, executes run_async with the provided
70
+ content, and ensures cleanup occurs even if an exception is raised.
51
71
 
52
72
  Parameters
53
73
  ----------
54
- response_cls
55
- Response class to instantiate.
56
- content
74
+ response_cls : type[BaseResponse]
75
+ Response class to instantiate for the workflow.
76
+ content : str
57
77
  Prompt text to send to the OpenAI API.
58
- response_kwargs
59
- Keyword arguments forwarded to ``response_cls``. Default ``None``.
78
+ response_kwargs : dict[str, Any] or None, default None
79
+ Optional keyword arguments forwarded to response_cls constructor.
60
80
 
61
81
  Returns
62
82
  -------
63
83
  Any
64
- Parsed response from :meth:`BaseResponse.run_response_async`.
84
+ Parsed response from BaseResponse.run_async, typically a structured
85
+ output or None.
86
+
87
+ Examples
88
+ --------
89
+ >>> from openai_sdk_helpers.response import run_async
90
+ >>> result = await run_async(
91
+ ... MyResponse,
92
+ ... content="Summarize this document",
93
+ ... response_kwargs={"openai_settings": settings}
94
+ ... )
65
95
  """
66
96
  response = response_cls(**(response_kwargs or {}))
67
97
  try:
@@ -71,30 +101,44 @@ async def run_async(
71
101
 
72
102
 
73
103
  def run_streamed(
74
- response_cls: Type[R],
104
+ response_cls: type[R],
75
105
  *,
76
106
  content: str,
77
- response_kwargs: Optional[dict[str, Any]] = None,
107
+ response_kwargs: dict[str, Any] | None = None,
78
108
  ) -> Any:
79
- """Run a response workflow and return the asynchronous result.
109
+ """Execute a response workflow and return the awaited result.
80
110
 
81
- This mirrors the agent API for discoverability. Streaming responses are not
82
- currently supported by :class:`BaseResponse`, so this returns the same value
83
- as :func:`run_async`.
111
+ Provides API compatibility with agent interfaces. Streaming responses
112
+ are not currently fully supported, so this executes run_async and
113
+ awaits the result.
84
114
 
85
115
  Parameters
86
116
  ----------
87
- response_cls
88
- Response class to instantiate.
89
- content
117
+ response_cls : type[BaseResponse]
118
+ Response class to instantiate for the workflow.
119
+ content : str
90
120
  Prompt text to send to the OpenAI API.
91
- response_kwargs
92
- Keyword arguments forwarded to ``response_cls``. Default ``None``.
121
+ response_kwargs : dict[str, Any] or None, default None
122
+ Optional keyword arguments forwarded to response_cls constructor.
93
123
 
94
124
  Returns
95
125
  -------
96
126
  Any
97
- Parsed response returned from :func:`run_async`.
127
+ Parsed response from run_async, typically a structured output or None.
128
+
129
+ Notes
130
+ -----
131
+ This function exists for API consistency but does not currently provide
132
+ true streaming functionality.
133
+
134
+ Examples
135
+ --------
136
+ >>> from openai_sdk_helpers.response import run_streamed
137
+ >>> result = run_streamed(
138
+ ... MyResponse,
139
+ ... content="Process this text",
140
+ ... response_kwargs={"openai_settings": settings}
141
+ ... )
98
142
  """
99
143
  return asyncio.run(
100
144
  run_async(response_cls, content=content, response_kwargs=response_kwargs)