openai-sdk-helpers 0.0.8__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openai_sdk_helpers/__init__.py +90 -2
- openai_sdk_helpers/agent/__init__.py +8 -4
- openai_sdk_helpers/agent/base.py +80 -45
- openai_sdk_helpers/agent/config.py +6 -4
- openai_sdk_helpers/agent/{project_manager.py → coordination.py} +29 -45
- openai_sdk_helpers/agent/prompt_utils.py +7 -1
- openai_sdk_helpers/agent/runner.py +67 -141
- openai_sdk_helpers/agent/search/__init__.py +33 -0
- openai_sdk_helpers/agent/search/base.py +297 -0
- openai_sdk_helpers/agent/{vector_search.py → search/vector.py} +89 -157
- openai_sdk_helpers/agent/{web_search.py → search/web.py} +77 -156
- openai_sdk_helpers/agent/summarizer.py +29 -8
- openai_sdk_helpers/agent/translator.py +40 -13
- openai_sdk_helpers/agent/validation.py +32 -8
- openai_sdk_helpers/async_utils.py +132 -0
- openai_sdk_helpers/config.py +101 -65
- openai_sdk_helpers/context_manager.py +241 -0
- openai_sdk_helpers/enums/__init__.py +9 -1
- openai_sdk_helpers/enums/base.py +67 -8
- openai_sdk_helpers/environment.py +33 -6
- openai_sdk_helpers/errors.py +133 -0
- openai_sdk_helpers/logging_config.py +105 -0
- openai_sdk_helpers/prompt/__init__.py +10 -71
- openai_sdk_helpers/prompt/base.py +222 -0
- openai_sdk_helpers/response/__init__.py +38 -3
- openai_sdk_helpers/response/base.py +363 -210
- openai_sdk_helpers/response/config.py +318 -0
- openai_sdk_helpers/response/messages.py +56 -40
- openai_sdk_helpers/response/runner.py +77 -33
- openai_sdk_helpers/response/tool_call.py +62 -27
- openai_sdk_helpers/response/vector_store.py +27 -14
- openai_sdk_helpers/retry.py +175 -0
- openai_sdk_helpers/streamlit_app/__init__.py +19 -2
- openai_sdk_helpers/streamlit_app/app.py +114 -39
- openai_sdk_helpers/streamlit_app/config.py +502 -0
- openai_sdk_helpers/streamlit_app/streamlit_web_search.py +5 -6
- openai_sdk_helpers/structure/__init__.py +72 -3
- openai_sdk_helpers/structure/agent_blueprint.py +82 -19
- openai_sdk_helpers/structure/base.py +208 -93
- openai_sdk_helpers/structure/plan/__init__.py +29 -1
- openai_sdk_helpers/structure/plan/enum.py +41 -5
- openai_sdk_helpers/structure/plan/helpers.py +172 -0
- openai_sdk_helpers/structure/plan/plan.py +109 -49
- openai_sdk_helpers/structure/plan/task.py +38 -6
- openai_sdk_helpers/structure/plan/types.py +15 -0
- openai_sdk_helpers/structure/prompt.py +21 -2
- openai_sdk_helpers/structure/responses.py +52 -11
- openai_sdk_helpers/structure/summary.py +55 -7
- openai_sdk_helpers/structure/validation.py +34 -6
- openai_sdk_helpers/structure/vector_search.py +132 -18
- openai_sdk_helpers/structure/web_search.py +125 -13
- openai_sdk_helpers/tools.py +193 -0
- openai_sdk_helpers/types.py +57 -0
- openai_sdk_helpers/utils/__init__.py +34 -1
- openai_sdk_helpers/utils/core.py +296 -34
- openai_sdk_helpers/validation.py +302 -0
- openai_sdk_helpers/vector_storage/__init__.py +21 -1
- openai_sdk_helpers/vector_storage/cleanup.py +25 -13
- openai_sdk_helpers/vector_storage/storage.py +123 -64
- openai_sdk_helpers/vector_storage/types.py +20 -19
- openai_sdk_helpers-0.1.0.dist-info/METADATA +550 -0
- openai_sdk_helpers-0.1.0.dist-info/RECORD +69 -0
- openai_sdk_helpers/streamlit_app/configuration.py +0 -324
- openai_sdk_helpers-0.0.8.dist-info/METADATA +0 -194
- openai_sdk_helpers-0.0.8.dist-info/RECORD +0 -55
- {openai_sdk_helpers-0.0.8.dist-info → openai_sdk_helpers-0.1.0.dist-info}/WHEEL +0 -0
- {openai_sdk_helpers-0.0.8.dist-info → openai_sdk_helpers-0.1.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
"""Async/sync bridge utilities with proper error handling.
|
|
2
|
+
|
|
3
|
+
Provides thread-safe wrappers for running async code from sync contexts
|
|
4
|
+
with proper exception propagation and timeout handling.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import queue
|
|
9
|
+
import threading
|
|
10
|
+
from typing import Any, Coroutine, Generic, TypeVar
|
|
11
|
+
|
|
12
|
+
from openai_sdk_helpers.errors import AsyncExecutionError
|
|
13
|
+
from openai_sdk_helpers.utils.core import log
|
|
14
|
+
|
|
15
|
+
T = TypeVar("T")
|
|
16
|
+
|
|
17
|
+
# Default timeout constants
|
|
18
|
+
DEFAULT_COROUTINE_TIMEOUT = 300.0 # 5 minutes
|
|
19
|
+
THREAD_JOIN_TIMEOUT = 5.0 # 5 seconds
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def run_coroutine_thread_safe(
|
|
23
|
+
coro: Coroutine[Any, Any, T],
|
|
24
|
+
timeout: float = DEFAULT_COROUTINE_TIMEOUT,
|
|
25
|
+
) -> T:
|
|
26
|
+
"""Run a coroutine in a thread-safe manner from a sync context.
|
|
27
|
+
|
|
28
|
+
Uses a queue to safely communicate results and exceptions between threads.
|
|
29
|
+
Ensures exceptions from the async operation are properly propagated.
|
|
30
|
+
|
|
31
|
+
Parameters
|
|
32
|
+
----------
|
|
33
|
+
coro : Coroutine
|
|
34
|
+
The coroutine to execute.
|
|
35
|
+
timeout : float
|
|
36
|
+
Maximum time in seconds to wait for the coroutine to complete.
|
|
37
|
+
Default is 300 (5 minutes).
|
|
38
|
+
|
|
39
|
+
Returns
|
|
40
|
+
-------
|
|
41
|
+
Any
|
|
42
|
+
Result from the coroutine.
|
|
43
|
+
|
|
44
|
+
Raises
|
|
45
|
+
------
|
|
46
|
+
AsyncExecutionError
|
|
47
|
+
If the coroutine fails or timeout occurs.
|
|
48
|
+
|
|
49
|
+
Examples
|
|
50
|
+
--------
|
|
51
|
+
>>> async def fetch_data():
|
|
52
|
+
... return "data"
|
|
53
|
+
>>> result = run_coroutine_thread_safe(fetch_data())
|
|
54
|
+
"""
|
|
55
|
+
result_queue: queue.Queue[T | Exception] = queue.Queue()
|
|
56
|
+
|
|
57
|
+
def _thread_runner() -> None:
|
|
58
|
+
"""Run coroutine and put result in queue."""
|
|
59
|
+
try:
|
|
60
|
+
result = asyncio.run(coro)
|
|
61
|
+
result_queue.put(result)
|
|
62
|
+
except Exception as exc:
|
|
63
|
+
# Queue stores the exception to propagate later
|
|
64
|
+
result_queue.put(exc)
|
|
65
|
+
|
|
66
|
+
thread = threading.Thread(target=_thread_runner, daemon=False)
|
|
67
|
+
thread.start()
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
result = result_queue.get(timeout=timeout)
|
|
71
|
+
if isinstance(result, Exception):
|
|
72
|
+
# Re-raise the exception from the thread
|
|
73
|
+
raise result
|
|
74
|
+
return result
|
|
75
|
+
except queue.Empty:
|
|
76
|
+
raise AsyncExecutionError(
|
|
77
|
+
f"Coroutine execution timed out after {timeout} seconds"
|
|
78
|
+
) from None
|
|
79
|
+
finally:
|
|
80
|
+
# Ensure thread is cleaned up
|
|
81
|
+
thread.join(timeout=THREAD_JOIN_TIMEOUT)
|
|
82
|
+
if thread.is_alive():
|
|
83
|
+
log(
|
|
84
|
+
f"Thread {thread.name} did not terminate within {THREAD_JOIN_TIMEOUT} seconds",
|
|
85
|
+
level=20, # logging.INFO
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def run_coroutine_with_fallback(
|
|
90
|
+
coro: Coroutine[Any, Any, T],
|
|
91
|
+
) -> T:
|
|
92
|
+
"""Run a coroutine, falling back to thread if event loop is already running.
|
|
93
|
+
|
|
94
|
+
Attempts to run the coroutine directly if no event loop is present.
|
|
95
|
+
If an event loop is already running (nested scenario), creates a new
|
|
96
|
+
thread to avoid the "RuntimeError: asyncio.run() cannot be called from a
|
|
97
|
+
running event loop" error.
|
|
98
|
+
|
|
99
|
+
Parameters
|
|
100
|
+
----------
|
|
101
|
+
coro : Coroutine
|
|
102
|
+
The coroutine to execute.
|
|
103
|
+
|
|
104
|
+
Returns
|
|
105
|
+
-------
|
|
106
|
+
Any
|
|
107
|
+
Result from the coroutine.
|
|
108
|
+
|
|
109
|
+
Raises
|
|
110
|
+
------
|
|
111
|
+
AsyncExecutionError
|
|
112
|
+
If execution fails or times out.
|
|
113
|
+
|
|
114
|
+
Examples
|
|
115
|
+
--------
|
|
116
|
+
>>> async def fetch_data():
|
|
117
|
+
... return "data"
|
|
118
|
+
>>> result = run_coroutine_with_fallback(fetch_data())
|
|
119
|
+
"""
|
|
120
|
+
try:
|
|
121
|
+
# Try to get currently running loop
|
|
122
|
+
loop = asyncio.get_running_loop()
|
|
123
|
+
except RuntimeError:
|
|
124
|
+
# No running loop, safe to use asyncio.run()
|
|
125
|
+
return asyncio.run(coro)
|
|
126
|
+
|
|
127
|
+
# Loop is already running, must use thread
|
|
128
|
+
if loop.is_running():
|
|
129
|
+
return run_coroutine_thread_safe(coro)
|
|
130
|
+
|
|
131
|
+
# This shouldn't happen but handle defensive
|
|
132
|
+
return loop.run_until_complete(coro)
|
openai_sdk_helpers/config.py
CHANGED
|
@@ -1,10 +1,15 @@
|
|
|
1
|
-
"""Shared configuration for OpenAI SDK usage.
|
|
1
|
+
"""Shared configuration for OpenAI SDK usage.
|
|
2
|
+
|
|
3
|
+
This module provides the OpenAISettings class for centralized management of
|
|
4
|
+
OpenAI client configuration, reading from environment variables and .env files.
|
|
5
|
+
"""
|
|
2
6
|
|
|
3
7
|
from __future__ import annotations
|
|
4
8
|
|
|
5
9
|
import os
|
|
10
|
+
from collections.abc import Mapping
|
|
6
11
|
from pathlib import Path
|
|
7
|
-
from typing import Any
|
|
12
|
+
from typing import Any
|
|
8
13
|
|
|
9
14
|
from dotenv import dotenv_values
|
|
10
15
|
from openai import OpenAI
|
|
@@ -20,6 +25,25 @@ from openai_sdk_helpers.utils import (
|
|
|
20
25
|
class OpenAISettings(BaseModel):
|
|
21
26
|
"""Configuration helpers for constructing OpenAI clients.
|
|
22
27
|
|
|
28
|
+
This class centralizes OpenAI SDK configuration by reading from environment
|
|
29
|
+
variables and optional `.env` files, enabling consistent client setup across
|
|
30
|
+
your application.
|
|
31
|
+
|
|
32
|
+
Examples
|
|
33
|
+
--------
|
|
34
|
+
Load settings from environment and create a client:
|
|
35
|
+
|
|
36
|
+
>>> from openai_sdk_helpers import OpenAISettings
|
|
37
|
+
>>> settings = OpenAISettings.from_env()
|
|
38
|
+
>>> client = settings.create_client()
|
|
39
|
+
|
|
40
|
+
Override specific settings:
|
|
41
|
+
|
|
42
|
+
>>> settings = OpenAISettings.from_env(
|
|
43
|
+
... default_model="gpt-4o",
|
|
44
|
+
... timeout=60.0
|
|
45
|
+
... )
|
|
46
|
+
|
|
23
47
|
Methods
|
|
24
48
|
-------
|
|
25
49
|
from_env(dotenv_path, **overrides)
|
|
@@ -32,116 +56,122 @@ class OpenAISettings(BaseModel):
|
|
|
32
56
|
|
|
33
57
|
model_config = ConfigDict(extra="ignore")
|
|
34
58
|
|
|
35
|
-
api_key:
|
|
59
|
+
api_key: str | None = Field(
|
|
36
60
|
default=None,
|
|
37
61
|
description=(
|
|
38
|
-
"API key used to authenticate requests. Defaults to
|
|
62
|
+
"API key used to authenticate requests. Defaults to OPENAI_API_KEY"
|
|
39
63
|
" from the environment."
|
|
40
64
|
),
|
|
41
65
|
)
|
|
42
|
-
org_id:
|
|
66
|
+
org_id: str | None = Field(
|
|
43
67
|
default=None,
|
|
44
68
|
description=(
|
|
45
69
|
"Organization identifier applied to outgoing requests. Defaults to"
|
|
46
|
-
"
|
|
70
|
+
" OPENAI_ORG_ID."
|
|
47
71
|
),
|
|
48
72
|
)
|
|
49
|
-
project_id:
|
|
73
|
+
project_id: str | None = Field(
|
|
50
74
|
default=None,
|
|
51
75
|
description=(
|
|
52
76
|
"Project identifier used for billing and resource scoping. Defaults to"
|
|
53
|
-
"
|
|
77
|
+
" OPENAI_PROJECT_ID."
|
|
54
78
|
),
|
|
55
79
|
)
|
|
56
|
-
base_url:
|
|
80
|
+
base_url: str | None = Field(
|
|
57
81
|
default=None,
|
|
58
82
|
description=(
|
|
59
83
|
"Custom base URL for self-hosted or proxied deployments. Defaults to"
|
|
60
|
-
"
|
|
84
|
+
" OPENAI_BASE_URL."
|
|
61
85
|
),
|
|
62
86
|
)
|
|
63
|
-
default_model:
|
|
87
|
+
default_model: str | None = Field(
|
|
64
88
|
default=None,
|
|
65
89
|
description=(
|
|
66
90
|
"Model name used when constructing agents if no model is explicitly"
|
|
67
|
-
" provided. Defaults to
|
|
91
|
+
" provided. Defaults to OPENAI_MODEL."
|
|
68
92
|
),
|
|
69
93
|
)
|
|
70
|
-
timeout:
|
|
94
|
+
timeout: float | None = Field(
|
|
71
95
|
default=None,
|
|
72
96
|
description=(
|
|
73
97
|
"Request timeout in seconds applied to all OpenAI client calls."
|
|
74
|
-
" Defaults to
|
|
98
|
+
" Defaults to OPENAI_TIMEOUT."
|
|
75
99
|
),
|
|
76
100
|
)
|
|
77
|
-
max_retries:
|
|
101
|
+
max_retries: int | None = Field(
|
|
78
102
|
default=None,
|
|
79
103
|
description=(
|
|
80
104
|
"Maximum number of automatic retries for transient failures."
|
|
81
|
-
" Defaults to
|
|
105
|
+
" Defaults to OPENAI_MAX_RETRIES."
|
|
82
106
|
),
|
|
83
107
|
)
|
|
84
|
-
extra_client_kwargs:
|
|
108
|
+
extra_client_kwargs: dict[str, Any] = Field(
|
|
85
109
|
default_factory=dict,
|
|
86
110
|
description=(
|
|
87
|
-
"Additional keyword arguments forwarded to
|
|
88
|
-
" this for less common options like
|
|
89
|
-
"
|
|
111
|
+
"Additional keyword arguments forwarded to openai.OpenAI. Use"
|
|
112
|
+
" this for less common options like default_headers or"
|
|
113
|
+
" http_client."
|
|
90
114
|
),
|
|
91
115
|
)
|
|
92
116
|
|
|
93
117
|
@classmethod
|
|
94
118
|
def from_env(
|
|
95
|
-
cls, dotenv_path:
|
|
96
|
-
) ->
|
|
119
|
+
cls, dotenv_path: Path | None = None, **overrides: Any
|
|
120
|
+
) -> OpenAISettings:
|
|
97
121
|
"""Load settings from the environment and optional overrides.
|
|
98
122
|
|
|
123
|
+
Reads configuration from environment variables and an optional .env
|
|
124
|
+
file, with explicit overrides taking precedence.
|
|
125
|
+
|
|
99
126
|
Parameters
|
|
100
127
|
----------
|
|
101
|
-
dotenv_path : Path
|
|
102
|
-
Path to a
|
|
103
|
-
variables
|
|
128
|
+
dotenv_path : Path or None, optional
|
|
129
|
+
Path to a .env file to load before reading environment
|
|
130
|
+
variables, by default None.
|
|
104
131
|
overrides : Any
|
|
105
132
|
Keyword overrides applied on top of environment values.
|
|
106
133
|
|
|
107
134
|
Returns
|
|
108
135
|
-------
|
|
109
136
|
OpenAISettings
|
|
110
|
-
|
|
137
|
+
Settings instance populated from environment variables and overrides.
|
|
138
|
+
|
|
139
|
+
Raises
|
|
140
|
+
------
|
|
141
|
+
ValueError
|
|
142
|
+
If OPENAI_API_KEY is not found in environment or dotenv file.
|
|
111
143
|
"""
|
|
112
|
-
env_file_values: Mapping[str,
|
|
144
|
+
env_file_values: Mapping[str, str | None] = {}
|
|
113
145
|
if dotenv_path is not None:
|
|
114
146
|
env_file_values = dotenv_values(dotenv_path)
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
)
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
"
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
"default_model":
|
|
143
|
-
or env_file_values.get("OPENAI_MODEL")
|
|
144
|
-
or os.getenv("OPENAI_MODEL"),
|
|
147
|
+
|
|
148
|
+
def first_non_none(*candidates: Any) -> Any:
|
|
149
|
+
for candidate in candidates:
|
|
150
|
+
if candidate is not None:
|
|
151
|
+
return candidate
|
|
152
|
+
return None
|
|
153
|
+
|
|
154
|
+
def resolve_value(override_key: str, env_var: str) -> Any:
|
|
155
|
+
if dotenv_path is not None:
|
|
156
|
+
return first_non_none(
|
|
157
|
+
overrides.get(override_key),
|
|
158
|
+
env_file_values.get(env_var),
|
|
159
|
+
os.getenv(env_var),
|
|
160
|
+
)
|
|
161
|
+
return first_non_none(
|
|
162
|
+
overrides.get(override_key),
|
|
163
|
+
os.getenv(env_var),
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
timeout_raw = resolve_value("timeout", "OPENAI_TIMEOUT")
|
|
167
|
+
max_retries_raw = resolve_value("max_retries", "OPENAI_MAX_RETRIES")
|
|
168
|
+
|
|
169
|
+
values: dict[str, Any] = {
|
|
170
|
+
"api_key": resolve_value("api_key", "OPENAI_API_KEY"),
|
|
171
|
+
"org_id": resolve_value("org_id", "OPENAI_ORG_ID"),
|
|
172
|
+
"project_id": resolve_value("project_id", "OPENAI_PROJECT_ID"),
|
|
173
|
+
"base_url": resolve_value("base_url", "OPENAI_BASE_URL"),
|
|
174
|
+
"default_model": resolve_value("default_model", "OPENAI_MODEL"),
|
|
145
175
|
"timeout": coerce_optional_float(timeout_raw),
|
|
146
176
|
"max_retries": coerce_optional_int(max_retries_raw),
|
|
147
177
|
"extra_client_kwargs": coerce_dict(overrides.get("extra_client_kwargs")),
|
|
@@ -161,16 +191,19 @@ class OpenAISettings(BaseModel):
|
|
|
161
191
|
|
|
162
192
|
return settings
|
|
163
193
|
|
|
164
|
-
def client_kwargs(self) ->
|
|
165
|
-
"""Return keyword arguments for constructing an
|
|
194
|
+
def client_kwargs(self) -> dict[str, Any]:
|
|
195
|
+
"""Return keyword arguments for constructing an OpenAI client.
|
|
196
|
+
|
|
197
|
+
Builds a dictionary containing all configured authentication and
|
|
198
|
+
routing parameters suitable for OpenAI client initialization.
|
|
166
199
|
|
|
167
200
|
Returns
|
|
168
201
|
-------
|
|
169
|
-
dict
|
|
170
|
-
|
|
171
|
-
|
|
202
|
+
dict[str, Any]
|
|
203
|
+
Keyword arguments populated with available authentication and
|
|
204
|
+
routing values.
|
|
172
205
|
"""
|
|
173
|
-
kwargs:
|
|
206
|
+
kwargs: dict[str, Any] = dict(self.extra_client_kwargs)
|
|
174
207
|
if self.api_key:
|
|
175
208
|
kwargs["api_key"] = self.api_key
|
|
176
209
|
if self.org_id:
|
|
@@ -186,12 +219,15 @@ class OpenAISettings(BaseModel):
|
|
|
186
219
|
return kwargs
|
|
187
220
|
|
|
188
221
|
def create_client(self) -> OpenAI:
|
|
189
|
-
"""Instantiate an
|
|
222
|
+
"""Instantiate an OpenAI client using the stored configuration.
|
|
223
|
+
|
|
224
|
+
Uses client_kwargs() to build the client with all configured
|
|
225
|
+
authentication and routing parameters.
|
|
190
226
|
|
|
191
227
|
Returns
|
|
192
228
|
-------
|
|
193
229
|
OpenAI
|
|
194
|
-
|
|
230
|
+
Client initialized with the configured settings.
|
|
195
231
|
"""
|
|
196
232
|
return OpenAI(**self.client_kwargs())
|
|
197
233
|
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""Context manager utilities for resource cleanup.
|
|
2
|
+
|
|
3
|
+
Provides base classes and utilities for proper resource management
|
|
4
|
+
with guaranteed cleanup on exit or exception.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
from contextlib import asynccontextmanager
|
|
11
|
+
from types import TracebackType
|
|
12
|
+
from typing import Any, AsyncIterator, Generic, Optional, TypeVar
|
|
13
|
+
|
|
14
|
+
from openai_sdk_helpers.utils.core import log
|
|
15
|
+
|
|
16
|
+
T = TypeVar("T")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ManagedResource(Generic[T]):
|
|
20
|
+
"""Base class for resources that need cleanup.
|
|
21
|
+
|
|
22
|
+
Provides context manager support for guaranteed resource cleanup
|
|
23
|
+
even when exceptions occur.
|
|
24
|
+
|
|
25
|
+
Examples
|
|
26
|
+
--------
|
|
27
|
+
>>> class DatabaseConnection(ManagedResource[Connection]):
|
|
28
|
+
... def __init__(self, connection):
|
|
29
|
+
... self.connection = connection
|
|
30
|
+
...
|
|
31
|
+
... def close(self) -> None:
|
|
32
|
+
... if self.connection:
|
|
33
|
+
... self.connection.close()
|
|
34
|
+
|
|
35
|
+
>>> with DatabaseConnection(connect()) as db:
|
|
36
|
+
... db.query("SELECT ...")
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def __enter__(self) -> T:
|
|
40
|
+
"""Enter context manager.
|
|
41
|
+
|
|
42
|
+
Returns
|
|
43
|
+
-------
|
|
44
|
+
T
|
|
45
|
+
The resource instance (self cast appropriately).
|
|
46
|
+
"""
|
|
47
|
+
return self # type: ignore
|
|
48
|
+
|
|
49
|
+
def __exit__(
|
|
50
|
+
self,
|
|
51
|
+
exc_type: Optional[type[BaseException]],
|
|
52
|
+
exc_val: Optional[BaseException],
|
|
53
|
+
exc_tb: Optional[TracebackType],
|
|
54
|
+
) -> bool:
|
|
55
|
+
"""Exit context manager with cleanup.
|
|
56
|
+
|
|
57
|
+
Parameters
|
|
58
|
+
----------
|
|
59
|
+
exc_type : type[BaseException] | None
|
|
60
|
+
Type of exception if one was raised, None otherwise.
|
|
61
|
+
exc_val : BaseException | None
|
|
62
|
+
Exception instance if one was raised, None otherwise.
|
|
63
|
+
exc_tb : TracebackType | None
|
|
64
|
+
Traceback if exception was raised, None otherwise.
|
|
65
|
+
|
|
66
|
+
Returns
|
|
67
|
+
-------
|
|
68
|
+
bool
|
|
69
|
+
False to re-raise exceptions, True to suppress them.
|
|
70
|
+
"""
|
|
71
|
+
try:
|
|
72
|
+
self.close()
|
|
73
|
+
except Exception as exc:
|
|
74
|
+
log(f"Error during cleanup: {exc}", level=30) # logging.WARNING
|
|
75
|
+
# Don't suppress cleanup errors
|
|
76
|
+
if exc_type is None:
|
|
77
|
+
raise
|
|
78
|
+
|
|
79
|
+
return False # Re-raise exceptions
|
|
80
|
+
|
|
81
|
+
def close(self) -> None:
|
|
82
|
+
"""Close and cleanup the resource.
|
|
83
|
+
|
|
84
|
+
Should be overridden by subclasses to perform actual cleanup.
|
|
85
|
+
Should not raise exceptions, but may log them.
|
|
86
|
+
|
|
87
|
+
Raises
|
|
88
|
+
------
|
|
89
|
+
Exception
|
|
90
|
+
May raise if cleanup fails catastrophically.
|
|
91
|
+
"""
|
|
92
|
+
pass
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class AsyncManagedResource(Generic[T]):
|
|
96
|
+
"""Base class for async resources that need cleanup.
|
|
97
|
+
|
|
98
|
+
Provides async context manager support for guaranteed resource cleanup
|
|
99
|
+
even when exceptions occur.
|
|
100
|
+
|
|
101
|
+
Examples
|
|
102
|
+
--------
|
|
103
|
+
>>> class AsyncDatabaseConnection(AsyncManagedResource[AsyncConnection]):
|
|
104
|
+
... def __init__(self, connection):
|
|
105
|
+
... self.connection = connection
|
|
106
|
+
...
|
|
107
|
+
... async def close(self) -> None:
|
|
108
|
+
... if self.connection:
|
|
109
|
+
... await self.connection.close()
|
|
110
|
+
|
|
111
|
+
>>> async with AsyncDatabaseConnection(await connect()) as db:
|
|
112
|
+
... await db.query("SELECT ...")
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
async def __aenter__(self) -> T:
|
|
116
|
+
"""Enter async context manager.
|
|
117
|
+
|
|
118
|
+
Returns
|
|
119
|
+
-------
|
|
120
|
+
T
|
|
121
|
+
The resource instance (self cast appropriately).
|
|
122
|
+
"""
|
|
123
|
+
return self # type: ignore
|
|
124
|
+
|
|
125
|
+
async def __aexit__(
|
|
126
|
+
self,
|
|
127
|
+
exc_type: Optional[type[BaseException]],
|
|
128
|
+
exc_val: Optional[BaseException],
|
|
129
|
+
exc_tb: Optional[TracebackType],
|
|
130
|
+
) -> bool:
|
|
131
|
+
"""Exit async context manager with cleanup.
|
|
132
|
+
|
|
133
|
+
Parameters
|
|
134
|
+
----------
|
|
135
|
+
exc_type : type[BaseException] | None
|
|
136
|
+
Type of exception if one was raised, None otherwise.
|
|
137
|
+
exc_val : BaseException | None
|
|
138
|
+
Exception instance if one was raised, None otherwise.
|
|
139
|
+
exc_tb : TracebackType | None
|
|
140
|
+
Traceback if exception was raised, None otherwise.
|
|
141
|
+
|
|
142
|
+
Returns
|
|
143
|
+
-------
|
|
144
|
+
bool
|
|
145
|
+
False to re-raise exceptions, True to suppress them.
|
|
146
|
+
"""
|
|
147
|
+
try:
|
|
148
|
+
await self.close()
|
|
149
|
+
except Exception as exc:
|
|
150
|
+
log(f"Error during async cleanup: {exc}", level=30) # logging.WARNING
|
|
151
|
+
# Don't suppress cleanup errors
|
|
152
|
+
if exc_type is None:
|
|
153
|
+
raise
|
|
154
|
+
|
|
155
|
+
return False # Re-raise exceptions
|
|
156
|
+
|
|
157
|
+
async def close(self) -> None:
|
|
158
|
+
"""Close and cleanup the resource asynchronously.
|
|
159
|
+
|
|
160
|
+
Should be overridden by subclasses to perform actual cleanup.
|
|
161
|
+
Should not raise exceptions, but may log them.
|
|
162
|
+
|
|
163
|
+
Raises
|
|
164
|
+
------
|
|
165
|
+
Exception
|
|
166
|
+
May raise if cleanup fails catastrophically.
|
|
167
|
+
"""
|
|
168
|
+
pass
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def ensure_closed(resource: Any) -> None:
|
|
172
|
+
"""Safely close a resource if it has a close method.
|
|
173
|
+
|
|
174
|
+
Logs errors but doesn't raise them.
|
|
175
|
+
|
|
176
|
+
Parameters
|
|
177
|
+
----------
|
|
178
|
+
resource : Any
|
|
179
|
+
Object that may have a close() method.
|
|
180
|
+
"""
|
|
181
|
+
if resource is None:
|
|
182
|
+
return
|
|
183
|
+
|
|
184
|
+
close_method = getattr(resource, "close", None)
|
|
185
|
+
if callable(close_method):
|
|
186
|
+
try:
|
|
187
|
+
close_method()
|
|
188
|
+
except Exception as exc:
|
|
189
|
+
log(f"Error closing {type(resource).__name__}: {exc}", level=30)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
async def ensure_closed_async(resource: Any) -> None:
|
|
193
|
+
"""Safely close a resource asynchronously if it has an async close method.
|
|
194
|
+
|
|
195
|
+
Logs errors but doesn't raise them.
|
|
196
|
+
|
|
197
|
+
Parameters
|
|
198
|
+
----------
|
|
199
|
+
resource : Any
|
|
200
|
+
Object that may have an async close() method.
|
|
201
|
+
"""
|
|
202
|
+
if resource is None:
|
|
203
|
+
return
|
|
204
|
+
|
|
205
|
+
close_method = getattr(resource, "close", None)
|
|
206
|
+
if callable(close_method):
|
|
207
|
+
try:
|
|
208
|
+
if asyncio.iscoroutinefunction(close_method):
|
|
209
|
+
await close_method()
|
|
210
|
+
else:
|
|
211
|
+
close_method()
|
|
212
|
+
except Exception as exc:
|
|
213
|
+
log(
|
|
214
|
+
f"Error closing async {type(resource).__name__}: {exc}",
|
|
215
|
+
level=30,
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
@asynccontextmanager
|
|
220
|
+
async def async_context(resource: AsyncManagedResource[T]) -> AsyncIterator[T]:
|
|
221
|
+
"""Context manager for async resources.
|
|
222
|
+
|
|
223
|
+
Parameters
|
|
224
|
+
----------
|
|
225
|
+
resource : AsyncManagedResource
|
|
226
|
+
Async resource to manage.
|
|
227
|
+
|
|
228
|
+
Yields
|
|
229
|
+
------
|
|
230
|
+
T
|
|
231
|
+
The resource instance.
|
|
232
|
+
|
|
233
|
+
Examples
|
|
234
|
+
--------
|
|
235
|
+
>>> async with async_context(my_resource) as resource:
|
|
236
|
+
... await resource.do_something()
|
|
237
|
+
"""
|
|
238
|
+
try:
|
|
239
|
+
yield await resource.__aenter__()
|
|
240
|
+
finally:
|
|
241
|
+
await resource.__aexit__(None, None, None)
|
|
@@ -1,4 +1,12 @@
|
|
|
1
|
-
"""Enum
|
|
1
|
+
"""Enum utilities for OpenAI SDK helpers.
|
|
2
|
+
|
|
3
|
+
This module provides specialized enum base classes with metadata capabilities.
|
|
4
|
+
|
|
5
|
+
Classes
|
|
6
|
+
-------
|
|
7
|
+
CrosswalkJSONEnum
|
|
8
|
+
String-based enum with crosswalk metadata support.
|
|
9
|
+
"""
|
|
2
10
|
|
|
3
11
|
from __future__ import annotations
|
|
4
12
|
|