openai-sdk-helpers 0.0.6__py3-none-any.whl → 0.0.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openai_sdk_helpers/config.py +48 -2
- openai_sdk_helpers/response/__init__.py +2 -0
- openai_sdk_helpers/response/base.py +7 -3
- openai_sdk_helpers/response/vector_store.py +84 -0
- openai_sdk_helpers/utils/__init__.py +13 -1
- openai_sdk_helpers/utils/core.py +93 -1
- openai_sdk_helpers/vector_storage/storage.py +64 -1
- {openai_sdk_helpers-0.0.6.dist-info → openai_sdk_helpers-0.0.7.dist-info}/METADATA +6 -3
- {openai_sdk_helpers-0.0.6.dist-info → openai_sdk_helpers-0.0.7.dist-info}/RECORD +11 -10
- {openai_sdk_helpers-0.0.6.dist-info → openai_sdk_helpers-0.0.7.dist-info}/WHEEL +0 -0
- {openai_sdk_helpers-0.0.6.dist-info → openai_sdk_helpers-0.0.7.dist-info}/licenses/LICENSE +0 -0
openai_sdk_helpers/config.py
CHANGED
|
@@ -10,6 +10,12 @@ from dotenv import dotenv_values
|
|
|
10
10
|
from openai import OpenAI
|
|
11
11
|
from pydantic import BaseModel, ConfigDict, Field
|
|
12
12
|
|
|
13
|
+
from openai_sdk_helpers.utils import (
|
|
14
|
+
coerce_dict,
|
|
15
|
+
coerce_optional_float,
|
|
16
|
+
coerce_optional_int,
|
|
17
|
+
)
|
|
18
|
+
|
|
13
19
|
|
|
14
20
|
class OpenAISettings(BaseModel):
|
|
15
21
|
"""Configuration helpers for constructing OpenAI clients.
|
|
@@ -61,6 +67,28 @@ class OpenAISettings(BaseModel):
|
|
|
61
67
|
" provided. Defaults to ``OPENAI_MODEL``."
|
|
62
68
|
),
|
|
63
69
|
)
|
|
70
|
+
timeout: Optional[float] = Field(
|
|
71
|
+
default=None,
|
|
72
|
+
description=(
|
|
73
|
+
"Request timeout in seconds applied to all OpenAI client calls."
|
|
74
|
+
" Defaults to ``OPENAI_TIMEOUT``."
|
|
75
|
+
),
|
|
76
|
+
)
|
|
77
|
+
max_retries: Optional[int] = Field(
|
|
78
|
+
default=None,
|
|
79
|
+
description=(
|
|
80
|
+
"Maximum number of automatic retries for transient failures."
|
|
81
|
+
" Defaults to ``OPENAI_MAX_RETRIES``."
|
|
82
|
+
),
|
|
83
|
+
)
|
|
84
|
+
extra_client_kwargs: Dict[str, Any] = Field(
|
|
85
|
+
default_factory=dict,
|
|
86
|
+
description=(
|
|
87
|
+
"Additional keyword arguments forwarded to ``openai.OpenAI``. Use"
|
|
88
|
+
" this for less common options like ``default_headers`` or"
|
|
89
|
+
" ``http_client``."
|
|
90
|
+
),
|
|
91
|
+
)
|
|
64
92
|
|
|
65
93
|
@classmethod
|
|
66
94
|
def from_env(
|
|
@@ -87,7 +115,18 @@ class OpenAISettings(BaseModel):
|
|
|
87
115
|
else:
|
|
88
116
|
env_file_values = dotenv_values()
|
|
89
117
|
|
|
90
|
-
|
|
118
|
+
timeout_raw = (
|
|
119
|
+
overrides.get("timeout")
|
|
120
|
+
or env_file_values.get("OPENAI_TIMEOUT")
|
|
121
|
+
or os.getenv("OPENAI_TIMEOUT")
|
|
122
|
+
)
|
|
123
|
+
max_retries_raw = (
|
|
124
|
+
overrides.get("max_retries")
|
|
125
|
+
or env_file_values.get("OPENAI_MAX_RETRIES")
|
|
126
|
+
or os.getenv("OPENAI_MAX_RETRIES")
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
values: Dict[str, Any] = {
|
|
91
130
|
"api_key": overrides.get("api_key")
|
|
92
131
|
or env_file_values.get("OPENAI_API_KEY")
|
|
93
132
|
or os.getenv("OPENAI_API_KEY"),
|
|
@@ -103,6 +142,9 @@ class OpenAISettings(BaseModel):
|
|
|
103
142
|
"default_model": overrides.get("default_model")
|
|
104
143
|
or env_file_values.get("OPENAI_MODEL")
|
|
105
144
|
or os.getenv("OPENAI_MODEL"),
|
|
145
|
+
"timeout": coerce_optional_float(timeout_raw),
|
|
146
|
+
"max_retries": coerce_optional_int(max_retries_raw),
|
|
147
|
+
"extra_client_kwargs": coerce_dict(overrides.get("extra_client_kwargs")),
|
|
106
148
|
}
|
|
107
149
|
|
|
108
150
|
settings = cls(**values)
|
|
@@ -128,7 +170,7 @@ class OpenAISettings(BaseModel):
|
|
|
128
170
|
Keyword arguments populated with available authentication and routing
|
|
129
171
|
values.
|
|
130
172
|
"""
|
|
131
|
-
kwargs: Dict[str, Any] =
|
|
173
|
+
kwargs: Dict[str, Any] = dict(self.extra_client_kwargs)
|
|
132
174
|
if self.api_key:
|
|
133
175
|
kwargs["api_key"] = self.api_key
|
|
134
176
|
if self.org_id:
|
|
@@ -137,6 +179,10 @@ class OpenAISettings(BaseModel):
|
|
|
137
179
|
kwargs["project"] = self.project_id
|
|
138
180
|
if self.base_url:
|
|
139
181
|
kwargs["base_url"] = self.base_url
|
|
182
|
+
if self.timeout is not None:
|
|
183
|
+
kwargs["timeout"] = self.timeout
|
|
184
|
+
if self.max_retries is not None:
|
|
185
|
+
kwargs["max_retries"] = self.max_retries
|
|
140
186
|
return kwargs
|
|
141
187
|
|
|
142
188
|
def create_client(self) -> OpenAI:
|
|
@@ -5,6 +5,7 @@ from __future__ import annotations
|
|
|
5
5
|
from .base import ResponseBase
|
|
6
6
|
from .messages import ResponseMessage, ResponseMessages
|
|
7
7
|
from .runner import run_sync, run_async, run_streamed
|
|
8
|
+
from .vector_store import attach_vector_store
|
|
8
9
|
from .tool_call import ResponseToolCall
|
|
9
10
|
|
|
10
11
|
__all__ = [
|
|
@@ -15,4 +16,5 @@ __all__ = [
|
|
|
15
16
|
"run_async",
|
|
16
17
|
"run_streamed",
|
|
17
18
|
"ResponseToolCall",
|
|
19
|
+
"attach_vector_store",
|
|
18
20
|
]
|
|
@@ -129,6 +129,8 @@ class ResponseBase(Generic[T]):
|
|
|
129
129
|
self._tools = tools if tools is not None else []
|
|
130
130
|
self._schema = schema
|
|
131
131
|
self._output_structure = output_structure
|
|
132
|
+
self._cleanup_user_vector_storage = False
|
|
133
|
+
self._cleanup_system_vector_storage = False
|
|
132
134
|
|
|
133
135
|
if client is None:
|
|
134
136
|
if api_key is None:
|
|
@@ -162,6 +164,7 @@ class ResponseBase(Generic[T]):
|
|
|
162
164
|
self._system_vector_storage = self._vector_storage_cls(
|
|
163
165
|
store_name=storage_name, client=self._client, model=self._model
|
|
164
166
|
)
|
|
167
|
+
self._cleanup_system_vector_storage = True
|
|
165
168
|
system_vector_storage = cast(Any, self._system_vector_storage)
|
|
166
169
|
for file_path, tool_type in attachments:
|
|
167
170
|
uploaded_file = system_vector_storage.upload_file(file_path=file_path)
|
|
@@ -248,6 +251,7 @@ class ResponseBase(Generic[T]):
|
|
|
248
251
|
client=self._client,
|
|
249
252
|
model=self._model,
|
|
250
253
|
)
|
|
254
|
+
self._cleanup_user_vector_storage = True
|
|
251
255
|
user_vector_storage = cast(Any, self._user_vector_storage)
|
|
252
256
|
if not any(
|
|
253
257
|
tool.get("type") == "file_search" for tool in self._tools
|
|
@@ -483,17 +487,17 @@ class ResponseBase(Generic[T]):
|
|
|
483
487
|
self.close()
|
|
484
488
|
|
|
485
489
|
def close(self) -> None:
|
|
486
|
-
"""Delete
|
|
490
|
+
"""Delete managed vector stores and clean up the session."""
|
|
487
491
|
log(f"Closing session {self.uuid} for {self.__class__.__name__}")
|
|
488
492
|
|
|
489
493
|
try:
|
|
490
|
-
if self._user_vector_storage:
|
|
494
|
+
if self._user_vector_storage and self._cleanup_user_vector_storage:
|
|
491
495
|
self._user_vector_storage.delete()
|
|
492
496
|
log("User vector store deleted.")
|
|
493
497
|
except Exception as exc:
|
|
494
498
|
log(f"Error deleting user vector store: {exc}", level=logging.WARNING)
|
|
495
499
|
try:
|
|
496
|
-
if self._system_vector_storage:
|
|
500
|
+
if self._system_vector_storage and self._cleanup_system_vector_storage:
|
|
497
501
|
self._system_vector_storage.delete()
|
|
498
502
|
log("System vector store deleted.")
|
|
499
503
|
except Exception as exc:
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""Helpers for attaching vector stores to responses."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Optional, Sequence
|
|
6
|
+
|
|
7
|
+
from openai import OpenAI
|
|
8
|
+
|
|
9
|
+
from ..utils import ensure_list
|
|
10
|
+
from .base import ResponseBase
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def attach_vector_store(
|
|
14
|
+
response: ResponseBase[Any],
|
|
15
|
+
vector_stores: str | Sequence[str],
|
|
16
|
+
api_key: Optional[str] = None,
|
|
17
|
+
) -> list[str]:
|
|
18
|
+
"""Attach vector stores to a response ``file_search`` tool.
|
|
19
|
+
|
|
20
|
+
Parameters
|
|
21
|
+
----------
|
|
22
|
+
response
|
|
23
|
+
Response instance whose tool configuration is updated.
|
|
24
|
+
vector_stores
|
|
25
|
+
Single vector store name or a sequence of names to attach.
|
|
26
|
+
api_key : str, optional
|
|
27
|
+
API key used when the response does not already have a client. Default
|
|
28
|
+
``None``.
|
|
29
|
+
|
|
30
|
+
Returns
|
|
31
|
+
-------
|
|
32
|
+
list[str]
|
|
33
|
+
Ordered list of vector store IDs applied to the ``file_search`` tool.
|
|
34
|
+
|
|
35
|
+
Raises
|
|
36
|
+
------
|
|
37
|
+
ValueError
|
|
38
|
+
If a vector store cannot be resolved or no API key is available when
|
|
39
|
+
required.
|
|
40
|
+
"""
|
|
41
|
+
requested_stores = ensure_list(vector_stores)
|
|
42
|
+
|
|
43
|
+
client = getattr(response, "_client", None)
|
|
44
|
+
if client is None:
|
|
45
|
+
if api_key is None:
|
|
46
|
+
raise ValueError(
|
|
47
|
+
"OpenAI API key is required to resolve vector store names."
|
|
48
|
+
)
|
|
49
|
+
client = OpenAI(api_key=api_key)
|
|
50
|
+
|
|
51
|
+
available_stores = client.vector_stores.list().data
|
|
52
|
+
resolved_ids: list[str] = []
|
|
53
|
+
|
|
54
|
+
for store in requested_stores:
|
|
55
|
+
match = next(
|
|
56
|
+
(vs.id for vs in available_stores if vs.name == store),
|
|
57
|
+
None,
|
|
58
|
+
)
|
|
59
|
+
if match is None:
|
|
60
|
+
raise ValueError(f"Vector store '{store}' not found.")
|
|
61
|
+
if match not in resolved_ids:
|
|
62
|
+
resolved_ids.append(match)
|
|
63
|
+
|
|
64
|
+
file_search_tool = next(
|
|
65
|
+
(tool for tool in response._tools if tool.get("type") == "file_search"),
|
|
66
|
+
None,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
if file_search_tool is None:
|
|
70
|
+
response._tools.append(
|
|
71
|
+
{"type": "file_search", "vector_store_ids": resolved_ids}
|
|
72
|
+
)
|
|
73
|
+
return resolved_ids
|
|
74
|
+
|
|
75
|
+
existing_ids = ensure_list(file_search_tool.get("vector_store_ids", []))
|
|
76
|
+
combined_ids = existing_ids.copy()
|
|
77
|
+
for vector_store_id in resolved_ids:
|
|
78
|
+
if vector_store_id not in combined_ids:
|
|
79
|
+
combined_ids.append(vector_store_id)
|
|
80
|
+
file_search_tool["vector_store_ids"] = combined_ids
|
|
81
|
+
return combined_ids
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
__all__ = ["attach_vector_store"]
|
|
@@ -2,11 +2,23 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
from .core import
|
|
5
|
+
from .core import (
|
|
6
|
+
JSONSerializable,
|
|
7
|
+
check_filepath,
|
|
8
|
+
coerce_dict,
|
|
9
|
+
coerce_optional_float,
|
|
10
|
+
coerce_optional_int,
|
|
11
|
+
customJSONEncoder,
|
|
12
|
+
ensure_list,
|
|
13
|
+
log,
|
|
14
|
+
)
|
|
6
15
|
|
|
7
16
|
__all__ = [
|
|
8
17
|
"ensure_list",
|
|
9
18
|
"check_filepath",
|
|
19
|
+
"coerce_optional_float",
|
|
20
|
+
"coerce_optional_int",
|
|
21
|
+
"coerce_dict",
|
|
10
22
|
"JSONSerializable",
|
|
11
23
|
"customJSONEncoder",
|
|
12
24
|
"log",
|
openai_sdk_helpers/utils/core.py
CHANGED
|
@@ -8,7 +8,99 @@ from dataclasses import asdict, is_dataclass
|
|
|
8
8
|
from datetime import datetime
|
|
9
9
|
from enum import Enum
|
|
10
10
|
from pathlib import Path
|
|
11
|
-
from typing import Any, Dict, Iterable, List, TypeVar
|
|
11
|
+
from typing import Any, Dict, Iterable, List, Mapping, Optional, TypeVar
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def coerce_optional_float(value: Any) -> Optional[float]:
|
|
15
|
+
"""Return a float when the provided value can be coerced, otherwise ``None``.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
value : Any
|
|
20
|
+
Value to convert into a float. Strings must be parseable as floats.
|
|
21
|
+
|
|
22
|
+
Returns
|
|
23
|
+
-------
|
|
24
|
+
float | None
|
|
25
|
+
Converted float value or ``None`` if the input is ``None``.
|
|
26
|
+
|
|
27
|
+
Raises
|
|
28
|
+
------
|
|
29
|
+
ValueError
|
|
30
|
+
If a non-empty string cannot be converted to a float.
|
|
31
|
+
TypeError
|
|
32
|
+
If the value is not a float-compatible type.
|
|
33
|
+
"""
|
|
34
|
+
if value is None:
|
|
35
|
+
return None
|
|
36
|
+
if isinstance(value, (float, int)):
|
|
37
|
+
return float(value)
|
|
38
|
+
if isinstance(value, str) and value.strip():
|
|
39
|
+
try:
|
|
40
|
+
return float(value)
|
|
41
|
+
except ValueError as exc:
|
|
42
|
+
raise ValueError("timeout must be a float-compatible value") from exc
|
|
43
|
+
raise TypeError("timeout must be a float, int, str, or None")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def coerce_optional_int(value: Any) -> Optional[int]:
|
|
47
|
+
"""Return an int when the provided value can be coerced, otherwise ``None``.
|
|
48
|
+
|
|
49
|
+
Parameters
|
|
50
|
+
----------
|
|
51
|
+
value : Any
|
|
52
|
+
Value to convert into an int. Strings must be parseable as integers.
|
|
53
|
+
|
|
54
|
+
Returns
|
|
55
|
+
-------
|
|
56
|
+
int | None
|
|
57
|
+
Converted integer value or ``None`` if the input is ``None``.
|
|
58
|
+
|
|
59
|
+
Raises
|
|
60
|
+
------
|
|
61
|
+
ValueError
|
|
62
|
+
If a non-empty string cannot be converted to an integer.
|
|
63
|
+
TypeError
|
|
64
|
+
If the value is not an int-compatible type.
|
|
65
|
+
"""
|
|
66
|
+
if value is None:
|
|
67
|
+
return None
|
|
68
|
+
if isinstance(value, int) and not isinstance(value, bool):
|
|
69
|
+
return value
|
|
70
|
+
if isinstance(value, float) and value.is_integer():
|
|
71
|
+
return int(value)
|
|
72
|
+
if isinstance(value, str) and value.strip():
|
|
73
|
+
try:
|
|
74
|
+
return int(value)
|
|
75
|
+
except ValueError as exc:
|
|
76
|
+
raise ValueError("max_retries must be an int-compatible value") from exc
|
|
77
|
+
raise TypeError("max_retries must be an int, str, or None")
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def coerce_dict(value: Any) -> Dict[str, Any]:
|
|
81
|
+
"""Return a string-keyed dictionary built from ``value`` if possible.
|
|
82
|
+
|
|
83
|
+
Parameters
|
|
84
|
+
----------
|
|
85
|
+
value : Any
|
|
86
|
+
Mapping-like value to convert. ``None`` yields an empty dictionary.
|
|
87
|
+
|
|
88
|
+
Returns
|
|
89
|
+
-------
|
|
90
|
+
dict[str, Any]
|
|
91
|
+
Dictionary representation of ``value``.
|
|
92
|
+
|
|
93
|
+
Raises
|
|
94
|
+
------
|
|
95
|
+
TypeError
|
|
96
|
+
If the value cannot be treated as a mapping.
|
|
97
|
+
"""
|
|
98
|
+
if value is None:
|
|
99
|
+
return {}
|
|
100
|
+
if isinstance(value, Mapping):
|
|
101
|
+
return dict(value)
|
|
102
|
+
raise TypeError("extra_client_kwargs must be a mapping or None")
|
|
103
|
+
|
|
12
104
|
|
|
13
105
|
T = TypeVar("T")
|
|
14
106
|
_configured_logging = False
|
|
@@ -7,7 +7,7 @@ import logging
|
|
|
7
7
|
import mimetypes
|
|
8
8
|
import os
|
|
9
9
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
10
|
-
from typing import List, Optional, Union
|
|
10
|
+
from typing import List, Optional, Union, cast
|
|
11
11
|
|
|
12
12
|
from openai import OpenAI
|
|
13
13
|
from openai.pagination import SyncPage
|
|
@@ -437,6 +437,69 @@ class VectorStorage:
|
|
|
437
437
|
level=logging.ERROR,
|
|
438
438
|
)
|
|
439
439
|
|
|
440
|
+
def download_files(self, output_dir: str) -> VectorStorageFileStats:
|
|
441
|
+
"""Download every file in the vector store to a local directory.
|
|
442
|
+
|
|
443
|
+
Parameters
|
|
444
|
+
----------
|
|
445
|
+
output_dir
|
|
446
|
+
Destination directory where the files will be written. The
|
|
447
|
+
directory is created when it does not already exist.
|
|
448
|
+
|
|
449
|
+
Returns
|
|
450
|
+
-------
|
|
451
|
+
VectorStorageFileStats
|
|
452
|
+
Aggregated statistics describing the download results.
|
|
453
|
+
"""
|
|
454
|
+
os.makedirs(output_dir, exist_ok=True)
|
|
455
|
+
|
|
456
|
+
try:
|
|
457
|
+
files = self._client.vector_stores.files.list(
|
|
458
|
+
vector_store_id=self._vector_storage.id
|
|
459
|
+
)
|
|
460
|
+
store_files = list(getattr(files, "data", files))
|
|
461
|
+
except Exception as exc:
|
|
462
|
+
log(f"Failed to list files for download: {exc}", level=logging.ERROR)
|
|
463
|
+
return VectorStorageFileStats(
|
|
464
|
+
total=0,
|
|
465
|
+
fail=1,
|
|
466
|
+
errors=[
|
|
467
|
+
VectorStorageFileInfo(
|
|
468
|
+
name="", id="", status="error", error=str(exc)
|
|
469
|
+
)
|
|
470
|
+
],
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
stats = VectorStorageFileStats(total=len(store_files))
|
|
474
|
+
|
|
475
|
+
for file_ref in store_files:
|
|
476
|
+
file_id = getattr(file_ref, "id", "")
|
|
477
|
+
attributes = getattr(file_ref, "attributes", {}) or {}
|
|
478
|
+
file_name = attributes.get("file_name") or file_id
|
|
479
|
+
target_path = os.path.join(output_dir, file_name)
|
|
480
|
+
|
|
481
|
+
try:
|
|
482
|
+
content = self._client.files.content(file_id=file_id)
|
|
483
|
+
if isinstance(content, bytes):
|
|
484
|
+
data = content
|
|
485
|
+
elif hasattr(content, "read"):
|
|
486
|
+
data = cast(bytes, content.read())
|
|
487
|
+
else:
|
|
488
|
+
raise TypeError("Unsupported content type for file download")
|
|
489
|
+
with open(target_path, "wb") as handle:
|
|
490
|
+
handle.write(data)
|
|
491
|
+
stats.success += 1
|
|
492
|
+
except Exception as exc:
|
|
493
|
+
log(f"Failed to download {file_id}: {exc}", level=logging.ERROR)
|
|
494
|
+
stats.fail += 1
|
|
495
|
+
stats.errors.append(
|
|
496
|
+
VectorStorageFileInfo(
|
|
497
|
+
name=file_name, id=file_id, status="error", error=str(exc)
|
|
498
|
+
)
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
return stats
|
|
502
|
+
|
|
440
503
|
def search(
|
|
441
504
|
self, query: str, top_k: int = 5
|
|
442
505
|
) -> Optional[SyncPage[VectorStoreSearchResponse]]:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: openai-sdk-helpers
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.7
|
|
4
4
|
Summary: Composable helpers for OpenAI SDK agents, prompts, and storage
|
|
5
5
|
Author: openai-sdk-helpers maintainers
|
|
6
6
|
License: MIT
|
|
@@ -133,8 +133,11 @@ vector_search = VectorSearch(
|
|
|
133
133
|
```
|
|
134
134
|
|
|
135
135
|
The helper reads `OPENAI_API_KEY`, `OPENAI_ORG_ID`, `OPENAI_PROJECT_ID`,
|
|
136
|
-
`OPENAI_BASE_URL`, and `
|
|
137
|
-
custom deployments.
|
|
136
|
+
`OPENAI_BASE_URL`, `OPENAI_MODEL`, `OPENAI_TIMEOUT`, and `OPENAI_MAX_RETRIES` by
|
|
137
|
+
default but supports overrides for custom deployments. Pass uncommon OpenAI
|
|
138
|
+
client keyword arguments (such as `default_headers`, `http_client`, or
|
|
139
|
+
`base_url` proxies) through `extra_client_kwargs` when instantiating
|
|
140
|
+
`OpenAISettings`.
|
|
138
141
|
|
|
139
142
|
## Development
|
|
140
143
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
openai_sdk_helpers/__init__.py,sha256=aqesJ3gPruETXtksvQga-iNJYBQZG6TY2YGK7iNOY6g,1324
|
|
2
|
-
openai_sdk_helpers/config.py,sha256=
|
|
2
|
+
openai_sdk_helpers/config.py,sha256=OH--g7Xp4ftucA2C4o6FCX0M9Z15KMRpkY0lpn0mmOI,6455
|
|
3
3
|
openai_sdk_helpers/environment.py,sha256=t_AFP6OXjRBoIQVZdgjqZzcUWB-FDeYn4KzKn5FgrnY,693
|
|
4
4
|
openai_sdk_helpers/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
5
|
openai_sdk_helpers/agent/__init__.py,sha256=1sRtu_TbnKR7EstzZjoi2OUOk0xJOTn4xbo5ZeoPIhk,825
|
|
@@ -20,11 +20,12 @@ openai_sdk_helpers/prompt/__init__.py,sha256=Pio9s99ej0ior5gKr8m-hSUgqkBOgFdNu1x
|
|
|
20
20
|
openai_sdk_helpers/prompt/summarizer.jinja,sha256=jliSetWDISbql1EkWi1RB8-L_BXUg8JMkRRsPRHuzbY,309
|
|
21
21
|
openai_sdk_helpers/prompt/translator.jinja,sha256=SZhW8ipEzM-9IA4wyS_r2wIMTAclWrilmk1s46njoL0,291
|
|
22
22
|
openai_sdk_helpers/prompt/validator.jinja,sha256=6t8q_IdxFd3mVBGX6SFKNOert1Wo3YpTOji2SNEbbtE,547
|
|
23
|
-
openai_sdk_helpers/response/__init__.py,sha256=
|
|
24
|
-
openai_sdk_helpers/response/base.py,sha256=
|
|
23
|
+
openai_sdk_helpers/response/__init__.py,sha256=LOCBPGe--iKHHCU1WVcljXV5tLFFRdv1dTJZeip7gc8,505
|
|
24
|
+
openai_sdk_helpers/response/base.py,sha256=vOibHp9xQiC4Nu-kNJ8GutRCwBPAtShgybf9_T_lHWk,19593
|
|
25
25
|
openai_sdk_helpers/response/messages.py,sha256=Ot432B9qbbvinGG0aO6mWYeeEUSVqmLiNlvulKbUlZQ,6487
|
|
26
26
|
openai_sdk_helpers/response/runner.py,sha256=OD-lNgpuhBULKS9-fyYAicKONdx_Tk48qGOzVBmG00s,2536
|
|
27
27
|
openai_sdk_helpers/response/tool_call.py,sha256=kYUfkjmkLbEJun_QByMuAvYjdixwoRAjHNy2Sk_riXA,1937
|
|
28
|
+
openai_sdk_helpers/response/vector_store.py,sha256=wCPPppzCQ-PYSS4T2QHqDUHcPMYNlQFeV-Pu33KBRy8,2418
|
|
28
29
|
openai_sdk_helpers/structure/__init__.py,sha256=GBD48p3M9wnOPmEO9t9t5XuiMQedmDBClKmjDLEjcUo,1129
|
|
29
30
|
openai_sdk_helpers/structure/agent_blueprint.py,sha256=opL-dER3a_f_JWC3Jx9ovRdbC4XZe9X20-o1YUq9sgw,7569
|
|
30
31
|
openai_sdk_helpers/structure/base.py,sha256=FJvB-HedXG02VJEEgymMd2adcJCwugdGHidj4ne7phU,22975
|
|
@@ -38,13 +39,13 @@ openai_sdk_helpers/structure/plan/__init__.py,sha256=OVTGp0MAb8RLjE7TZk5zoXjXIih
|
|
|
38
39
|
openai_sdk_helpers/structure/plan/enum.py,sha256=EYGdUckSUSOXQCTIbD8RhSQNylgVTVvOnb7za6fv6_A,1772
|
|
39
40
|
openai_sdk_helpers/structure/plan/plan.py,sha256=33qBu-yzFEpYHVsvGiGMsdZphYpOD7eT5AjNQzt5cmo,7480
|
|
40
41
|
openai_sdk_helpers/structure/plan/task.py,sha256=Qn-GXV0VuOYA-EHXSC6tQlcXObKMeV3-TQhgeepl2L8,3481
|
|
41
|
-
openai_sdk_helpers/utils/__init__.py,sha256=
|
|
42
|
-
openai_sdk_helpers/utils/core.py,sha256=
|
|
42
|
+
openai_sdk_helpers/utils/__init__.py,sha256=JRmAwQz2qE0bNhshAq5HAlXjYMyE5OqLl-vOKcD6MZY,463
|
|
43
|
+
openai_sdk_helpers/utils/core.py,sha256=mKCkGE1cC06ZcOip70eiA8rJ4jaRmnGm4zJfwDPLkQM,8161
|
|
43
44
|
openai_sdk_helpers/vector_storage/__init__.py,sha256=BjUueFnxmF4T6YOCra2nqa8rEAzsihEYWavkYB7S_lM,384
|
|
44
45
|
openai_sdk_helpers/vector_storage/cleanup.py,sha256=6e_A9MAOKhJl_9EbRgGiB0NrrN79IwN0mMnHrwp4gd8,2964
|
|
45
|
-
openai_sdk_helpers/vector_storage/storage.py,sha256
|
|
46
|
+
openai_sdk_helpers/vector_storage/storage.py,sha256=-Yk9RwzG6ABkhH1p3Y-6tv5sGu2MdxHvwkxEtB9N9_c,19295
|
|
46
47
|
openai_sdk_helpers/vector_storage/types.py,sha256=9u5oBxKTDf_ljvbWhp1dWVW1zrlVwLd4OpikygvlKJI,1298
|
|
47
|
-
openai_sdk_helpers-0.0.
|
|
48
|
-
openai_sdk_helpers-0.0.
|
|
49
|
-
openai_sdk_helpers-0.0.
|
|
50
|
-
openai_sdk_helpers-0.0.
|
|
48
|
+
openai_sdk_helpers-0.0.7.dist-info/METADATA,sha256=zYMBq4ke7ex0StA0Q4ddjD4GAsx9cI8oEWTXpZOV1SY,6557
|
|
49
|
+
openai_sdk_helpers-0.0.7.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
50
|
+
openai_sdk_helpers-0.0.7.dist-info/licenses/LICENSE,sha256=CUhc1NrE50bs45tcXF7OcTQBKEvkUuLqeOHgrWQ5jaA,1067
|
|
51
|
+
openai_sdk_helpers-0.0.7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|