bridgic-llms-openai 0.1.0rc1__tar.gz → 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bridgic-llms-openai might be problematic. Click here for more details.
- {bridgic_llms_openai-0.1.0rc1 → bridgic_llms_openai-0.1.1}/PKG-INFO +3 -2
- {bridgic_llms_openai-0.1.0rc1 → bridgic_llms_openai-0.1.1}/bridgic/llms/openai/__init__.py +1 -1
- bridgic_llms_openai-0.1.0rc1/bridgic/llms/openai/openai_llm.py → bridgic_llms_openai-0.1.1/bridgic/llms/openai/_openai_llm.py +9 -41
- {bridgic_llms_openai-0.1.0rc1 → bridgic_llms_openai-0.1.1}/pyproject.toml +3 -2
- {bridgic_llms_openai-0.1.0rc1 → bridgic_llms_openai-0.1.1}/.gitignore +0 -0
- {bridgic_llms_openai-0.1.0rc1 → bridgic_llms_openai-0.1.1}/LICENSE +0 -0
- {bridgic_llms_openai-0.1.0rc1 → bridgic_llms_openai-0.1.1}/README.md +0 -0
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: bridgic-llms-openai
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.1
|
|
4
4
|
Summary: OpenAI adapters for Bridgic.
|
|
5
5
|
Author-email: Tielei Zhang <zhangtl04@gmail.com>
|
|
6
6
|
License: MIT
|
|
7
7
|
License-File: LICENSE
|
|
8
8
|
Classifier: Programming Language :: Python :: 3
|
|
9
9
|
Requires-Python: >=3.9
|
|
10
|
-
Requires-Dist: bridgic-core>=0.1.
|
|
10
|
+
Requires-Dist: bridgic-core>=0.1.1
|
|
11
|
+
Requires-Dist: bridgic-llms-openai-like>=0.1.1
|
|
11
12
|
Requires-Dist: httpx-aiohttp>=0.1.8
|
|
12
13
|
Requires-Dist: httpx>=0.28.1
|
|
13
14
|
Requires-Dist: openai>=1.60.0
|
|
@@ -13,7 +13,7 @@ pip install bridgic-llms-openai
|
|
|
13
13
|
"""
|
|
14
14
|
|
|
15
15
|
from importlib.metadata import version
|
|
16
|
-
from .
|
|
16
|
+
from ._openai_llm import OpenAIConfiguration, OpenAILlm
|
|
17
17
|
|
|
18
18
|
__version__ = version("bridgic-llms-openai")
|
|
19
19
|
__all__ = ["OpenAIConfiguration", "OpenAILlm", "__version__"]
|
|
@@ -20,41 +20,13 @@ from bridgic.core.model.types import *
|
|
|
20
20
|
from bridgic.core.model.protocols import StructuredOutput, ToolSelection, PydanticModel, JsonSchema, Constraint
|
|
21
21
|
from bridgic.core.utils._console import printer
|
|
22
22
|
from bridgic.core.utils._collection import filter_dict, merge_dict, validate_required_params
|
|
23
|
+
from bridgic.llms.openai_like import OpenAILikeConfiguration
|
|
23
24
|
|
|
24
|
-
class OpenAIConfiguration(
|
|
25
|
-
"""Default configuration for OpenAI chat completions.
|
|
26
|
-
|
|
27
|
-
model : str
|
|
28
|
-
Model ID used to generate the response, like `gpt-4o` or `gpt-4`.
|
|
29
|
-
temperature : Optional[float]
|
|
30
|
-
What sampling temperature to use, between 0 and 2. Higher values like 0.8 will
|
|
31
|
-
make the output more random, while lower values like 0.2 will make it more
|
|
32
|
-
focused and deterministic.
|
|
33
|
-
top_p : Optional[float]
|
|
34
|
-
An alternative to sampling with temperature, called nucleus sampling, where the
|
|
35
|
-
model considers the results of the tokens with top_p probability mass.
|
|
36
|
-
presence_penalty : Optional[float]
|
|
37
|
-
Number between -2.0 and 2.0. Positive values penalize new tokens based on
|
|
38
|
-
whether they appear in the text so far, increasing the model's likelihood to
|
|
39
|
-
talk about new topics.
|
|
40
|
-
frequency_penalty : Optional[float]
|
|
41
|
-
Number between -2.0 and 2.0. Positive values penalize new tokens based on their
|
|
42
|
-
existing frequency in the text so far, decreasing the model's likelihood to
|
|
43
|
-
repeat the same line verbatim.
|
|
44
|
-
max_tokens : Optional[int]
|
|
45
|
-
The maximum number of tokens that can be generated in the chat completion.
|
|
46
|
-
This value is now deprecated in favor of `max_completion_tokens`.
|
|
47
|
-
stop : Optional[List[str]]
|
|
48
|
-
Up to 4 sequences where the API will stop generating further tokens.
|
|
49
|
-
Not supported with latest reasoning models `o3` and `o3-mini`.
|
|
25
|
+
class OpenAIConfiguration(OpenAILikeConfiguration):
|
|
50
26
|
"""
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
presence_penalty: Optional[float] = None
|
|
55
|
-
frequency_penalty: Optional[float] = None
|
|
56
|
-
max_tokens: Optional[int] = None
|
|
57
|
-
stop: Optional[List[str]] = None
|
|
27
|
+
Configuration for OpenAI chat completions.
|
|
28
|
+
"""
|
|
29
|
+
pass
|
|
58
30
|
|
|
59
31
|
class OpenAILlm(BaseLlm, StructuredOutput, ToolSelection):
|
|
60
32
|
"""
|
|
@@ -67,6 +39,8 @@ class OpenAILlm(BaseLlm, StructuredOutput, ToolSelection):
|
|
|
67
39
|
The API key for OpenAI services. Required for authentication.
|
|
68
40
|
api_base : Optional[str]
|
|
69
41
|
The base URL for the OpenAI API. If None, uses the default OpenAI endpoint.
|
|
42
|
+
configuration : Optional[OpenAIConfiguration]
|
|
43
|
+
The configuration for the OpenAI API. If None, uses the default configuration.
|
|
70
44
|
timeout : Optional[float]
|
|
71
45
|
Request timeout in seconds. If None, no timeout is applied.
|
|
72
46
|
http_client : Optional[httpx.Client]
|
|
@@ -74,13 +48,6 @@ class OpenAILlm(BaseLlm, StructuredOutput, ToolSelection):
|
|
|
74
48
|
http_async_client : Optional[httpx.AsyncClient]
|
|
75
49
|
Custom asynchronous HTTP client for requests. If None, creates a default client.
|
|
76
50
|
|
|
77
|
-
Attributes
|
|
78
|
-
----------
|
|
79
|
-
client : openai.OpenAI
|
|
80
|
-
The synchronous OpenAI client instance.
|
|
81
|
-
async_client : openai.AsyncOpenAI
|
|
82
|
-
The asynchronous OpenAI client instance.
|
|
83
|
-
|
|
84
51
|
Examples
|
|
85
52
|
--------
|
|
86
53
|
Basic usage for chat completion:
|
|
@@ -894,7 +861,8 @@ class OpenAILlm(BaseLlm, StructuredOutput, ToolSelection):
|
|
|
894
861
|
"type": "json_schema",
|
|
895
862
|
"json_schema": {
|
|
896
863
|
"schema": self._add_schema_properties(constraint.schema_dict),
|
|
897
|
-
|
|
864
|
+
# default name for schema
|
|
865
|
+
"name": "schema",
|
|
898
866
|
"strict": True,
|
|
899
867
|
},
|
|
900
868
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "bridgic-llms-openai"
|
|
3
|
-
version = "0.1.
|
|
3
|
+
version = "0.1.1"
|
|
4
4
|
license = {text = "MIT"}
|
|
5
5
|
classifiers = [
|
|
6
6
|
# "Private :: Do Not Upload",
|
|
@@ -13,7 +13,8 @@ authors = [
|
|
|
13
13
|
{ name = "Tielei Zhang", email = "zhangtl04@gmail.com" },
|
|
14
14
|
]
|
|
15
15
|
dependencies = [
|
|
16
|
-
"bridgic-core>=0.1.
|
|
16
|
+
"bridgic-core>=0.1.1",
|
|
17
|
+
"bridgic-llms-openai-like>=0.1.1",
|
|
17
18
|
"openai>=1.60.0",
|
|
18
19
|
"httpx>=0.28.1",
|
|
19
20
|
"httpx-aiohttp>=0.1.8",
|
|
File without changes
|
|
File without changes
|
|
File without changes
|