agentrun-inner-test 0.0.46__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentrun/__init__.py +325 -0
- agentrun/agent_runtime/__client_async_template.py +466 -0
- agentrun/agent_runtime/__endpoint_async_template.py +345 -0
- agentrun/agent_runtime/__init__.py +53 -0
- agentrun/agent_runtime/__runtime_async_template.py +477 -0
- agentrun/agent_runtime/api/__data_async_template.py +58 -0
- agentrun/agent_runtime/api/__init__.py +6 -0
- agentrun/agent_runtime/api/control.py +1362 -0
- agentrun/agent_runtime/api/data.py +98 -0
- agentrun/agent_runtime/client.py +868 -0
- agentrun/agent_runtime/endpoint.py +649 -0
- agentrun/agent_runtime/model.py +362 -0
- agentrun/agent_runtime/runtime.py +904 -0
- agentrun/credential/__client_async_template.py +177 -0
- agentrun/credential/__credential_async_template.py +216 -0
- agentrun/credential/__init__.py +28 -0
- agentrun/credential/api/__init__.py +5 -0
- agentrun/credential/api/control.py +606 -0
- agentrun/credential/client.py +319 -0
- agentrun/credential/credential.py +381 -0
- agentrun/credential/model.py +248 -0
- agentrun/integration/__init__.py +21 -0
- agentrun/integration/agentscope/__init__.py +12 -0
- agentrun/integration/agentscope/adapter.py +17 -0
- agentrun/integration/agentscope/builtin.py +65 -0
- agentrun/integration/agentscope/message_adapter.py +185 -0
- agentrun/integration/agentscope/model_adapter.py +60 -0
- agentrun/integration/agentscope/tool_adapter.py +59 -0
- agentrun/integration/builtin/__init__.py +16 -0
- agentrun/integration/builtin/model.py +93 -0
- agentrun/integration/builtin/sandbox.py +1234 -0
- agentrun/integration/builtin/toolset.py +47 -0
- agentrun/integration/crewai/__init__.py +12 -0
- agentrun/integration/crewai/adapter.py +9 -0
- agentrun/integration/crewai/builtin.py +65 -0
- agentrun/integration/crewai/model_adapter.py +31 -0
- agentrun/integration/crewai/tool_adapter.py +26 -0
- agentrun/integration/google_adk/__init__.py +12 -0
- agentrun/integration/google_adk/adapter.py +15 -0
- agentrun/integration/google_adk/builtin.py +65 -0
- agentrun/integration/google_adk/message_adapter.py +144 -0
- agentrun/integration/google_adk/model_adapter.py +46 -0
- agentrun/integration/google_adk/tool_adapter.py +235 -0
- agentrun/integration/langchain/__init__.py +30 -0
- agentrun/integration/langchain/adapter.py +15 -0
- agentrun/integration/langchain/builtin.py +71 -0
- agentrun/integration/langchain/message_adapter.py +141 -0
- agentrun/integration/langchain/model_adapter.py +37 -0
- agentrun/integration/langchain/tool_adapter.py +50 -0
- agentrun/integration/langgraph/__init__.py +35 -0
- agentrun/integration/langgraph/adapter.py +20 -0
- agentrun/integration/langgraph/agent_converter.py +1073 -0
- agentrun/integration/langgraph/builtin.py +65 -0
- agentrun/integration/pydantic_ai/__init__.py +12 -0
- agentrun/integration/pydantic_ai/adapter.py +13 -0
- agentrun/integration/pydantic_ai/builtin.py +65 -0
- agentrun/integration/pydantic_ai/model_adapter.py +44 -0
- agentrun/integration/pydantic_ai/tool_adapter.py +19 -0
- agentrun/integration/utils/__init__.py +112 -0
- agentrun/integration/utils/adapter.py +560 -0
- agentrun/integration/utils/canonical.py +164 -0
- agentrun/integration/utils/converter.py +134 -0
- agentrun/integration/utils/model.py +110 -0
- agentrun/integration/utils/tool.py +1759 -0
- agentrun/model/__client_async_template.py +357 -0
- agentrun/model/__init__.py +57 -0
- agentrun/model/__model_proxy_async_template.py +270 -0
- agentrun/model/__model_service_async_template.py +267 -0
- agentrun/model/api/__init__.py +6 -0
- agentrun/model/api/control.py +1173 -0
- agentrun/model/api/data.py +196 -0
- agentrun/model/client.py +674 -0
- agentrun/model/model.py +235 -0
- agentrun/model/model_proxy.py +439 -0
- agentrun/model/model_service.py +438 -0
- agentrun/sandbox/__aio_sandbox_async_template.py +523 -0
- agentrun/sandbox/__browser_sandbox_async_template.py +110 -0
- agentrun/sandbox/__client_async_template.py +491 -0
- agentrun/sandbox/__code_interpreter_sandbox_async_template.py +463 -0
- agentrun/sandbox/__init__.py +69 -0
- agentrun/sandbox/__sandbox_async_template.py +463 -0
- agentrun/sandbox/__template_async_template.py +152 -0
- agentrun/sandbox/aio_sandbox.py +905 -0
- agentrun/sandbox/api/__aio_data_async_template.py +335 -0
- agentrun/sandbox/api/__browser_data_async_template.py +140 -0
- agentrun/sandbox/api/__code_interpreter_data_async_template.py +206 -0
- agentrun/sandbox/api/__init__.py +19 -0
- agentrun/sandbox/api/__sandbox_data_async_template.py +107 -0
- agentrun/sandbox/api/aio_data.py +551 -0
- agentrun/sandbox/api/browser_data.py +172 -0
- agentrun/sandbox/api/code_interpreter_data.py +396 -0
- agentrun/sandbox/api/control.py +1051 -0
- agentrun/sandbox/api/playwright_async.py +492 -0
- agentrun/sandbox/api/playwright_sync.py +492 -0
- agentrun/sandbox/api/sandbox_data.py +154 -0
- agentrun/sandbox/browser_sandbox.py +185 -0
- agentrun/sandbox/client.py +925 -0
- agentrun/sandbox/code_interpreter_sandbox.py +823 -0
- agentrun/sandbox/model.py +397 -0
- agentrun/sandbox/sandbox.py +848 -0
- agentrun/sandbox/template.py +217 -0
- agentrun/server/__init__.py +191 -0
- agentrun/server/agui_normalizer.py +180 -0
- agentrun/server/agui_protocol.py +797 -0
- agentrun/server/invoker.py +309 -0
- agentrun/server/model.py +427 -0
- agentrun/server/openai_protocol.py +535 -0
- agentrun/server/protocol.py +140 -0
- agentrun/server/server.py +208 -0
- agentrun/toolset/__client_async_template.py +62 -0
- agentrun/toolset/__init__.py +51 -0
- agentrun/toolset/__toolset_async_template.py +204 -0
- agentrun/toolset/api/__init__.py +17 -0
- agentrun/toolset/api/control.py +262 -0
- agentrun/toolset/api/mcp.py +100 -0
- agentrun/toolset/api/openapi.py +1251 -0
- agentrun/toolset/client.py +102 -0
- agentrun/toolset/model.py +321 -0
- agentrun/toolset/toolset.py +270 -0
- agentrun/utils/__data_api_async_template.py +720 -0
- agentrun/utils/__init__.py +5 -0
- agentrun/utils/__resource_async_template.py +158 -0
- agentrun/utils/config.py +258 -0
- agentrun/utils/control_api.py +78 -0
- agentrun/utils/data_api.py +1120 -0
- agentrun/utils/exception.py +151 -0
- agentrun/utils/helper.py +108 -0
- agentrun/utils/log.py +77 -0
- agentrun/utils/model.py +168 -0
- agentrun/utils/resource.py +291 -0
- agentrun_inner_test-0.0.46.dist-info/METADATA +263 -0
- agentrun_inner_test-0.0.46.dist-info/RECORD +135 -0
- agentrun_inner_test-0.0.46.dist-info/WHEEL +5 -0
- agentrun_inner_test-0.0.46.dist-info/licenses/LICENSE +201 -0
- agentrun_inner_test-0.0.46.dist-info/top_level.txt +1 -0
agentrun/model/model.py
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
"""Model Service 模型定义"""
|
|
2
|
+
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Any, Dict, List, Optional
|
|
5
|
+
|
|
6
|
+
from agentrun.utils.model import BaseModel, NetworkConfig, PageableInput, Status
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BackendType(str, Enum):
|
|
10
|
+
"""后端类型"""
|
|
11
|
+
|
|
12
|
+
PROXY = "proxy"
|
|
13
|
+
"""模型治理"""
|
|
14
|
+
SERVICE = "service"
|
|
15
|
+
"""模型服务"""
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ModelType(str, Enum):
|
|
19
|
+
"""模型类型"""
|
|
20
|
+
|
|
21
|
+
LLM = "llm"
|
|
22
|
+
"""大语言模型"""
|
|
23
|
+
EMBEDDING = "text-embedding"
|
|
24
|
+
"""嵌入模型"""
|
|
25
|
+
RERANK = "rerank"
|
|
26
|
+
"""重排序模型"""
|
|
27
|
+
SPEECH2TEXT = "speech2text"
|
|
28
|
+
TTS = "tts"
|
|
29
|
+
MODERATION = "moderation"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class Provider(str, Enum):
|
|
33
|
+
OpenAI = "openai"
|
|
34
|
+
Anthropic = "anthropic"
|
|
35
|
+
BaiChuan = "baichuan"
|
|
36
|
+
DeepSeek = "deepseek"
|
|
37
|
+
Gemini = "gemini"
|
|
38
|
+
HunYuan = "hunyuan"
|
|
39
|
+
MiniMax = "minimax"
|
|
40
|
+
MoonShot = "moonshot"
|
|
41
|
+
Spark = "spark"
|
|
42
|
+
StepFun = "stepfun"
|
|
43
|
+
Tongyi = "tongyi"
|
|
44
|
+
VertexAI = "vertex_ai"
|
|
45
|
+
WenXin = "wenxin"
|
|
46
|
+
Yi = "yi"
|
|
47
|
+
ZhiPuAI = "zhipuai"
|
|
48
|
+
Custom = "custom"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ProxyMode(str, Enum):
|
|
52
|
+
SINGLE = "single"
|
|
53
|
+
MULTI = "multi"
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class ProviderSettings(BaseModel):
|
|
57
|
+
"""提供商设置"""
|
|
58
|
+
|
|
59
|
+
api_key: Optional[str] = None
|
|
60
|
+
"""API Key"""
|
|
61
|
+
base_url: Optional[str] = None
|
|
62
|
+
"""基础 URL"""
|
|
63
|
+
model_names: Optional[List[str]] = None
|
|
64
|
+
"""模型名称列表"""
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class ModelFeatures(BaseModel):
|
|
68
|
+
"""模型特性"""
|
|
69
|
+
|
|
70
|
+
agent_thought: Optional[bool] = None
|
|
71
|
+
"""是否支持智能体思维"""
|
|
72
|
+
multi_tool_call: Optional[bool] = None
|
|
73
|
+
"""是否支持多工具调用"""
|
|
74
|
+
stream_tool_call: Optional[bool] = None
|
|
75
|
+
"""是否支持流式工具调用"""
|
|
76
|
+
tool_call: Optional[bool] = None
|
|
77
|
+
"""是否支持工具调用"""
|
|
78
|
+
vision: Optional[bool] = None
|
|
79
|
+
"""是否支持视觉"""
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class ModelProperties(BaseModel):
|
|
83
|
+
"""模型属性"""
|
|
84
|
+
|
|
85
|
+
context_size: Optional[int] = None
|
|
86
|
+
"""上下文大小"""
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class ModelParameterRule(BaseModel):
|
|
90
|
+
"""模型参数规则"""
|
|
91
|
+
|
|
92
|
+
default: Optional[Any] = None
|
|
93
|
+
max: Optional[float] = None
|
|
94
|
+
min: Optional[float] = None
|
|
95
|
+
name: Optional[str] = None
|
|
96
|
+
required: Optional[bool] = None
|
|
97
|
+
type: Optional[str] = None
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class ModelInfoConfig(BaseModel):
|
|
101
|
+
"""模型信息配置"""
|
|
102
|
+
|
|
103
|
+
model_name: Optional[str] = None
|
|
104
|
+
"""模型名称"""
|
|
105
|
+
model_features: Optional[ModelFeatures] = None
|
|
106
|
+
"""模型特性"""
|
|
107
|
+
model_properties: Optional[ModelProperties] = None
|
|
108
|
+
"""模型属性"""
|
|
109
|
+
model_parameter_rules: Optional[List[ModelParameterRule]] = None
|
|
110
|
+
"""模型参数规则"""
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class ProxyConfigEndpoint(BaseModel):
|
|
114
|
+
base_url: Optional[str] = None
|
|
115
|
+
model_names: Optional[List[str]] = None
|
|
116
|
+
model_service_name: Optional[str] = None
|
|
117
|
+
weight: Optional[int] = None
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class ProxyConfigFallback(BaseModel):
|
|
121
|
+
model_name: Optional[str] = None
|
|
122
|
+
model_service_name: Optional[str] = None
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
class ProxyConfigTokenRateLimiter(BaseModel):
|
|
126
|
+
tps: Optional[int] = None
|
|
127
|
+
tpm: Optional[int] = None
|
|
128
|
+
tph: Optional[int] = None
|
|
129
|
+
tpd: Optional[int] = None
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
class ProxyConfigAIGuardrailConfig(BaseModel):
|
|
133
|
+
"""AI 防护配置"""
|
|
134
|
+
|
|
135
|
+
check_request: Optional[bool] = None
|
|
136
|
+
check_response: Optional[bool] = None
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class ProxyConfigPolicies(BaseModel):
|
|
140
|
+
cache: Optional[bool] = None
|
|
141
|
+
concurrency_limit: Optional[int] = None
|
|
142
|
+
fallbacks: Optional[List[ProxyConfigFallback]] = None
|
|
143
|
+
num_retries: Optional[int] = None
|
|
144
|
+
request_timeout: Optional[int] = None
|
|
145
|
+
ai_guardrail_config: Optional[ProxyConfigAIGuardrailConfig] = None
|
|
146
|
+
token_rate_limiter: Optional[ProxyConfigTokenRateLimiter] = None
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class ProxyConfig(BaseModel):
|
|
150
|
+
endpoints: Optional[List[ProxyConfigEndpoint]] = None
|
|
151
|
+
"""代理端点列表"""
|
|
152
|
+
policies: Optional[ProxyConfigPolicies] = None
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
class CommonModelMutableProps(BaseModel):
|
|
156
|
+
credential_name: Optional[str] = None
|
|
157
|
+
description: Optional[str] = None
|
|
158
|
+
network_configuration: Optional[NetworkConfig] = None
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class CommonModelImmutableProps(BaseModel):
|
|
162
|
+
model_type: Optional[ModelType] = None
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
class CommonModelSystemProps:
|
|
166
|
+
created_at: Optional[str] = None
|
|
167
|
+
last_updated_at: Optional[str] = None
|
|
168
|
+
status: Optional[Status] = None
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class ModelServiceMutableProps(CommonModelMutableProps):
|
|
172
|
+
provider_settings: Optional[ProviderSettings] = None
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
class ModelServiceImmutableProps(CommonModelImmutableProps):
|
|
176
|
+
model_info_configs: Optional[List[ModelInfoConfig]] = None
|
|
177
|
+
model_service_name: Optional[str] = None
|
|
178
|
+
provider: Optional[str] = None
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
class ModelServicesSystemProps(CommonModelSystemProps):
|
|
182
|
+
model_service_id: Optional[str] = None
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
class ModelProxyMutableProps(CommonModelMutableProps):
|
|
186
|
+
cpu: Optional[float] = 2
|
|
187
|
+
litellm_version: Optional[str] = None
|
|
188
|
+
memory: Optional[int] = 4096
|
|
189
|
+
model_proxy_name: Optional[str] = None
|
|
190
|
+
proxy_mode: Optional[ProxyMode] = None
|
|
191
|
+
service_region_id: Optional[str] = None
|
|
192
|
+
proxy_config: Optional[ProxyConfig] = None
|
|
193
|
+
execution_role_arn: Optional[str] = None
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
class ModelProxyImmutableProps(CommonModelImmutableProps):
|
|
197
|
+
pass
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
class ModelProxySystemProps(CommonModelSystemProps):
|
|
201
|
+
endpoint: Optional[str] = None
|
|
202
|
+
function_name: Optional[str] = None
|
|
203
|
+
model_proxy_id: Optional[str] = None
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
class ModelServiceCreateInput(
|
|
207
|
+
ModelServiceImmutableProps, ModelServiceMutableProps
|
|
208
|
+
):
|
|
209
|
+
"""模型服务创建输入参数"""
|
|
210
|
+
|
|
211
|
+
pass
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class ModelServiceUpdateInput(ModelServiceMutableProps):
|
|
215
|
+
"""模型服务更新输入参数"""
|
|
216
|
+
|
|
217
|
+
pass
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
class ModelServiceListInput(PageableInput):
|
|
221
|
+
model_type: Optional[ModelType] = None
|
|
222
|
+
provider: Optional[str] = None
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
class ModelProxyCreateInput(ModelProxyMutableProps, ModelProxyImmutableProps):
|
|
226
|
+
pass
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
class ModelProxyUpdateInput(ModelProxyMutableProps):
|
|
230
|
+
pass
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
class ModelProxyListInput(PageableInput):
|
|
234
|
+
proxy_mode: Optional[str] = None
|
|
235
|
+
status: Optional[Status] = None
|
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This file is auto generated by the code generation script.
|
|
3
|
+
Do not modify this file manually.
|
|
4
|
+
Use the `make codegen` command to regenerate.
|
|
5
|
+
|
|
6
|
+
当前文件为自动生成的控制 API 客户端代码。请勿手动修改此文件。
|
|
7
|
+
使用 `make codegen` 命令重新生成。
|
|
8
|
+
|
|
9
|
+
source: agentrun/model/__model_proxy_async_template.py
|
|
10
|
+
|
|
11
|
+
Model Proxy 高层 API / Model Proxy High-Level API
|
|
12
|
+
|
|
13
|
+
此模块定义模型代理资源的高级API。
|
|
14
|
+
This module defines the high-level API for model proxy resources.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from typing import List, Optional
|
|
18
|
+
|
|
19
|
+
import pydash
|
|
20
|
+
|
|
21
|
+
from agentrun.model.api.data import BaseInfo, ModelDataAPI
|
|
22
|
+
from agentrun.utils.config import Config
|
|
23
|
+
from agentrun.utils.model import Status
|
|
24
|
+
from agentrun.utils.resource import ResourceBase
|
|
25
|
+
|
|
26
|
+
from .model import (
|
|
27
|
+
BackendType,
|
|
28
|
+
ModelProxyCreateInput,
|
|
29
|
+
ModelProxyImmutableProps,
|
|
30
|
+
ModelProxyListInput,
|
|
31
|
+
ModelProxyMutableProps,
|
|
32
|
+
ModelProxySystemProps,
|
|
33
|
+
ModelProxyUpdateInput,
|
|
34
|
+
PageableInput,
|
|
35
|
+
ProxyMode,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class ModelProxy(
|
|
40
|
+
ModelProxyImmutableProps,
|
|
41
|
+
ModelProxyMutableProps,
|
|
42
|
+
ModelProxySystemProps,
|
|
43
|
+
ResourceBase,
|
|
44
|
+
):
|
|
45
|
+
"""模型服务"""
|
|
46
|
+
|
|
47
|
+
_data_client: Optional[ModelDataAPI] = None
|
|
48
|
+
|
|
49
|
+
@classmethod
|
|
50
|
+
def __get_client(cls):
|
|
51
|
+
from .client import ModelClient
|
|
52
|
+
|
|
53
|
+
return ModelClient()
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
async def create_async(
|
|
57
|
+
cls, input: ModelProxyCreateInput, config: Optional[Config] = None
|
|
58
|
+
):
|
|
59
|
+
"""创建模型服务(异步)
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
input: 模型服务输入参数
|
|
63
|
+
config: 配置
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
ModelProxy: 创建的模型服务对象
|
|
67
|
+
"""
|
|
68
|
+
return await cls.__get_client().create_async(input, config=config)
|
|
69
|
+
|
|
70
|
+
@classmethod
|
|
71
|
+
def create(
|
|
72
|
+
cls, input: ModelProxyCreateInput, config: Optional[Config] = None
|
|
73
|
+
):
|
|
74
|
+
"""创建模型服务(同步)
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
input: 模型服务输入参数
|
|
78
|
+
config: 配置
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
ModelProxy: 创建的模型服务对象
|
|
82
|
+
"""
|
|
83
|
+
return cls.__get_client().create(input, config=config)
|
|
84
|
+
|
|
85
|
+
@classmethod
|
|
86
|
+
async def delete_by_name_async(
|
|
87
|
+
cls, model_Proxy_name: str, config: Optional[Config] = None
|
|
88
|
+
):
|
|
89
|
+
"""根据名称删除模型服务(异步)
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
model_Proxy_name: 模型服务名称
|
|
93
|
+
config: 配置
|
|
94
|
+
"""
|
|
95
|
+
return await cls.__get_client().delete_async(
|
|
96
|
+
model_Proxy_name, backend_type=BackendType.PROXY, config=config
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
@classmethod
|
|
100
|
+
def delete_by_name(
|
|
101
|
+
cls, model_Proxy_name: str, config: Optional[Config] = None
|
|
102
|
+
):
|
|
103
|
+
"""根据名称删除模型服务(同步)
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
model_Proxy_name: 模型服务名称
|
|
107
|
+
config: 配置
|
|
108
|
+
"""
|
|
109
|
+
return cls.__get_client().delete(
|
|
110
|
+
model_Proxy_name, backend_type=BackendType.PROXY, config=config
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
@classmethod
|
|
114
|
+
async def update_by_name_async(
|
|
115
|
+
cls,
|
|
116
|
+
model_proxy_name: str,
|
|
117
|
+
input: ModelProxyUpdateInput,
|
|
118
|
+
config: Optional[Config] = None,
|
|
119
|
+
):
|
|
120
|
+
"""根据名称更新模型服务(异步)
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
model_Proxy_name: 模型服务名称
|
|
124
|
+
input: 模型服务更新输入参数
|
|
125
|
+
config: 配置
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
ModelProxy: 更新后的模型服务对象
|
|
129
|
+
"""
|
|
130
|
+
return await cls.__get_client().update_async(
|
|
131
|
+
model_proxy_name, input, config=config
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
@classmethod
|
|
135
|
+
def update_by_name(
|
|
136
|
+
cls,
|
|
137
|
+
model_proxy_name: str,
|
|
138
|
+
input: ModelProxyUpdateInput,
|
|
139
|
+
config: Optional[Config] = None,
|
|
140
|
+
):
|
|
141
|
+
"""根据名称更新模型服务(同步)
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
model_Proxy_name: 模型服务名称
|
|
145
|
+
input: 模型服务更新输入参数
|
|
146
|
+
config: 配置
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
ModelProxy: 更新后的模型服务对象
|
|
150
|
+
"""
|
|
151
|
+
return cls.__get_client().update(model_proxy_name, input, config=config)
|
|
152
|
+
|
|
153
|
+
@classmethod
|
|
154
|
+
async def get_by_name_async(
|
|
155
|
+
cls, model_proxy_name: str, config: Optional[Config] = None
|
|
156
|
+
):
|
|
157
|
+
"""根据名称获取模型服务(异步)
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
model_Proxy_name: 模型服务名称
|
|
161
|
+
config: 配置
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
ModelProxy: 模型服务对象
|
|
165
|
+
"""
|
|
166
|
+
return await cls.__get_client().get_async(
|
|
167
|
+
model_proxy_name, backend_type=BackendType.PROXY, config=config
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
@classmethod
|
|
171
|
+
def get_by_name(
|
|
172
|
+
cls, model_proxy_name: str, config: Optional[Config] = None
|
|
173
|
+
):
|
|
174
|
+
"""根据名称获取模型服务(同步)
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
model_Proxy_name: 模型服务名称
|
|
178
|
+
config: 配置
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
ModelProxy: 模型服务对象
|
|
182
|
+
"""
|
|
183
|
+
return cls.__get_client().get(
|
|
184
|
+
model_proxy_name, backend_type=BackendType.PROXY, config=config
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
@classmethod
|
|
188
|
+
async def _list_page_async(
|
|
189
|
+
cls, page_input: PageableInput, config: Config | None = None, **kwargs
|
|
190
|
+
):
|
|
191
|
+
return await cls.__get_client().list_async(
|
|
192
|
+
input=ModelProxyListInput(
|
|
193
|
+
**kwargs,
|
|
194
|
+
**page_input.model_dump(),
|
|
195
|
+
),
|
|
196
|
+
config=config,
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
@classmethod
|
|
200
|
+
def _list_page(
|
|
201
|
+
cls, page_input: PageableInput, config: Config | None = None, **kwargs
|
|
202
|
+
):
|
|
203
|
+
return cls.__get_client().list(
|
|
204
|
+
input=ModelProxyListInput(
|
|
205
|
+
**kwargs,
|
|
206
|
+
**page_input.model_dump(),
|
|
207
|
+
),
|
|
208
|
+
config=config,
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
@classmethod
|
|
212
|
+
async def list_all_async(
|
|
213
|
+
cls,
|
|
214
|
+
*,
|
|
215
|
+
proxy_mode: Optional[str] = None,
|
|
216
|
+
status: Optional[Status] = None,
|
|
217
|
+
config: Optional[Config] = None,
|
|
218
|
+
) -> List["ModelProxy"]:
|
|
219
|
+
return await cls._list_all_async(
|
|
220
|
+
lambda m: m.model_proxy_id or "",
|
|
221
|
+
config=config,
|
|
222
|
+
proxy_mode=proxy_mode,
|
|
223
|
+
status=status,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
@classmethod
|
|
227
|
+
def list_all(
|
|
228
|
+
cls,
|
|
229
|
+
*,
|
|
230
|
+
proxy_mode: Optional[str] = None,
|
|
231
|
+
status: Optional[Status] = None,
|
|
232
|
+
config: Optional[Config] = None,
|
|
233
|
+
) -> List["ModelProxy"]:
|
|
234
|
+
return cls._list_all(
|
|
235
|
+
lambda m: m.model_proxy_id or "",
|
|
236
|
+
config=config,
|
|
237
|
+
proxy_mode=proxy_mode,
|
|
238
|
+
status=status,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
async def update_async(
|
|
242
|
+
self, input: ModelProxyUpdateInput, config: Optional[Config] = None
|
|
243
|
+
):
|
|
244
|
+
"""更新模型服务(异步)
|
|
245
|
+
|
|
246
|
+
Args:
|
|
247
|
+
input: 模型服务更新输入参数
|
|
248
|
+
config: 配置
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
ModelProxy: 更新后的模型服务对象
|
|
252
|
+
"""
|
|
253
|
+
if self.model_proxy_name is None:
|
|
254
|
+
raise ValueError(
|
|
255
|
+
"model_Proxy_name is required to update a ModelProxy"
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
result = await self.update_by_name_async(
|
|
259
|
+
self.model_proxy_name, input, config=config
|
|
260
|
+
)
|
|
261
|
+
self.update_self(result)
|
|
262
|
+
|
|
263
|
+
return self
|
|
264
|
+
|
|
265
|
+
def update(
|
|
266
|
+
self, input: ModelProxyUpdateInput, config: Optional[Config] = None
|
|
267
|
+
):
|
|
268
|
+
"""更新模型服务(同步)
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
input: 模型服务更新输入参数
|
|
272
|
+
config: 配置
|
|
273
|
+
|
|
274
|
+
Returns:
|
|
275
|
+
ModelProxy: 更新后的模型服务对象
|
|
276
|
+
"""
|
|
277
|
+
if self.model_proxy_name is None:
|
|
278
|
+
raise ValueError(
|
|
279
|
+
"model_Proxy_name is required to update a ModelProxy"
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
result = self.update_by_name(
|
|
283
|
+
self.model_proxy_name, input, config=config
|
|
284
|
+
)
|
|
285
|
+
self.update_self(result)
|
|
286
|
+
|
|
287
|
+
return self
|
|
288
|
+
|
|
289
|
+
async def delete_async(self, config: Optional[Config] = None):
|
|
290
|
+
"""删除模型服务(异步)
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
config: 配置
|
|
294
|
+
"""
|
|
295
|
+
if self.model_proxy_name is None:
|
|
296
|
+
raise ValueError(
|
|
297
|
+
"model_Proxy_name is required to delete a ModelProxy"
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
return await self.delete_by_name_async(
|
|
301
|
+
self.model_proxy_name, config=config
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
def delete(self, config: Optional[Config] = None):
|
|
305
|
+
"""删除模型服务(同步)
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
config: 配置
|
|
309
|
+
"""
|
|
310
|
+
if self.model_proxy_name is None:
|
|
311
|
+
raise ValueError(
|
|
312
|
+
"model_Proxy_name is required to delete a ModelProxy"
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
return self.delete_by_name(self.model_proxy_name, config=config)
|
|
316
|
+
|
|
317
|
+
async def get_async(self, config: Optional[Config] = None):
|
|
318
|
+
"""刷新模型服务信息(异步)
|
|
319
|
+
|
|
320
|
+
Args:
|
|
321
|
+
config: 配置
|
|
322
|
+
|
|
323
|
+
Returns:
|
|
324
|
+
ModelProxy: 刷新后的模型服务对象
|
|
325
|
+
"""
|
|
326
|
+
if self.model_proxy_name is None:
|
|
327
|
+
raise ValueError(
|
|
328
|
+
"model_Proxy_name is required to refresh a ModelProxy"
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
result = await self.get_by_name_async(
|
|
332
|
+
self.model_proxy_name, config=config
|
|
333
|
+
)
|
|
334
|
+
self.update_self(result)
|
|
335
|
+
|
|
336
|
+
return self
|
|
337
|
+
|
|
338
|
+
def get(self, config: Optional[Config] = None):
|
|
339
|
+
"""刷新模型服务信息(同步)
|
|
340
|
+
|
|
341
|
+
Args:
|
|
342
|
+
config: 配置
|
|
343
|
+
|
|
344
|
+
Returns:
|
|
345
|
+
ModelProxy: 刷新后的模型服务对象
|
|
346
|
+
"""
|
|
347
|
+
if self.model_proxy_name is None:
|
|
348
|
+
raise ValueError(
|
|
349
|
+
"model_Proxy_name is required to refresh a ModelProxy"
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
result = self.get_by_name(self.model_proxy_name, config=config)
|
|
353
|
+
self.update_self(result)
|
|
354
|
+
|
|
355
|
+
return self
|
|
356
|
+
|
|
357
|
+
async def refresh_async(self, config: Optional[Config] = None):
|
|
358
|
+
"""刷新模型服务信息(异步)
|
|
359
|
+
|
|
360
|
+
Args:
|
|
361
|
+
config: 配置
|
|
362
|
+
|
|
363
|
+
Returns:
|
|
364
|
+
ModelProxy: 刷新后的模型服务对象
|
|
365
|
+
"""
|
|
366
|
+
return await self.get_async(config=config)
|
|
367
|
+
|
|
368
|
+
def refresh(self, config: Optional[Config] = None):
|
|
369
|
+
"""刷新模型服务信息(同步)
|
|
370
|
+
|
|
371
|
+
Args:
|
|
372
|
+
config: 配置
|
|
373
|
+
|
|
374
|
+
Returns:
|
|
375
|
+
ModelProxy: 刷新后的模型服务对象
|
|
376
|
+
"""
|
|
377
|
+
return self.get(config=config)
|
|
378
|
+
|
|
379
|
+
def model_info(self, config: Optional[Config] = None) -> BaseInfo:
|
|
380
|
+
cfg = Config.with_configs(self._config, config)
|
|
381
|
+
|
|
382
|
+
if self._data_client is None:
|
|
383
|
+
self._data_client = ModelDataAPI(
|
|
384
|
+
self.model_proxy_name or "",
|
|
385
|
+
credential_name=self.credential_name,
|
|
386
|
+
config=cfg,
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
self._data_client.update_model_name(
|
|
390
|
+
model_proxy_name=self.model_proxy_name,
|
|
391
|
+
model_name=(
|
|
392
|
+
pydash.get(self, "proxy_config.endpoints[0].model_names[0]")
|
|
393
|
+
if self.proxy_mode == ProxyMode.SINGLE
|
|
394
|
+
else self.model_proxy_name
|
|
395
|
+
)
|
|
396
|
+
or "",
|
|
397
|
+
credential_name=self.credential_name,
|
|
398
|
+
config=cfg,
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
return self._data_client.model_info()
|
|
402
|
+
|
|
403
|
+
def completions(
|
|
404
|
+
self,
|
|
405
|
+
messages: list,
|
|
406
|
+
model: Optional[str] = None,
|
|
407
|
+
stream: bool = False,
|
|
408
|
+
config: Optional[Config] = None,
|
|
409
|
+
**kwargs,
|
|
410
|
+
):
|
|
411
|
+
self.model_info(config)
|
|
412
|
+
assert self._data_client
|
|
413
|
+
|
|
414
|
+
return self._data_client.completions(
|
|
415
|
+
**kwargs,
|
|
416
|
+
messages=messages,
|
|
417
|
+
model=model,
|
|
418
|
+
stream=stream,
|
|
419
|
+
config=config,
|
|
420
|
+
)
|
|
421
|
+
|
|
422
|
+
def responses(
|
|
423
|
+
self,
|
|
424
|
+
messages: list,
|
|
425
|
+
model: Optional[str] = None,
|
|
426
|
+
stream: bool = False,
|
|
427
|
+
config: Optional[Config] = None,
|
|
428
|
+
**kwargs,
|
|
429
|
+
):
|
|
430
|
+
self.model_info(config)
|
|
431
|
+
assert self._data_client
|
|
432
|
+
|
|
433
|
+
return self._data_client.responses(
|
|
434
|
+
**kwargs,
|
|
435
|
+
messages=messages,
|
|
436
|
+
model=model,
|
|
437
|
+
stream=stream,
|
|
438
|
+
config=config,
|
|
439
|
+
)
|