agentrun-inner-test 0.0.62__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of agentrun-inner-test might be problematic. Click here for more details.
- agentrun/__init__.py +358 -0
- agentrun/agent_runtime/__client_async_template.py +466 -0
- agentrun/agent_runtime/__endpoint_async_template.py +345 -0
- agentrun/agent_runtime/__init__.py +53 -0
- agentrun/agent_runtime/__runtime_async_template.py +477 -0
- agentrun/agent_runtime/api/__data_async_template.py +58 -0
- agentrun/agent_runtime/api/__init__.py +6 -0
- agentrun/agent_runtime/api/control.py +1362 -0
- agentrun/agent_runtime/api/data.py +98 -0
- agentrun/agent_runtime/client.py +868 -0
- agentrun/agent_runtime/endpoint.py +649 -0
- agentrun/agent_runtime/model.py +362 -0
- agentrun/agent_runtime/runtime.py +904 -0
- agentrun/credential/__client_async_template.py +177 -0
- agentrun/credential/__credential_async_template.py +216 -0
- agentrun/credential/__init__.py +28 -0
- agentrun/credential/api/__init__.py +5 -0
- agentrun/credential/api/control.py +606 -0
- agentrun/credential/client.py +319 -0
- agentrun/credential/credential.py +381 -0
- agentrun/credential/model.py +248 -0
- agentrun/integration/__init__.py +21 -0
- agentrun/integration/agentscope/__init__.py +13 -0
- agentrun/integration/agentscope/adapter.py +17 -0
- agentrun/integration/agentscope/builtin.py +88 -0
- agentrun/integration/agentscope/message_adapter.py +185 -0
- agentrun/integration/agentscope/model_adapter.py +60 -0
- agentrun/integration/agentscope/tool_adapter.py +59 -0
- agentrun/integration/builtin/__init__.py +18 -0
- agentrun/integration/builtin/knowledgebase.py +137 -0
- agentrun/integration/builtin/model.py +93 -0
- agentrun/integration/builtin/sandbox.py +1234 -0
- agentrun/integration/builtin/toolset.py +47 -0
- agentrun/integration/crewai/__init__.py +13 -0
- agentrun/integration/crewai/adapter.py +9 -0
- agentrun/integration/crewai/builtin.py +88 -0
- agentrun/integration/crewai/model_adapter.py +31 -0
- agentrun/integration/crewai/tool_adapter.py +26 -0
- agentrun/integration/google_adk/__init__.py +13 -0
- agentrun/integration/google_adk/adapter.py +15 -0
- agentrun/integration/google_adk/builtin.py +88 -0
- agentrun/integration/google_adk/message_adapter.py +144 -0
- agentrun/integration/google_adk/model_adapter.py +46 -0
- agentrun/integration/google_adk/tool_adapter.py +235 -0
- agentrun/integration/langchain/__init__.py +31 -0
- agentrun/integration/langchain/adapter.py +15 -0
- agentrun/integration/langchain/builtin.py +94 -0
- agentrun/integration/langchain/message_adapter.py +141 -0
- agentrun/integration/langchain/model_adapter.py +37 -0
- agentrun/integration/langchain/tool_adapter.py +50 -0
- agentrun/integration/langgraph/__init__.py +36 -0
- agentrun/integration/langgraph/adapter.py +20 -0
- agentrun/integration/langgraph/agent_converter.py +1073 -0
- agentrun/integration/langgraph/builtin.py +88 -0
- agentrun/integration/pydantic_ai/__init__.py +13 -0
- agentrun/integration/pydantic_ai/adapter.py +13 -0
- agentrun/integration/pydantic_ai/builtin.py +88 -0
- agentrun/integration/pydantic_ai/model_adapter.py +44 -0
- agentrun/integration/pydantic_ai/tool_adapter.py +19 -0
- agentrun/integration/utils/__init__.py +112 -0
- agentrun/integration/utils/adapter.py +560 -0
- agentrun/integration/utils/canonical.py +164 -0
- agentrun/integration/utils/converter.py +134 -0
- agentrun/integration/utils/model.py +110 -0
- agentrun/integration/utils/tool.py +1759 -0
- agentrun/knowledgebase/__client_async_template.py +173 -0
- agentrun/knowledgebase/__init__.py +53 -0
- agentrun/knowledgebase/__knowledgebase_async_template.py +438 -0
- agentrun/knowledgebase/api/__data_async_template.py +414 -0
- agentrun/knowledgebase/api/__init__.py +19 -0
- agentrun/knowledgebase/api/control.py +606 -0
- agentrun/knowledgebase/api/data.py +624 -0
- agentrun/knowledgebase/client.py +311 -0
- agentrun/knowledgebase/knowledgebase.py +748 -0
- agentrun/knowledgebase/model.py +270 -0
- agentrun/memory_collection/__client_async_template.py +178 -0
- agentrun/memory_collection/__init__.py +37 -0
- agentrun/memory_collection/__memory_collection_async_template.py +457 -0
- agentrun/memory_collection/api/__init__.py +5 -0
- agentrun/memory_collection/api/control.py +610 -0
- agentrun/memory_collection/client.py +323 -0
- agentrun/memory_collection/memory_collection.py +844 -0
- agentrun/memory_collection/model.py +162 -0
- agentrun/model/__client_async_template.py +357 -0
- agentrun/model/__init__.py +57 -0
- agentrun/model/__model_proxy_async_template.py +270 -0
- agentrun/model/__model_service_async_template.py +267 -0
- agentrun/model/api/__init__.py +6 -0
- agentrun/model/api/control.py +1173 -0
- agentrun/model/api/data.py +196 -0
- agentrun/model/client.py +674 -0
- agentrun/model/model.py +235 -0
- agentrun/model/model_proxy.py +439 -0
- agentrun/model/model_service.py +438 -0
- agentrun/sandbox/__aio_sandbox_async_template.py +523 -0
- agentrun/sandbox/__browser_sandbox_async_template.py +110 -0
- agentrun/sandbox/__client_async_template.py +491 -0
- agentrun/sandbox/__code_interpreter_sandbox_async_template.py +463 -0
- agentrun/sandbox/__init__.py +69 -0
- agentrun/sandbox/__sandbox_async_template.py +463 -0
- agentrun/sandbox/__template_async_template.py +152 -0
- agentrun/sandbox/aio_sandbox.py +912 -0
- agentrun/sandbox/api/__aio_data_async_template.py +335 -0
- agentrun/sandbox/api/__browser_data_async_template.py +140 -0
- agentrun/sandbox/api/__code_interpreter_data_async_template.py +206 -0
- agentrun/sandbox/api/__init__.py +19 -0
- agentrun/sandbox/api/__sandbox_data_async_template.py +107 -0
- agentrun/sandbox/api/aio_data.py +551 -0
- agentrun/sandbox/api/browser_data.py +172 -0
- agentrun/sandbox/api/code_interpreter_data.py +396 -0
- agentrun/sandbox/api/control.py +1051 -0
- agentrun/sandbox/api/playwright_async.py +492 -0
- agentrun/sandbox/api/playwright_sync.py +492 -0
- agentrun/sandbox/api/sandbox_data.py +154 -0
- agentrun/sandbox/browser_sandbox.py +185 -0
- agentrun/sandbox/client.py +925 -0
- agentrun/sandbox/code_interpreter_sandbox.py +823 -0
- agentrun/sandbox/model.py +384 -0
- agentrun/sandbox/sandbox.py +848 -0
- agentrun/sandbox/template.py +217 -0
- agentrun/server/__init__.py +191 -0
- agentrun/server/agui_normalizer.py +180 -0
- agentrun/server/agui_protocol.py +797 -0
- agentrun/server/invoker.py +309 -0
- agentrun/server/model.py +427 -0
- agentrun/server/openai_protocol.py +535 -0
- agentrun/server/protocol.py +140 -0
- agentrun/server/server.py +208 -0
- agentrun/toolset/__client_async_template.py +62 -0
- agentrun/toolset/__init__.py +51 -0
- agentrun/toolset/__toolset_async_template.py +204 -0
- agentrun/toolset/api/__init__.py +17 -0
- agentrun/toolset/api/control.py +262 -0
- agentrun/toolset/api/mcp.py +100 -0
- agentrun/toolset/api/openapi.py +1251 -0
- agentrun/toolset/client.py +102 -0
- agentrun/toolset/model.py +321 -0
- agentrun/toolset/toolset.py +271 -0
- agentrun/utils/__data_api_async_template.py +721 -0
- agentrun/utils/__init__.py +5 -0
- agentrun/utils/__resource_async_template.py +158 -0
- agentrun/utils/config.py +270 -0
- agentrun/utils/control_api.py +105 -0
- agentrun/utils/data_api.py +1121 -0
- agentrun/utils/exception.py +151 -0
- agentrun/utils/helper.py +108 -0
- agentrun/utils/log.py +77 -0
- agentrun/utils/model.py +168 -0
- agentrun/utils/resource.py +291 -0
- agentrun_inner_test-0.0.62.dist-info/METADATA +265 -0
- agentrun_inner_test-0.0.62.dist-info/RECORD +154 -0
- agentrun_inner_test-0.0.62.dist-info/WHEEL +5 -0
- agentrun_inner_test-0.0.62.dist-info/licenses/LICENSE +201 -0
- agentrun_inner_test-0.0.62.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
from typing import Dict, Optional, TYPE_CHECKING, Union
|
|
2
|
+
|
|
3
|
+
from agentrun.utils.config import Config
|
|
4
|
+
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
from litellm import ResponseInputParam
|
|
7
|
+
|
|
8
|
+
from agentrun.utils.data_api import DataAPI, ResourceType
|
|
9
|
+
from agentrun.utils.helper import mask_password
|
|
10
|
+
from agentrun.utils.log import logger
|
|
11
|
+
from agentrun.utils.model import BaseModel
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BaseInfo(BaseModel):
|
|
15
|
+
model: Optional[str] = None
|
|
16
|
+
api_key: Optional[str] = None
|
|
17
|
+
base_url: Optional[str] = None
|
|
18
|
+
headers: Optional[Dict[str, str]] = None
|
|
19
|
+
provider: Optional[str] = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ModelCompletionAPI:
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
*,
|
|
27
|
+
api_key: str,
|
|
28
|
+
base_url: str,
|
|
29
|
+
model: str,
|
|
30
|
+
headers: Optional[dict] = None,
|
|
31
|
+
provider: str = "openai",
|
|
32
|
+
) -> None:
|
|
33
|
+
self.api_key = api_key
|
|
34
|
+
self.base_url = base_url
|
|
35
|
+
self.model = model
|
|
36
|
+
self.provider = provider
|
|
37
|
+
self.headers = headers or {}
|
|
38
|
+
|
|
39
|
+
def completions(
|
|
40
|
+
self,
|
|
41
|
+
messages: list = [],
|
|
42
|
+
model: Optional[str] = None,
|
|
43
|
+
custom_llm_provider: Optional[str] = None,
|
|
44
|
+
**kwargs,
|
|
45
|
+
):
|
|
46
|
+
logger.debug(
|
|
47
|
+
"ModelCompletionAPI completions called %s, api_key: %s,"
|
|
48
|
+
" messages: %s",
|
|
49
|
+
self.base_url,
|
|
50
|
+
mask_password(self.api_key),
|
|
51
|
+
messages,
|
|
52
|
+
)
|
|
53
|
+
kwargs["headers"] = {
|
|
54
|
+
**self.headers,
|
|
55
|
+
**kwargs.get("headers", {}),
|
|
56
|
+
}
|
|
57
|
+
if kwargs.get("stream_options") is None:
|
|
58
|
+
kwargs["stream_options"] = {}
|
|
59
|
+
kwargs["stream_options"]["include_usage"] = True
|
|
60
|
+
|
|
61
|
+
from litellm import completion
|
|
62
|
+
|
|
63
|
+
return completion(
|
|
64
|
+
**kwargs,
|
|
65
|
+
api_key=self.api_key,
|
|
66
|
+
base_url=self.base_url,
|
|
67
|
+
model=model or self.model,
|
|
68
|
+
custom_llm_provider=custom_llm_provider or self.provider,
|
|
69
|
+
messages=messages,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
def responses(
|
|
73
|
+
self,
|
|
74
|
+
input: Union[str, "ResponseInputParam"],
|
|
75
|
+
model: Optional[str] = None,
|
|
76
|
+
custom_llm_provider: Optional[str] = None,
|
|
77
|
+
**kwargs,
|
|
78
|
+
):
|
|
79
|
+
logger.debug(
|
|
80
|
+
"ModelCompletionAPI responses called %s, api_key: %s, input: %s",
|
|
81
|
+
self.base_url,
|
|
82
|
+
mask_password(self.api_key),
|
|
83
|
+
input,
|
|
84
|
+
)
|
|
85
|
+
kwargs["headers"] = {
|
|
86
|
+
**self.headers,
|
|
87
|
+
**kwargs.get("headers", {}),
|
|
88
|
+
}
|
|
89
|
+
if kwargs.get("stream_options") is None:
|
|
90
|
+
kwargs["stream_options"] = {}
|
|
91
|
+
kwargs["stream_options"]["include_usage"] = True
|
|
92
|
+
from litellm import responses
|
|
93
|
+
|
|
94
|
+
return responses(
|
|
95
|
+
**kwargs,
|
|
96
|
+
api_key=self.api_key,
|
|
97
|
+
base_url=self.base_url,
|
|
98
|
+
model=model or self.model,
|
|
99
|
+
custom_llm_provider=custom_llm_provider or self.provider,
|
|
100
|
+
input=input,
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class ModelDataAPI(DataAPI):
|
|
105
|
+
|
|
106
|
+
def __init__(
|
|
107
|
+
self,
|
|
108
|
+
model_proxy_name: str,
|
|
109
|
+
model_name: Optional[str] = None,
|
|
110
|
+
credential_name: Optional[str] = None,
|
|
111
|
+
provider: Optional[str] = "openai",
|
|
112
|
+
config: Optional[Config] = None,
|
|
113
|
+
) -> None:
|
|
114
|
+
super().__init__(
|
|
115
|
+
resource_name=model_proxy_name,
|
|
116
|
+
resource_type=ResourceType.LiteLLM,
|
|
117
|
+
config=config,
|
|
118
|
+
)
|
|
119
|
+
self.update_model_name(
|
|
120
|
+
model_proxy_name=model_proxy_name,
|
|
121
|
+
model_name=model_name,
|
|
122
|
+
credential_name=credential_name,
|
|
123
|
+
provider=provider,
|
|
124
|
+
config=config,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
def update_model_name(
|
|
128
|
+
self,
|
|
129
|
+
model_proxy_name,
|
|
130
|
+
model_name: Optional[str],
|
|
131
|
+
credential_name: Optional[str] = None,
|
|
132
|
+
provider: Optional[str] = "openai",
|
|
133
|
+
config: Optional[Config] = None,
|
|
134
|
+
):
|
|
135
|
+
self.model_proxy_name = model_proxy_name
|
|
136
|
+
self.namespace = f"models/{self.model_proxy_name}"
|
|
137
|
+
self.model_name = model_name
|
|
138
|
+
self.provider = provider
|
|
139
|
+
self.access_token = None
|
|
140
|
+
if not credential_name:
|
|
141
|
+
self.access_token = ""
|
|
142
|
+
|
|
143
|
+
self.config.update(config)
|
|
144
|
+
|
|
145
|
+
def model_info(self, config: Optional[Config] = None) -> BaseInfo:
|
|
146
|
+
cfg = Config.with_configs(self.config, config)
|
|
147
|
+
_, headers, _ = self.auth(headers=self.config.get_headers(), config=cfg)
|
|
148
|
+
|
|
149
|
+
return BaseInfo(
|
|
150
|
+
api_key="",
|
|
151
|
+
base_url=self.with_path("/v1").rstrip("/"),
|
|
152
|
+
model=self.model_name or "",
|
|
153
|
+
headers=headers,
|
|
154
|
+
provider=self.provider or "openai",
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
def completions(
|
|
158
|
+
self,
|
|
159
|
+
messages: list = [],
|
|
160
|
+
model: Optional[str] = None,
|
|
161
|
+
config: Optional[Config] = None,
|
|
162
|
+
**kwargs,
|
|
163
|
+
):
|
|
164
|
+
info = self.model_info(config=config)
|
|
165
|
+
|
|
166
|
+
return ModelCompletionAPI(
|
|
167
|
+
base_url=info.base_url or "",
|
|
168
|
+
api_key=info.api_key or "",
|
|
169
|
+
model=model or info.model or "",
|
|
170
|
+
headers=info.headers,
|
|
171
|
+
).completions(
|
|
172
|
+
**kwargs,
|
|
173
|
+
messages=messages,
|
|
174
|
+
custom_llm_provider="openai",
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
def responses(
|
|
178
|
+
self,
|
|
179
|
+
input: Union[str, "ResponseInputParam"],
|
|
180
|
+
model: Optional[str] = None,
|
|
181
|
+
config: Optional[Config] = None,
|
|
182
|
+
**kwargs,
|
|
183
|
+
):
|
|
184
|
+
info = self.model_info(config=config)
|
|
185
|
+
|
|
186
|
+
return ModelCompletionAPI(
|
|
187
|
+
base_url=info.base_url or "",
|
|
188
|
+
api_key=info.api_key or "",
|
|
189
|
+
model=model or info.model or "",
|
|
190
|
+
headers=info.headers,
|
|
191
|
+
).responses(
|
|
192
|
+
**kwargs,
|
|
193
|
+
custom_llm_provider="openai",
|
|
194
|
+
model=model,
|
|
195
|
+
input=input,
|
|
196
|
+
)
|