blaxel 0.1.9rc35__py3-none-any.whl → 0.1.9rc37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- blaxel/agents/__init__.py +53 -16
- blaxel/authentication/__init__.py +3 -4
- blaxel/client/api/compute/__init__.py +0 -0
- blaxel/client/api/compute/create_sandbox.py +166 -0
- blaxel/client/api/compute/delete_sandbox.py +154 -0
- blaxel/client/api/compute/get_sandbox.py +154 -0
- blaxel/client/api/compute/list_sandboxes.py +135 -0
- blaxel/client/api/compute/start_sandbox.py +157 -0
- blaxel/client/api/compute/stop_sandbox.py +157 -0
- blaxel/client/api/compute/update_sandbox.py +179 -0
- blaxel/client/api/default/list_sandbox_hub_definitions.py +123 -0
- blaxel/client/api/functions/list_function_revisions.py +16 -11
- blaxel/client/api/knowledgebases/list_knowledgebase_revisions.py +16 -11
- blaxel/client/api/models/list_model_revisions.py +16 -11
- blaxel/client/api/templates/list_templates.py +16 -11
- blaxel/client/models/__init__.py +32 -2
- blaxel/client/models/agent_spec.py +25 -69
- blaxel/client/models/core_spec.py +1 -45
- blaxel/client/models/function_spec.py +1 -45
- blaxel/client/models/last_n_requests_metric.py +18 -0
- blaxel/client/models/metrics.py +20 -0
- blaxel/client/models/model_spec.py +1 -45
- blaxel/client/models/{agent_chain.py → port.py} +23 -32
- blaxel/client/models/request_total_metric.py +12 -1
- blaxel/client/models/request_total_response_data.py +97 -0
- blaxel/client/models/resource_log.py +9 -0
- blaxel/client/models/resource_metrics.py +144 -0
- blaxel/client/models/resource_metrics_request_total_per_code_previous.py +45 -0
- blaxel/client/models/resource_metrics_rps_per_code_previous.py +45 -0
- blaxel/client/models/runtime.py +83 -7
- blaxel/client/models/runtime_configuration.py +45 -0
- blaxel/client/models/sandbox.py +129 -0
- blaxel/client/models/sandbox_definition.py +181 -0
- blaxel/client/models/sandbox_spec.py +208 -0
- blaxel/client/models/sandboxes.py +129 -0
- blaxel/client/models/serverless_config.py +29 -1
- blaxel/client/models/serverless_config_configuration.py +45 -0
- blaxel/client/models/start_sandbox.py +94 -0
- blaxel/client/models/stop_sandbox.py +94 -0
- blaxel/client/models/trigger.py +98 -0
- blaxel/client/models/trigger_configuration.py +45 -0
- blaxel/client/models/workspace.py +20 -0
- blaxel/client/models/workspace_runtime.py +61 -0
- blaxel/common/autoload.py +0 -4
- blaxel/common/internal.py +75 -0
- blaxel/common/settings.py +6 -1
- blaxel/instrumentation/exporters.py +3 -6
- blaxel/instrumentation/manager.py +5 -3
- blaxel/mcp/client.py +1 -3
- blaxel/mcp/server.py +4 -4
- blaxel/models/__init__.py +2 -1
- blaxel/models/custom/langchain/gemini.py +41 -18
- blaxel/models/custom/llamaindex/cohere.py +25 -16
- blaxel/models/custom/pydantic/gemini.py +0 -1
- blaxel/models/livekit.py +1 -1
- blaxel/tools/__init__.py +63 -22
- blaxel/tools/langchain.py +1 -2
- {blaxel-0.1.9rc35.dist-info → blaxel-0.1.9rc37.dist-info}/METADATA +1 -4
- {blaxel-0.1.9rc35.dist-info → blaxel-0.1.9rc37.dist-info}/RECORD +61 -37
- {blaxel-0.1.9rc35.dist-info → blaxel-0.1.9rc37.dist-info}/WHEEL +0 -0
- {blaxel-0.1.9rc35.dist-info → blaxel-0.1.9rc37.dist-info}/licenses/LICENSE +0 -0
blaxel/common/settings.py
CHANGED
@@ -21,7 +21,7 @@ class Settings:
|
|
21
21
|
@property
|
22
22
|
def log_level(self) -> str:
|
23
23
|
"""Get the log level."""
|
24
|
-
return os.environ.get("LOG_LEVEL", "INFO")
|
24
|
+
return os.environ.get("LOG_LEVEL", "INFO").upper()
|
25
25
|
|
26
26
|
@property
|
27
27
|
def base_url(self) -> str:
|
@@ -69,6 +69,11 @@ class Settings:
|
|
69
69
|
"""Is running on bl cloud."""
|
70
70
|
return os.environ.get("BL_CLOUD", "") == "true"
|
71
71
|
|
72
|
+
@property
|
73
|
+
def run_internal_protocol(self) -> str:
|
74
|
+
"""Get the run internal protocol."""
|
75
|
+
return os.environ.get("BL_RUN_INTERNAL_PROTOCOL", "https")
|
76
|
+
|
72
77
|
@property
|
73
78
|
def enable_opentelemetry(self) -> bool:
|
74
79
|
"""Get the enable opentelemetry."""
|
@@ -1,11 +1,8 @@
|
|
1
1
|
from typing import Callable, Dict, Sequence
|
2
2
|
|
3
|
-
from opentelemetry.exporter.otlp.proto.http._log_exporter import
|
4
|
-
|
5
|
-
from opentelemetry.exporter.otlp.proto.http.
|
6
|
-
OTLPMetricExporter
|
7
|
-
from opentelemetry.exporter.otlp.proto.http.trace_exporter import \
|
8
|
-
OTLPSpanExporter
|
3
|
+
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
|
4
|
+
from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter
|
5
|
+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
9
6
|
from opentelemetry.sdk._logs import LogData
|
10
7
|
from opentelemetry.sdk.metrics.export import MetricExportResult, MetricsData
|
11
8
|
|
@@ -20,9 +20,11 @@ from opentelemetry.sdk.trace import TracerProvider
|
|
20
20
|
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
21
21
|
from opentelemetry.trace import NoOpTracerProvider
|
22
22
|
|
23
|
-
from blaxel.instrumentation.exporters import (
|
24
|
-
|
25
|
-
|
23
|
+
from blaxel.instrumentation.exporters import (
|
24
|
+
DynamicHeadersLogExporter,
|
25
|
+
DynamicHeadersMetricExporter,
|
26
|
+
DynamicHeadersSpanExporter,
|
27
|
+
)
|
26
28
|
from blaxel.instrumentation.span import DefaultAttributesSpanProcessor
|
27
29
|
|
28
30
|
from ..common.settings import Settings
|
blaxel/mcp/client.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1
1
|
import logging
|
2
|
-
import os
|
3
2
|
from contextlib import asynccontextmanager
|
4
3
|
from typing import Any
|
5
4
|
from urllib.parse import urljoin, urlparse
|
@@ -7,8 +6,7 @@ from urllib.parse import urljoin, urlparse
|
|
7
6
|
import anyio
|
8
7
|
import mcp.types as types
|
9
8
|
from anyio.abc import TaskStatus
|
10
|
-
from anyio.streams.memory import
|
11
|
-
MemoryObjectSendStream)
|
9
|
+
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
12
10
|
from websockets.client import WebSocketClientProtocol
|
13
11
|
from websockets.client import connect as ws_connect
|
14
12
|
|
blaxel/mcp/server.py
CHANGED
@@ -1,19 +1,18 @@
|
|
1
1
|
import logging
|
2
|
+
import traceback
|
2
3
|
import uuid
|
3
4
|
from contextlib import asynccontextmanager
|
4
5
|
from typing import Dict, Literal
|
5
6
|
|
6
7
|
import anyio
|
7
8
|
import mcp.types as types
|
8
|
-
from anyio.streams.memory import
|
9
|
-
MemoryObjectSendStream)
|
9
|
+
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
10
10
|
from mcp.server.fastmcp import FastMCP as FastMCPBase
|
11
11
|
from opentelemetry.trace import Span, StatusCode
|
12
12
|
from websockets.server import WebSocketServerProtocol, serve
|
13
13
|
|
14
14
|
from ..common.env import env
|
15
15
|
from ..instrumentation.span import SpanManager
|
16
|
-
import traceback
|
17
16
|
|
18
17
|
logger = logging.getLogger(__name__)
|
19
18
|
|
@@ -66,7 +65,8 @@ class BlaxelMcpServerTransport:
|
|
66
65
|
"mcp.message.parsed": True,
|
67
66
|
"mcp.method": getattr(msg, "method", None),
|
68
67
|
"mcp.messageId": getattr(msg, "id", None),
|
69
|
-
"mcp.toolName": getattr(getattr(msg, "params", None), "name", None)
|
68
|
+
"mcp.toolName": getattr(getattr(msg, "params", None), "name", None),
|
69
|
+
"span.type": "mcp.message",
|
70
70
|
})
|
71
71
|
self.spans[client_id+":"+msg.id] = span
|
72
72
|
await read_stream_writer.send(msg)
|
blaxel/models/__init__.py
CHANGED
@@ -4,6 +4,7 @@ from ..client import client
|
|
4
4
|
from ..client.api.models import get_model
|
5
5
|
from ..client.models import Model
|
6
6
|
from ..common.settings import settings
|
7
|
+
|
7
8
|
# This has to be here because livekit plugins must be registered on the main thread
|
8
9
|
from .livekit import get_livekit_model
|
9
10
|
|
@@ -94,7 +95,7 @@ class BLModel:
|
|
94
95
|
|
95
96
|
try:
|
96
97
|
return await get_model.asyncio(client=client, model_name=self.model_name)
|
97
|
-
except Exception
|
98
|
+
except Exception:
|
98
99
|
return None
|
99
100
|
|
100
101
|
def bl_model(model_name, **kwargs):
|
@@ -6,33 +6,56 @@ import logging
|
|
6
6
|
import uuid
|
7
7
|
import warnings
|
8
8
|
from operator import itemgetter
|
9
|
-
from typing import (
|
10
|
-
|
9
|
+
from typing import (
|
10
|
+
Any,
|
11
|
+
AsyncIterator,
|
12
|
+
Callable,
|
13
|
+
Dict,
|
14
|
+
Iterator,
|
15
|
+
List,
|
16
|
+
Mapping,
|
17
|
+
Optional,
|
18
|
+
Sequence,
|
19
|
+
Tuple,
|
20
|
+
Type,
|
21
|
+
Union,
|
22
|
+
cast,
|
23
|
+
)
|
11
24
|
|
12
25
|
import httpx
|
13
26
|
import requests
|
14
|
-
from langchain_core.callbacks.manager import
|
15
|
-
CallbackManagerForLLMRun)
|
27
|
+
from langchain_core.callbacks.manager import AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun
|
16
28
|
from langchain_core.language_models import LanguageModelInput
|
17
|
-
from langchain_core.language_models.chat_models import
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
29
|
+
from langchain_core.language_models.chat_models import BaseChatModel, LangSmithParams
|
30
|
+
from langchain_core.messages import (
|
31
|
+
AIMessage,
|
32
|
+
AIMessageChunk,
|
33
|
+
BaseMessage,
|
34
|
+
FunctionMessage,
|
35
|
+
HumanMessage,
|
36
|
+
SystemMessage,
|
37
|
+
ToolMessage,
|
38
|
+
)
|
22
39
|
from langchain_core.messages.ai import UsageMetadata
|
23
|
-
from langchain_core.messages.tool import
|
24
|
-
tool_call_chunk)
|
40
|
+
from langchain_core.messages.tool import invalid_tool_call, tool_call, tool_call_chunk
|
25
41
|
from langchain_core.output_parsers.openai_tools import (
|
26
|
-
JsonOutputKeyToolsParser,
|
27
|
-
|
28
|
-
|
42
|
+
JsonOutputKeyToolsParser,
|
43
|
+
PydanticToolsParser,
|
44
|
+
parse_tool_calls,
|
45
|
+
)
|
46
|
+
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
29
47
|
from langchain_core.runnables import Runnable, RunnablePassthrough
|
30
48
|
from langchain_core.tools import BaseTool
|
31
49
|
from langchain_core.utils.function_calling import convert_to_openai_tool
|
32
50
|
from PIL import Image
|
33
51
|
from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator
|
34
|
-
from tenacity import (
|
35
|
-
|
52
|
+
from tenacity import (
|
53
|
+
before_sleep_log,
|
54
|
+
retry,
|
55
|
+
retry_if_exception_type,
|
56
|
+
stop_after_attempt,
|
57
|
+
wait_exponential,
|
58
|
+
)
|
36
59
|
from typing_extensions import Self, is_typeddict
|
37
60
|
|
38
61
|
WARNED_STRUCTURED_OUTPUT_JSON_MODE = False
|
@@ -189,7 +212,7 @@ def _chat_with_retry(generation_method: Callable, **kwargs: Any) -> Any:
|
|
189
212
|
try:
|
190
213
|
# Extract request parameters and other kwargs
|
191
214
|
request = kwargs.pop('request', {})
|
192
|
-
|
215
|
+
kwargs.pop('metadata', None)
|
193
216
|
|
194
217
|
# Unpack request parameters into kwargs
|
195
218
|
kwargs.update(request)
|
@@ -238,7 +261,7 @@ async def _achat_with_retry(generation_method: Callable, **kwargs: Any) -> Any:
|
|
238
261
|
try:
|
239
262
|
# Extract request parameters and other kwargs
|
240
263
|
request = kwargs.pop('request', {})
|
241
|
-
|
264
|
+
kwargs.pop('metadata', None)
|
242
265
|
|
243
266
|
# Unpack request parameters into kwargs
|
244
267
|
kwargs.update(request)
|
@@ -4,27 +4,36 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Union
|
|
4
4
|
|
5
5
|
import cohere
|
6
6
|
from cohere.types import ToolCall
|
7
|
-
from llama_index.core.base.llms.types import (
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
7
|
+
from llama_index.core.base.llms.types import (
|
8
|
+
ChatMessage,
|
9
|
+
ChatResponse,
|
10
|
+
ChatResponseAsyncGen,
|
11
|
+
ChatResponseGen,
|
12
|
+
CompletionResponse,
|
13
|
+
CompletionResponseAsyncGen,
|
14
|
+
CompletionResponseGen,
|
15
|
+
LLMMetadata,
|
16
|
+
MessageRole,
|
17
|
+
)
|
14
18
|
from llama_index.core.bridge.pydantic import Field, PrivateAttr
|
15
19
|
from llama_index.core.callbacks import CallbackManager
|
16
|
-
from llama_index.core.llms.callbacks import
|
17
|
-
llm_completion_callback)
|
20
|
+
from llama_index.core.llms.callbacks import llm_chat_callback, llm_completion_callback
|
18
21
|
from llama_index.core.llms.function_calling import FunctionCallingLLM
|
19
22
|
from llama_index.core.llms.llm import ToolSelection
|
20
23
|
from llama_index.core.tools.types import BaseTool
|
21
24
|
from llama_index.core.types import BaseOutputParser, PydanticProgramMode
|
22
25
|
from llama_index.llms.cohere.utils import (
|
23
|
-
CHAT_MODELS,
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
26
|
+
CHAT_MODELS,
|
27
|
+
_get_message_cohere_format,
|
28
|
+
_message_to_cohere_tool_results,
|
29
|
+
_messages_to_cohere_tool_results_curr_chat_turn,
|
30
|
+
acompletion_with_retry,
|
31
|
+
cohere_modelname_to_contextsize,
|
32
|
+
completion_with_retry,
|
33
|
+
format_to_cohere_tools,
|
34
|
+
is_cohere_function_calling_model,
|
35
|
+
remove_documents_from_messages,
|
36
|
+
)
|
28
37
|
|
29
38
|
|
30
39
|
class Cohere(FunctionCallingLLM):
|
@@ -200,9 +209,9 @@ class Cohere(FunctionCallingLLM):
|
|
200
209
|
The request for the Cohere chat API.
|
201
210
|
"""
|
202
211
|
additional_kwargs = messages[-1].additional_kwargs
|
203
|
-
|
212
|
+
documents = additional_kwargs.pop("documents", [])
|
204
213
|
# cohere SDK will fail loudly if both connectors and documents are provided
|
205
|
-
if
|
214
|
+
if documents and len(documents) > 0:
|
206
215
|
raise ValueError(
|
207
216
|
"Received documents both as a keyword argument and as an prompt additional keyword argument. Please choose only one option."
|
208
217
|
)
|
blaxel/models/livekit.py
CHANGED
blaxel/tools/__init__.py
CHANGED
@@ -4,12 +4,14 @@ import os
|
|
4
4
|
from contextlib import AsyncExitStack
|
5
5
|
from logging import getLogger
|
6
6
|
from typing import Any, cast
|
7
|
+
import traceback
|
7
8
|
|
8
9
|
from mcp import ClientSession
|
9
10
|
from mcp.types import CallToolResult
|
10
11
|
from mcp.types import Tool as MCPTool
|
11
12
|
|
12
13
|
from ..common.env import env
|
14
|
+
from ..common.internal import get_global_unique_hash
|
13
15
|
from ..common.settings import settings
|
14
16
|
from ..instrumentation.span import SpanManager
|
15
17
|
from ..mcp.client import websocket_client
|
@@ -22,15 +24,19 @@ if os.getenv("BL_SERVER_PORT"):
|
|
22
24
|
DEFAULT_TIMEOUT = 5
|
23
25
|
|
24
26
|
class PersistentWebSocket:
|
25
|
-
def __init__(self, url: str, timeout: int = DEFAULT_TIMEOUT, timeout_enabled: bool = True):
|
27
|
+
def __init__(self, url: str, name: str, timeout: int = DEFAULT_TIMEOUT, timeout_enabled: bool = True):
|
26
28
|
self.url = url
|
29
|
+
self.name = name
|
27
30
|
self.timeout = timeout
|
28
|
-
self.timeout_enabled = timeout_enabled
|
29
31
|
self.session_exit_stack = AsyncExitStack()
|
30
32
|
self.client_exit_stack = AsyncExitStack()
|
31
33
|
self.session: ClientSession = None
|
32
34
|
self.timer_task = None
|
33
35
|
self.tools_cache = []
|
36
|
+
if settings.bl_cloud:
|
37
|
+
self.timeout_enabled = False
|
38
|
+
else:
|
39
|
+
self.timeout_enabled = timeout_enabled
|
34
40
|
|
35
41
|
def with_metas(self, metas: dict[str, Any]):
|
36
42
|
self.metas = metas
|
@@ -46,19 +52,32 @@ class PersistentWebSocket:
|
|
46
52
|
logger.debug(f"Tool {tool_name} returned {call_tool_result}")
|
47
53
|
if self.timeout_enabled:
|
48
54
|
self._reset_timer()
|
55
|
+
else:
|
56
|
+
await self._close()
|
49
57
|
return call_tool_result
|
50
58
|
|
51
59
|
async def list_tools(self):
|
52
|
-
|
53
|
-
|
54
|
-
self.
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
self.
|
61
|
-
|
60
|
+
logger.debug(f"Listing tools for {self.name}")
|
61
|
+
span_attributes = {
|
62
|
+
"tool.server": self.url,
|
63
|
+
"tool.server_name": self.name,
|
64
|
+
"span.type": "tool.list",
|
65
|
+
}
|
66
|
+
with SpanManager("blaxel-tracer").create_active_span(self.name, span_attributes) as span:
|
67
|
+
await self._initialize()
|
68
|
+
logger.debug(f"Initialized websocket for {self.name}")
|
69
|
+
if self.timeout_enabled:
|
70
|
+
self._remove_timer()
|
71
|
+
logger.debug("Listing tools")
|
72
|
+
list_tools_result = await self.session.list_tools()
|
73
|
+
self.tools_cache = list_tools_result.tools
|
74
|
+
logger.debug(f"Tools listed: {list_tools_result}")
|
75
|
+
if self.timeout_enabled:
|
76
|
+
self._reset_timer()
|
77
|
+
else:
|
78
|
+
await self._close()
|
79
|
+
span.set_attribute("tool.list.result", list_tools_result.model_dump_json())
|
80
|
+
return list_tools_result
|
62
81
|
|
63
82
|
def get_tools(self):
|
64
83
|
return self.tools_cache
|
@@ -125,8 +144,9 @@ def convert_mcp_tool_to_blaxel_tool(
|
|
125
144
|
"tool.args": json.dumps(arguments),
|
126
145
|
"tool.server": url,
|
127
146
|
"tool.server_name": name,
|
147
|
+
"span.type": "tool.call",
|
128
148
|
}
|
129
|
-
with SpanManager("blaxel-tracer").create_active_span("blaxel-tool-call", span_attributes)
|
149
|
+
with SpanManager("blaxel-tracer").create_active_span("blaxel-tool-call", span_attributes):
|
130
150
|
logger.debug(f"Calling tool {tool.name} with arguments {arguments}")
|
131
151
|
call_tool_result = await websocket_client.call_tool(tool.name, arguments)
|
132
152
|
logger.debug(f"Tool {tool.name} returned {call_tool_result}")
|
@@ -163,22 +183,37 @@ class BlTools:
|
|
163
183
|
self.timeout = timeout
|
164
184
|
self.timeout_enabled = timeout_enabled
|
165
185
|
|
166
|
-
def
|
167
|
-
|
186
|
+
def _internal_url(self, name: str):
|
187
|
+
"""Get the internal URL for the agent using a hash of workspace and agent name."""
|
188
|
+
hash = get_global_unique_hash(settings.workspace, "function", name)
|
189
|
+
return f"{settings.run_internal_protocol}://bl-{settings.env}-{hash}.{settings.run_internal_hostname}"
|
168
190
|
|
169
|
-
def
|
191
|
+
def _forced_url(self, name: str):
|
192
|
+
"""Get the forced URL from environment variables if set."""
|
170
193
|
env_var = name.replace("-", "_").upper()
|
171
194
|
if env[f"BL_FUNCTION_{env_var}_URL"]:
|
172
195
|
return env[f"BL_FUNCTION_{env_var}_URL"]
|
173
|
-
|
174
|
-
|
175
|
-
|
196
|
+
return None
|
197
|
+
|
198
|
+
def _external_url(self, name: str):
|
199
|
+
return f"{settings.run_url}/{settings.workspace}/functions/{name}"
|
176
200
|
|
177
|
-
def _fallback_url(self, name: str)
|
201
|
+
def _fallback_url(self, name: str):
|
178
202
|
if self._external_url(name) != self._url(name):
|
179
203
|
return self._external_url(name)
|
180
204
|
return None
|
181
205
|
|
206
|
+
def _url(self, name: str):
|
207
|
+
logger.debug(f"Getting URL for {name}")
|
208
|
+
if self._forced_url(name):
|
209
|
+
logger.debug(f"Forced URL found for {name}: {self._forced_url(name)}")
|
210
|
+
return self._forced_url(name)
|
211
|
+
if settings.run_internal_hostname:
|
212
|
+
logger.debug(f"Internal hostname found for {name}: {self._internal_url(name)}")
|
213
|
+
return self._internal_url(name)
|
214
|
+
logger.debug(f"No URL found for {name}, using external URL")
|
215
|
+
return self._external_url(name)
|
216
|
+
|
182
217
|
def get_tools(self) -> list[Tool]:
|
183
218
|
"""Get a list of all tools from all connected servers."""
|
184
219
|
all_tools: list[Tool] = []
|
@@ -240,8 +275,13 @@ class BlTools:
|
|
240
275
|
except Exception as e:
|
241
276
|
if not self._fallback_url(name):
|
242
277
|
raise e
|
278
|
+
logger.warning(f"Error connecting to {name}: {e}\n{traceback.format_exc()}")
|
243
279
|
url = self._fallback_url(name)
|
244
|
-
|
280
|
+
try:
|
281
|
+
await self.connect_with_url(name, url)
|
282
|
+
except Exception as e:
|
283
|
+
logger.error(f"Error connecting to {name} with fallback URL: {e}\n{traceback.format_exc()}")
|
284
|
+
raise e
|
245
285
|
|
246
286
|
async def connect_with_url(
|
247
287
|
self, name: str, url: str
|
@@ -255,7 +295,8 @@ class BlTools:
|
|
255
295
|
logger.debug(f"Initializing session and loading tools from {url}")
|
256
296
|
|
257
297
|
if not toolPersistances.get(name):
|
258
|
-
|
298
|
+
logger.debug(f"Creating new persistent websocket for {name}")
|
299
|
+
toolPersistances[name] = PersistentWebSocket(url, name, timeout=self.timeout, timeout_enabled=self.timeout_enabled)
|
259
300
|
await toolPersistances[name].list_tools()
|
260
301
|
logger.debug(f"Loaded {len(toolPersistances[name].get_tools())} tools from {url}")
|
261
302
|
return toolPersistances[name].with_metas(self.metas)
|
blaxel/tools/langchain.py
CHANGED
@@ -1,8 +1,7 @@
|
|
1
1
|
from typing import Any
|
2
2
|
|
3
3
|
from langchain_core.tools import StructuredTool
|
4
|
-
from mcp.types import
|
5
|
-
TextContent)
|
4
|
+
from mcp.types import CallToolResult, EmbeddedResource, ImageContent, TextContent
|
6
5
|
|
7
6
|
from .types import Tool, ToolException
|
8
7
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: blaxel
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.9rc37
|
4
4
|
Summary: Add your description here
|
5
5
|
Author-email: cploujoux <cploujoux@blaxel.ai>
|
6
6
|
License-File: LICENSE
|
@@ -25,9 +25,6 @@ Requires-Dist: pyyaml<6.1.0,>=6.0.2
|
|
25
25
|
Requires-Dist: requests<2.33.0,>=2.32.3
|
26
26
|
Requires-Dist: tomli>=2.2.1
|
27
27
|
Requires-Dist: websockets<15.0.0
|
28
|
-
Provides-Extra: crewai
|
29
|
-
Requires-Dist: crewai>=0.102.0; extra == 'crewai'
|
30
|
-
Requires-Dist: opentelemetry-instrumentation-crewai>=0.39.0; extra == 'crewai'
|
31
28
|
Provides-Extra: google-adk
|
32
29
|
Requires-Dist: google-adk>=0.2.0; extra == 'google-adk'
|
33
30
|
Requires-Dist: litellm>=1.63.11; extra == 'google-adk'
|