blaxel 0.2.36__py3-none-any.whl → 0.2.38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- blaxel/__init__.py +2 -2
- blaxel/core/client/models/create_job_execution_request_env.py +3 -3
- blaxel/core/client/models/preview.py +48 -1
- blaxel/core/client/models/sandbox.py +10 -0
- blaxel/core/jobs/__init__.py +2 -2
- blaxel/core/sandbox/__init__.py +12 -0
- blaxel/core/sandbox/client/api/system/__init__.py +0 -0
- blaxel/core/sandbox/client/api/system/get_health.py +134 -0
- blaxel/core/sandbox/client/api/system/post_upgrade.py +196 -0
- blaxel/core/sandbox/client/models/__init__.py +8 -0
- blaxel/core/sandbox/client/models/content_search_match.py +24 -25
- blaxel/core/sandbox/client/models/content_search_response.py +25 -29
- blaxel/core/sandbox/client/models/find_match.py +13 -14
- blaxel/core/sandbox/client/models/find_response.py +21 -24
- blaxel/core/sandbox/client/models/fuzzy_search_match.py +17 -19
- blaxel/core/sandbox/client/models/fuzzy_search_response.py +21 -24
- blaxel/core/sandbox/client/models/health_response.py +159 -0
- blaxel/core/sandbox/client/models/process_upgrade_state.py +20 -0
- blaxel/core/sandbox/client/models/upgrade_request.py +71 -0
- blaxel/core/sandbox/client/models/upgrade_status.py +125 -0
- blaxel/core/sandbox/default/__init__.py +2 -0
- blaxel/core/sandbox/default/filesystem.py +20 -6
- blaxel/core/sandbox/default/preview.py +48 -1
- blaxel/core/sandbox/default/process.py +66 -21
- blaxel/core/sandbox/default/sandbox.py +36 -5
- blaxel/core/sandbox/default/system.py +71 -0
- blaxel/core/sandbox/sync/__init__.py +2 -0
- blaxel/core/sandbox/sync/filesystem.py +19 -2
- blaxel/core/sandbox/sync/preview.py +50 -3
- blaxel/core/sandbox/sync/process.py +38 -15
- blaxel/core/sandbox/sync/sandbox.py +29 -4
- blaxel/core/sandbox/sync/system.py +71 -0
- blaxel/core/sandbox/types.py +212 -5
- blaxel/core/tools/__init__.py +4 -0
- blaxel/core/volume/volume.py +10 -0
- blaxel/crewai/model.py +81 -44
- blaxel/crewai/tools.py +85 -2
- blaxel/googleadk/model.py +22 -3
- blaxel/googleadk/tools.py +25 -6
- blaxel/langgraph/custom/gemini.py +19 -12
- blaxel/langgraph/model.py +26 -18
- blaxel/langgraph/tools.py +6 -12
- blaxel/livekit/model.py +7 -2
- blaxel/livekit/tools.py +3 -1
- blaxel/llamaindex/model.py +145 -84
- blaxel/llamaindex/tools.py +6 -4
- blaxel/openai/model.py +7 -1
- blaxel/openai/tools.py +13 -3
- blaxel/pydantic/model.py +38 -24
- blaxel/pydantic/tools.py +37 -4
- blaxel-0.2.38.dist-info/METADATA +528 -0
- {blaxel-0.2.36.dist-info → blaxel-0.2.38.dist-info}/RECORD +54 -45
- blaxel-0.2.36.dist-info/METADATA +0 -228
- {blaxel-0.2.36.dist-info → blaxel-0.2.38.dist-info}/WHEEL +0 -0
- {blaxel-0.2.36.dist-info → blaxel-0.2.38.dist-info}/licenses/LICENSE +0 -0
blaxel/core/sandbox/types.py
CHANGED
|
@@ -4,7 +4,14 @@ from typing import Any, Callable, Dict, List, TypeVar, Union
|
|
|
4
4
|
import httpx
|
|
5
5
|
from attrs import define as _attrs_define
|
|
6
6
|
|
|
7
|
-
from ..client.models import
|
|
7
|
+
from ..client.models import (
|
|
8
|
+
Env,
|
|
9
|
+
Port,
|
|
10
|
+
PortProtocol,
|
|
11
|
+
Sandbox,
|
|
12
|
+
SandboxLifecycle,
|
|
13
|
+
VolumeAttachment,
|
|
14
|
+
)
|
|
8
15
|
from ..client.types import UNSET
|
|
9
16
|
from .client.models.process_request import ProcessRequest
|
|
10
17
|
from .client.models.process_response import ProcessResponse
|
|
@@ -205,7 +212,7 @@ class SandboxCreateConfiguration:
|
|
|
205
212
|
if isinstance(port, Port):
|
|
206
213
|
# If it's already a Port object, ensure protocol defaults to HTTP
|
|
207
214
|
if port.protocol is UNSET or not port.protocol:
|
|
208
|
-
port.protocol =
|
|
215
|
+
port.protocol = PortProtocol.HTTP
|
|
209
216
|
port_objects.append(port)
|
|
210
217
|
elif isinstance(port, dict):
|
|
211
218
|
# Convert dict to Port object with HTTP as default protocol
|
|
@@ -218,20 +225,22 @@ class SandboxCreateConfiguration:
|
|
|
218
225
|
|
|
219
226
|
return port_objects
|
|
220
227
|
|
|
221
|
-
def _normalize_envs(self) -> List[
|
|
228
|
+
def _normalize_envs(self) -> List[Env] | None:
|
|
222
229
|
"""Convert envs to list of dicts with name and value keys."""
|
|
223
230
|
if not self.envs:
|
|
224
231
|
return None
|
|
225
232
|
|
|
226
233
|
env_objects = []
|
|
227
234
|
for env in self.envs:
|
|
228
|
-
if isinstance(env,
|
|
235
|
+
if isinstance(env, Env):
|
|
236
|
+
env_objects.append(env)
|
|
237
|
+
elif isinstance(env, dict):
|
|
229
238
|
# Validate that the dict has the required keys
|
|
230
239
|
if "name" not in env or "value" not in env:
|
|
231
240
|
raise ValueError(
|
|
232
241
|
f"Environment variable dict must have 'name' and 'value' keys: {env}"
|
|
233
242
|
)
|
|
234
|
-
env_objects.append(
|
|
243
|
+
env_objects.append(Env(name=env["name"], value=env["value"]))
|
|
235
244
|
else:
|
|
236
245
|
raise ValueError(
|
|
237
246
|
f"Invalid env type: {type(env)}. Expected dict with 'name' and 'value' keys."
|
|
@@ -385,3 +394,201 @@ class Context:
|
|
|
385
394
|
@classmethod
|
|
386
395
|
def from_json(cls, data: Dict[str, Any]) -> "Context":
|
|
387
396
|
return cls(id=str(data.get("id") or data.get("context_id") or ""))
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
class StreamHandle:
|
|
400
|
+
"""Handle for managing a streaming operation (sync version).
|
|
401
|
+
|
|
402
|
+
Can be used as a context manager for automatic cleanup:
|
|
403
|
+
|
|
404
|
+
with sandbox.process.stream_logs(name, options) as handle:
|
|
405
|
+
# do something
|
|
406
|
+
# handle is automatically closed
|
|
407
|
+
|
|
408
|
+
Or used manually:
|
|
409
|
+
|
|
410
|
+
handle = sandbox.process.stream_logs(name, options)
|
|
411
|
+
try:
|
|
412
|
+
# do something
|
|
413
|
+
finally:
|
|
414
|
+
handle.close()
|
|
415
|
+
"""
|
|
416
|
+
|
|
417
|
+
def __init__(self, close_func: Callable[[], None]):
|
|
418
|
+
self._close_func = close_func
|
|
419
|
+
self._closed = False
|
|
420
|
+
|
|
421
|
+
def close(self) -> None:
|
|
422
|
+
"""Close the stream and stop receiving data."""
|
|
423
|
+
if not self._closed:
|
|
424
|
+
self._close_func()
|
|
425
|
+
self._closed = True
|
|
426
|
+
|
|
427
|
+
@property
|
|
428
|
+
def closed(self) -> bool:
|
|
429
|
+
"""Returns True if the stream handle has been closed."""
|
|
430
|
+
return self._closed
|
|
431
|
+
|
|
432
|
+
def __enter__(self) -> "StreamHandle":
|
|
433
|
+
return self
|
|
434
|
+
|
|
435
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
436
|
+
self.close()
|
|
437
|
+
|
|
438
|
+
# Backward compatibility: support dict-like access
|
|
439
|
+
def __getitem__(self, key: str) -> Callable[[], None]:
|
|
440
|
+
if key == "close":
|
|
441
|
+
return self.close
|
|
442
|
+
raise KeyError(key)
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
class AsyncStreamHandle:
|
|
446
|
+
"""Handle for managing a streaming operation (async version).
|
|
447
|
+
|
|
448
|
+
Can be used as an async context manager for automatic cleanup:
|
|
449
|
+
|
|
450
|
+
async with sandbox.process.stream_logs(name, options) as handle:
|
|
451
|
+
# do something
|
|
452
|
+
# handle is automatically closed
|
|
453
|
+
|
|
454
|
+
Or used manually:
|
|
455
|
+
|
|
456
|
+
handle = sandbox.process.stream_logs(name, options)
|
|
457
|
+
try:
|
|
458
|
+
# do something
|
|
459
|
+
finally:
|
|
460
|
+
handle.close()
|
|
461
|
+
"""
|
|
462
|
+
|
|
463
|
+
def __init__(self, close_func: Callable[[], None]):
|
|
464
|
+
self._close_func = close_func
|
|
465
|
+
self._closed = False
|
|
466
|
+
|
|
467
|
+
def close(self) -> None:
|
|
468
|
+
"""Close the stream and stop receiving data."""
|
|
469
|
+
if not self._closed:
|
|
470
|
+
self._close_func()
|
|
471
|
+
self._closed = True
|
|
472
|
+
|
|
473
|
+
@property
|
|
474
|
+
def closed(self) -> bool:
|
|
475
|
+
"""Returns True if the stream handle has been closed."""
|
|
476
|
+
return self._closed
|
|
477
|
+
|
|
478
|
+
async def __aenter__(self) -> "AsyncStreamHandle":
|
|
479
|
+
return self
|
|
480
|
+
|
|
481
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
482
|
+
self.close()
|
|
483
|
+
|
|
484
|
+
# Also support sync context manager for convenience
|
|
485
|
+
def __enter__(self) -> "AsyncStreamHandle":
|
|
486
|
+
return self
|
|
487
|
+
|
|
488
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
489
|
+
self.close()
|
|
490
|
+
|
|
491
|
+
# Backward compatibility: support dict-like access
|
|
492
|
+
def __getitem__(self, key: str) -> Callable[[], None]:
|
|
493
|
+
if key == "close":
|
|
494
|
+
return self.close
|
|
495
|
+
raise KeyError(key)
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
class WatchHandle:
|
|
499
|
+
"""Handle for managing a file system watch operation (sync version).
|
|
500
|
+
|
|
501
|
+
Can be used as a context manager for automatic cleanup:
|
|
502
|
+
|
|
503
|
+
with sandbox.fs.watch(path, callback) as handle:
|
|
504
|
+
# do something
|
|
505
|
+
# handle is automatically closed
|
|
506
|
+
|
|
507
|
+
Or used manually:
|
|
508
|
+
|
|
509
|
+
handle = sandbox.fs.watch(path, callback)
|
|
510
|
+
try:
|
|
511
|
+
# do something
|
|
512
|
+
finally:
|
|
513
|
+
handle.close()
|
|
514
|
+
"""
|
|
515
|
+
|
|
516
|
+
def __init__(self, close_func: Callable[[], None]):
|
|
517
|
+
self._close_func = close_func
|
|
518
|
+
self._closed = False
|
|
519
|
+
|
|
520
|
+
def close(self) -> None:
|
|
521
|
+
"""Close the watch and stop receiving events."""
|
|
522
|
+
if not self._closed:
|
|
523
|
+
self._close_func()
|
|
524
|
+
self._closed = True
|
|
525
|
+
|
|
526
|
+
@property
|
|
527
|
+
def closed(self) -> bool:
|
|
528
|
+
"""Returns True if the watch handle has been closed."""
|
|
529
|
+
return self._closed
|
|
530
|
+
|
|
531
|
+
def __enter__(self) -> "WatchHandle":
|
|
532
|
+
return self
|
|
533
|
+
|
|
534
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
535
|
+
self.close()
|
|
536
|
+
|
|
537
|
+
# Backward compatibility: support dict-like access
|
|
538
|
+
def __getitem__(self, key: str) -> Callable[[], None]:
|
|
539
|
+
if key == "close":
|
|
540
|
+
return self.close
|
|
541
|
+
raise KeyError(key)
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
class AsyncWatchHandle:
|
|
545
|
+
"""Handle for managing a file system watch operation (async version).
|
|
546
|
+
|
|
547
|
+
Can be used as an async context manager for automatic cleanup:
|
|
548
|
+
|
|
549
|
+
async with sandbox.fs.watch(path, callback) as handle:
|
|
550
|
+
# do something
|
|
551
|
+
# handle is automatically closed
|
|
552
|
+
|
|
553
|
+
Or used manually:
|
|
554
|
+
|
|
555
|
+
handle = sandbox.fs.watch(path, callback)
|
|
556
|
+
try:
|
|
557
|
+
# do something
|
|
558
|
+
finally:
|
|
559
|
+
handle.close()
|
|
560
|
+
"""
|
|
561
|
+
|
|
562
|
+
def __init__(self, close_func: Callable[[], None]):
|
|
563
|
+
self._close_func = close_func
|
|
564
|
+
self._closed = False
|
|
565
|
+
|
|
566
|
+
def close(self) -> None:
|
|
567
|
+
"""Close the watch and stop receiving events."""
|
|
568
|
+
if not self._closed:
|
|
569
|
+
self._close_func()
|
|
570
|
+
self._closed = True
|
|
571
|
+
|
|
572
|
+
@property
|
|
573
|
+
def closed(self) -> bool:
|
|
574
|
+
"""Returns True if the watch handle has been closed."""
|
|
575
|
+
return self._closed
|
|
576
|
+
|
|
577
|
+
async def __aenter__(self) -> "AsyncWatchHandle":
|
|
578
|
+
return self
|
|
579
|
+
|
|
580
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
581
|
+
self.close()
|
|
582
|
+
|
|
583
|
+
# Also support sync context manager for convenience
|
|
584
|
+
def __enter__(self) -> "AsyncWatchHandle":
|
|
585
|
+
return self
|
|
586
|
+
|
|
587
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
588
|
+
self.close()
|
|
589
|
+
|
|
590
|
+
# Backward compatibility: support dict-like access
|
|
591
|
+
def __getitem__(self, key: str) -> Callable[[], None]:
|
|
592
|
+
if key == "close":
|
|
593
|
+
return self.close
|
|
594
|
+
raise KeyError(key)
|
blaxel/core/tools/__init__.py
CHANGED
|
@@ -254,6 +254,10 @@ class PersistentMcpClient:
|
|
|
254
254
|
await self.client_exit_stack.aclose()
|
|
255
255
|
except Exception as e:
|
|
256
256
|
logger.debug(f"Error closing client exit stack: {e}")
|
|
257
|
+
# Create fresh exit stacks so that future initialize() calls
|
|
258
|
+
# don't reuse stacks tainted by old cancel scopes
|
|
259
|
+
self.session_exit_stack = AsyncExitStack()
|
|
260
|
+
self.client_exit_stack = AsyncExitStack()
|
|
257
261
|
logger.debug("WebSocket connection closed due to inactivity.")
|
|
258
262
|
|
|
259
263
|
|
blaxel/core/volume/volume.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import time
|
|
1
3
|
import uuid
|
|
2
4
|
from typing import Callable, Dict, List, Union
|
|
3
5
|
|
|
@@ -135,6 +137,8 @@ class VolumeCreateConfiguration:
|
|
|
135
137
|
|
|
136
138
|
|
|
137
139
|
class VolumeInstance:
|
|
140
|
+
delete: "_AsyncDeleteDescriptor"
|
|
141
|
+
|
|
138
142
|
def __init__(self, volume: Volume):
|
|
139
143
|
self.volume = volume
|
|
140
144
|
|
|
@@ -273,6 +277,8 @@ class VolumeInstance:
|
|
|
273
277
|
|
|
274
278
|
|
|
275
279
|
class SyncVolumeInstance:
|
|
280
|
+
delete: "_SyncDeleteDescriptor"
|
|
281
|
+
|
|
276
282
|
"""Synchronous volume instance for managing persistent storage."""
|
|
277
283
|
|
|
278
284
|
def __init__(self, volume: Volume):
|
|
@@ -500,6 +506,8 @@ async def _update_volume_by_name(
|
|
|
500
506
|
status_code = int(response.code) if response.code is not UNSET else None
|
|
501
507
|
message = response.message if response.message is not UNSET else response.error
|
|
502
508
|
raise VolumeAPIError(message, status_code=status_code, code=response.error)
|
|
509
|
+
# This is for safe update
|
|
510
|
+
await asyncio.sleep(0.5)
|
|
503
511
|
return VolumeInstance(response)
|
|
504
512
|
|
|
505
513
|
|
|
@@ -576,6 +584,8 @@ def _update_volume_by_name_sync(
|
|
|
576
584
|
status_code = int(response.code) if response.code is not UNSET else None
|
|
577
585
|
message = response.message if response.message is not UNSET else response.error
|
|
578
586
|
raise VolumeAPIError(message, status_code=status_code, code=response.error)
|
|
587
|
+
# This is for safe update
|
|
588
|
+
time.sleep(0.5)
|
|
579
589
|
return SyncVolumeInstance(response)
|
|
580
590
|
|
|
581
591
|
|
blaxel/crewai/model.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
from logging import getLogger
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
import httpx
|
|
4
|
+
from crewai import LLM # type: ignore[import-not-found]
|
|
5
|
+
from crewai.llms.hooks.base import BaseInterceptor # type: ignore[import-not-found]
|
|
4
6
|
|
|
5
7
|
from blaxel.core import bl_model as bl_model_core
|
|
6
8
|
from blaxel.core import settings
|
|
@@ -8,62 +10,97 @@ from blaxel.core import settings
|
|
|
8
10
|
logger = getLogger(__name__)
|
|
9
11
|
|
|
10
12
|
|
|
11
|
-
class
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
13
|
+
class AuthInterceptor(BaseInterceptor[httpx.Request, httpx.Response]):
|
|
14
|
+
"""Interceptor that injects dynamic auth headers into every HTTP request.
|
|
15
|
+
|
|
16
|
+
Used for crewai native providers (OpenAI, Anthropic, Gemini, etc.)
|
|
17
|
+
where the LLM.__new__ factory returns a provider-specific instance
|
|
18
|
+
and subclass overrides are not possible.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def on_outbound(self, message: httpx.Request) -> httpx.Request:
|
|
22
|
+
auth_headers = settings.auth.get_headers()
|
|
23
|
+
# Remove the SDK's default "Authorization: Bearer replaced" header
|
|
24
|
+
# when our auth uses a different header (e.g. X-Blaxel-Authorization with API keys)
|
|
25
|
+
if "Authorization" not in auth_headers:
|
|
26
|
+
message.headers.pop("Authorization", None)
|
|
27
|
+
message.headers.pop("authorization", None)
|
|
28
|
+
for key, value in auth_headers.items():
|
|
29
|
+
message.headers[key] = value
|
|
30
|
+
return message
|
|
31
|
+
|
|
32
|
+
def on_inbound(self, message: httpx.Response) -> httpx.Response:
|
|
33
|
+
return message
|
|
34
|
+
|
|
35
|
+
async def aon_outbound(self, message: httpx.Request) -> httpx.Request:
|
|
36
|
+
return self.on_outbound(message)
|
|
37
|
+
|
|
38
|
+
async def aon_inbound(self, message: httpx.Response) -> httpx.Response:
|
|
39
|
+
return message
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# Provider types that crewai routes to native SDK implementations.
|
|
43
|
+
# These support the interceptor mechanism for auth.
|
|
44
|
+
_NATIVE_PROVIDER_PREFIXES = {"openai", "anthropic", "gemini", "azure", "bedrock"}
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _is_native_route(provider_prefix: str) -> bool:
|
|
48
|
+
"""Check if a provider prefix will be routed to a native SDK by crewai."""
|
|
49
|
+
return provider_prefix.lower() in _NATIVE_PROVIDER_PREFIXES
|
|
15
50
|
|
|
16
51
|
|
|
17
52
|
async def bl_model(name: str, **kwargs):
|
|
18
53
|
url, type, model = await bl_model_core(name).get_parameters()
|
|
54
|
+
|
|
55
|
+
# Map blaxel model types to crewai provider prefixes and base URLs
|
|
19
56
|
if type == "mistral":
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
api_key="replaced",
|
|
23
|
-
base_url=f"{url}/v1",
|
|
24
|
-
**kwargs,
|
|
25
|
-
)
|
|
57
|
+
provider_prefix = "mistral"
|
|
58
|
+
base_url = f"{url}/v1"
|
|
26
59
|
elif type == "xai":
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
api_key="replaced",
|
|
30
|
-
base_url=f"{url}/v1",
|
|
31
|
-
**kwargs,
|
|
32
|
-
)
|
|
60
|
+
provider_prefix = "groq"
|
|
61
|
+
base_url = f"{url}/v1"
|
|
33
62
|
elif type == "deepseek":
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
api_key="replaced",
|
|
37
|
-
base_url=f"{url}/v1",
|
|
38
|
-
**kwargs,
|
|
39
|
-
)
|
|
63
|
+
provider_prefix = "openai"
|
|
64
|
+
base_url = f"{url}/v1"
|
|
40
65
|
elif type == "anthropic":
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
api_key="replaced",
|
|
44
|
-
base_url=url,
|
|
45
|
-
**kwargs,
|
|
46
|
-
)
|
|
66
|
+
provider_prefix = "anthropic"
|
|
67
|
+
base_url = url
|
|
47
68
|
elif type == "gemini":
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
api_key="replaced",
|
|
51
|
-
base_url=f"{url}/v1beta/models/{model}",
|
|
52
|
-
**kwargs,
|
|
53
|
-
)
|
|
69
|
+
provider_prefix = "gemini"
|
|
70
|
+
base_url = f"{url}/v1beta/models/{model}"
|
|
54
71
|
elif type == "cerebras":
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
api_key="replaced",
|
|
58
|
-
base_url=f"{url}/v1",
|
|
59
|
-
**kwargs,
|
|
60
|
-
)
|
|
72
|
+
provider_prefix = "cerebras"
|
|
73
|
+
base_url = f"{url}/v1"
|
|
61
74
|
else:
|
|
62
75
|
if type != "openai":
|
|
63
76
|
logger.warning(f"Model {model} is not supported by CrewAI, defaulting to OpenAI")
|
|
64
|
-
|
|
65
|
-
|
|
77
|
+
provider_prefix = "openai"
|
|
78
|
+
base_url = f"{url}/v1"
|
|
79
|
+
|
|
80
|
+
model_string = f"{provider_prefix}/{model}"
|
|
81
|
+
auth_headers = settings.auth.get_headers()
|
|
82
|
+
|
|
83
|
+
if _is_native_route(provider_prefix):
|
|
84
|
+
# Native providers: use interceptor for dynamic auth headers.
|
|
85
|
+
# Always pass api_key="replaced" because crewai's native providers
|
|
86
|
+
# require a non-None api_key. The AuthInterceptor handles stripping
|
|
87
|
+
# the dummy Authorization header and injecting the real auth.
|
|
88
|
+
return LLM(
|
|
89
|
+
model=model_string,
|
|
66
90
|
api_key="replaced",
|
|
67
|
-
base_url=
|
|
91
|
+
base_url=base_url,
|
|
92
|
+
interceptor=AuthInterceptor(),
|
|
93
|
+
**kwargs,
|
|
94
|
+
)
|
|
95
|
+
else:
|
|
96
|
+
# LiteLLM fallback: pass auth headers via extra_headers param.
|
|
97
|
+
# Omit api_key when auth uses X-Blaxel-Authorization to prevent
|
|
98
|
+
# litellm from adding "Authorization: Bearer replaced".
|
|
99
|
+
llm_api_key = "replaced" if "Authorization" in auth_headers else None
|
|
100
|
+
return LLM(
|
|
101
|
+
model=model_string,
|
|
102
|
+
api_key=llm_api_key,
|
|
103
|
+
base_url=base_url,
|
|
104
|
+
extra_headers=auth_headers,
|
|
68
105
|
**kwargs,
|
|
69
106
|
)
|
blaxel/crewai/tools.py
CHANGED
|
@@ -1,10 +1,86 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
1
3
|
from crewai.tools import BaseTool
|
|
4
|
+
from pydantic import BaseModel
|
|
2
5
|
|
|
3
6
|
from blaxel.core.tools import bl_tools as bl_tools_core
|
|
4
|
-
from blaxel.core.tools.common import create_model_from_json_schema
|
|
5
7
|
from blaxel.core.tools.types import Tool
|
|
6
8
|
|
|
7
9
|
|
|
10
|
+
def _clean_schema_for_openai(schema: dict) -> dict:
|
|
11
|
+
"""Clean JSON schema to be compatible with OpenAI strict mode.
|
|
12
|
+
|
|
13
|
+
Recursively resolves anyOf patterns, ensures all schemas have type keys,
|
|
14
|
+
removes additionalProperties and $schema, and ensures object types have
|
|
15
|
+
properties and required fields.
|
|
16
|
+
"""
|
|
17
|
+
if not isinstance(schema, dict):
|
|
18
|
+
return schema
|
|
19
|
+
|
|
20
|
+
cleaned = schema.copy()
|
|
21
|
+
|
|
22
|
+
# Remove unsupported keys
|
|
23
|
+
cleaned.pop("$schema", None)
|
|
24
|
+
cleaned.pop("additionalProperties", None)
|
|
25
|
+
|
|
26
|
+
# Resolve anyOf: pick the non-null type
|
|
27
|
+
if "anyOf" in cleaned:
|
|
28
|
+
any_of = cleaned.pop("anyOf")
|
|
29
|
+
non_null = [s for s in any_of if s.get("type") != "null"]
|
|
30
|
+
if non_null:
|
|
31
|
+
# Merge the first non-null variant into current schema
|
|
32
|
+
resolved = _clean_schema_for_openai(non_null[0])
|
|
33
|
+
cleaned.update(resolved)
|
|
34
|
+
else:
|
|
35
|
+
cleaned["type"] = "string"
|
|
36
|
+
|
|
37
|
+
# Ensure type exists
|
|
38
|
+
if "type" not in cleaned and "properties" in cleaned:
|
|
39
|
+
cleaned["type"] = "object"
|
|
40
|
+
|
|
41
|
+
# Handle object types
|
|
42
|
+
if cleaned.get("type") == "object":
|
|
43
|
+
if "properties" not in cleaned:
|
|
44
|
+
cleaned["properties"] = {}
|
|
45
|
+
if "required" not in cleaned:
|
|
46
|
+
cleaned["required"] = list(cleaned["properties"].keys())
|
|
47
|
+
|
|
48
|
+
# Recursively clean properties
|
|
49
|
+
if "properties" in cleaned:
|
|
50
|
+
cleaned["properties"] = {
|
|
51
|
+
k: _clean_schema_for_openai(v) for k, v in cleaned["properties"].items()
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
# Recursively clean array items
|
|
55
|
+
if "items" in cleaned:
|
|
56
|
+
cleaned["items"] = _clean_schema_for_openai(cleaned["items"])
|
|
57
|
+
# Ensure items has a type
|
|
58
|
+
if "type" not in cleaned["items"]:
|
|
59
|
+
cleaned["items"]["type"] = "string"
|
|
60
|
+
|
|
61
|
+
return cleaned
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _make_clean_args_schema(tool: Tool) -> type[BaseModel]:
|
|
65
|
+
"""Create a Pydantic model whose JSON schema returns the pre-cleaned schema.
|
|
66
|
+
|
|
67
|
+
CrewAI calls model_json_schema() on args_schema to build the OpenAI tool
|
|
68
|
+
parameters. By overriding model_json_schema we ensure the cleaned schema
|
|
69
|
+
is used directly, avoiding issues with Pydantic re-introducing anyOf or
|
|
70
|
+
dropping type keys on array items.
|
|
71
|
+
"""
|
|
72
|
+
clean = _clean_schema_for_openai(tool.input_schema)
|
|
73
|
+
|
|
74
|
+
class CleanArgsSchema(BaseModel):
|
|
75
|
+
@classmethod
|
|
76
|
+
def model_json_schema(cls, *args: Any, **kwargs: Any) -> dict[str, Any]:
|
|
77
|
+
return clean
|
|
78
|
+
|
|
79
|
+
CleanArgsSchema.__name__ = f"{tool.name}Schema"
|
|
80
|
+
CleanArgsSchema.__qualname__ = f"{tool.name}Schema"
|
|
81
|
+
return CleanArgsSchema
|
|
82
|
+
|
|
83
|
+
|
|
8
84
|
class CrewAITool(BaseTool):
|
|
9
85
|
_tool: Tool
|
|
10
86
|
|
|
@@ -12,13 +88,20 @@ class CrewAITool(BaseTool):
|
|
|
12
88
|
super().__init__(
|
|
13
89
|
name=tool.name,
|
|
14
90
|
description=tool.description,
|
|
15
|
-
args_schema=
|
|
91
|
+
args_schema=_make_clean_args_schema(tool),
|
|
16
92
|
)
|
|
17
93
|
self._tool = tool
|
|
18
94
|
|
|
19
95
|
def _run(self, *args, **kwargs):
|
|
96
|
+
if not self._tool.sync_coroutine:
|
|
97
|
+
raise ValueError(f"Tool {self._tool.name} does not have a sync_coroutine defined")
|
|
20
98
|
return self._tool.sync_coroutine(**kwargs)
|
|
21
99
|
|
|
100
|
+
async def _arun(self, *args, **kwargs):
|
|
101
|
+
if not self._tool.coroutine:
|
|
102
|
+
raise ValueError(f"Tool {self._tool.name} does not have a coroutine defined")
|
|
103
|
+
return await self._tool.coroutine(**kwargs)
|
|
104
|
+
|
|
22
105
|
|
|
23
106
|
async def bl_tools(tools_names: list[str], **kwargs) -> list[BaseTool]:
|
|
24
107
|
tools = bl_tools_core(tools_names, **kwargs)
|
blaxel/googleadk/model.py
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
from logging import getLogger
|
|
2
2
|
|
|
3
|
-
from google.adk.models.lite_llm import
|
|
3
|
+
from google.adk.models.lite_llm import ( # type: ignore[import-not-found]
|
|
4
|
+
LiteLlm,
|
|
5
|
+
LiteLLMClient,
|
|
6
|
+
)
|
|
4
7
|
|
|
5
8
|
from blaxel.core import bl_model as bl_model_core
|
|
6
9
|
from blaxel.core import settings
|
|
@@ -23,7 +26,15 @@ class AuthenticatedLiteLLMClient(LiteLLMClient):
|
|
|
23
26
|
Returns:
|
|
24
27
|
The model response as a message.
|
|
25
28
|
"""
|
|
26
|
-
|
|
29
|
+
auth_headers = settings.auth.get_headers()
|
|
30
|
+
extra = dict(auth_headers)
|
|
31
|
+
# When auth uses X-Blaxel-Authorization (API keys), override the
|
|
32
|
+
# Authorization header that litellm sets from api_key or OPENAI_API_KEY
|
|
33
|
+
# env var. Without this, the server sees an invalid Authorization header
|
|
34
|
+
# and rejects the request.
|
|
35
|
+
if "Authorization" not in auth_headers:
|
|
36
|
+
extra["Authorization"] = ""
|
|
37
|
+
kwargs["extra_headers"] = extra
|
|
27
38
|
return await super().acompletion(
|
|
28
39
|
model=model,
|
|
29
40
|
messages=messages,
|
|
@@ -44,7 +55,15 @@ class AuthenticatedLiteLLMClient(LiteLLMClient):
|
|
|
44
55
|
Returns:
|
|
45
56
|
The response from the model.
|
|
46
57
|
"""
|
|
47
|
-
|
|
58
|
+
auth_headers = settings.auth.get_headers()
|
|
59
|
+
extra = dict(auth_headers)
|
|
60
|
+
# When auth uses X-Blaxel-Authorization (API keys), override the
|
|
61
|
+
# Authorization header that litellm sets from api_key or OPENAI_API_KEY
|
|
62
|
+
# env var. Without this, the server sees an invalid Authorization header
|
|
63
|
+
# and rejects the request.
|
|
64
|
+
if "Authorization" not in auth_headers:
|
|
65
|
+
extra["Authorization"] = ""
|
|
66
|
+
kwargs["extra_headers"] = extra
|
|
48
67
|
return super().completion(
|
|
49
68
|
model=model,
|
|
50
69
|
messages=messages,
|
blaxel/googleadk/tools.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import inspect
|
|
2
|
-
from typing import Any
|
|
2
|
+
from typing import Any
|
|
3
3
|
|
|
4
|
-
from google.adk.tools import BaseTool, ToolContext
|
|
5
|
-
from google.genai import types
|
|
4
|
+
from google.adk.tools import BaseTool, ToolContext # type: ignore[import-not-found]
|
|
5
|
+
from google.genai import types # type: ignore[import-not-found]
|
|
6
6
|
|
|
7
7
|
from blaxel.core.tools import bl_tools as bl_tools_core
|
|
8
8
|
from blaxel.core.tools.types import Tool
|
|
@@ -31,15 +31,33 @@ class GoogleADKTool(BaseTool):
|
|
|
31
31
|
if "additionalProperties" in cleaned_schema:
|
|
32
32
|
del cleaned_schema["additionalProperties"]
|
|
33
33
|
|
|
34
|
+
# Google genai Schema expects type as a single enum string (e.g. "STRING"),
|
|
35
|
+
# not a JSON Schema union list like ["null", "string"].
|
|
36
|
+
if "type" in cleaned_schema and isinstance(cleaned_schema["type"], list):
|
|
37
|
+
type_list = [t for t in cleaned_schema["type"] if t != "null"]
|
|
38
|
+
cleaned_schema["type"] = type_list[0].upper() if type_list else "STRING"
|
|
39
|
+
# Mark as nullable if "null" was in the original list
|
|
40
|
+
if "null" in schema["type"]:
|
|
41
|
+
cleaned_schema["nullable"] = True
|
|
42
|
+
elif "type" in cleaned_schema and isinstance(cleaned_schema["type"], str):
|
|
43
|
+
cleaned_schema["type"] = cleaned_schema["type"].upper()
|
|
44
|
+
|
|
45
|
+
# Ensure object types have properties
|
|
46
|
+
if cleaned_schema.get("type") == "OBJECT" and "properties" not in cleaned_schema:
|
|
47
|
+
cleaned_schema["properties"] = {}
|
|
48
|
+
|
|
34
49
|
# Recursively clean properties if they exist
|
|
35
50
|
if "properties" in cleaned_schema:
|
|
36
51
|
cleaned_schema["properties"] = {
|
|
37
52
|
k: self._clean_schema(v) for k, v in cleaned_schema["properties"].items()
|
|
38
53
|
}
|
|
39
54
|
|
|
55
|
+
# Recursively clean items for array types
|
|
56
|
+
if "items" in cleaned_schema and isinstance(cleaned_schema["items"], dict):
|
|
57
|
+
cleaned_schema["items"] = self._clean_schema(cleaned_schema["items"])
|
|
58
|
+
|
|
40
59
|
return cleaned_schema
|
|
41
60
|
|
|
42
|
-
@override
|
|
43
61
|
def _get_declaration(self) -> types.FunctionDeclaration | None:
|
|
44
62
|
# Clean the schema recursively
|
|
45
63
|
schema = self._clean_schema(self._tool.input_schema)
|
|
@@ -48,14 +66,15 @@ class GoogleADKTool(BaseTool):
|
|
|
48
66
|
types.FunctionDeclaration(
|
|
49
67
|
name=self._tool.name,
|
|
50
68
|
description=self._tool.description,
|
|
51
|
-
parameters=schema,
|
|
69
|
+
parameters=types.Schema(**schema),
|
|
52
70
|
)
|
|
53
71
|
)
|
|
54
72
|
|
|
55
73
|
return function_decl
|
|
56
74
|
|
|
57
|
-
@override
|
|
58
75
|
async def run_async(self, *, args: dict[str, Any], tool_context: ToolContext) -> Any:
|
|
76
|
+
if not self._tool.coroutine:
|
|
77
|
+
raise ValueError(f"Tool {self._tool.name} does not have a coroutine defined")
|
|
59
78
|
args_to_call = args.copy()
|
|
60
79
|
signature = inspect.signature(self._tool.coroutine)
|
|
61
80
|
if "tool_context" in signature.parameters:
|