blaxel 0.1.18rc61__py3-none-any.whl → 0.1.18rc63__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
blaxel/common/logger.py CHANGED
@@ -7,6 +7,7 @@ import logging
7
7
  import os
8
8
 
9
9
  from opentelemetry import trace
10
+ from opentelemetry import context as context_api
10
11
 
11
12
 
12
13
  class JsonFormatter(logging.Formatter):
@@ -35,8 +36,15 @@ class JsonFormatter(logging.Formatter):
35
36
  self.labels_name: {}
36
37
  }
37
38
 
38
- # Add trace context if available
39
+ # Try to get the current span from context
40
+ # Get current span - try multiple approaches
39
41
  current_span = trace.get_current_span()
42
+
43
+ # If that doesn't work, try getting from current context explicitly
44
+ if not current_span.is_recording():
45
+ ctx = context_api.get_current()
46
+ current_span = trace.get_current_span(ctx)
47
+
40
48
  if current_span.is_recording():
41
49
  span_context = current_span.get_span_context()
42
50
  log_entry[self.trace_id_name] = f"{self.trace_id_prefix}{span_context.trace_id}"
@@ -0,0 +1,169 @@
1
+ Metadata-Version: 2.4
2
+ Name: blaxel
3
+ Version: 0.1.18rc63
4
+ Summary: Add your description here
5
+ Project-URL: Homepage, https://blaxel.ai
6
+ Project-URL: Documentation, https://docs.blaxel.ai
7
+ Project-URL: Repository, https://github.com/blaxel-ai/sdk-python
8
+ Project-URL: Changelog, https://docs.blaxel.ai/changelog
9
+ Author-email: cploujoux <cploujoux@blaxel.ai>
10
+ License-File: LICENSE
11
+ Requires-Python: >=3.10
12
+ Requires-Dist: attrs>=21.3.0
13
+ Requires-Dist: httpx>=0.27.0
14
+ Requires-Dist: mcp<=1.7.1
15
+ Requires-Dist: opentelemetry-api>=1.28.0
16
+ Requires-Dist: opentelemetry-exporter-otlp>=1.28.0
17
+ Requires-Dist: opentelemetry-instrumentation-anthropic==0.40.6
18
+ Requires-Dist: opentelemetry-instrumentation-cohere==0.40.6
19
+ Requires-Dist: opentelemetry-instrumentation-fastapi==0.54b1
20
+ Requires-Dist: opentelemetry-instrumentation-ollama==0.40.6
21
+ Requires-Dist: opentelemetry-instrumentation-openai==0.40.6
22
+ Requires-Dist: opentelemetry-instrumentation-system-metrics
23
+ Requires-Dist: opentelemetry-sdk>=1.28.0
24
+ Requires-Dist: pydantic<2.11.0,>=2.10.3
25
+ Requires-Dist: pyjwt>=2.10.1
26
+ Requires-Dist: python-dateutil>=2.8.0
27
+ Requires-Dist: pyyaml<6.1.0,>=6.0.2
28
+ Requires-Dist: requests<2.33.0,>=2.32.3
29
+ Requires-Dist: tomli>=2.2.1
30
+ Requires-Dist: websockets<15.0.0
31
+ Provides-Extra: crewai
32
+ Requires-Dist: crewai>=0.120.1; extra == 'crewai'
33
+ Requires-Dist: opentelemetry-instrumentation-crewai>=0.40.6; extra == 'crewai'
34
+ Provides-Extra: google-adk
35
+ Requires-Dist: google-adk>=0.2.0; extra == 'google-adk'
36
+ Requires-Dist: litellm>=1.63.11; extra == 'google-adk'
37
+ Provides-Extra: langchain
38
+ Requires-Dist: langchain-anthropic>=0.3.10; extra == 'langchain'
39
+ Requires-Dist: langchain-cerebras>=0.5.0; extra == 'langchain'
40
+ Requires-Dist: langchain-cohere>=0.4.3; extra == 'langchain'
41
+ Requires-Dist: langchain-community<0.4.0,>=0.3.3; extra == 'langchain'
42
+ Requires-Dist: langchain-core<0.4.0,>=0.3.13; extra == 'langchain'
43
+ Requires-Dist: langchain-deepseek-official>=0.1.0.post1; extra == 'langchain'
44
+ Requires-Dist: langchain-openai>=0.3.10; extra == 'langchain'
45
+ Requires-Dist: langchain-xai>=0.2.2; extra == 'langchain'
46
+ Requires-Dist: langgraph<0.3.0,>=0.2.40; extra == 'langchain'
47
+ Requires-Dist: opentelemetry-instrumentation-langchain>=0.35.0; extra == 'langchain'
48
+ Requires-Dist: pillow>=10.0.0; extra == 'langchain'
49
+ Provides-Extra: livekit
50
+ Requires-Dist: livekit-agents[anthropic,cartesia,deepgram,elevenlabs,groq,openai,silero,turn-detector]~=1.0; extra == 'livekit'
51
+ Requires-Dist: livekit-plugins-noise-cancellation~=0.2; extra == 'livekit'
52
+ Provides-Extra: llamaindex
53
+ Requires-Dist: llama-index-llms-anthropic>=0.6.10; extra == 'llamaindex'
54
+ Requires-Dist: llama-index-llms-cerebras>=0.2.2; extra == 'llamaindex'
55
+ Requires-Dist: llama-index-llms-cohere>=0.4.0; extra == 'llamaindex'
56
+ Requires-Dist: llama-index-llms-deepseek>=0.1.1; extra == 'llamaindex'
57
+ Requires-Dist: llama-index-llms-google-genai>=0.1.7; extra == 'llamaindex'
58
+ Requires-Dist: llama-index-llms-groq>=0.3.1; extra == 'llamaindex'
59
+ Requires-Dist: llama-index-llms-mistralai>=0.4.0; extra == 'llamaindex'
60
+ Requires-Dist: llama-index-llms-openai>=0.3.28; extra == 'llamaindex'
61
+ Requires-Dist: llama-index>=0.12.26; extra == 'llamaindex'
62
+ Requires-Dist: opentelemetry-instrumentation-llamaindex>=0.35.0; extra == 'llamaindex'
63
+ Provides-Extra: openai-agents
64
+ Requires-Dist: openai-agents>=0.0.7; extra == 'openai-agents'
65
+ Provides-Extra: pydantic-ai
66
+ Requires-Dist: pydantic-ai>=0.0.48; extra == 'pydantic-ai'
67
+ Description-Content-Type: text/markdown
68
+
69
+ # Blaxel Python SDK
70
+
71
+ <p align="center">
72
+ <img src="https://blaxel.ai/logo-bg.png" alt="Blaxel"/>
73
+ </p>
74
+
75
+ **Blaxel is a computing platform for AI agent builders, with all the services and infrastructure to build and deploy agents efficiently.** This repository contains the Python SDK to create and manage resources on Blaxel.
76
+
77
+ ## Table of Contents
78
+
79
+ - [Installation](#installation)
80
+ - [Authentication](#authentication)
81
+ - [Features](#features)
82
+ - [Quickstart](#quickstart)
83
+ - [Contributing](#contributing)
84
+ - [License](#license)
85
+
86
+
87
+
88
+ ## Installation
89
+
90
+ Install Blaxel SDK which lets you manage Blaxel resources.
91
+
92
+ ```bash
93
+ ## Using pip
94
+ pip install blaxel
95
+
96
+ ## Using uv
97
+ uv pip install blaxel
98
+
99
+ ## Using uv add
100
+ uv add blaxel
101
+ ```
102
+
103
+
104
+
105
+ ### Authentication
106
+
107
+ The Blaxel SDK authenticates with your workspace using credentials from these sources, in priority order:
108
+ 1. When running on Blaxel, authentication is handled automatically
109
+ 2. Variables in your .env file (`BL_WORKSPACE` and `BL_API_KEY`, or see [this page](https://docs.blaxel.ai/Agents/Variables-and-secrets) for other authentication options).
110
+ 3. Environment variables from your machine
111
+ 4. Configuration file created locally when you log in through Blaxel CLI (or deploy on Blaxel)
112
+
113
+ When developing locally, the recommended method is to just log in to your workspace with Blaxel CLI. This allows you to run Blaxel SDK functions that will automatically connect to your workspace without additional setup. When you deploy on Blaxel, this connection persists automatically.
114
+
115
+ When running Blaxel SDK from a remote server that is not Blaxel-hosted, we recommend using environment variables as described in the third option above.
116
+
117
+
118
+
119
+ ## Features
120
+ - Agents & MCP servers
121
+ - [Create MCP servers](https://docs.blaxel.ai/Functions/Create-MCP-server)
122
+ - [Connect to MCP servers and model APIs hosted on Blaxel](https://docs.blaxel.ai/Agents/Develop-an-agent-ts)
123
+ - [Call agents from another agent](https://docs.blaxel.ai/Agents/Develop-an-agent-ts#connect-to-another-agent-multi-agent-chaining)
124
+ - [Deploy on Blaxel](https://docs.blaxel.ai/Agents/Deploy-an-agent)
125
+ - Sandboxes
126
+ - [Create and update sandboxes and sandbox previews](https://docs.blaxel.ai/Sandboxes/Overview)
127
+ - [Run filesystem operations and processes on a sandbox](https://docs.blaxel.ai/Sandboxes/Processes)
128
+ - [Use environment variables or secrets](https://docs.blaxel.ai/Agents/Variables-and-secrets)
129
+
130
+
131
+
132
+ ## Quickstart
133
+
134
+ Blaxel CLI gives you a quick way to create new applications: agents, MCP servers, jobs, etc - and deploy them to Blaxel.
135
+
136
+ **Prerequisites**:
137
+ - **Node.js:** v18 or later.
138
+ - **Blaxel CLI:** Make sure you have Blaxel CLI installed. If not, [install it](https://docs.blaxel.ai/cli-reference/introduction):
139
+ ```bash
140
+ curl -fsSL \
141
+ https://raw.githubusercontent.com/blaxel-ai/toolkit/main/install.sh \
142
+ | BINDIR=/usr/local/bin sudo -E sh
143
+ ```
144
+ - **Blaxel login:** Login to Blaxel:
145
+ ```bash
146
+ bl login YOUR-WORKSPACE
147
+ ```
148
+
149
+ ```bash
150
+ bl create-agent-app myfolder
151
+ cd myfolder
152
+ bl deploy
153
+ ```
154
+
155
+ Also available:
156
+ - `bl create-mcp-server`
157
+ - `bl create-job`
158
+
159
+
160
+
161
+ ## Contributing
162
+
163
+ Contributions are welcome! Please feel free to submit a Pull Request.
164
+
165
+
166
+
167
+ ## License
168
+
169
+ This project is licensed under the MIT License - see the LICENSE file for details.
@@ -285,7 +285,7 @@ blaxel/client/models/workspace_user.py,sha256=70CcifQWYbeWG7TDui4pblTzUe5sVK0AS1
285
285
  blaxel/common/autoload.py,sha256=NFuK71-IHOY2JQyEBSjDCVfUaQ8D8PJsEUEryIdG4AU,263
286
286
  blaxel/common/env.py,sha256=wTbzPDdNgz4HMJiS2NCZmQlN0qpxy1PQEYBaZgtvhoc,1247
287
287
  blaxel/common/internal.py,sha256=6lZENUQrh3bvpMIPLaie6-g2upiNzf21QG-dQ0hGJWU,2371
288
- blaxel/common/logger.py,sha256=NPAS3g82ryROjvc_DEZaTIfrcehoLEZoP-JkLxADxc0,4113
288
+ blaxel/common/logger.py,sha256=-2_aTQgo8VDKqR8VoM16EcJ5i8Z7l8VmqHi5MoYAuLg,4462
289
289
  blaxel/common/settings.py,sha256=7KTryuBdud0IfHqykX7xEEtpgq5M5h1Z8YEzYKsHB-Q,2327
290
290
  blaxel/instrumentation/exporters.py,sha256=EoX3uaBVku1Rg49pSNXKFyHhgY5OV3Ih6UlqgjF5epw,1670
291
291
  blaxel/instrumentation/log.py,sha256=RvQByRjZMoP_dRaAZu8oK6DTegsHs-xV4W-UIqis6CA,2461
@@ -364,7 +364,7 @@ blaxel/tools/llamaindex.py,sha256=-gQ-C9V_h9a11J4ItsbWjXrCJOg0lRKsb98v9rVsNak,71
364
364
  blaxel/tools/openai.py,sha256=GuFXkj6bXEwldyVr89jEsRAi5ihZUVEVe327QuWiGNs,653
365
365
  blaxel/tools/pydantic.py,sha256=CvnNbAG_J4yBtA-XFI4lQrq3FYKjNd39hu841vZT004,1801
366
366
  blaxel/tools/types.py,sha256=YPCGJ4vZDhqR0X2H_TWtc5chQScsC32nGTQdRKJlO8Y,707
367
- blaxel-0.1.18rc61.dist-info/METADATA,sha256=9IYqyMHfZIabRsrdrNaHntEL9K3QhwNNeHPJ62g0N8E,11936
368
- blaxel-0.1.18rc61.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
369
- blaxel-0.1.18rc61.dist-info/licenses/LICENSE,sha256=p5PNQvpvyDT_0aYBDgmV1fFI_vAD2aSV0wWG7VTgRis,1069
370
- blaxel-0.1.18rc61.dist-info/RECORD,,
367
+ blaxel-0.1.18rc63.dist-info/METADATA,sha256=3r5AE3YTRl2qSwnNTwkZHxKLML9N-pat_lbJlnVrBrA,6706
368
+ blaxel-0.1.18rc63.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
369
+ blaxel-0.1.18rc63.dist-info/licenses/LICENSE,sha256=p5PNQvpvyDT_0aYBDgmV1fFI_vAD2aSV0wWG7VTgRis,1069
370
+ blaxel-0.1.18rc63.dist-info/RECORD,,
@@ -1,397 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: blaxel
3
- Version: 0.1.18rc61
4
- Summary: Add your description here
5
- Author-email: cploujoux <cploujoux@blaxel.ai>
6
- License-File: LICENSE
7
- Requires-Python: >=3.10
8
- Requires-Dist: attrs>=21.3.0
9
- Requires-Dist: httpx>=0.27.0
10
- Requires-Dist: mcp<=1.7.1
11
- Requires-Dist: opentelemetry-api>=1.28.0
12
- Requires-Dist: opentelemetry-exporter-otlp>=1.28.0
13
- Requires-Dist: opentelemetry-instrumentation-anthropic==0.40.6
14
- Requires-Dist: opentelemetry-instrumentation-cohere==0.40.6
15
- Requires-Dist: opentelemetry-instrumentation-fastapi==0.54b1
16
- Requires-Dist: opentelemetry-instrumentation-ollama==0.40.6
17
- Requires-Dist: opentelemetry-instrumentation-openai==0.40.6
18
- Requires-Dist: opentelemetry-instrumentation-system-metrics
19
- Requires-Dist: opentelemetry-sdk>=1.28.0
20
- Requires-Dist: pydantic<2.11.0,>=2.10.3
21
- Requires-Dist: pyjwt>=2.10.1
22
- Requires-Dist: python-dateutil>=2.8.0
23
- Requires-Dist: pyyaml<6.1.0,>=6.0.2
24
- Requires-Dist: requests<2.33.0,>=2.32.3
25
- Requires-Dist: tomli>=2.2.1
26
- Requires-Dist: websockets<15.0.0
27
- Provides-Extra: crewai
28
- Requires-Dist: crewai>=0.120.1; extra == 'crewai'
29
- Requires-Dist: opentelemetry-instrumentation-crewai>=0.40.6; extra == 'crewai'
30
- Provides-Extra: google-adk
31
- Requires-Dist: google-adk>=0.2.0; extra == 'google-adk'
32
- Requires-Dist: litellm>=1.63.11; extra == 'google-adk'
33
- Provides-Extra: langchain
34
- Requires-Dist: langchain-anthropic>=0.3.10; extra == 'langchain'
35
- Requires-Dist: langchain-cerebras>=0.5.0; extra == 'langchain'
36
- Requires-Dist: langchain-cohere>=0.4.3; extra == 'langchain'
37
- Requires-Dist: langchain-community<0.4.0,>=0.3.3; extra == 'langchain'
38
- Requires-Dist: langchain-core<0.4.0,>=0.3.13; extra == 'langchain'
39
- Requires-Dist: langchain-deepseek-official>=0.1.0.post1; extra == 'langchain'
40
- Requires-Dist: langchain-openai>=0.3.10; extra == 'langchain'
41
- Requires-Dist: langchain-xai>=0.2.2; extra == 'langchain'
42
- Requires-Dist: langgraph<0.3.0,>=0.2.40; extra == 'langchain'
43
- Requires-Dist: opentelemetry-instrumentation-langchain>=0.35.0; extra == 'langchain'
44
- Requires-Dist: pillow>=10.0.0; extra == 'langchain'
45
- Provides-Extra: livekit
46
- Requires-Dist: livekit-agents[anthropic,cartesia,deepgram,elevenlabs,groq,openai,silero,turn-detector]~=1.0; extra == 'livekit'
47
- Requires-Dist: livekit-plugins-noise-cancellation~=0.2; extra == 'livekit'
48
- Provides-Extra: llamaindex
49
- Requires-Dist: llama-index-llms-anthropic>=0.6.10; extra == 'llamaindex'
50
- Requires-Dist: llama-index-llms-cerebras>=0.2.2; extra == 'llamaindex'
51
- Requires-Dist: llama-index-llms-cohere>=0.4.0; extra == 'llamaindex'
52
- Requires-Dist: llama-index-llms-deepseek>=0.1.1; extra == 'llamaindex'
53
- Requires-Dist: llama-index-llms-google-genai>=0.1.7; extra == 'llamaindex'
54
- Requires-Dist: llama-index-llms-groq>=0.3.1; extra == 'llamaindex'
55
- Requires-Dist: llama-index-llms-mistralai>=0.4.0; extra == 'llamaindex'
56
- Requires-Dist: llama-index-llms-openai>=0.3.28; extra == 'llamaindex'
57
- Requires-Dist: llama-index>=0.12.26; extra == 'llamaindex'
58
- Requires-Dist: opentelemetry-instrumentation-llamaindex>=0.35.0; extra == 'llamaindex'
59
- Provides-Extra: openai-agents
60
- Requires-Dist: openai-agents>=0.0.7; extra == 'openai-agents'
61
- Provides-Extra: pydantic-ai
62
- Requires-Dist: pydantic-ai>=0.0.48; extra == 'pydantic-ai'
63
- Description-Content-Type: text/markdown
64
-
65
- # Blaxel Python SDK
66
-
67
- <p align="center">
68
- <img src="https://blaxel.ai/logo.png" alt="Blaxel"/>
69
- </p>
70
-
71
- An SDK to connect your agent or tools with Blaxel platform.
72
- Currently in preview, feel free to send us feedback or contribute to the project.
73
-
74
- ## Table of Contents
75
-
76
- - [Features](#features)
77
- - [Prerequisites](#prerequisites)
78
- - [Start from an hello world example](#start-from-an-hello-world-example)
79
- - [Integrate with a custom code](#integrate-with-a-custom-code)
80
- - [Set-up blaxel observability](#set-up-blaxel-observability)
81
- - [Connect tools and model from blaxel platform to your agent](#connect-tools-and-model-from-blaxel-platform-to-your-agent)
82
- - [Agent Chaining](#agent-chaining)
83
- - [Deploy on blaxel](#deploy-on-blaxel)
84
- - [Advanced configuration](#advanced-configuration)
85
- - [Create an MCP Server](#create-an-mcp-server)
86
- - [Connect an existing MCP Server to blaxel](#connect-an-existing-mcp-server-to-blaxel)
87
- - [How to use environment variables or secrets](#how-to-use-environment-variables-or-secrets)
88
- - [Contributing](#contributing)
89
- - [License](#license)
90
-
91
- ## Features
92
-
93
- Supported AI frameworks:
94
-
95
- - LangChain
96
- - LlamaIndex
97
- - CrewAI
98
- - OpenAI Agents
99
-
100
- Supported Tools frameworks:
101
-
102
- - MCP (Model Context Protocol)
103
-
104
- ## Prerequisites
105
-
106
- - **Python:** 3.10 or later
107
- - **Blaxel CLI:** Ensure you have the Blaxel CLI installed. If not, install it globally:
108
- ```bash
109
- curl -fsSL https://raw.githubusercontent.com/beamlit/toolkit/preview/install.sh | BINDIR=$HOME/.local/bin sh
110
- ```
111
- - **Blaxel login:** Login to Blaxel platform
112
- ```bash
113
- bl login YOUR-WORKSPACE
114
- ```
115
-
116
- ## Start from an hello world example
117
-
118
- ```bash
119
- bl create-agent-app myfolder
120
- cd myfolder
121
- bl serve --hotreload
122
- ```
123
-
124
- ## Integrate with a custom code
125
-
126
- ### Set-up blaxel observability
127
-
128
- It only needs an import of our SDK on top of your main entrypoint file.
129
- It will directly plug our backend (when deployed on blaxel) with open telemetry standard.
130
-
131
- ```python
132
- from blaxel import sdk
133
- ```
134
-
135
- ### Connect tools and model from blaxel platform to your agent
136
-
137
- ```python
138
- from blaxel.models import bl_model
139
- from blaxel.tools import bl_tools
140
- ```
141
-
142
- Then you need to use it in your agent. Here are examples with different frameworks:
143
-
144
- ```python
145
- # Example with LangChain
146
- from langchain.agents import AgentExecutor, create_react_agent
147
- from langchain.prompts import ChatPromptTemplate
148
- from langchain.tools import Tool
149
- from langchain_core.messages import HumanMessage
150
-
151
- async def create_agent():
152
- model = await bl_model("gpt-4o-mini").to_langchain()
153
- async with bl_tools(["blaxel-search", "webcrawl"]) as t:
154
- tools = t.to_langchain()
155
- tools.append(
156
- Tool(
157
- name="weather",
158
- description="Get the weather in a specific city",
159
- func=lambda city: f"The weather in {city} is sunny"
160
- )
161
- )
162
-
163
- prompt = ChatPromptTemplate.from_messages([
164
- ("system", "You are a helpful assistant."),
165
- ("human", "{input}")
166
- ])
167
-
168
- agent = create_react_agent(model, tools, prompt)
169
- return AgentExecutor(agent=agent, tools=tools)
170
-
171
- # Example with LlamaIndex
172
- from llama_index.core import SimpleDirectoryReader
173
- from llama_index.core.agent import ReActAgent
174
- from llama_index.core.tools import FunctionTool
175
-
176
- async def create_llamaindex_agent():
177
- model = await bl_model("gpt-4o-mini").to_llamaindex()
178
- async with bl_tools(["blaxel-search", "webcrawl"]) as t:
179
- tools = t.to_llamaindex()
180
- tools.append(
181
- FunctionTool.from_defaults(
182
- fn=lambda city: f"The weather in {city} is sunny",
183
- name="weather",
184
- description="Get the weather in a specific city"
185
- )
186
- )
187
-
188
- return ReActAgent.from_tools(
189
- tools,
190
- llm=model,
191
- verbose=True
192
- )
193
-
194
- # Example with CrewAI
195
- from crewai import Agent, Task, Crew
196
-
197
- async def create_crewai_agent():
198
- model = await bl_model("gpt-4o-mini").to_crewai()
199
- async with bl_tools(["blaxel-search", "webcrawl"]) as t:
200
- tools = t.to_crewai()
201
- tools.append(
202
- Tool(
203
- name="weather",
204
- description="Get the weather in a specific city",
205
- func=lambda city: f"The weather in {city} is sunny"
206
- )
207
- )
208
-
209
- agent = Agent(
210
- role='Assistant',
211
- goal='Help users with their queries',
212
- backstory='I am a helpful AI assistant',
213
- tools=tools,
214
- llm=model
215
- )
216
-
217
- return agent
218
- ```
219
-
220
- ### Agent Chaining
221
-
222
- You can call an agent from another agent to chain them.
223
- This allows complex agentic logic, with multiple agents calling each other, orchestration, routing, etc.
224
-
225
- ```python
226
- # Example of calling an agent, then putting its result inside a second one
227
- from blaxel.agents import bl_agent
228
-
229
- async def first_agent(input_text: str) -> dict:
230
- # First agent that processes loan applications
231
- response = await bl_agent("first-agent").run({
232
- "inputs": input_text
233
- })
234
- return response
235
-
236
- async def second_agent(input_text: str) -> dict:
237
- # Second agent that evaluates the loan application
238
- first_response = await first_agent(input_text)
239
-
240
- model = await bl_model("gpt-4o-mini").to_langchain()
241
- prompt = ChatPromptTemplate.from_messages([
242
- ("system", "You are a loan specialist. Based on the given json file with client data, your job is to decide if a client can be further processed."),
243
- ("human", "{input}")
244
- ])
245
-
246
- response = await model.ainvoke(first_response)
247
- return response
248
- ```
249
-
250
- ### Deploy on blaxel
251
-
252
- To deploy on blaxel, we have only one requirement in each agent code.
253
- We need an HTTP Server.
254
-
255
- For example with FastAPI:
256
-
257
- ```python
258
- from fastapi import FastAPI
259
- from blaxel import sdk
260
- import uvicorn
261
-
262
- app = FastAPI()
263
-
264
- @app.post("/")
265
- async def root(inputs: str):
266
- # Your agentic logic here
267
- return {"response": "Your response here"}
268
-
269
- if __name__ == "__main__":
270
- port = int(os.getenv("BL_SERVER_PORT", "3000"))
271
- host = os.getenv("BL_SERVER_HOST", "0.0.0.0")
272
- uvicorn.run(app, host=host, port=port)
273
- ```
274
-
275
- ```bash
276
- bl deploy
277
- ```
278
-
279
- ### Advanced configuration
280
-
281
- You can add optionally a configuration file "blaxel.toml" in your project root.
282
-
283
- ```toml
284
- name = "my-agent"
285
- workspace = "my-workspace"
286
- type = "agent"
287
-
288
- functions = ["blaxel-search"]
289
- models = ["sandbox-openai"]
290
- ```
291
-
292
- It allows to customize the requirements for your agent, it can be useful if you have many models and functions in your workspace.
293
-
294
- ### Create an MCP Server
295
-
296
- If you want to create an MCP Server for using it in multiple agents, you can bootstrap it with the following command:
297
-
298
- ```bash
299
- bl create-mcp-server my-mcp-server
300
- cd my-mcp-server
301
- bl serve --hotreload
302
- ```
303
-
304
- We follow current standard for tool development over MCP Server.
305
- Example of a tool which is sending fake information about the weather:
306
-
307
- ```python
308
- from blaxel.mcp.server import FastMCP
309
-
310
- mcp = FastMCP("My Weather MCP server")
311
-
312
- @mcp.tool()
313
- def weather(city: str) -> str:
314
- """Get the weather for a city"""
315
- return f"The weather in {city} is sunny"
316
-
317
- if __name__ == "__main__":
318
- if os.getenv("BL_SERVER_PORT"):
319
- mcp.run(transport="ws")
320
- else:
321
- mcp.run(transport="stdio")
322
-
323
-
324
- ```
325
-
326
- ### Connect an existing MCP Server to blaxel
327
-
328
- You need to have a "blaxel.toml" file in your project root:
329
-
330
- ```toml
331
- name = "weather"
332
- workspace = "my-workspace"
333
- type = "function"
334
- ```
335
-
336
- Connect the observability layer:
337
-
338
- ```python
339
- from blaxel import sdk
340
- ```
341
-
342
- Update your import of FastMCP
343
-
344
- ```python
345
- from blaxel.mcp.server import FastMCP
346
- ```
347
-
348
- Update your entrypoint to support our transport:
349
-
350
- ```python
351
- def main():
352
- mcp.run(transport="ws") if os.getenv("BL_SERVER_PORT") else mcp.run(transport="stdio")
353
- ```
354
-
355
- ### How to use environment variables or secrets
356
-
357
- You can use the "blaxel.toml" config file to specify environment variables for your agent:
358
-
359
- ```toml
360
- name = "weather"
361
- workspace = "my-workspace"
362
- type = "function"
363
-
364
- [env]
365
- DEFAULT_CITY = "San Francisco"
366
- ```
367
-
368
- Then you can use it in your agent or function with the following syntax:
369
-
370
- ```python
371
- from blaxel.env import env
372
- print(env.DEFAULT_CITY) # San Francisco
373
- ```
374
-
375
- You can also add secrets variables to a .env files in your project root. (goal is to not commit this file)
376
-
377
- Example of a .env file:
378
-
379
- ```
380
- # Secret variables can be stored here
381
- DEFAULT_CITY_PASSWORD=123456
382
- ```
383
-
384
- Then you can use it in your agent or function with the following syntax:
385
-
386
- ```python
387
- from blaxel.env import env
388
- print(env.DEFAULT_CITY_PASSWORD) # 123456
389
- ```
390
-
391
- ## Contributing
392
-
393
- Contributions are welcome! Please feel free to submit a Pull Request.
394
-
395
- ## License
396
-
397
- This project is licensed under the MIT License - see the LICENSE file for details.