blaxel 0.2.0rc6__py3-none-any.whl → 0.2.1rc71__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  import asyncio
2
2
  import logging
3
3
  import time
4
+ import uuid
4
5
  from typing import Any, Dict, List, Union
5
6
 
6
7
  from ..client.api.compute.create_sandbox import asyncio as create_sandbox
@@ -8,7 +9,7 @@ from ..client.api.compute.delete_sandbox import asyncio as delete_sandbox
8
9
  from ..client.api.compute.get_sandbox import asyncio as get_sandbox
9
10
  from ..client.api.compute.list_sandboxes import asyncio as list_sandboxes
10
11
  from ..client.client import client
11
- from ..client.models import Metadata, Sandbox
12
+ from ..client.models import Metadata, Runtime, Sandbox, SandboxSpec
12
13
  from .filesystem import SandboxFileSystem
13
14
  from .network import SandboxNetwork
14
15
  from .preview import SandboxPreviews
@@ -67,14 +68,20 @@ class SandboxInstance:
67
68
  raise Exception("Sandbox did not deploy in time")
68
69
 
69
70
  @classmethod
70
- async def create(cls, sandbox: Union[Sandbox, Dict[str, Any]]) -> "SandboxInstance":
71
+ async def create(
72
+ cls, sandbox: Union[Sandbox, Dict[str, Any], None] = None
73
+ ) -> "SandboxInstance":
74
+ if sandbox is None:
75
+ sandbox = Sandbox()
71
76
  if isinstance(sandbox, dict):
72
77
  sandbox = Sandbox.from_dict(sandbox)
73
-
78
+ if not sandbox.metadata:
79
+ sandbox.metadata = Metadata(name=uuid.uuid4().hex)
74
80
  if not sandbox.spec:
75
- raise Exception("Sandbox spec is required")
81
+ sandbox.spec = SandboxSpec(runtime=Runtime(image="blaxel/prod-base:latest"))
76
82
  if not sandbox.spec.runtime:
77
- raise Exception("Sandbox runtime is required")
83
+ sandbox.spec.runtime = Runtime(image="blaxel/prod-base:latest")
84
+
78
85
  sandbox.spec.runtime.generation = sandbox.spec.runtime.generation or "mk3"
79
86
 
80
87
  response = await create_sandbox(
@@ -47,7 +47,6 @@ class Cohere(FunctionCallingLLM):
47
47
 
48
48
  llm = Cohere(model="command", api_key=api_key)
49
49
  resp = llm.complete("Paul Graham is ")
50
- print(resp)
51
50
  ```
52
51
  """
53
52
 
@@ -55,9 +54,7 @@ class Cohere(FunctionCallingLLM):
55
54
  temperature: Optional[float] = Field(
56
55
  description="The temperature to use for sampling.", default=None
57
56
  )
58
- max_retries: int = Field(
59
- default=10, description="The maximum number of API retries."
60
- )
57
+ max_retries: int = Field(default=10, description="The maximum number of API retries.")
61
58
  additional_kwargs: Dict[str, Any] = Field(
62
59
  default_factory=dict, description="Additional kwargs for the Cohere API."
63
60
  )
@@ -167,9 +164,7 @@ class Cohere(FunctionCallingLLM):
167
164
  error_on_no_tool_call: bool = False,
168
165
  ) -> List[ToolSelection]:
169
166
  """Predict and call the tool."""
170
- tool_calls: List[ToolCall] = (
171
- response.message.additional_kwargs.get("tool_calls", []) or []
172
- )
167
+ tool_calls: List[ToolCall] = response.message.additional_kwargs.get("tool_calls", []) or []
173
168
 
174
169
  if len(tool_calls) < 1 and error_on_no_tool_call:
175
170
  raise ValueError(
@@ -218,10 +213,8 @@ class Cohere(FunctionCallingLLM):
218
213
 
219
214
  messages, documents = remove_documents_from_messages(messages)
220
215
 
221
- tool_results: Optional[
222
- List[Dict[str, Any]]
223
- ] = _messages_to_cohere_tool_results_curr_chat_turn(messages) or kwargs.get(
224
- "tool_results"
216
+ tool_results: Optional[List[Dict[str, Any]]] = (
217
+ _messages_to_cohere_tool_results_curr_chat_turn(messages) or kwargs.get("tool_results")
225
218
  )
226
219
  if not tool_results:
227
220
  tool_results = None
@@ -235,12 +228,8 @@ class Cohere(FunctionCallingLLM):
235
228
  if message.role == MessageRole.TOOL:
236
229
  temp_tool_results += _message_to_cohere_tool_results(messages, i)
237
230
 
238
- if (i == len(messages) - 1) or messages[
239
- i + 1
240
- ].role != MessageRole.TOOL:
241
- cohere_message = _get_message_cohere_format(
242
- message, temp_tool_results
243
- )
231
+ if (i == len(messages) - 1) or messages[i + 1].role != MessageRole.TOOL:
232
+ cohere_message = _get_message_cohere_format(message, temp_tool_results)
244
233
  chat_history.append(cohere_message)
245
234
  temp_tool_results = []
246
235
  else:
@@ -262,12 +251,8 @@ class Cohere(FunctionCallingLLM):
262
251
  if message.role == MessageRole.TOOL:
263
252
  temp_tool_results += _message_to_cohere_tool_results(messages, i)
264
253
 
265
- if (i == len(messages) - 1) or messages[
266
- i + 1
267
- ].role != MessageRole.TOOL:
268
- cohere_message = _get_message_cohere_format(
269
- message, temp_tool_results
270
- )
254
+ if (i == len(messages) - 1) or messages[i + 1].role != MessageRole.TOOL:
255
+ cohere_message = _get_message_cohere_format(message, temp_tool_results)
271
256
  chat_history.append(cohere_message)
272
257
  temp_tool_results = []
273
258
  else:
@@ -327,9 +312,7 @@ class Cohere(FunctionCallingLLM):
327
312
  )
328
313
 
329
314
  @llm_completion_callback()
330
- def complete(
331
- self, prompt: str, formatted: bool = False, **kwargs: Any
332
- ) -> CompletionResponse:
315
+ def complete(self, prompt: str, formatted: bool = False, **kwargs: Any) -> CompletionResponse:
333
316
  all_kwargs = self._get_all_kwargs(**kwargs)
334
317
  if "stream" in all_kwargs:
335
318
  warnings.warn(
@@ -351,9 +334,7 @@ class Cohere(FunctionCallingLLM):
351
334
  )
352
335
 
353
336
  @llm_chat_callback()
354
- def stream_chat(
355
- self, messages: Sequence[ChatMessage], **kwargs: Any
356
- ) -> ChatResponseGen:
337
+ def stream_chat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponseGen:
357
338
  all_kwargs = self._get_all_kwargs(**kwargs)
358
339
  all_kwargs["stream"] = True
359
340
  if all_kwargs["model"] not in CHAT_MODELS:
@@ -402,16 +383,12 @@ class Cohere(FunctionCallingLLM):
402
383
  for r in response:
403
384
  content_delta = r.text
404
385
  content += content_delta
405
- yield CompletionResponse(
406
- text=content, delta=content_delta, raw=r._asdict()
407
- )
386
+ yield CompletionResponse(text=content, delta=content_delta, raw=r._asdict())
408
387
 
409
388
  return gen()
410
389
 
411
390
  @llm_chat_callback()
412
- async def achat(
413
- self, messages: Sequence[ChatMessage], **kwargs: Any
414
- ) -> ChatResponse:
391
+ async def achat(self, messages: Sequence[ChatMessage], **kwargs: Any) -> ChatResponse:
415
392
  all_kwargs = self._get_all_kwargs(**kwargs)
416
393
  if all_kwargs["model"] not in CHAT_MODELS:
417
394
  raise ValueError(f"{all_kwargs['model']} not supported for chat")
@@ -535,8 +512,6 @@ class Cohere(FunctionCallingLLM):
535
512
  async for r in response:
536
513
  content_delta = r.text
537
514
  content += content_delta
538
- yield CompletionResponse(
539
- text=content, delta=content_delta, raw=r._asdict()
540
- )
515
+ yield CompletionResponse(text=content, delta=content_delta, raw=r._asdict())
541
516
 
542
517
  return gen()
@@ -10,9 +10,10 @@ from blaxel.telemetry.span import SpanManager
10
10
 
11
11
  logger = logging.getLogger(__name__)
12
12
 
13
+
13
14
  class BlaxelCoreInstrumentor(BaseInstrumentor):
14
15
  def instrumentation_dependencies(self):
15
- return ["blaxel.core"]
16
+ return []
16
17
 
17
18
  def _instrument(self, **kwargs):
18
19
  tracer_provider = kwargs.get("tracer_provider")
@@ -121,4 +122,4 @@ class BlaxelCoreInstrumentor(BaseInstrumentor):
121
122
  Tool.sync_coroutine = traced_sync_coroutine
122
123
  return Tool
123
124
 
124
- blaxel.core.tools.convert_mcp_tool_to_blaxel_tool = traced_convert_mcp_tool_to_blaxel_tool
125
+ blaxel.core.tools.convert_mcp_tool_to_blaxel_tool = traced_convert_mcp_tool_to_blaxel_tool
@@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
11
11
 
12
12
  class BlaxelLanggraphInstrumentor(BaseInstrumentor):
13
13
  def instrumentation_dependencies(self):
14
- return ["blaxel.langgraph", "blaxel.core"]
14
+ return ["langgraph"]
15
15
 
16
16
  def _instrument(self, **kwargs):
17
17
  tracer_provider = kwargs.get("tracer_provider")
@@ -316,7 +316,7 @@ class BlaxelLanggraphGeminiInstrumentor(BaseInstrumentor):
316
316
  Config.exception_logger = exception_logger
317
317
 
318
318
  def instrumentation_dependencies(self) -> Collection[str]:
319
- return ["blaxel.langgraph"]
319
+ return ["langgraph"]
320
320
 
321
321
  def _instrument(self, **kwargs):
322
322
  tracer_provider = kwargs.get("tracer_provider")
@@ -30,7 +30,7 @@ TO_INSTRUMENT = [
30
30
 
31
31
  class BlaxelLlamaIndexInstrumentor(BaseInstrumentor):
32
32
  def instrumentation_dependencies(self):
33
- return ["blaxel.llamaindex", "blaxel.core"]
33
+ return ["llama_index"]
34
34
 
35
35
  def _instrument(self, **kwargs):
36
36
  tracer_provider = kwargs.get("tracer_provider")
@@ -35,28 +35,28 @@ MAPPINGS: Dict[str, InstrumentationMapping] = {
35
35
  required_packages=["google-generativeai"],
36
36
  ignore_if_packages=[],
37
37
  ),
38
- "blaxel.core": InstrumentationMapping(
38
+ "blaxel_core": InstrumentationMapping(
39
39
  module_path="blaxel.telemetry.instrumentation.blaxel_core",
40
40
  class_name="BlaxelCoreInstrumentor",
41
- required_packages=["blaxel.core"],
41
+ required_packages=[],
42
42
  ignore_if_packages=[],
43
43
  ),
44
- "blaxel.langgraph": InstrumentationMapping(
44
+ "blaxel_langgraph": InstrumentationMapping(
45
45
  module_path="blaxel.telemetry.instrumentation.blaxel_langgraph",
46
46
  class_name="BlaxelLanggraphInstrumentor",
47
- required_packages=["blaxel.langgraph"],
47
+ required_packages=["langgraph"],
48
48
  ignore_if_packages=[],
49
49
  ),
50
- "blaxel.langgraph_gemini": InstrumentationMapping(
50
+ "blaxel_langgraph_gemini": InstrumentationMapping(
51
51
  module_path="blaxel.telemetry.instrumentation.blaxel_langgraph_gemini",
52
52
  class_name="BlaxelLanggraphGeminiInstrumentor",
53
- required_packages=["blaxel.langgraph"],
53
+ required_packages=["langgraph"],
54
54
  ignore_if_packages=[],
55
55
  ),
56
- "blaxel.llamaindex": InstrumentationMapping(
56
+ "blaxel_llamaindex": InstrumentationMapping(
57
57
  module_path="blaxel.telemetry.instrumentation.blaxel_llamaindex",
58
58
  class_name="BlaxelLlamaIndexInstrumentor",
59
- required_packages=["blaxel_llamaindex"],
59
+ required_packages=["llama_index"],
60
60
  ignore_if_packages=[],
61
61
  ),
62
62
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: blaxel
3
- Version: 0.2.0rc6
3
+ Version: 0.2.1rc71
4
4
  Summary: Blaxel - AI development platform SDK
5
5
  Project-URL: Homepage, https://blaxel.ai
6
6
  Project-URL: Documentation, https://docs.blaxel.ai
@@ -310,7 +310,7 @@ blaxel/core/sandbox/filesystem.py,sha256=dyIvDdlPZO0ijD6mXXX8Yl0t75VijQ6_uMz_9rJ
310
310
  blaxel/core/sandbox/network.py,sha256=P5jLd4AAg1zgyIK4qGWvZaDZ5BzIcxRx2ffz_JLsLMI,357
311
311
  blaxel/core/sandbox/preview.py,sha256=M6FulOxPghUBpb5fLxu1Rd3ekLeCbZ_dgt4s1X2Cneo,5354
312
312
  blaxel/core/sandbox/process.py,sha256=FahDx-FOqXPV3ajVI6aKdXqx4Oi4fK3egojHjTGy8ro,6705
313
- blaxel/core/sandbox/sandbox.py,sha256=an06JrTl2kfS5guYo0ZT2A-Uu5Zr7BQAu-fWXcCZnhY,5368
313
+ blaxel/core/sandbox/sandbox.py,sha256=BRMl74XTY5JQFcwZdEJoCgqcN3tTdRMD4gL7J-1Go1Q,5635
314
314
  blaxel/core/sandbox/session.py,sha256=4SH1tyXcQ9UqJx4lMwxAlp7x9Te_anDrdSEG6AlNkvU,4496
315
315
  blaxel/core/sandbox/types.py,sha256=gmNAt8x7PrJZHpw2_2aWWDE5uG6H-uEgdIKTYIw4G0g,2981
316
316
  blaxel/core/sandbox/client/__init__.py,sha256=N26bD5o1jsTb48oExow6Rgivd8ylaU9jaWZfZsVilP8,128
@@ -379,7 +379,7 @@ blaxel/llamaindex/__init__.py,sha256=iZ3QbZhlwKvP91ChcqSXVkpRrzurMxJoQfKdZFzE2AA
379
379
  blaxel/llamaindex/model.py,sha256=FYUCTA80LKC4-UfESh43htoHrsoZjwwVaxXD51T0IuE,2557
380
380
  blaxel/llamaindex/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
381
381
  blaxel/llamaindex/tools.py,sha256=LgrKe-o0I8_JE4DxcM2YYlHN-15LKmXuBKm08NYLV9w,907
382
- blaxel/llamaindex/custom/cohere.py,sha256=Igj5Y1ozf1V4feIXfBHDdaTFU7od_wuOhm0yChZNxMY,19109
382
+ blaxel/llamaindex/custom/cohere.py,sha256=zoUv4NWwMZLZEynYTC3JuOvtinguRgtuRwS31wIm3rI,18713
383
383
  blaxel/openai/__init__.py,sha256=YkizVtcYL2m9v-z5B1EReYVu9n9V-DCxJhSB2mvqOs0,123
384
384
  blaxel/openai/model.py,sha256=lGz4zrV4sURPb6aLtroMRV5-CgfQUq15PeTgjc7QkTI,600
385
385
  blaxel/openai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -394,15 +394,15 @@ blaxel/telemetry/exporters.py,sha256=EoX3uaBVku1Rg49pSNXKFyHhgY5OV3Ih6UlqgjF5epw
394
394
  blaxel/telemetry/manager.py,sha256=3yYBxxqQKl1rCKrn0GVz9jR5jouC1nsElbAaH8tTtgA,9075
395
395
  blaxel/telemetry/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
396
396
  blaxel/telemetry/span.py,sha256=b8lpWe5nqek4w9YrBLJCIYz4Z3n9Z_49H_AkqpAGOiM,3660
397
- blaxel/telemetry/instrumentation/blaxel_core.py,sha256=gY2QoAr2UgnMabLyoxR1X79SVUaFM_hQJIzDdy80SuY,4892
398
- blaxel/telemetry/instrumentation/blaxel_langgraph.py,sha256=5C6mM0aKa8rbaAmhe7Z3HWHhZ-A_BZ0iio9Hn-GvIVM,4257
399
- blaxel/telemetry/instrumentation/blaxel_langgraph_gemini.py,sha256=2_5vcaNU7fS7ZisSLadNpFfiuy4zeRUDONJXhlQ9L1M,11960
400
- blaxel/telemetry/instrumentation/blaxel_llamaindex.py,sha256=EvgFhLUv5QViz7rAMJs5w_Oks-5NOe1v7A3B9b0NxeM,3122
401
- blaxel/telemetry/instrumentation/map.py,sha256=D7htA88-H8r7jHutBWnr_KEL8OZqLvvnRbU9jWiAA8g,2207
397
+ blaxel/telemetry/instrumentation/blaxel_core.py,sha256=7PNseKq7bOQ4Z6a8jW7AGm0ojcdcusjoiH_5iQsR5ic,4881
398
+ blaxel/telemetry/instrumentation/blaxel_langgraph.py,sha256=sBxt8kzRjsFepm86vaCBbGvpYI4mWPK_PB3wiLxSuOk,4235
399
+ blaxel/telemetry/instrumentation/blaxel_langgraph_gemini.py,sha256=hQbOaaMsHte_Igq_vhPgNXYd40IwuqPB0TUP-5VAtME,11953
400
+ blaxel/telemetry/instrumentation/blaxel_llamaindex.py,sha256=HA4YUXrRYvG5U1J8MAJnymmmj4ZCDydv-pXXhrftHOA,3101
401
+ blaxel/telemetry/instrumentation/map.py,sha256=PCzZJj39yiYVYJrxLBNP-NW-tjjYyTijwEDeI9njuDY,2174
402
402
  blaxel/telemetry/instrumentation/utils.py,sha256=KInMYZH-mu9_wvetmf0EmgrfN3Sw8IWk2Y95v2u90_U,1901
403
403
  blaxel/telemetry/log/log.py,sha256=RvQByRjZMoP_dRaAZu8oK6DTegsHs-xV4W-UIqis6CA,2461
404
404
  blaxel/telemetry/log/logger.py,sha256=NPAS3g82ryROjvc_DEZaTIfrcehoLEZoP-JkLxADxc0,4113
405
- blaxel-0.2.0rc6.dist-info/METADATA,sha256=aI1uHOODyBfwDgJAbMDwmZlI__mZnGgzX9u-tY1PO8g,9878
406
- blaxel-0.2.0rc6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
407
- blaxel-0.2.0rc6.dist-info/licenses/LICENSE,sha256=p5PNQvpvyDT_0aYBDgmV1fFI_vAD2aSV0wWG7VTgRis,1069
408
- blaxel-0.2.0rc6.dist-info/RECORD,,
405
+ blaxel-0.2.1rc71.dist-info/METADATA,sha256=v0CXAwoZmLommJuFSHXUTdXHRMakSGnzuwrjvYZcKV8,9879
406
+ blaxel-0.2.1rc71.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
407
+ blaxel-0.2.1rc71.dist-info/licenses/LICENSE,sha256=p5PNQvpvyDT_0aYBDgmV1fFI_vAD2aSV0wWG7VTgRis,1069
408
+ blaxel-0.2.1rc71.dist-info/RECORD,,