blaxel 0.2.35__py3-none-any.whl → 0.2.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. blaxel/__init__.py +2 -2
  2. blaxel/core/client/api/compute/create_sandbox.py +21 -1
  3. blaxel/core/client/api/jobs/create_job_execution.py +12 -12
  4. blaxel/core/client/api/volumes/update_volume.py +187 -0
  5. blaxel/core/client/models/__init__.py +10 -6
  6. blaxel/core/client/models/{create_job_execution_response.py → create_job_execution_output.py} +11 -13
  7. blaxel/core/client/models/{create_job_execution_response_tasks_item.py → create_job_execution_output_tasks_item.py} +5 -5
  8. blaxel/core/client/models/create_job_execution_request.py +31 -0
  9. blaxel/core/client/models/create_job_execution_request_env.py +50 -0
  10. blaxel/core/client/models/function_runtime.py +18 -0
  11. blaxel/core/client/models/{function_spec_transport.py → function_runtime_transport.py} +2 -2
  12. blaxel/core/client/models/function_spec.py +0 -18
  13. blaxel/core/client/models/job_execution_spec.py +35 -0
  14. blaxel/core/client/models/job_execution_spec_env_override.py +50 -0
  15. blaxel/core/client/models/port_protocol.py +1 -0
  16. blaxel/core/client/models/preview.py +48 -1
  17. blaxel/core/client/models/sandbox.py +10 -0
  18. blaxel/core/common/settings.py +5 -0
  19. blaxel/core/jobs/__init__.py +60 -88
  20. blaxel/core/sandbox/__init__.py +12 -0
  21. blaxel/core/{client/api/invitations/list_all_pending_invitations.py → sandbox/client/api/system/get_health.py} +26 -34
  22. blaxel/core/sandbox/client/api/system/post_upgrade.py +196 -0
  23. blaxel/core/sandbox/client/models/__init__.py +8 -0
  24. blaxel/core/sandbox/client/models/content_search_match.py +24 -25
  25. blaxel/core/sandbox/client/models/content_search_response.py +25 -29
  26. blaxel/core/sandbox/client/models/find_match.py +13 -14
  27. blaxel/core/sandbox/client/models/find_response.py +21 -24
  28. blaxel/core/sandbox/client/models/fuzzy_search_match.py +17 -19
  29. blaxel/core/sandbox/client/models/fuzzy_search_response.py +21 -24
  30. blaxel/core/sandbox/client/models/health_response.py +159 -0
  31. blaxel/core/sandbox/client/models/process_upgrade_state.py +20 -0
  32. blaxel/core/sandbox/client/models/upgrade_request.py +71 -0
  33. blaxel/core/sandbox/client/models/upgrade_status.py +125 -0
  34. blaxel/core/sandbox/default/__init__.py +2 -0
  35. blaxel/core/sandbox/default/filesystem.py +20 -6
  36. blaxel/core/sandbox/default/preview.py +48 -1
  37. blaxel/core/sandbox/default/process.py +66 -21
  38. blaxel/core/sandbox/default/sandbox.py +104 -6
  39. blaxel/core/sandbox/default/system.py +71 -0
  40. blaxel/core/sandbox/sync/__init__.py +2 -0
  41. blaxel/core/sandbox/sync/filesystem.py +19 -2
  42. blaxel/core/sandbox/sync/preview.py +50 -3
  43. blaxel/core/sandbox/sync/process.py +38 -15
  44. blaxel/core/sandbox/sync/sandbox.py +97 -5
  45. blaxel/core/sandbox/sync/system.py +71 -0
  46. blaxel/core/sandbox/types.py +212 -5
  47. blaxel/core/volume/volume.py +209 -4
  48. blaxel/langgraph/model.py +25 -14
  49. blaxel/langgraph/tools.py +15 -12
  50. blaxel/llamaindex/model.py +33 -24
  51. blaxel/llamaindex/tools.py +9 -4
  52. blaxel/pydantic/model.py +26 -12
  53. blaxel-0.2.37.dist-info/METADATA +569 -0
  54. {blaxel-0.2.35.dist-info → blaxel-0.2.37.dist-info}/RECORD +57 -47
  55. blaxel-0.2.35.dist-info/METADATA +0 -228
  56. /blaxel/core/{client/api/invitations → sandbox/client/api/system}/__init__.py +0 -0
  57. {blaxel-0.2.35.dist-info → blaxel-0.2.37.dist-info}/WHEEL +0 -0
  58. {blaxel-0.2.35.dist-info → blaxel-0.2.37.dist-info}/licenses/LICENSE +0 -0
@@ -7,7 +7,12 @@ import httpx
7
7
  from ...common.settings import settings
8
8
  from ..client.models import ProcessResponse, SuccessResponse
9
9
  from ..client.models.process_request import ProcessRequest
10
- from ..types import ProcessRequestWithLog, ProcessResponseWithLog, SandboxConfiguration
10
+ from ..types import (
11
+ ProcessRequestWithLog,
12
+ ProcessResponseWithLog,
13
+ SandboxConfiguration,
14
+ StreamHandle,
15
+ )
11
16
  from .action import SyncSandboxAction
12
17
 
13
18
 
@@ -19,19 +24,35 @@ class SyncSandboxProcess(SyncSandboxAction):
19
24
  self,
20
25
  process_name: str,
21
26
  options: Dict[str, Callable[[str], None]] | None = None,
22
- ) -> Dict[str, Callable[[], None]]:
27
+ ) -> StreamHandle:
28
+ """Stream logs from a process with automatic reconnection and deduplication.
29
+
30
+ Returns a StreamHandle that can be used as a context manager:
31
+
32
+ with sandbox.process.stream_logs(name, options) as handle:
33
+ # do something
34
+ # handle is automatically closed
35
+
36
+ Or manually:
37
+
38
+ handle = sandbox.process.stream_logs(name, options)
39
+ try:
40
+ # do something
41
+ finally:
42
+ handle.close()
43
+ """
23
44
  if options is None:
24
45
  options = {}
25
46
  reconnect_interval = 30
26
47
  is_running = threading.Event()
27
48
  is_running.set()
28
49
  seen_logs = set()
29
- current_close = {"fn": None}
50
+ current_stream: StreamHandle | None = None
30
51
  timer_lock = threading.Lock()
31
- reconnect_timer = {"t": None}
52
+ reconnect_timer: dict[str, threading.Timer | None] = {"t": None}
32
53
 
33
54
  def start_stream():
34
- nonlocal current_close
55
+ nonlocal current_stream
35
56
  log_counter = [0]
36
57
 
37
58
  def make_dedup(cb_key: str):
@@ -52,9 +73,9 @@ class SyncSandboxProcess(SyncSandboxAction):
52
73
  wrapped_options["on_stdout"] = make_dedup("on_stdout")
53
74
  if "on_stderr" in options:
54
75
  wrapped_options["on_stderr"] = make_dedup("on_stderr")
55
- if current_close["fn"]:
56
- current_close["fn"]()
57
- current_close["fn"] = self._stream_logs(process_name, wrapped_options)["close"]
76
+ if current_stream:
77
+ current_stream.close()
78
+ current_stream = self._stream_logs(process_name, wrapped_options)
58
79
 
59
80
  def schedule():
60
81
  if is_running.is_set():
@@ -71,23 +92,25 @@ class SyncSandboxProcess(SyncSandboxAction):
71
92
  start_stream()
72
93
 
73
94
  def close():
95
+ nonlocal current_stream
74
96
  is_running.clear()
75
97
  with timer_lock:
76
98
  if reconnect_timer["t"]:
77
99
  reconnect_timer["t"].cancel()
78
100
  reconnect_timer["t"] = None
79
- if current_close["fn"]:
80
- current_close["fn"]()
81
- current_close["fn"] = None
101
+ if current_stream:
102
+ current_stream.close()
103
+ current_stream = None
82
104
  seen_logs.clear()
83
105
 
84
- return {"close": close}
106
+ return StreamHandle(close)
85
107
 
86
108
  def _stream_logs(
87
109
  self,
88
110
  identifier: str,
89
111
  options: Dict[str, Callable[[str], None]] | None = None,
90
- ) -> Dict[str, Callable[[], None]]:
112
+ ) -> StreamHandle:
113
+ """Private method to stream logs from a process with callbacks for different output types."""
91
114
  if options is None:
92
115
  options = {}
93
116
  closed = threading.Event()
@@ -136,7 +159,7 @@ class SyncSandboxProcess(SyncSandboxAction):
136
159
  def close():
137
160
  closed.set()
138
161
 
139
- return {"close": close}
162
+ return StreamHandle(close)
140
163
 
141
164
  def exec(
142
165
  self,
@@ -191,7 +214,7 @@ class SyncSandboxProcess(SyncSandboxAction):
191
214
  )
192
215
  return ProcessResponseWithLog(
193
216
  result,
194
- lambda: stream_control["close"]() if stream_control else None,
217
+ lambda: stream_control.close() if stream_control else None,
195
218
  )
196
219
 
197
220
  return result
@@ -8,10 +8,17 @@ from ...client.api.compute.get_sandbox import sync as get_sandbox
8
8
  from ...client.api.compute.list_sandboxes import sync as list_sandboxes
9
9
  from ...client.api.compute.update_sandbox import sync as update_sandbox
10
10
  from ...client.client import client
11
- from ...client.models import Metadata, Sandbox, SandboxRuntime, SandboxSpec
11
+ from ...client.models import (
12
+ Metadata,
13
+ Sandbox,
14
+ SandboxLifecycle,
15
+ SandboxRuntime,
16
+ SandboxSpec,
17
+ )
12
18
  from ...client.models.error import Error
13
19
  from ...client.models.sandbox_error import SandboxError
14
20
  from ...client.types import UNSET
21
+ from ...common.settings import settings
15
22
  from ..default.sandbox import SandboxAPIError
16
23
  from ..types import (
17
24
  SandboxConfiguration,
@@ -25,6 +32,7 @@ from .network import SyncSandboxNetwork
25
32
  from .preview import SyncSandboxPreviews
26
33
  from .process import SyncSandboxProcess
27
34
  from .session import SyncSandboxSessions
35
+ from .system import SyncSandboxSystem
28
36
 
29
37
  logger = logging.getLogger(__name__)
30
38
 
@@ -72,6 +80,7 @@ class SyncSandboxInstance:
72
80
  self.sessions = SyncSandboxSessions(self.config)
73
81
  self.network = SyncSandboxNetwork(self.config)
74
82
  self.codegen = SyncSandboxCodegen(self.config)
83
+ self.system = SyncSandboxSystem(self.config)
75
84
 
76
85
  @property
77
86
  def metadata(self):
@@ -89,6 +98,14 @@ class SyncSandboxInstance:
89
98
  def spec(self):
90
99
  return self.sandbox.spec
91
100
 
101
+ @property
102
+ def last_used_at(self):
103
+ return self.sandbox.last_used_at
104
+
105
+ @property
106
+ def expires_in(self):
107
+ return self.sandbox.expires_in
108
+
92
109
  def wait(self, max_wait: int = 60000, interval: int = 1000) -> "SyncSandboxInstance":
93
110
  logger.warning(
94
111
  "⚠️ Warning: sandbox.wait() is deprecated. You don't need to wait for the sandbox to be deployed anymore."
@@ -144,7 +161,7 @@ class SyncSandboxInstance:
144
161
  volumes = config._normalize_volumes() or UNSET
145
162
  ttl = config.ttl
146
163
  expires = config.expires
147
- region = config.region
164
+ region = config.region or settings.region
148
165
  lifecycle = config.lifecycle
149
166
  sandbox = Sandbox(
150
167
  metadata=Metadata(name=name, labels=config.labels),
@@ -158,17 +175,26 @@ class SyncSandboxInstance:
158
175
  volumes=volumes,
159
176
  ),
160
177
  )
161
- if ttl:
178
+ if ttl and sandbox.spec.runtime:
162
179
  sandbox.spec.runtime.ttl = ttl
163
- if expires:
180
+ if expires and sandbox.spec.runtime:
164
181
  sandbox.spec.runtime.expires = expires.isoformat()
165
182
  if region:
166
183
  sandbox.spec.region = region
167
184
  if lifecycle:
168
- sandbox.spec.lifecycle = lifecycle
185
+ if type(lifecycle) is dict:
186
+ lifecycle = SandboxLifecycle.from_dict(lifecycle)
187
+ assert lifecycle is not None
188
+ sandbox.spec.lifecycle = lifecycle
189
+ elif type(lifecycle) is SandboxLifecycle:
190
+ sandbox.spec.lifecycle = lifecycle
191
+ else:
192
+ raise ValueError(f"Invalid lifecycle type: {type(lifecycle)}")
169
193
  else:
170
194
  if isinstance(sandbox, dict):
171
195
  sandbox = Sandbox.from_dict(sandbox)
196
+ assert isinstance(sandbox, Sandbox)
197
+
172
198
  if not sandbox.metadata:
173
199
  sandbox.metadata = Metadata(name=default_name)
174
200
  if not sandbox.spec:
@@ -250,6 +276,72 @@ class SyncSandboxInstance:
250
276
  )
251
277
  return cls(response)
252
278
 
279
+ @classmethod
280
+ def update_ttl(cls, sandbox_name: str, ttl: str) -> "SyncSandboxInstance":
281
+ """Update sandbox TTL without recreating it.
282
+
283
+ Args:
284
+ sandbox_name: The name of the sandbox to update
285
+ ttl: The new TTL value (e.g., "5m", "1h", "30s")
286
+
287
+ Returns:
288
+ A new SyncSandboxInstance with updated TTL
289
+ """
290
+ # Get the existing sandbox
291
+ sandbox_instance = cls.get(sandbox_name)
292
+ sandbox = sandbox_instance.sandbox
293
+
294
+ # Prepare the updated sandbox object
295
+ updated_sandbox = Sandbox.from_dict(sandbox.to_dict())
296
+ if updated_sandbox.spec is None or updated_sandbox.spec.runtime is None:
297
+ raise ValueError(f"Sandbox {sandbox_name} has invalid spec")
298
+
299
+ # Update TTL
300
+ updated_sandbox.spec.runtime.ttl = ttl
301
+
302
+ # Call the update API
303
+ response = update_sandbox(
304
+ sandbox_name=sandbox_name,
305
+ client=client,
306
+ body=updated_sandbox,
307
+ )
308
+
309
+ return cls(response)
310
+
311
+ @classmethod
312
+ def update_lifecycle(
313
+ cls, sandbox_name: str, lifecycle: SandboxLifecycle
314
+ ) -> "SyncSandboxInstance":
315
+ """Update sandbox lifecycle configuration without recreating it.
316
+
317
+ Args:
318
+ sandbox_name: The name of the sandbox to update
319
+ lifecycle: The new lifecycle configuration
320
+
321
+ Returns:
322
+ A new SyncSandboxInstance with updated lifecycle
323
+ """
324
+ # Get the existing sandbox
325
+ sandbox_instance = cls.get(sandbox_name)
326
+ sandbox = sandbox_instance.sandbox
327
+
328
+ # Prepare the updated sandbox object
329
+ updated_sandbox = Sandbox.from_dict(sandbox.to_dict())
330
+ if updated_sandbox.spec is None:
331
+ raise ValueError(f"Sandbox {sandbox_name} has invalid spec")
332
+
333
+ # Update lifecycle
334
+ updated_sandbox.spec.lifecycle = lifecycle
335
+
336
+ # Call the update API
337
+ response = update_sandbox(
338
+ sandbox_name=sandbox_name,
339
+ client=client,
340
+ body=updated_sandbox,
341
+ )
342
+
343
+ return cls(response)
344
+
253
345
  @classmethod
254
346
  def create_if_not_exists(
255
347
  cls, sandbox: Union[Sandbox, SandboxCreateConfiguration, Dict[str, Any]]
@@ -0,0 +1,71 @@
1
+ from typing import Union
2
+
3
+ from ...common.settings import settings
4
+ from ..client.api.system.get_health import sync as get_health
5
+ from ..client.api.system.post_upgrade import sync as post_upgrade
6
+ from ..client.client import Client
7
+ from ..client.models import ErrorResponse, HealthResponse, SuccessResponse, UpgradeRequest
8
+ from ..types import SandboxConfiguration
9
+ from .action import SyncSandboxAction
10
+
11
+
12
+ class SyncSandboxSystem(SyncSandboxAction):
13
+ """System operations for sandbox including upgrade functionality (sync version)."""
14
+
15
+ def __init__(self, sandbox_config: SandboxConfiguration):
16
+ super().__init__(sandbox_config)
17
+
18
+ def upgrade(
19
+ self,
20
+ version: str | None = None,
21
+ base_url: str | None = None,
22
+ ) -> SuccessResponse:
23
+ """Upgrade the sandbox-api to a new version.
24
+
25
+ Triggers an upgrade of the sandbox-api process. Returns immediately before upgrading.
26
+ The upgrade will: download the specified binary from GitHub releases, validate it, and restart.
27
+ All running processes will be preserved across the upgrade.
28
+
29
+ Args:
30
+ version: Version to upgrade to - "develop" (default), "main", "latest",
31
+ or specific tag like "v1.0.0"
32
+ base_url: Base URL for releases (useful for forks, defaults to
33
+ https://github.com/blaxel-ai/sandbox/releases)
34
+
35
+ Returns:
36
+ SuccessResponse with status information
37
+ """
38
+ request = UpgradeRequest(version=version, base_url=base_url)
39
+
40
+ client = Client(
41
+ base_url=self.url,
42
+ headers={**settings.headers, **self.sandbox_config.headers},
43
+ )
44
+
45
+ with client:
46
+ response = post_upgrade(client=client, body=request)
47
+ if response is None:
48
+ raise Exception("Failed to upgrade sandbox")
49
+ if isinstance(response, ErrorResponse):
50
+ raise Exception(f"Upgrade failed: {response.error}")
51
+ return response
52
+
53
+ def health(self) -> HealthResponse:
54
+ """Get health status and system information.
55
+
56
+ Returns health status and system information including upgrade count and binary details.
57
+ Also includes last upgrade attempt status with detailed error information if available.
58
+
59
+ Returns:
60
+ HealthResponse with system status information
61
+ """
62
+ client = Client(
63
+ base_url=self.url,
64
+ headers={**settings.headers, **self.sandbox_config.headers},
65
+ )
66
+
67
+ with client:
68
+ response = get_health(client=client)
69
+ if response is None:
70
+ raise Exception("Failed to get health status")
71
+ return response
@@ -4,7 +4,14 @@ from typing import Any, Callable, Dict, List, TypeVar, Union
4
4
  import httpx
5
5
  from attrs import define as _attrs_define
6
6
 
7
- from ..client.models import Port, Sandbox, SandboxLifecycle, VolumeAttachment
7
+ from ..client.models import (
8
+ Env,
9
+ Port,
10
+ PortProtocol,
11
+ Sandbox,
12
+ SandboxLifecycle,
13
+ VolumeAttachment,
14
+ )
8
15
  from ..client.types import UNSET
9
16
  from .client.models.process_request import ProcessRequest
10
17
  from .client.models.process_response import ProcessResponse
@@ -205,7 +212,7 @@ class SandboxCreateConfiguration:
205
212
  if isinstance(port, Port):
206
213
  # If it's already a Port object, ensure protocol defaults to HTTP
207
214
  if port.protocol is UNSET or not port.protocol:
208
- port.protocol = "HTTP"
215
+ port.protocol = PortProtocol.HTTP
209
216
  port_objects.append(port)
210
217
  elif isinstance(port, dict):
211
218
  # Convert dict to Port object with HTTP as default protocol
@@ -218,20 +225,22 @@ class SandboxCreateConfiguration:
218
225
 
219
226
  return port_objects
220
227
 
221
- def _normalize_envs(self) -> List[Dict[str, str]] | None:
228
+ def _normalize_envs(self) -> List[Env] | None:
222
229
  """Convert envs to list of dicts with name and value keys."""
223
230
  if not self.envs:
224
231
  return None
225
232
 
226
233
  env_objects = []
227
234
  for env in self.envs:
228
- if isinstance(env, dict):
235
+ if isinstance(env, Env):
236
+ env_objects.append(env)
237
+ elif isinstance(env, dict):
229
238
  # Validate that the dict has the required keys
230
239
  if "name" not in env or "value" not in env:
231
240
  raise ValueError(
232
241
  f"Environment variable dict must have 'name' and 'value' keys: {env}"
233
242
  )
234
- env_objects.append({"name": env["name"], "value": env["value"]})
243
+ env_objects.append(Env(name=env["name"], value=env["value"]))
235
244
  else:
236
245
  raise ValueError(
237
246
  f"Invalid env type: {type(env)}. Expected dict with 'name' and 'value' keys."
@@ -385,3 +394,201 @@ class Context:
385
394
  @classmethod
386
395
  def from_json(cls, data: Dict[str, Any]) -> "Context":
387
396
  return cls(id=str(data.get("id") or data.get("context_id") or ""))
397
+
398
+
399
+ class StreamHandle:
400
+ """Handle for managing a streaming operation (sync version).
401
+
402
+ Can be used as a context manager for automatic cleanup:
403
+
404
+ with sandbox.process.stream_logs(name, options) as handle:
405
+ # do something
406
+ # handle is automatically closed
407
+
408
+ Or used manually:
409
+
410
+ handle = sandbox.process.stream_logs(name, options)
411
+ try:
412
+ # do something
413
+ finally:
414
+ handle.close()
415
+ """
416
+
417
+ def __init__(self, close_func: Callable[[], None]):
418
+ self._close_func = close_func
419
+ self._closed = False
420
+
421
+ def close(self) -> None:
422
+ """Close the stream and stop receiving data."""
423
+ if not self._closed:
424
+ self._close_func()
425
+ self._closed = True
426
+
427
+ @property
428
+ def closed(self) -> bool:
429
+ """Returns True if the stream handle has been closed."""
430
+ return self._closed
431
+
432
+ def __enter__(self) -> "StreamHandle":
433
+ return self
434
+
435
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
436
+ self.close()
437
+
438
+ # Backward compatibility: support dict-like access
439
+ def __getitem__(self, key: str) -> Callable[[], None]:
440
+ if key == "close":
441
+ return self.close
442
+ raise KeyError(key)
443
+
444
+
445
+ class AsyncStreamHandle:
446
+ """Handle for managing a streaming operation (async version).
447
+
448
+ Can be used as an async context manager for automatic cleanup:
449
+
450
+ async with sandbox.process.stream_logs(name, options) as handle:
451
+ # do something
452
+ # handle is automatically closed
453
+
454
+ Or used manually:
455
+
456
+ handle = sandbox.process.stream_logs(name, options)
457
+ try:
458
+ # do something
459
+ finally:
460
+ handle.close()
461
+ """
462
+
463
+ def __init__(self, close_func: Callable[[], None]):
464
+ self._close_func = close_func
465
+ self._closed = False
466
+
467
+ def close(self) -> None:
468
+ """Close the stream and stop receiving data."""
469
+ if not self._closed:
470
+ self._close_func()
471
+ self._closed = True
472
+
473
+ @property
474
+ def closed(self) -> bool:
475
+ """Returns True if the stream handle has been closed."""
476
+ return self._closed
477
+
478
+ async def __aenter__(self) -> "AsyncStreamHandle":
479
+ return self
480
+
481
+ async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
482
+ self.close()
483
+
484
+ # Also support sync context manager for convenience
485
+ def __enter__(self) -> "AsyncStreamHandle":
486
+ return self
487
+
488
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
489
+ self.close()
490
+
491
+ # Backward compatibility: support dict-like access
492
+ def __getitem__(self, key: str) -> Callable[[], None]:
493
+ if key == "close":
494
+ return self.close
495
+ raise KeyError(key)
496
+
497
+
498
+ class WatchHandle:
499
+ """Handle for managing a file system watch operation (sync version).
500
+
501
+ Can be used as a context manager for automatic cleanup:
502
+
503
+ with sandbox.fs.watch(path, callback) as handle:
504
+ # do something
505
+ # handle is automatically closed
506
+
507
+ Or used manually:
508
+
509
+ handle = sandbox.fs.watch(path, callback)
510
+ try:
511
+ # do something
512
+ finally:
513
+ handle.close()
514
+ """
515
+
516
+ def __init__(self, close_func: Callable[[], None]):
517
+ self._close_func = close_func
518
+ self._closed = False
519
+
520
+ def close(self) -> None:
521
+ """Close the watch and stop receiving events."""
522
+ if not self._closed:
523
+ self._close_func()
524
+ self._closed = True
525
+
526
+ @property
527
+ def closed(self) -> bool:
528
+ """Returns True if the watch handle has been closed."""
529
+ return self._closed
530
+
531
+ def __enter__(self) -> "WatchHandle":
532
+ return self
533
+
534
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
535
+ self.close()
536
+
537
+ # Backward compatibility: support dict-like access
538
+ def __getitem__(self, key: str) -> Callable[[], None]:
539
+ if key == "close":
540
+ return self.close
541
+ raise KeyError(key)
542
+
543
+
544
+ class AsyncWatchHandle:
545
+ """Handle for managing a file system watch operation (async version).
546
+
547
+ Can be used as an async context manager for automatic cleanup:
548
+
549
+ async with sandbox.fs.watch(path, callback) as handle:
550
+ # do something
551
+ # handle is automatically closed
552
+
553
+ Or used manually:
554
+
555
+ handle = sandbox.fs.watch(path, callback)
556
+ try:
557
+ # do something
558
+ finally:
559
+ handle.close()
560
+ """
561
+
562
+ def __init__(self, close_func: Callable[[], None]):
563
+ self._close_func = close_func
564
+ self._closed = False
565
+
566
+ def close(self) -> None:
567
+ """Close the watch and stop receiving events."""
568
+ if not self._closed:
569
+ self._close_func()
570
+ self._closed = True
571
+
572
+ @property
573
+ def closed(self) -> bool:
574
+ """Returns True if the watch handle has been closed."""
575
+ return self._closed
576
+
577
+ async def __aenter__(self) -> "AsyncWatchHandle":
578
+ return self
579
+
580
+ async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
581
+ self.close()
582
+
583
+ # Also support sync context manager for convenience
584
+ def __enter__(self) -> "AsyncWatchHandle":
585
+ return self
586
+
587
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
588
+ self.close()
589
+
590
+ # Backward compatibility: support dict-like access
591
+ def __getitem__(self, key: str) -> Callable[[], None]:
592
+ if key == "close":
593
+ return self.close
594
+ raise KeyError(key)