hypha-rpc 0.20.92__py3-none-any.whl → 0.20.94__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hypha_rpc/VERSION CHANGED
@@ -1,3 +1,3 @@
1
1
  {
2
- "version": "0.20.92"
2
+ "version": "0.20.94"
3
3
  }
hypha_rpc/__init__.py CHANGED
@@ -28,11 +28,12 @@ from .http_client import HTTPStreamingRPCConnection
28
28
  with open(os.path.join(os.path.dirname(__file__), "VERSION"), "r") as f:
29
29
  __version__ = json.load(f)["version"]
30
30
 
31
+
31
32
  def is_user_defined_class_instance(obj):
32
33
  return (
33
- not isinstance(obj, type) and # not a class itself
34
- hasattr(obj, "__class__") and
35
- obj.__class__.__module__ != "builtins" # not a built-in type
34
+ not isinstance(obj, type) # not a class itself
35
+ and hasattr(obj, "__class__")
36
+ and obj.__class__.__module__ != "builtins" # not a built-in type
36
37
  )
37
38
 
38
39
 
@@ -41,23 +42,34 @@ class API(ObjectProxy):
41
42
  super().__init__(*args, **kwargs)
42
43
  self._registry = {}
43
44
  self._export_handler = self._default_export_handler
44
-
45
+
45
46
  async def _register_services(self, obj, config=None, **kwargs):
46
47
  if not os.environ.get("HYPHA_SERVER_URL"):
47
48
  try:
48
49
  from dotenv import load_dotenv, find_dotenv
50
+
49
51
  load_dotenv(dotenv_path=find_dotenv(usecwd=True))
50
52
  # use info from .env file
51
53
  print("✅ Loaded connection configuration from .env file.")
52
54
  except ImportError:
53
- print("❌ Missing environment variables. Set HYPHA_SERVER_URL, HYPHA_TOKEN, HYPHA_WORKSPACE", file=sys.stderr)
55
+ print(
56
+ "❌ Missing environment variables. Set HYPHA_SERVER_URL, HYPHA_TOKEN, HYPHA_WORKSPACE",
57
+ file=sys.stderr,
58
+ )
54
59
  sys.exit(1)
55
60
  SERVER_URL = os.environ.get("HYPHA_SERVER_URL")
56
61
  TOKEN = os.environ.get("HYPHA_TOKEN")
57
62
  CLIENT_ID = os.environ.get("HYPHA_CLIENT_ID")
58
63
  WORKSPACE = os.environ.get("HYPHA_WORKSPACE")
59
64
 
60
- server = await connect_to_server({"client_id": CLIENT_ID, "server_url": SERVER_URL, "token": TOKEN, "workspace": WORKSPACE})
65
+ server = await connect_to_server(
66
+ {
67
+ "client_id": CLIENT_ID,
68
+ "server_url": SERVER_URL,
69
+ "token": TOKEN,
70
+ "workspace": WORKSPACE,
71
+ }
72
+ )
61
73
  # If obj is a class, instantiate it
62
74
  if isinstance(obj, type):
63
75
  obj = obj()
@@ -96,7 +108,6 @@ class API(ObjectProxy):
96
108
  asyncio.create_task(self._register_services(obj, config, **kwargs))
97
109
  else:
98
110
  asyncio.run(self._register_services(obj, config, **kwargs))
99
-
100
111
 
101
112
  def set_export_handler(self, handler):
102
113
  self._export_handler = handler
hypha_rpc/http_client.py CHANGED
@@ -134,7 +134,9 @@ class HTTPStreamingRPCConnection:
134
134
  if response.status_code == 200:
135
135
  logger.debug("Token refresh requested successfully")
136
136
  else:
137
- logger.warning(f"Token refresh request failed: {response.status_code}")
137
+ logger.warning(
138
+ f"Token refresh request failed: {response.status_code}"
139
+ )
138
140
  except Exception as e:
139
141
  logger.warning(f"Failed to send refresh token request: {e}")
140
142
 
@@ -174,27 +176,43 @@ class HTTPStreamingRPCConnection:
174
176
  elif self._ssl is not None:
175
177
  verify = self._ssl
176
178
 
179
+ # Try to enable HTTP/2 if h2 is available
180
+ try:
181
+ import h2 # noqa
182
+
183
+ http2_enabled = True
184
+ logger.info("HTTP/2 enabled for improved performance")
185
+ except ImportError:
186
+ http2_enabled = False
187
+ logger.debug(
188
+ "HTTP/2 not available (install httpx[http2] for better performance)"
189
+ )
190
+
177
191
  return httpx.AsyncClient(
178
192
  timeout=httpx.Timeout(self._timeout, connect=30.0),
179
193
  verify=verify,
180
- # Connection pooling for better performance with many requests
194
+ # Optimized connection pooling for high-performance RPC
181
195
  limits=httpx.Limits(
182
- max_connections=100, # Max total connections
183
- max_keepalive_connections=20, # Keep-alive connections for reuse
184
- keepalive_expiry=30.0, # Keep connections alive for 30 seconds
196
+ max_connections=200, # Max total connections (increased for parallel requests)
197
+ max_keepalive_connections=50, # More reusable connections (up from 20)
198
+ keepalive_expiry=300.0, # Keep connections alive longer (5 minutes)
185
199
  ),
200
+ # Enable HTTP/2 for better multiplexing if available
201
+ http2=http2_enabled,
186
202
  )
187
203
 
188
204
  async def open(self):
189
205
  """Open the streaming connection."""
190
- logger.info(f"Opening HTTP streaming connection to {self._server_url} (format={self._format})")
206
+ logger.info(
207
+ f"Opening HTTP streaming connection to {self._server_url} (format={self._format})"
208
+ )
191
209
 
192
210
  if self._http_client is None:
193
211
  self._http_client = await self._create_http_client()
194
212
 
195
- # Build stream URL
196
- workspace = self._workspace or "public"
197
- stream_url = f"{self._server_url}/{workspace}/rpc"
213
+ # Build stream URL - workspace is part of path, default to "public" for anonymous
214
+ ws = self._workspace or "public"
215
+ stream_url = f"{self._server_url}/{ws}/rpc"
198
216
  params = {"client_id": self._client_id}
199
217
  if self._format == "msgpack":
200
218
  params["format"] = "msgpack"
@@ -346,15 +364,15 @@ class HTTPStreamingRPCConnection:
346
364
  # Process complete frames from buffer
347
365
  while len(buffer) >= 4:
348
366
  # Read 4-byte length prefix (big-endian)
349
- length = int.from_bytes(buffer[:4], 'big')
367
+ length = int.from_bytes(buffer[:4], "big")
350
368
 
351
369
  if len(buffer) < 4 + length:
352
370
  # Incomplete frame, wait for more data
353
371
  break
354
372
 
355
373
  # Extract the frame
356
- frame_data = buffer[4:4 + length]
357
- buffer = buffer[4 + length:]
374
+ frame_data = buffer[4 : 4 + length]
375
+ buffer = buffer[4 + length :]
358
376
 
359
377
  try:
360
378
  # For msgpack, first check if it's a control message
@@ -418,18 +436,24 @@ class HTTPStreamingRPCConnection:
418
436
  self._handle_message(data)
419
437
 
420
438
  async def emit_message(self, data: bytes):
421
- """Send a message to the server via HTTP POST."""
439
+ """Send a message to the server via HTTP POST.
440
+
441
+ Uses optimized connection pooling with keep-alive for better performance.
442
+ HTTP client automatically handles efficient transfer for all payload sizes.
443
+ """
422
444
  if self._closed:
423
445
  raise ConnectionError("Connection is closed")
424
446
 
425
447
  if self._http_client is None:
426
448
  self._http_client = await self._create_http_client()
427
449
 
428
- workspace = self._workspace or "public"
429
- url = f"{self._server_url}/{workspace}/rpc"
450
+ # Build POST URL - workspace is part of path (must be set after connection)
451
+ ws = self._workspace or "public"
452
+ url = f"{self._server_url}/{ws}/rpc"
430
453
  params = {"client_id": self._client_id}
431
454
 
432
455
  try:
456
+ # httpx handles large payloads efficiently with connection pooling
433
457
  response = await self._http_client.post(
434
458
  url,
435
459
  content=data,
@@ -438,7 +462,9 @@ class HTTPStreamingRPCConnection:
438
462
  )
439
463
 
440
464
  if response.status_code != 200:
441
- error = response.json() if response.content else {"detail": "Unknown error"}
465
+ error = (
466
+ response.json() if response.content else {"detail": "Unknown error"}
467
+ )
442
468
  raise ConnectionError(f"POST failed: {error.get('detail', error)}")
443
469
 
444
470
  except httpx.TimeoutException:
@@ -512,6 +538,7 @@ def connect_to_server_http(config=None, **kwargs):
512
538
  ServerContextManager that can be used as async context manager
513
539
  """
514
540
  from .websocket_client import connect_to_server
541
+
515
542
  config = config or {}
516
543
  config.update(kwargs)
517
544
  config["transport"] = "http"
@@ -609,6 +636,7 @@ async def _connect_to_server_http(config: dict):
609
636
 
610
637
  # Handle force-exit from manager
611
638
  if connection.manager_id:
639
+
612
640
  async def handle_disconnect(message):
613
641
  if message.get("from") == "*/" + connection.manager_id:
614
642
  logger.info(f"Disconnecting from server: {message.get('reason')}")
@@ -626,6 +654,7 @@ def get_remote_service_http(service_uri: str, config=None, **kwargs):
626
654
  For a unified interface, use get_remote_service with transport="http" instead.
627
655
  """
628
656
  from .websocket_client import get_remote_service
657
+
629
658
  config = config or {}
630
659
  config.update(kwargs)
631
660
  config["transport"] = "http"
hypha_rpc/rpc.py CHANGED
@@ -458,7 +458,10 @@ class RemoteFunction:
458
458
  timer = None
459
459
  if self._with_promise:
460
460
  main_message["session"] = local_session_id
461
- method_name = f"{self._encoded_method['_rtarget']}:{self._encoded_method['_rmethod']}"
461
+ method_name = (
462
+ f"{self._encoded_method['_rtarget']}:{self._encoded_method['_rmethod']}"
463
+ )
464
+
462
465
  # Timer will be started after message is sent
463
466
  # Heartbeat will keep resetting it, allowing methods to run indefinitely
464
467
  # IMPORTANT: When timeout occurs, we must clean up the session to prevent memory leaks
@@ -472,7 +475,7 @@ class RemoteFunction:
472
475
  if local_session_id in self._rpc._object_store:
473
476
  del self._rpc._object_store[local_session_id]
474
477
  logger.debug(f"Cleaned up session {local_session_id} after timeout")
475
-
478
+
476
479
  timer = Timer(
477
480
  self._rpc._method_timeout,
478
481
  timeout_callback,
@@ -486,9 +489,7 @@ class RemoteFunction:
486
489
  if isinstance(obj, dict):
487
490
  if obj.get("_rintf") == True:
488
491
  return True
489
- return any(
490
- has_interface_object(value) for value in obj.values()
491
- )
492
+ return any(has_interface_object(value) for value in obj.values())
492
493
  elif isinstance(obj, (list, tuple)):
493
494
  return any(has_interface_object(item) for item in obj)
494
495
  return False
@@ -516,14 +517,15 @@ class RemoteFunction:
516
517
  if extra_data:
517
518
  message_package = message_package + msgpack.packb(extra_data)
518
519
  total_size = len(message_package)
519
- if (
520
- total_size <= self._rpc._long_message_chunk_size + 1024
521
- or self.__no_chunk__
522
- ):
520
+ if total_size <= self._rpc._long_message_chunk_size + 1024 or self.__no_chunk__:
523
521
  emit_task = asyncio.create_task(self._rpc._emit_message(message_package))
524
522
  else:
525
523
  emit_task = asyncio.create_task(
526
- self._rpc._send_chunks(message_package, self._encoded_method["_rtarget"], self._remote_parent)
524
+ self._rpc._send_chunks(
525
+ message_package,
526
+ self._encoded_method["_rtarget"],
527
+ self._remote_parent,
528
+ )
527
529
  )
528
530
  background_tasks.add(emit_task)
529
531
 
@@ -543,6 +545,7 @@ class RemoteFunction:
543
545
  else:
544
546
  if timer:
545
547
  timer.start()
548
+
546
549
  emit_task.add_done_callback(handle_result)
547
550
  return fut
548
551
 
@@ -552,6 +555,7 @@ class RemoteFunction:
552
555
  def __str__(self):
553
556
  return self.__repr__()
554
557
 
558
+
555
559
  background_tasks = set()
556
560
 
557
561
 
@@ -596,18 +600,23 @@ class RPC(MessageEmitter):
596
600
 
597
601
  # Set up exception handler for unhandled asyncio futures
598
602
  def handle_exception(loop, context):
599
- exception = context.get('exception')
603
+ exception = context.get("exception")
600
604
  if isinstance(exception, Exception):
601
605
  # Check if this is a "Method not found" error that we can ignore
602
- if "Method not found" in str(exception) or "Session not found" in str(exception):
603
- logger.debug("Ignoring expected method/session not found error: %s", exception)
606
+ if "Method not found" in str(exception) or "Session not found" in str(
607
+ exception
608
+ ):
609
+ logger.debug(
610
+ "Ignoring expected method/session not found error: %s",
611
+ exception,
612
+ )
604
613
  else:
605
614
  logger.debug("Unhandled asyncio exception: %s", context)
606
615
  else:
607
616
  logger.debug("Unhandled asyncio exception: %s", context)
608
-
617
+
609
618
  # Only set the exception handler if we haven't already set one
610
- if not hasattr(self.loop, '_hypha_exception_handler_set'):
619
+ if not hasattr(self.loop, "_hypha_exception_handler_set"):
611
620
  self.loop.set_exception_handler(handle_exception)
612
621
  self.loop._hypha_exception_handler_set = True
613
622
 
@@ -677,7 +686,7 @@ class RPC(MessageEmitter):
677
686
  service_info = self._extract_service_info(service)
678
687
  await asyncio.wait_for(
679
688
  manager.register_service(service_info),
680
- timeout=service_registration_timeout
689
+ timeout=service_registration_timeout,
681
690
  )
682
691
  registered_count += 1
683
692
  logger.debug(
@@ -712,39 +721,57 @@ class RPC(MessageEmitter):
712
721
  "failed": failed_services,
713
722
  },
714
723
  )
715
-
724
+
716
725
  # Subscribe to client_disconnected events if the manager supports it
717
726
  try:
718
727
  manager_dict = ObjectProxy.toDict(manager)
719
728
  if "subscribe" in manager_dict:
720
- logger.debug("Subscribing to client_disconnected events")
729
+ logger.debug(
730
+ "Subscribing to client_disconnected events"
731
+ )
721
732
 
722
733
  async def handle_client_disconnected(event):
723
734
  client_id = event.get("client")
724
735
  if client_id:
725
- logger.debug(f"Client {client_id} disconnected, cleaning up sessions")
726
- await self._handle_client_disconnected(client_id)
736
+ logger.debug(
737
+ f"Client {client_id} disconnected, cleaning up sessions"
738
+ )
739
+ await self._handle_client_disconnected(
740
+ client_id
741
+ )
727
742
 
728
743
  # Subscribe to the event topic first with timeout
729
- self._client_disconnected_subscription = await asyncio.wait_for(
730
- manager.subscribe(["client_disconnected"]),
731
- timeout=service_registration_timeout
744
+ self._client_disconnected_subscription = (
745
+ await asyncio.wait_for(
746
+ manager.subscribe(["client_disconnected"]),
747
+ timeout=service_registration_timeout,
748
+ )
732
749
  )
733
750
 
734
751
  # Then register the local event handler
735
- self.on("client_disconnected", handle_client_disconnected)
752
+ self.on(
753
+ "client_disconnected", handle_client_disconnected
754
+ )
736
755
 
737
- logger.debug("Successfully subscribed to client_disconnected events")
756
+ logger.debug(
757
+ "Successfully subscribed to client_disconnected events"
758
+ )
738
759
  else:
739
- logger.debug("Manager does not support subscribe method, skipping client_disconnected handling")
760
+ logger.debug(
761
+ "Manager does not support subscribe method, skipping client_disconnected handling"
762
+ )
740
763
  self._client_disconnected_subscription = None
741
764
  except asyncio.TimeoutError:
742
- logger.warning("Timeout subscribing to client_disconnected events")
765
+ logger.warning(
766
+ "Timeout subscribing to client_disconnected events"
767
+ )
743
768
  self._client_disconnected_subscription = None
744
769
  except Exception as subscribe_error:
745
- logger.warning(f"Failed to subscribe to client_disconnected events: {subscribe_error}")
770
+ logger.warning(
771
+ f"Failed to subscribe to client_disconnected events: {subscribe_error}"
772
+ )
746
773
  self._client_disconnected_subscription = None
747
-
774
+
748
775
  except Exception as manager_error:
749
776
  logger.error(
750
777
  f"Failed to get manager service for registering services: {manager_error}"
@@ -969,34 +996,34 @@ class RPC(MessageEmitter):
969
996
  # Clean up all pending sessions before closing
970
997
  self._cleanup_on_disconnect()
971
998
  self._close_sessions(self._object_store)
972
-
999
+
973
1000
  # Clean up background tasks to prevent memory leaks
974
1001
  # Cancel any background tasks that might be holding references to this RPC
975
- if hasattr(self, '_background_task') and self._background_task:
1002
+ if hasattr(self, "_background_task") and self._background_task:
976
1003
  try:
977
1004
  if not self._background_task.done():
978
1005
  self._background_task.cancel()
979
1006
  background_tasks.discard(self._background_task)
980
1007
  except Exception as e:
981
1008
  logger.debug(f"Error cleaning up background task: {e}")
982
-
1009
+
983
1010
  # Clean up any other background tasks that might reference this RPC
984
1011
  tasks_to_remove = []
985
1012
  for task in list(background_tasks):
986
1013
  try:
987
- if hasattr(task, '_rpc_ref') and task._rpc_ref is self:
1014
+ if hasattr(task, "_rpc_ref") and task._rpc_ref is self:
988
1015
  tasks_to_remove.append(task)
989
- elif hasattr(task, 'get_coro') and task.get_coro():
1016
+ elif hasattr(task, "get_coro") and task.get_coro():
990
1017
  # Check if task is related to this RPC by examining the coroutine
991
1018
  coro = task.get_coro()
992
- if hasattr(coro, 'cr_frame') and coro.cr_frame:
1019
+ if hasattr(coro, "cr_frame") and coro.cr_frame:
993
1020
  # Look for references to this RPC in the task's frame
994
1021
  frame_locals = coro.cr_frame.f_locals
995
- if 'self' in frame_locals and frame_locals['self'] is self:
1022
+ if "self" in frame_locals and frame_locals["self"] is self:
996
1023
  tasks_to_remove.append(task)
997
1024
  except Exception as e:
998
1025
  logger.debug(f"Error checking background task: {e}")
999
-
1026
+
1000
1027
  for task in tasks_to_remove:
1001
1028
  try:
1002
1029
  if not task.done():
@@ -1004,49 +1031,57 @@ class RPC(MessageEmitter):
1004
1031
  background_tasks.discard(task)
1005
1032
  except Exception as e:
1006
1033
  logger.debug(f"Error removing background task: {e}")
1007
-
1034
+
1008
1035
  # Remove the local event handler for client_disconnected
1009
1036
  # Note: Actual unsubscription from server is done in async disconnect() method
1010
- if hasattr(self, '_client_disconnected_subscription') and self._client_disconnected_subscription:
1037
+ if (
1038
+ hasattr(self, "_client_disconnected_subscription")
1039
+ and self._client_disconnected_subscription
1040
+ ):
1011
1041
  try:
1012
1042
  # Remove the local event handler
1013
1043
  self.off("client_disconnected")
1014
1044
  except Exception as e:
1015
1045
  logger.debug(f"Error removing client_disconnected handler: {e}")
1016
-
1046
+
1017
1047
  # Clear connection reference to break circular references
1018
- if hasattr(self, '_connection'):
1048
+ if hasattr(self, "_connection"):
1019
1049
  self._connection = None
1020
-
1050
+
1021
1051
  # Clear emit_message reference to break circular references
1022
- if hasattr(self, '_emit_message'):
1052
+ if hasattr(self, "_emit_message"):
1023
1053
  self._emit_message = None
1024
-
1054
+
1025
1055
  self._fire("disconnected")
1026
1056
 
1027
1057
  async def disconnect(self):
1028
1058
  """Disconnect."""
1029
1059
  # Store connection reference before closing for unsubscribe
1030
- connection = getattr(self, '_connection', None)
1060
+ connection = getattr(self, "_connection", None)
1031
1061
  manager_id = connection.manager_id if connection else None
1032
1062
 
1033
1063
  # Unsubscribe from client_disconnected events before closing
1034
- if hasattr(self, '_client_disconnected_subscription') and self._client_disconnected_subscription:
1064
+ if (
1065
+ hasattr(self, "_client_disconnected_subscription")
1066
+ and self._client_disconnected_subscription
1067
+ ):
1035
1068
  try:
1036
1069
  if connection and manager_id:
1037
1070
  manager = await asyncio.wait_for(
1038
- self.get_remote_service(f"*/{manager_id}"),
1039
- timeout=5.0
1071
+ self.get_remote_service(f"*/{manager_id}"), timeout=5.0
1040
1072
  )
1041
- if hasattr(manager, 'unsubscribe') and callable(manager.unsubscribe):
1073
+ if hasattr(manager, "unsubscribe") and callable(
1074
+ manager.unsubscribe
1075
+ ):
1042
1076
  if asyncio.iscoroutinefunction(manager.unsubscribe):
1043
1077
  await asyncio.wait_for(
1044
- manager.unsubscribe("client_disconnected"),
1045
- timeout=5.0
1078
+ manager.unsubscribe("client_disconnected"), timeout=5.0
1046
1079
  )
1047
1080
  else:
1048
1081
  manager.unsubscribe("client_disconnected")
1049
- logger.debug("Successfully unsubscribed from client_disconnected events")
1082
+ logger.debug(
1083
+ "Successfully unsubscribed from client_disconnected events"
1084
+ )
1050
1085
  except asyncio.TimeoutError:
1051
1086
  logger.debug("Timeout unsubscribing from client_disconnected events")
1052
1087
  except Exception as e:
@@ -1062,92 +1097,105 @@ class RPC(MessageEmitter):
1062
1097
  await connection.disconnect()
1063
1098
  except Exception as e:
1064
1099
  logger.debug(f"Error disconnecting underlying connection: {e}")
1065
-
1100
+
1066
1101
  async def _handle_client_disconnected(self, client_id):
1067
1102
  """Handle cleanup when a remote client disconnects."""
1068
1103
  try:
1069
1104
  logger.debug(f"Handling disconnection for client: {client_id}")
1070
-
1105
+
1071
1106
  # Clean up all sessions for the disconnected client
1072
1107
  sessions_cleaned = self._cleanup_sessions_for_client(client_id)
1073
-
1108
+
1074
1109
  if sessions_cleaned > 0:
1075
- logger.debug(f"Cleaned up {sessions_cleaned} sessions for disconnected client: {client_id}")
1076
-
1110
+ logger.debug(
1111
+ f"Cleaned up {sessions_cleaned} sessions for disconnected client: {client_id}"
1112
+ )
1113
+
1077
1114
  # Fire an event to notify about the client disconnection
1078
- self._fire("remote_client_disconnected", {"client_id": client_id, "sessions_cleaned": sessions_cleaned})
1079
-
1115
+ self._fire(
1116
+ "remote_client_disconnected",
1117
+ {"client_id": client_id, "sessions_cleaned": sessions_cleaned},
1118
+ )
1119
+
1080
1120
  except Exception as e:
1081
1121
  logger.error(f"Error handling client disconnection for {client_id}: {e}")
1082
-
1122
+
1083
1123
  def _cleanup_sessions_for_client(self, client_id):
1084
1124
  """Clean up all sessions for a specific client."""
1085
1125
  sessions_cleaned = 0
1086
-
1126
+
1087
1127
  # Iterate through all top-level session keys
1088
1128
  for session_key in list(self._object_store.keys()):
1089
1129
  if session_key in ("services", "message_cache"):
1090
1130
  continue
1091
-
1131
+
1092
1132
  session = self._object_store.get(session_key)
1093
1133
  if not isinstance(session, dict):
1094
1134
  continue
1095
-
1135
+
1096
1136
  # Check if this session belongs to the disconnected client
1097
1137
  # Sessions have a target_id property that identifies which client they're calling
1098
1138
  if session.get("target_id") == client_id:
1099
- logger.debug(f"Found session {session_key} for disconnected client: {client_id}")
1100
-
1139
+ logger.debug(
1140
+ f"Found session {session_key} for disconnected client: {client_id}"
1141
+ )
1142
+
1101
1143
  # Reject any pending promises in this session
1102
1144
  if "reject" in session and callable(session["reject"]):
1103
1145
  logger.debug(f"Rejecting session {session_key}")
1104
1146
  try:
1105
- session["reject"](RemoteException(f"Client disconnected: {client_id}"))
1147
+ session["reject"](
1148
+ RemoteException(f"Client disconnected: {client_id}")
1149
+ )
1106
1150
  except Exception as e:
1107
1151
  logger.warning(f"Error rejecting session {session_key}: {e}")
1108
-
1152
+
1109
1153
  if "resolve" in session and callable(session["resolve"]):
1110
1154
  logger.debug(f"Resolving session {session_key} with error")
1111
1155
  try:
1112
- session["resolve"](RemoteException(f"Client disconnected: {client_id}"))
1156
+ session["resolve"](
1157
+ RemoteException(f"Client disconnected: {client_id}")
1158
+ )
1113
1159
  except Exception as e:
1114
1160
  logger.warning(f"Error resolving session {session_key}: {e}")
1115
-
1161
+
1116
1162
  # Clear any timers
1117
1163
  if session.get("timer"):
1118
1164
  try:
1119
1165
  session["timer"].clear()
1120
1166
  except Exception as e:
1121
1167
  logger.warning(f"Error clearing timer for {session_key}: {e}")
1122
-
1123
- # Clear heartbeat tasks
1168
+
1169
+ # Clear heartbeat tasks
1124
1170
  if session.get("heartbeat_task"):
1125
1171
  try:
1126
1172
  session["heartbeat_task"].cancel()
1127
1173
  except Exception as e:
1128
- logger.warning(f"Error clearing heartbeat for {session_key}: {e}")
1129
-
1174
+ logger.warning(
1175
+ f"Error clearing heartbeat for {session_key}: {e}"
1176
+ )
1177
+
1130
1178
  # Remove the entire session
1131
1179
  del self._object_store[session_key]
1132
1180
  sessions_cleaned += 1
1133
1181
  logger.debug(f"Cleaned up session: {session_key}")
1134
-
1182
+
1135
1183
  return sessions_cleaned
1136
-
1184
+
1137
1185
  def _cleanup_on_disconnect(self):
1138
1186
  """Clean up all pending sessions when the local RPC disconnects."""
1139
1187
  try:
1140
1188
  logger.debug("Cleaning up all sessions due to local RPC disconnection")
1141
-
1189
+
1142
1190
  # Get all keys to delete (everything except services)
1143
1191
  keys_to_delete = []
1144
-
1192
+
1145
1193
  for key in list(self._object_store.keys()):
1146
1194
  if key == "services":
1147
1195
  continue
1148
-
1196
+
1149
1197
  value = self._object_store.get(key)
1150
-
1198
+
1151
1199
  if isinstance(value, dict):
1152
1200
  # Reject any pending promises
1153
1201
  if "reject" in value and callable(value["reject"]):
@@ -1155,20 +1203,20 @@ class RPC(MessageEmitter):
1155
1203
  value["reject"](RemoteException("RPC connection closed"))
1156
1204
  except Exception as e:
1157
1205
  logger.debug(f"Error rejecting promise during cleanup: {e}")
1158
-
1206
+
1159
1207
  # Clean up timers and tasks
1160
1208
  if value.get("heartbeat_task"):
1161
1209
  value["heartbeat_task"].cancel()
1162
1210
  if value.get("timer"):
1163
1211
  value["timer"].clear()
1164
-
1212
+
1165
1213
  # Mark ALL keys for deletion except services
1166
1214
  keys_to_delete.append(key)
1167
-
1215
+
1168
1216
  # Delete all marked sessions
1169
1217
  for key in keys_to_delete:
1170
1218
  del self._object_store[key]
1171
-
1219
+
1172
1220
  except Exception as e:
1173
1221
  logger.error(f"Error during cleanup on disconnect: {e}")
1174
1222
 
@@ -1249,7 +1297,7 @@ class RPC(MessageEmitter):
1249
1297
  # allow access for the same workspace
1250
1298
  if context["ws"] == ws:
1251
1299
  return service
1252
-
1300
+
1253
1301
  # Check if user is from an authorized workspace
1254
1302
  authorized_workspaces = service["config"].get("authorized_workspaces")
1255
1303
  if authorized_workspaces and context["ws"] in authorized_workspaces:
@@ -1403,7 +1451,7 @@ class RPC(MessageEmitter):
1403
1451
  run_in_executor = True
1404
1452
  visibility = api["config"].get("visibility", "protected")
1405
1453
  assert visibility in ["protected", "public", "unlisted"]
1406
-
1454
+
1407
1455
  # Validate authorized_workspaces
1408
1456
  authorized_workspaces = api["config"].get("authorized_workspaces")
1409
1457
  if authorized_workspaces is not None:
@@ -1412,10 +1460,14 @@ class RPC(MessageEmitter):
1412
1460
  f"authorized_workspaces can only be set when visibility is 'protected', got visibility='{visibility}'"
1413
1461
  )
1414
1462
  if not isinstance(authorized_workspaces, list):
1415
- raise ValueError("authorized_workspaces must be a list of workspace ids")
1463
+ raise ValueError(
1464
+ "authorized_workspaces must be a list of workspace ids"
1465
+ )
1416
1466
  for ws_id in authorized_workspaces:
1417
1467
  if not isinstance(ws_id, str):
1418
- raise ValueError(f"Each workspace id in authorized_workspaces must be a string, got {type(ws_id)}")
1468
+ raise ValueError(
1469
+ f"Each workspace id in authorized_workspaces must be a string, got {type(ws_id)}"
1470
+ )
1419
1471
  self._annotate_service_methods(
1420
1472
  api,
1421
1473
  api["id"],
@@ -1443,7 +1495,16 @@ class RPC(MessageEmitter):
1443
1495
  "Workspace is not set. Please ensure the connection has a workspace or set local_workspace."
1444
1496
  )
1445
1497
  skip_context = config.get("require_context", False)
1446
- exclude_keys = ["id", "config", "name", "description", "type", "docs", "app_id", "service_schema"]
1498
+ exclude_keys = [
1499
+ "id",
1500
+ "config",
1501
+ "name",
1502
+ "description",
1503
+ "type",
1504
+ "docs",
1505
+ "app_id",
1506
+ "service_schema",
1507
+ ]
1447
1508
  filtered_service = {k: v for k, v in service.items() if k not in exclude_keys}
1448
1509
  service_schema = _get_schema(filtered_service, skip_context=skip_context)
1449
1510
  service_info = {
@@ -1627,14 +1688,19 @@ class RPC(MessageEmitter):
1627
1688
  if "_promise_manager" in store:
1628
1689
  try:
1629
1690
  promise_manager = store["_promise_manager"]
1630
- if hasattr(promise_manager, 'should_cleanup_on_callback') and \
1631
- promise_manager.should_cleanup_on_callback(callback_name):
1632
- if hasattr(promise_manager, 'settle'):
1691
+ if hasattr(
1692
+ promise_manager, "should_cleanup_on_callback"
1693
+ ) and promise_manager.should_cleanup_on_callback(callback_name):
1694
+ if hasattr(promise_manager, "settle"):
1633
1695
  promise_manager.settle()
1634
1696
  should_cleanup = True
1635
- logger.debug(f"Promise session {session_id} settled and marked for cleanup")
1697
+ logger.debug(
1698
+ f"Promise session {session_id} settled and marked for cleanup"
1699
+ )
1636
1700
  except Exception as e:
1637
- logger.warning(f"Error in promise manager cleanup for {session_id}: {e}")
1701
+ logger.warning(
1702
+ f"Error in promise manager cleanup for {session_id}: {e}"
1703
+ )
1638
1704
  # Still try to cleanup if promise manager fails
1639
1705
  should_cleanup = True
1640
1706
  else:
@@ -1651,7 +1717,9 @@ class RPC(MessageEmitter):
1651
1717
  try:
1652
1718
  self._delete_session_safely(session_id)
1653
1719
  except Exception as fallback_error:
1654
- logger.error(f"Fallback cleanup also failed for {session_id}: {fallback_error}")
1720
+ logger.error(
1721
+ f"Fallback cleanup also failed for {session_id}: {fallback_error}"
1722
+ )
1655
1723
 
1656
1724
  def _delete_session_completely(self, session_id):
1657
1725
  """Completely delete a session and clean up empty parent containers."""
@@ -1660,17 +1728,17 @@ class RPC(MessageEmitter):
1660
1728
 
1661
1729
  try:
1662
1730
  levels = session_id.split(".")
1663
-
1731
+
1664
1732
  # Navigate to the session and delete it safely
1665
1733
  if len(levels) == 1:
1666
1734
  # Top-level session - delete directly from object store
1667
1735
  session_key = levels[0]
1668
1736
  if session_key in self._object_store:
1669
1737
  session_data = self._object_store[session_key]
1670
-
1738
+
1671
1739
  # Clear any timers or resources in the session before deletion
1672
1740
  self._cleanup_session_resources(session_data)
1673
-
1741
+
1674
1742
  # Delete the session
1675
1743
  del self._object_store[session_key]
1676
1744
  logger.debug(f"Deleted top-level session: {session_id}")
@@ -1680,35 +1748,41 @@ class RPC(MessageEmitter):
1680
1748
  # Nested session - navigate and delete safely
1681
1749
  current_store = self._object_store
1682
1750
  path_exists = True
1683
-
1751
+
1684
1752
  # Navigate to parent container
1685
1753
  for i, level in enumerate(levels[:-1]):
1686
1754
  if level not in current_store:
1687
1755
  path_exists = False
1688
- logger.debug(f"Parent path broken at level '{level}' for session {session_id}")
1756
+ logger.debug(
1757
+ f"Parent path broken at level '{level}' for session {session_id}"
1758
+ )
1689
1759
  break
1690
1760
  if not isinstance(current_store[level], dict):
1691
1761
  path_exists = False
1692
- logger.debug(f"Non-dict container at level '{level}' for session {session_id}")
1762
+ logger.debug(
1763
+ f"Non-dict container at level '{level}' for session {session_id}"
1764
+ )
1693
1765
  break
1694
1766
  current_store = current_store[level]
1695
-
1767
+
1696
1768
  if path_exists and levels[-1] in current_store:
1697
1769
  session_data = current_store[levels[-1]]
1698
-
1770
+
1699
1771
  # Clear resources before deletion
1700
1772
  if isinstance(session_data, dict):
1701
1773
  self._cleanup_session_resources(session_data)
1702
-
1774
+
1703
1775
  # Delete the session
1704
1776
  del current_store[levels[-1]]
1705
1777
  logger.debug(f"Deleted nested session: {session_id}")
1706
-
1778
+
1707
1779
  # Clean up empty parent containers from bottom up
1708
1780
  self._cleanup_empty_parent_containers(levels[:-1])
1709
1781
  else:
1710
- logger.debug(f"Nested session {session_id} already deleted or path invalid")
1711
-
1782
+ logger.debug(
1783
+ f"Nested session {session_id} already deleted or path invalid"
1784
+ )
1785
+
1712
1786
  except KeyError as e:
1713
1787
  logger.debug(f"Session {session_id} already deleted: {e}")
1714
1788
  except Exception as e:
@@ -1728,9 +1802,9 @@ class RPC(MessageEmitter):
1728
1802
  """Clean up resources within a session (timers, etc.) before deletion."""
1729
1803
  if not isinstance(session_dict, dict):
1730
1804
  return
1731
-
1805
+
1732
1806
  cleanup_errors = []
1733
-
1807
+
1734
1808
  try:
1735
1809
  # Clear any active timers
1736
1810
  if "timer" in session_dict and session_dict["timer"]:
@@ -1746,8 +1820,8 @@ class RPC(MessageEmitter):
1746
1820
  logger.debug("Cancelled session timer during cleanup")
1747
1821
  except Exception as timer_error:
1748
1822
  cleanup_errors.append(f"timer: {timer_error}")
1749
-
1750
- # Cancel any heartbeat tasks
1823
+
1824
+ # Cancel any heartbeat tasks
1751
1825
  if "heartbeat_task" in session_dict and session_dict["heartbeat_task"]:
1752
1826
  try:
1753
1827
  task = session_dict["heartbeat_task"]
@@ -1771,51 +1845,64 @@ class RPC(MessageEmitter):
1771
1845
  if "_promise_manager" in session_dict:
1772
1846
  try:
1773
1847
  promise_manager = session_dict["_promise_manager"]
1774
- if hasattr(promise_manager, 'cleanup'):
1848
+ if hasattr(promise_manager, "cleanup"):
1775
1849
  promise_manager.cleanup()
1776
1850
  except Exception as pm_error:
1777
1851
  cleanup_errors.append(f"promise_manager: {pm_error}")
1778
-
1852
+
1779
1853
  except Exception as e:
1780
1854
  cleanup_errors.append(f"general: {e}")
1781
1855
 
1782
1856
  if cleanup_errors:
1783
- logger.debug(f"Some resource cleanup errors (non-critical): {cleanup_errors}")
1857
+ logger.debug(
1858
+ f"Some resource cleanup errors (non-critical): {cleanup_errors}"
1859
+ )
1784
1860
 
1785
1861
  def _cleanup_empty_parent_containers(self, parent_levels):
1786
1862
  """Clean up empty parent containers from bottom up."""
1787
1863
  if not parent_levels:
1788
1864
  return
1789
-
1865
+
1790
1866
  try:
1791
1867
  # Work backwards through the path to clean up empty containers
1792
1868
  for i in range(len(parent_levels), 0, -1):
1793
1869
  path = parent_levels[:i]
1794
1870
  container = self._object_store
1795
-
1871
+
1796
1872
  # Navigate to the container
1797
1873
  for level in path[:-1]:
1798
1874
  if level not in container:
1799
- logger.debug(f"Parent container path broken at '{level}', stopping cleanup")
1875
+ logger.debug(
1876
+ f"Parent container path broken at '{level}', stopping cleanup"
1877
+ )
1800
1878
  return # Path doesn't exist, nothing to clean
1801
1879
  if not isinstance(container[level], dict):
1802
- logger.debug(f"Non-dict parent container at '{level}', stopping cleanup")
1880
+ logger.debug(
1881
+ f"Non-dict parent container at '{level}', stopping cleanup"
1882
+ )
1803
1883
  return
1804
1884
  container = container[level]
1805
-
1885
+
1806
1886
  target_key = path[-1]
1807
1887
  if target_key in container and isinstance(container[target_key], dict):
1808
1888
  # Only delete if the container is empty (excluding system keys)
1809
- remaining_keys = [k for k in container[target_key].keys()
1810
- if k not in ["services", "message_cache"]]
1889
+ remaining_keys = [
1890
+ k
1891
+ for k in container[target_key].keys()
1892
+ if k not in ["services", "message_cache"]
1893
+ ]
1811
1894
  if not remaining_keys:
1812
1895
  del container[target_key]
1813
- logger.debug(f"Cleaned up empty parent container: {'.'.join(path)}")
1896
+ logger.debug(
1897
+ f"Cleaned up empty parent container: {'.'.join(path)}"
1898
+ )
1814
1899
  else:
1815
1900
  # Container has content, stop cleanup
1816
- logger.debug(f"Parent container {'.'.join(path)} has content, stopping cleanup")
1901
+ logger.debug(
1902
+ f"Parent container {'.'.join(path)} has content, stopping cleanup"
1903
+ )
1817
1904
  break
1818
-
1905
+
1819
1906
  except Exception as e:
1820
1907
  logger.debug(f"Error cleaning empty parent containers: {e}")
1821
1908
 
@@ -1823,12 +1910,12 @@ class RPC(MessageEmitter):
1823
1910
  """Emergency cleanup method to remove all sessions (for testing/debugging)."""
1824
1911
  if not hasattr(self, "_object_store"):
1825
1912
  return
1826
-
1913
+
1827
1914
  sessions_to_remove = []
1828
1915
  for key in self._object_store.keys():
1829
1916
  if key not in ["services", "message_cache"]:
1830
1917
  sessions_to_remove.append(key)
1831
-
1918
+
1832
1919
  for session_key in sessions_to_remove:
1833
1920
  try:
1834
1921
  session_data = self._object_store[session_key]
@@ -1842,7 +1929,7 @@ class RPC(MessageEmitter):
1842
1929
  """Get statistics about current sessions (for debugging/monitoring)."""
1843
1930
  if not hasattr(self, "_object_store"):
1844
1931
  return {"error": "No object store"}
1845
-
1932
+
1846
1933
  stats = {
1847
1934
  "total_sessions": 0,
1848
1935
  "promise_sessions": 0,
@@ -1850,9 +1937,9 @@ class RPC(MessageEmitter):
1850
1937
  "sessions_with_timers": 0,
1851
1938
  "sessions_with_heartbeat": 0,
1852
1939
  "system_stores": {},
1853
- "session_ids": []
1940
+ "session_ids": [],
1854
1941
  }
1855
-
1942
+
1856
1943
  for key, value in self._object_store.items():
1857
1944
  if key in ["services", "message_cache"]:
1858
1945
  stats["system_stores"][key] = {
@@ -1861,19 +1948,19 @@ class RPC(MessageEmitter):
1861
1948
  else:
1862
1949
  stats["total_sessions"] += 1
1863
1950
  stats["session_ids"].append(key)
1864
-
1951
+
1865
1952
  if isinstance(value, dict):
1866
1953
  if "_promise_manager" in value:
1867
1954
  stats["promise_sessions"] += 1
1868
1955
  else:
1869
1956
  stats["regular_sessions"] += 1
1870
-
1957
+
1871
1958
  if "timer" in value:
1872
1959
  stats["sessions_with_timers"] += 1
1873
-
1960
+
1874
1961
  if "heartbeat_task" in value:
1875
1962
  stats["sessions_with_heartbeat"] += 1
1876
-
1963
+
1877
1964
  return stats
1878
1965
 
1879
1966
  def _encode_promise(
@@ -2211,7 +2298,9 @@ class RPC(MessageEmitter):
2211
2298
  )
2212
2299
  # For expired callbacks, don't raise an exception, just log and return
2213
2300
  if callable(reject):
2214
- reject(Exception(f"Method expired or not found: {method_name}"))
2301
+ reject(
2302
+ Exception(f"Method expired or not found: {method_name}")
2303
+ )
2215
2304
  return
2216
2305
  else:
2217
2306
  logger.debug(
@@ -2228,7 +2317,9 @@ class RPC(MessageEmitter):
2228
2317
  logger.debug(
2229
2318
  "Failed to find method %s at %s", method_name, self._client_id
2230
2319
  )
2231
- error = Exception(f"Method not found: {method_name} at {self._client_id}")
2320
+ error = Exception(
2321
+ f"Method not found: {method_name} at {self._client_id}"
2322
+ )
2232
2323
  if callable(reject):
2233
2324
  reject(error)
2234
2325
  else:
@@ -2250,11 +2341,15 @@ class RPC(MessageEmitter):
2250
2341
  # Check if remote workspace is in authorized_workspaces list
2251
2342
  elif (
2252
2343
  self._method_annotations[method].get("authorized_workspaces")
2253
- and remote_workspace in self._method_annotations[method]["authorized_workspaces"]
2344
+ and remote_workspace
2345
+ in self._method_annotations[method]["authorized_workspaces"]
2254
2346
  ):
2255
2347
  pass # Access granted
2256
2348
  # Allow manager access
2257
- elif remote_workspace == "*" and remote_client_id == self._connection.manager_id:
2349
+ elif (
2350
+ remote_workspace == "*"
2351
+ and remote_client_id == self._connection.manager_id
2352
+ ):
2258
2353
  pass # Access granted
2259
2354
  else:
2260
2355
  raise PermissionError(
@@ -20,10 +20,10 @@ from munch import Munch, munchify
20
20
  def ensure_event_loop():
21
21
  """
22
22
  Ensure there's an event loop available for the current thread.
23
-
23
+
24
24
  This function checks if there's a running event loop or an event loop
25
25
  set for the current thread. If neither exists, it creates a new one.
26
-
26
+
27
27
  This is useful for preventing RuntimeError when calling asyncio.Future()
28
28
  or other asyncio operations from threads without event loops.
29
29
  """
@@ -33,7 +33,7 @@ def ensure_event_loop():
33
33
  return # Running loop exists, nothing to do
34
34
  except RuntimeError:
35
35
  pass
36
-
36
+
37
37
  try:
38
38
  # Check if there's a loop set for this thread
39
39
  loop = asyncio.get_event_loop()
@@ -50,7 +50,7 @@ def ensure_event_loop():
50
50
  def safe_create_future():
51
51
  """
52
52
  Safely create an asyncio.Future() that works from any thread context.
53
-
53
+
54
54
  This is a more targeted approach than ensure_event_loop() - it only
55
55
  creates an event loop if absolutely necessary for Future creation.
56
56
  """
@@ -187,7 +187,9 @@ class ObjectProxy(Munch):
187
187
 
188
188
  def _repr_html_(self):
189
189
  obj_id = f"object-proxy-{uuid.uuid4().hex}"
190
- html_content = self._render_html(self.toDict(), level=0, label=f"{type(self).__name__} at {hex(id(self))}")
190
+ html_content = self._render_html(
191
+ self.toDict(), level=0, label=f"{type(self).__name__} at {hex(id(self))}"
192
+ )
191
193
  style = f"""
192
194
  <style>
193
195
  #{obj_id} ul {{
@@ -213,17 +215,21 @@ class ObjectProxy(Munch):
213
215
  return f'{style}<div id="{obj_id}" class="object-proxy">{html_content}</div>'
214
216
 
215
217
  def _render_html(self, data, level=0, label="dict"):
216
- parts = [f'<details><summary>{html.escape(label)}</summary><ul>']
218
+ parts = [f"<details><summary>{html.escape(label)}</summary><ul>"]
217
219
 
218
220
  # Handle lists
219
221
  if isinstance(data, list):
220
222
  for item in data:
221
223
  if isinstance(item, dict):
222
224
  item_label = f"{type(item).__name__} at {hex(id(item))}"
223
- parts.append(f"<li>{self._render_html(item, level + 1, label=item_label)}</li>")
225
+ parts.append(
226
+ f"<li>{self._render_html(item, level + 1, label=item_label)}</li>"
227
+ )
224
228
  elif isinstance(item, list):
225
229
  item_label = f"list at {hex(id(item))}"
226
- parts.append(f"<li>{self._render_html(item, level + 1, label=item_label)}</li>")
230
+ parts.append(
231
+ f"<li>{self._render_html(item, level + 1, label=item_label)}</li>"
232
+ )
227
233
  else:
228
234
  parts.append(f"<li>{html.escape(str(item))}</li>")
229
235
  # Handle dicts
@@ -571,7 +577,7 @@ def callable_doc(any_callable):
571
577
  """Return the docstring of a callable."""
572
578
  if isinstance(any_callable, partial):
573
579
  return any_callable.func.__doc__
574
-
580
+
575
581
  try:
576
582
  return any_callable.__doc__
577
583
  except AttributeError:
@@ -221,7 +221,9 @@ class WebsocketRPCConnection:
221
221
  logger.info("Refresh token task was cancelled.")
222
222
  except RuntimeError as e:
223
223
  # Handle event loop closed error gracefully
224
- if "Event loop is closed" in str(e) or "cannot schedule new futures" in str(e):
224
+ if "Event loop is closed" in str(e) or "cannot schedule new futures" in str(
225
+ e
226
+ ):
225
227
  logger.debug("Event loop closed during refresh token task")
226
228
  else:
227
229
  logger.error(f"RuntimeError in refresh token task: {e}")
@@ -347,14 +349,16 @@ class WebsocketRPCConnection:
347
349
  logger.error(f"HTTP error during WebSocket connection: {e}")
348
350
  except websockets.exceptions.ConnectionClosedOK as e:
349
351
  logger.info("Websocket connection closed gracefully")
350
- # Don't set self._closed = True here - let the finally block
352
+ # Don't set self._closed = True here - let the finally block
351
353
  # decide whether to reconnect based on whether this was user-initiated
352
354
  except websockets.exceptions.ConnectionClosedError as e:
353
355
  logger.info("Websocket connection closed: %s", e)
354
356
  except RuntimeError as e:
355
357
  # Handle event loop closed error gracefully
356
358
  if "Event loop is closed" in str(e):
357
- logger.debug("Event loop closed during WebSocket operation, stopping listen task")
359
+ logger.debug(
360
+ "Event loop closed during WebSocket operation, stopping listen task"
361
+ )
358
362
  return
359
363
  else:
360
364
  logger.error(f"RuntimeError in _listen: {e}")
@@ -624,13 +628,15 @@ class WebsocketRPCConnection:
624
628
  except Exception as e:
625
629
  logger.debug(f"Error waiting for reconnect task: {e}")
626
630
  self._reconnect_tasks.discard(task)
627
-
631
+
628
632
  # Clear any remaining tasks
629
633
  self._reconnect_tasks.clear()
630
-
634
+
631
635
  except RuntimeError as e:
632
636
  if "Event loop is closed" in str(e):
633
- logger.debug("Event loop closed during cleanup, performing minimal cleanup")
637
+ logger.debug(
638
+ "Event loop closed during cleanup, performing minimal cleanup"
639
+ )
634
640
  self._refresh_token_task = None
635
641
  self._listen_task = None
636
642
  self._reconnect_tasks.clear()
@@ -659,7 +665,20 @@ def normalize_server_url(server_url):
659
665
 
660
666
 
661
667
  async def login(config):
662
- """Login to the hypha server."""
668
+ """Login to the hypha server.
669
+
670
+ Configuration options:
671
+ server_url: The server URL (required)
672
+ workspace: Target workspace (optional)
673
+ login_service_id: Login service ID (default: "public/hypha-login")
674
+ expires_in: Token expiration time (optional)
675
+ login_timeout: Timeout for login process (default: 60)
676
+ login_callback: Callback function for login URL (optional)
677
+ profile: Whether to return user profile (default: False)
678
+ ssl: SSL configuration (optional)
679
+ additional_headers: Additional HTTP headers (optional)
680
+ transport: Transport type - "websocket" (default) or "http"
681
+ """
663
682
  server_url = config.get("server_url")
664
683
  service_id = config.get("login_service_id", "public/hypha-login")
665
684
  workspace = config.get("workspace")
@@ -669,6 +688,7 @@ async def login(config):
669
688
  profile = config.get("profile", False)
670
689
  ssl = config.get("ssl")
671
690
  additional_headers = config.get("additional_headers")
691
+ transport = config.get("transport", "websocket")
672
692
 
673
693
  server = await connect_to_server(
674
694
  {
@@ -677,6 +697,7 @@ async def login(config):
677
697
  "method_timeout": timeout,
678
698
  "ssl": ssl,
679
699
  "additional_headers": additional_headers,
700
+ "transport": transport,
680
701
  }
681
702
  )
682
703
  try:
@@ -699,12 +720,22 @@ async def login(config):
699
720
 
700
721
 
701
722
  async def logout(config):
702
- """Logout from the hypha server."""
723
+ """Logout from the hypha server.
724
+
725
+ Configuration options:
726
+ server_url: The server URL (required)
727
+ login_service_id: Login service ID (default: "public/hypha-login")
728
+ logout_callback: Callback function for logout URL (optional)
729
+ ssl: SSL configuration (optional)
730
+ additional_headers: Additional HTTP headers (optional)
731
+ transport: Transport type - "websocket" (default) or "http"
732
+ """
703
733
  server_url = config.get("server_url")
704
734
  service_id = config.get("login_service_id", "public/hypha-login")
705
735
  callback = config.get("logout_callback")
706
736
  ssl = config.get("ssl")
707
737
  additional_headers = config.get("additional_headers")
738
+ transport = config.get("transport", "websocket")
708
739
 
709
740
  server = await connect_to_server(
710
741
  {
@@ -712,6 +743,7 @@ async def logout(config):
712
743
  "server_url": server_url,
713
744
  "ssl": ssl,
714
745
  "additional_headers": additional_headers,
746
+ "transport": transport,
715
747
  }
716
748
  )
717
749
  try:
@@ -1164,6 +1196,7 @@ class ServerContextManager:
1164
1196
  if not os.environ.get("HYPHA_SERVER_URL"):
1165
1197
  try:
1166
1198
  from dotenv import load_dotenv, find_dotenv
1199
+
1167
1200
  load_dotenv(dotenv_path=find_dotenv(usecwd=True))
1168
1201
  # use info from .env file
1169
1202
  print("✅ Loaded connection configuration from .env file.")
@@ -1187,6 +1220,7 @@ class ServerContextManager:
1187
1220
  async def __aenter__(self):
1188
1221
  if self._transport == "http":
1189
1222
  from .http_client import _connect_to_server_http
1223
+
1190
1224
  self.wm = await _connect_to_server_http(self.config)
1191
1225
  else:
1192
1226
  self.wm = await _connect_to_server(self.config)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hypha_rpc
3
- Version: 0.20.92
3
+ Version: 0.20.94
4
4
  Summary: Hypha RPC client for connecting to Hypha server for data management and AI model serving
5
5
  Author-email: Wei Ouyang <oeway007@gmail.com>
6
6
  Requires-Python: >=3.9
@@ -1,19 +1,19 @@
1
- hypha_rpc/VERSION,sha256=FvcC2kPSeAtE8EdoT2URAoEp9R-fCt-CTG2NiGNsfqk,26
2
- hypha_rpc/__init__.py,sha256=kWNHbAl-a0RhgKgWm5r0cvs-qd1ujlpmPbXV_0QWuQQ,4727
3
- hypha_rpc/http_client.py,sha256=E7WyOlVlJmz6IUg-ewy5IM0co94u3KdajiIHOlFLQm4,23565
1
+ hypha_rpc/VERSION,sha256=2SthgenE_Cf8vsKt4Ogl7Qze2l5ai6_9skSV4MhY5bE,26
2
+ hypha_rpc/__init__.py,sha256=RyoDrxqzVR2N-jHf1KAwfd87eJGh8uSVFVKW0Rit_Ss,4853
3
+ hypha_rpc/http_client.py,sha256=bfDeBstT3HquyUhH4XmQ1Tu689sXhmuvuqb4RiPG4Uk,24579
4
4
  hypha_rpc/pyodide_sse.py,sha256=o1-6Bqb7bcplSy7pwkmtQb6vKeJsyxex_RebqNd3wX8,2960
5
5
  hypha_rpc/pyodide_websocket.py,sha256=XjrgKYySUSNYma-rXjHrSv08YCxj5t4hYEQnK15D6cE,18749
6
- hypha_rpc/rpc.py,sha256=lnNiPAm_BNT5JjDhAPFRlxs6HioXw77TelUtYd_blYk,115579
6
+ hypha_rpc/rpc.py,sha256=-V3qEE5a0QElDDeiPW5nNNIrVEiNH-YtDutPbg1zDig,116955
7
7
  hypha_rpc/sync.py,sha256=HcQwpGHsZjDNcSnDRuyxGu7bquOi5_jWrVL5vTwraZY,12268
8
8
  hypha_rpc/webrtc_client.py,sha256=JVbSTWr6Y6vMaeoAPsfecD2SuCtXOuoBVuhpwG5-Qm0,11944
9
- hypha_rpc/websocket_client.py,sha256=tk8mID_zd0SaKTnwCHMTDIIeWvmatDbr5LHXbo6WwJ8,52920
10
- hypha_rpc/utils/__init__.py,sha256=1UWExsUWzNRFkuYa7RSDXH3welrelIxOmGLtzdJ2oIA,20042
9
+ hypha_rpc/websocket_client.py,sha256=du2ddv1_ZN4q7FBbwaZB_ExkNkiNitDUnREp7LKu9PQ,54199
10
+ hypha_rpc/utils/__init__.py,sha256=jtH35g2kZypHfevtsCDJH3oI4N3PEu5FuwyY4soGp2I,20136
11
11
  hypha_rpc/utils/launch.py,sha256=GB1Ranb5E_oNFBLw2ARfT78SbqGEwUmWwfMo3E82kAM,3976
12
12
  hypha_rpc/utils/mcp.py,sha256=AW48yjCovc0jyekRLeD_1U8mRaA8-nEqh4DotSE_s3Y,17348
13
13
  hypha_rpc/utils/pydantic.py,sha256=a09_ys4BSXc4Yi6OgZjdspbtLvQVoRCChr6uInY4fN4,5144
14
14
  hypha_rpc/utils/schema.py,sha256=WabBJiDheMKRXUroVe9JRlI5P4Wlv6kc0roxVNQZHH8,22110
15
15
  hypha_rpc/utils/serve.py,sha256=xr_3oAQDyignQbz1fcm4kuRMBOb52-i0VSYCjZou51c,11882
16
- hypha_rpc-0.20.92.dist-info/METADATA,sha256=O9DwRhv2Si0B5kM65d4fVZw4Y8G7sF9lf-Ry3hgjzHE,924
17
- hypha_rpc-0.20.92.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
18
- hypha_rpc-0.20.92.dist-info/top_level.txt,sha256=uShPbaPGP-Ig8OVnQcT6sEzV0Qhb6wfxSJ3uCmYaB58,10
19
- hypha_rpc-0.20.92.dist-info/RECORD,,
16
+ hypha_rpc-0.20.94.dist-info/METADATA,sha256=RcxCmcAxjjbpVCz1691xyJUWY863lYaTKkyMAapqPKI,924
17
+ hypha_rpc-0.20.94.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
18
+ hypha_rpc-0.20.94.dist-info/top_level.txt,sha256=uShPbaPGP-Ig8OVnQcT6sEzV0Qhb6wfxSJ3uCmYaB58,10
19
+ hypha_rpc-0.20.94.dist-info/RECORD,,