@scrypted/server 0.123.33 → 0.123.35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/dist/cluster/cluster-labels.d.ts +5 -0
  2. package/dist/cluster/cluster-labels.js +15 -5
  3. package/dist/cluster/cluster-labels.js.map +1 -1
  4. package/dist/cluster/cluster-setup.js +12 -5
  5. package/dist/cluster/cluster-setup.js.map +1 -1
  6. package/dist/plugin/plugin-host.d.ts +1 -0
  7. package/dist/plugin/plugin-host.js +8 -2
  8. package/dist/plugin/plugin-host.js.map +1 -1
  9. package/dist/plugin/plugin-remote-worker.js +2 -2
  10. package/dist/plugin/plugin-remote-worker.js.map +1 -1
  11. package/dist/plugin/runtime/cluster-fork-worker.js +1 -1
  12. package/dist/plugin/runtime/cluster-fork-worker.js.map +1 -1
  13. package/dist/scrypted-cluster-main.d.ts +13 -3
  14. package/dist/scrypted-cluster-main.js +97 -77
  15. package/dist/scrypted-cluster-main.js.map +1 -1
  16. package/dist/scrypted-server-main.js +19 -8
  17. package/dist/scrypted-server-main.js.map +1 -1
  18. package/dist/services/cluster-fork.d.ts +3 -3
  19. package/dist/services/cluster-fork.js +54 -14
  20. package/dist/services/cluster-fork.js.map +1 -1
  21. package/package.json +1 -1
  22. package/python/cluster_labels.py +4 -1
  23. package/python/cluster_setup.py +16 -7
  24. package/python/plugin_console.py +1 -0
  25. package/python/plugin_pip.py +14 -8
  26. package/python/plugin_remote.py +120 -38
  27. package/python/plugin_repl.py +42 -15
  28. package/python/plugin_volume.py +17 -11
  29. package/python/rpc-iterator-test.py +11 -8
  30. package/python/rpc.py +242 -154
  31. package/python/rpc_reader.py +35 -28
  32. package/src/cluster/cluster-labels.ts +16 -5
  33. package/src/cluster/cluster-setup.ts +12 -5
  34. package/src/plugin/plugin-host.ts +11 -3
  35. package/src/plugin/plugin-remote-worker.ts +4 -5
  36. package/src/plugin/runtime/cluster-fork-worker.ts +1 -1
  37. package/src/scrypted-cluster-main.ts +123 -91
  38. package/src/scrypted-server-main.ts +24 -11
  39. package/src/services/cluster-fork.ts +64 -18
@@ -47,5 +47,8 @@ def needs_cluster_fork_worker(options: ClusterForkOptions) -> bool:
47
47
  return (
48
48
  os.environ.get("SCRYPTED_CLUSTER_ADDRESS")
49
49
  and options
50
- and (not matches_cluster_labels(options, get_cluster_labels()) or options.get("clusterWorkerId", None))
50
+ and (
51
+ not matches_cluster_labels(options, get_cluster_labels())
52
+ or options.get("clusterWorkerId", None)
53
+ )
51
54
  )
@@ -12,6 +12,7 @@ import rpc
12
12
  import rpc_reader
13
13
  from typing import TypedDict
14
14
 
15
+
15
16
  class ClusterObject(TypedDict):
16
17
  id: str
17
18
  address: str
@@ -20,12 +21,16 @@ class ClusterObject(TypedDict):
20
21
  sourceKey: str
21
22
  sha256: str
22
23
 
24
+
23
25
  def isClusterAddress(address: str):
24
26
  return not address or address == os.environ.get("SCRYPTED_CLUSTER_ADDRESS", None)
25
27
 
28
+
26
29
  def getClusterPeerKey(address: str, port: int):
27
30
  return f"{address}:{port}"
28
- class ClusterSetup():
31
+
32
+
33
+ class ClusterSetup:
29
34
  def __init__(self, loop: AbstractEventLoop, peer: rpc.RpcPeer):
30
35
  self.loop = loop
31
36
  self.peer = peer
@@ -50,9 +55,13 @@ class ClusterSetup():
50
55
  sha256 = self.computeClusterObjectHash(o)
51
56
  if sha256 != o["sha256"]:
52
57
  raise Exception("secret incorrect")
53
- return await self.resolveObject(o.get('proxyId', None), o.get('sourceKey', None))
58
+ return await self.resolveObject(
59
+ o.get("proxyId", None), o.get("sourceKey", None)
60
+ )
54
61
 
55
- def onProxySerialization(self, peer: rpc.RpcPeer, value: Any, sourceKey: str = None):
62
+ def onProxySerialization(
63
+ self, peer: rpc.RpcPeer, value: Any, sourceKey: str = None
64
+ ):
56
65
  properties: dict = rpc.RpcPeer.prepareProxyProperties(value) or {}
57
66
  clusterEntry = properties.get("__cluster", None)
58
67
  proxyId: str
@@ -126,7 +135,9 @@ class ClusterSetup():
126
135
  handleClusterClient, listenAddress, 0
127
136
  )
128
137
  self.clusterPort = clusterRpcServer.sockets[0].getsockname()[1]
129
- self.peer.onProxySerialization = lambda value: self.onProxySerialization(self.peer, value, None)
138
+ self.peer.onProxySerialization = lambda value: self.onProxySerialization(
139
+ self.peer, value, None
140
+ )
130
141
  del self.peer.params["initializeCluster"]
131
142
 
132
143
  def computeClusterObjectHash(self, o: ClusterObject) -> str:
@@ -215,9 +226,7 @@ class ClusterSetup():
215
226
 
216
227
  peerConnectRPCObject = clusterPeer.tags.get("connectRPCObject")
217
228
  if not peerConnectRPCObject:
218
- peerConnectRPCObject = await clusterPeer.getParam(
219
- "connectRPCObject"
220
- )
229
+ peerConnectRPCObject = await clusterPeer.getParam("connectRPCObject")
221
230
  clusterPeer.tags["connectRPCObject"] = peerConnectRPCObject
222
231
  newValue = await peerConnectRPCObject(clusterObject)
223
232
  if not newValue:
@@ -1,5 +1,6 @@
1
1
  import typing
2
2
 
3
+
3
4
  async def writeWorkerGenerator(gen, out: typing.TextIO):
4
5
  try:
5
6
  async for item in gen:
@@ -4,22 +4,25 @@ import sys
4
4
  from typing import Any
5
5
  import shutil
6
6
 
7
+
7
8
  def get_requirements_files(requirements: str):
8
- want_requirements = requirements + '.txt'
9
- installed_requirementstxt = requirements + '.installed.txt'
9
+ want_requirements = requirements + ".txt"
10
+ installed_requirementstxt = requirements + ".installed.txt"
10
11
  return want_requirements, installed_requirementstxt
11
12
 
13
+
12
14
  def need_requirements(requirements_basename: str, requirements_str: str):
13
15
  _, installed_requirementstxt = get_requirements_files(requirements_basename)
14
16
  if not os.path.exists(installed_requirementstxt):
15
17
  return True
16
18
  try:
17
19
  f = open(installed_requirementstxt, "rb")
18
- installed_requirements = f.read().decode('utf8')
20
+ installed_requirements = f.read().decode("utf8")
19
21
  return requirements_str != installed_requirements
20
22
  except:
21
23
  return True
22
24
 
25
+
23
26
  def remove_pip_dirs(plugin_volume: str):
24
27
  try:
25
28
  for de in os.listdir(plugin_volume):
@@ -48,7 +51,9 @@ def install_with_pip(
48
51
  ignore_error: bool = False,
49
52
  site_packages: str = None,
50
53
  ):
51
- requirementstxt, installed_requirementstxt = get_requirements_files(requirements_basename)
54
+ requirementstxt, installed_requirementstxt = get_requirements_files(
55
+ requirements_basename
56
+ )
52
57
 
53
58
  os.makedirs(python_prefix, exist_ok=True)
54
59
 
@@ -81,15 +86,16 @@ def install_with_pip(
81
86
  # force reinstall even if it exists in system packages.
82
87
  pipArgs.append("--force-reinstall")
83
88
 
84
-
85
89
  env = None
86
90
  if site_packages:
87
91
  env = dict(os.environ)
88
- PYTHONPATH = env['PYTHONPATH'] or ''
89
- PYTHONPATH += ':' + site_packages
92
+ PYTHONPATH = env["PYTHONPATH"] or ""
93
+ PYTHONPATH += ":" + site_packages
90
94
  env["PYTHONPATH"] = PYTHONPATH
91
95
  print("PYTHONPATH", env["PYTHONPATH"])
92
- p = subprocess.Popen(pipArgs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env)
96
+ p = subprocess.Popen(
97
+ pipArgs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env
98
+ )
93
99
 
94
100
  while True:
95
101
  line = p.stdout.readline()
@@ -27,18 +27,22 @@ from cluster_setup import ClusterSetup
27
27
  import cluster_labels
28
28
  from plugin_pip import install_with_pip, need_requirements, remove_pip_dirs
29
29
  from scrypted_python.scrypted_sdk import PluginFork, ScryptedStatic
30
- from scrypted_python.scrypted_sdk.types import (Device, DeviceManifest,
31
- EventDetails,
32
- ScryptedInterface,
33
- ScryptedInterfaceMethods,
34
- ScryptedInterfaceProperty,
35
- Storage)
30
+ from scrypted_python.scrypted_sdk.types import (
31
+ Device,
32
+ DeviceManifest,
33
+ EventDetails,
34
+ ScryptedInterface,
35
+ ScryptedInterfaceMethods,
36
+ ScryptedInterfaceProperty,
37
+ Storage,
38
+ )
36
39
 
37
40
  SCRYPTED_REQUIREMENTS = """
38
41
  ptpython
39
42
  wheel
40
43
  """.strip()
41
44
 
45
+
42
46
  class SystemDeviceState(TypedDict):
43
47
  lastEventTime: int
44
48
  stateTime: int
@@ -47,8 +51,10 @@ class SystemDeviceState(TypedDict):
47
51
 
48
52
  def ensure_not_coroutine(fn: Callable | Coroutine) -> Callable:
49
53
  if inspect.iscoroutinefunction(fn):
54
+
50
55
  def wrapper(*args, **kwargs):
51
56
  return asyncio.create_task(fn(*args, **kwargs))
57
+
52
58
  return wrapper
53
59
  return fn
54
60
 
@@ -96,25 +102,29 @@ class DeviceProxy(object):
96
102
  class EventListenerRegisterImpl(scrypted_python.scrypted_sdk.EventListenerRegister):
97
103
  removeListener: Callable[[], None]
98
104
 
99
- def __init__(self, removeListener: Callable[[], None] | Coroutine[Any, None, None]) -> None:
105
+ def __init__(
106
+ self, removeListener: Callable[[], None] | Coroutine[Any, None, None]
107
+ ) -> None:
100
108
  self.removeListener = ensure_not_coroutine(removeListener)
101
109
 
102
110
 
103
111
  class EventRegistry(object):
104
112
  systemListeners: Set[scrypted_python.scrypted_sdk.EventListener]
105
- listeners: Mapping[str, Set[Callable[[scrypted_python.scrypted_sdk.EventDetails, Any], None]]]
113
+ listeners: Mapping[
114
+ str, Set[Callable[[scrypted_python.scrypted_sdk.EventDetails, Any], None]]
115
+ ]
106
116
 
107
- __allowedEventInterfaces = set([
108
- ScryptedInterface.ScryptedDevice.value,
109
- 'Logger',
110
- 'Storage'
111
- ])
117
+ __allowedEventInterfaces = set(
118
+ [ScryptedInterface.ScryptedDevice.value, "Logger", "Storage"]
119
+ )
112
120
 
113
121
  def __init__(self) -> None:
114
122
  self.systemListeners = set()
115
123
  self.listeners = {}
116
124
 
117
- def __getMixinEventName(self, options: str | scrypted_python.scrypted_sdk.EventListenerOptions) -> str:
125
+ def __getMixinEventName(
126
+ self, options: str | scrypted_python.scrypted_sdk.EventListenerOptions
127
+ ) -> str:
118
128
  mixinId = None
119
129
  if type(options) == str:
120
130
  event = options
@@ -155,7 +165,15 @@ class EventRegistry(object):
155
165
  self.listeners[id].add(callback)
156
166
  return EventListenerRegisterImpl(lambda: self.listeners[id].remove(callback))
157
167
 
158
- def notify(self, id: str, eventTime: int, eventInterface: str, property: str, value: Any, options: dict = None):
168
+ def notify(
169
+ self,
170
+ id: str,
171
+ eventTime: int,
172
+ eventInterface: str,
173
+ property: str,
174
+ value: Any,
175
+ options: dict = None,
176
+ ):
159
177
  options = options or {}
160
178
  changed = options.get("changed")
161
179
  mixinId = options.get("mixinId")
@@ -174,7 +192,13 @@ class EventRegistry(object):
174
192
 
175
193
  return self.notifyEventDetails(id, eventDetails, value)
176
194
 
177
- def notifyEventDetails(self, id: str, eventDetails: scrypted_python.scrypted_sdk.EventDetails, value: Any, eventInterface: str = None):
195
+ def notifyEventDetails(
196
+ self,
197
+ id: str,
198
+ eventDetails: scrypted_python.scrypted_sdk.EventDetails,
199
+ value: Any,
200
+ eventInterface: str = None,
201
+ ):
178
202
  if not eventDetails.get("eventId"):
179
203
  eventDetails["eventId"] = self.__generateBase36Str()
180
204
  if not eventInterface:
@@ -183,8 +207,9 @@ class EventRegistry(object):
183
207
  # system listeners only get state changes.
184
208
  # there are many potentially noisy stateless events, like
185
209
  # object detection and settings changes
186
- if (eventDetails.get("property") and not eventDetails.get("mixinId")) or \
187
- (eventInterface in EventRegistry.__allowedEventInterfaces):
210
+ if (eventDetails.get("property") and not eventDetails.get("mixinId")) or (
211
+ eventInterface in EventRegistry.__allowedEventInterfaces
212
+ ):
188
213
  for listener in self.systemListeners:
189
214
  listener(id, eventDetails, value)
190
215
 
@@ -202,6 +227,7 @@ class EventRegistry(object):
202
227
 
203
228
  return True
204
229
 
230
+
205
231
  class ClusterManager(scrypted_python.scrypted_sdk.types.ClusterManager):
206
232
  def __init__(self, api: Any):
207
233
  self.api = api
@@ -213,11 +239,16 @@ class ClusterManager(scrypted_python.scrypted_sdk.types.ClusterManager):
213
239
  def getClusterWorkerId(self) -> str:
214
240
  return os.getenv("SCRYPTED_CLUSTER_WORKER_ID", None)
215
241
 
216
- async def getClusterWorkers(self) -> Mapping[str, scrypted_python.scrypted_sdk.types.ClusterWorker]:
217
- self.clusterService = self.clusterService or asyncio.ensure_future(self.api.getComponent("cluster-fork"))
242
+ async def getClusterWorkers(
243
+ self,
244
+ ) -> Mapping[str, scrypted_python.scrypted_sdk.types.ClusterWorker]:
245
+ self.clusterService = self.clusterService or asyncio.ensure_future(
246
+ self.api.getComponent("cluster-fork")
247
+ )
218
248
  cs = await self.clusterService
219
249
  return await cs.getClusterWorkers()
220
250
 
251
+
221
252
  class SystemManager(scrypted_python.scrypted_sdk.types.SystemManager):
222
253
  def __init__(
223
254
  self, api: Any, systemState: Mapping[str, Mapping[str, SystemDeviceState]]
@@ -306,19 +337,27 @@ class SystemManager(scrypted_python.scrypted_sdk.types.SystemManager):
306
337
  callback = ensure_not_coroutine(callback)
307
338
  if type(options) != str and options.get("watch"):
308
339
  return self.events.listenDevice(
309
- id, options,
310
- lambda eventDetails, eventData: callback(self.getDeviceById(id), eventDetails, eventData)
340
+ id,
341
+ options,
342
+ lambda eventDetails, eventData: callback(
343
+ self.getDeviceById(id), eventDetails, eventData
344
+ ),
311
345
  )
312
346
 
313
347
  register_fut = asyncio.ensure_future(
314
348
  self.api.listenDevice(
315
- id, options,
316
- lambda eventDetails, eventData: callback(self.getDeviceById(id), eventDetails, eventData)
349
+ id,
350
+ options,
351
+ lambda eventDetails, eventData: callback(
352
+ self.getDeviceById(id), eventDetails, eventData
353
+ ),
317
354
  )
318
355
  )
356
+
319
357
  async def unregister():
320
358
  register = await register_fut
321
359
  await register.removeListener()
360
+
322
361
  return EventListenerRegisterImpl(lambda: asyncio.ensure_future(unregister()))
323
362
 
324
363
  async def removeDevice(self, id: str) -> None:
@@ -555,6 +594,7 @@ class DeviceManager(scrypted_python.scrypted_sdk.types.DeviceManager):
555
594
  def getDeviceStorage(self, nativeId: str = None) -> Storage:
556
595
  return self.nativeIds.get(nativeId, None)
557
596
 
597
+
558
598
  class PeerLiveness:
559
599
  def __init__(self, loop: AbstractEventLoop):
560
600
  self.killed = Future(loop=loop)
@@ -562,15 +602,22 @@ class PeerLiveness:
562
602
  async def waitKilled(self):
563
603
  await self.killed
564
604
 
605
+
565
606
  def safe_set_result(fut: Future, result: Any):
566
607
  try:
567
608
  fut.set_result(result)
568
609
  except:
569
610
  pass
570
611
 
612
+
571
613
  class PluginRemote:
572
614
  def __init__(
573
- self, clusterSetup: ClusterSetup, api, pluginId: str, hostInfo, loop: AbstractEventLoop
615
+ self,
616
+ clusterSetup: ClusterSetup,
617
+ api,
618
+ pluginId: str,
619
+ hostInfo,
620
+ loop: AbstractEventLoop,
574
621
  ):
575
622
  self.systemState: Mapping[str, Mapping[str, SystemDeviceState]] = {}
576
623
  self.nativeIds: Mapping[str, DeviceStorage] = {}
@@ -606,7 +653,9 @@ class PluginRemote:
606
653
  consoleFuture = Future()
607
654
  self.consoles[nativeId] = consoleFuture
608
655
  plugins = await self.api.getComponent("plugins")
609
- port, hostname = await plugins.getRemoteServicePort(self.pluginId, "console-writer")
656
+ port, hostname = await plugins.getRemoteServicePort(
657
+ self.pluginId, "console-writer"
658
+ )
610
659
  connection = await asyncio.open_connection(host=hostname, port=port)
611
660
  _, writer = connection
612
661
  if not nativeId:
@@ -682,7 +731,7 @@ class PluginRemote:
682
731
 
683
732
  if not forkMain:
684
733
  multiprocessing.set_start_method("spawn")
685
-
734
+
686
735
  # forkMain may be set to true, but the environment may not be initialized
687
736
  # if the plugin is loaded in another cluster worker.
688
737
  # instead rely on a environemnt variable that will be passed to
@@ -819,10 +868,13 @@ class PluginRemote:
819
868
  async def getZip(self):
820
869
  return await zipAPI.getZip()
821
870
 
822
- return await remote.loadZip(packageJson, PluginZipAPI(), forkOptions)
871
+ return await remote.loadZip(
872
+ packageJson, PluginZipAPI(), forkOptions
873
+ )
823
874
 
824
875
  if cluster_labels.needs_cluster_fork_worker(options):
825
876
  peerLiveness = PeerLiveness(self.loop)
877
+
826
878
  async def getClusterFork():
827
879
  runtimeWorkerOptions = {
828
880
  "packageJson": packageJson,
@@ -835,14 +887,17 @@ class PluginRemote:
835
887
 
836
888
  forkComponent = await self.api.getComponent("cluster-fork")
837
889
  sanitizedOptions = options.copy()
838
- sanitizedOptions["runtime"] = sanitizedOptions.get("runtime", "python")
890
+ sanitizedOptions["runtime"] = sanitizedOptions.get(
891
+ "runtime", "python"
892
+ )
839
893
  sanitizedOptions["zipHash"] = zipHash
840
894
  clusterForkResult = await forkComponent.fork(
841
895
  runtimeWorkerOptions,
842
896
  sanitizedOptions,
843
- peerLiveness, lambda: zipAPI.getZip()
897
+ peerLiveness,
898
+ lambda: zipAPI.getZip(),
844
899
  )
845
-
900
+
846
901
  async def waitPeerLiveness():
847
902
  try:
848
903
  await peerLiveness.waitKilled()
@@ -851,6 +906,7 @@ class PluginRemote:
851
906
  await clusterForkResult.kill()
852
907
  except:
853
908
  pass
909
+
854
910
  asyncio.ensure_future(waitPeerLiveness(), loop=self.loop)
855
911
 
856
912
  async def waitClusterForkKilled():
@@ -859,30 +915,48 @@ class PluginRemote:
859
915
  except:
860
916
  pass
861
917
  safe_set_result(peerLiveness.killed, None)
918
+
862
919
  asyncio.ensure_future(waitClusterForkKilled(), loop=self.loop)
863
920
 
864
- clusterGetRemote = await self.clusterSetup.connectRPCObject(await clusterForkResult.getResult())
921
+ clusterGetRemote = await self.clusterSetup.connectRPCObject(
922
+ await clusterForkResult.getResult()
923
+ )
865
924
  remoteDict = await clusterGetRemote()
866
- asyncio.ensure_future(plugin_console.writeWorkerGenerator(remoteDict["stdout"], sys.stdout))
867
- asyncio.ensure_future(plugin_console.writeWorkerGenerator(remoteDict["stderr"], sys.stderr))
925
+ asyncio.ensure_future(
926
+ plugin_console.writeWorkerGenerator(
927
+ remoteDict["stdout"], sys.stdout
928
+ )
929
+ )
930
+ asyncio.ensure_future(
931
+ plugin_console.writeWorkerGenerator(
932
+ remoteDict["stderr"], sys.stderr
933
+ )
934
+ )
868
935
 
869
936
  getRemote = remoteDict["getRemote"]
870
- directGetRemote = await self.clusterSetup.connectRPCObject(getRemote)
937
+ directGetRemote = await self.clusterSetup.connectRPCObject(
938
+ getRemote
939
+ )
871
940
  if directGetRemote is getRemote:
872
941
  raise Exception("cluster fork peer not direct connected")
873
942
 
874
- forkPeer = getattr(directGetRemote, rpc.RpcPeer.PROPERTY_PROXY_PEER)
943
+ forkPeer = getattr(
944
+ directGetRemote, rpc.RpcPeer.PROPERTY_PROXY_PEER
945
+ )
875
946
  return await finishFork(forkPeer)
876
947
 
877
-
878
948
  pluginFork = PluginFork()
879
949
  pluginFork.result = asyncio.create_task(getClusterFork())
950
+
880
951
  async def waitKilled():
881
952
  await peerLiveness.killed
953
+
882
954
  pluginFork.exit = asyncio.create_task(waitKilled())
955
+
883
956
  def terminate():
884
957
  safe_set_result(peerLiveness.killed, None)
885
958
  pluginFork.worker.terminate()
959
+
886
960
  pluginFork.terminate = terminate
887
961
 
888
962
  pluginFork.worker = None
@@ -902,12 +976,16 @@ class PluginRemote:
902
976
 
903
977
  pluginFork = PluginFork()
904
978
  killed = Future(loop=self.loop)
979
+
905
980
  async def waitKilled():
906
981
  await killed
982
+
907
983
  pluginFork.exit = asyncio.create_task(waitKilled())
984
+
908
985
  def terminate():
909
986
  safe_set_result(killed, None)
910
987
  pluginFork.worker.kill()
988
+
911
989
  pluginFork.terminate = terminate
912
990
 
913
991
  pluginFork.worker = multiprocessing.Process(
@@ -956,6 +1034,7 @@ class PluginRemote:
956
1034
  # sdk.
957
1035
 
958
1036
  from scrypted_sdk import sdk_init2 # type: ignore
1037
+
959
1038
  sdk_init2(sdk)
960
1039
  except:
961
1040
  from scrypted_sdk import sdk_init # type: ignore
@@ -1048,7 +1127,9 @@ async def plugin_async_main(
1048
1127
  peer.params["print"] = print
1049
1128
 
1050
1129
  clusterSetup = ClusterSetup(loop, peer)
1051
- peer.params["initializeCluster"] = lambda options: clusterSetup.initializeCluster(options)
1130
+ peer.params["initializeCluster"] = lambda options: clusterSetup.initializeCluster(
1131
+ options
1132
+ )
1052
1133
 
1053
1134
  async def ping(time: int):
1054
1135
  return time
@@ -1077,6 +1158,7 @@ def main(rpcTransport: rpc_reader.RpcTransport):
1077
1158
  loop.run_until_complete(plugin_async_main(loop, rpcTransport))
1078
1159
  loop.close()
1079
1160
 
1161
+
1080
1162
  def plugin_fork(conn: multiprocessing.connection.Connection):
1081
1163
  main(rpc_reader.RpcConnectionTransport(conn))
1082
1164
 
@@ -39,6 +39,8 @@ ColorDepth.default = lambda *args, **kwargs: ColorDepth.DEPTH_4_BIT
39
39
  # the library. The patches here allow us to scope a particular call stack
40
40
  # to a particular REPL, and to get the current Application from the stack.
41
41
  default_get_app = prompt_toolkit.application.current.get_app
42
+
43
+
42
44
  def get_app_patched() -> Application[Any]:
43
45
  stack = inspect.stack()
44
46
  for frame in stack:
@@ -46,6 +48,8 @@ def get_app_patched() -> Application[Any]:
46
48
  if self_var is not None and isinstance(self_var, Application):
47
49
  return self_var
48
50
  return default_get_app()
51
+
52
+
49
53
  prompt_toolkit.application.current.get_app = get_app_patched
50
54
  prompt_toolkit.key_binding.key_processor.get_app = get_app_patched
51
55
  prompt_toolkit.contrib.telnet.server.get_app = get_app_patched
@@ -141,7 +145,9 @@ async def eval_async_patched(self: PythonRepl, line: str) -> object:
141
145
 
142
146
  def eval_across_loops(code, *args, **kwargs):
143
147
  future = concurrent.futures.Future()
144
- scrypted_loop.call_soon_threadsafe(partial(eval_in_scrypted, future), code, *args, **kwargs)
148
+ scrypted_loop.call_soon_threadsafe(
149
+ partial(eval_in_scrypted, future), code, *args, **kwargs
150
+ )
145
151
  return future.result()
146
152
 
147
153
  # WORKAROUND: Due to a bug in Jedi, the current directory is removed
@@ -192,7 +198,7 @@ async def createREPLServer(sdk: ScryptedStatic, plugin: ScryptedDevice) -> int:
192
198
  sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
193
199
  sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
194
200
  sock.settimeout(None)
195
- sock.bind(('localhost', 0))
201
+ sock.bind(("localhost", 0))
196
202
  sock.listen()
197
203
 
198
204
  scrypted_loop: asyncio.AbstractEventLoop = asyncio.get_event_loop()
@@ -222,7 +228,7 @@ async def createREPLServer(sdk: ScryptedStatic, plugin: ScryptedDevice) -> int:
222
228
 
223
229
  # Select a free port for the telnet server
224
230
  s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
225
- s.bind(('localhost', 0))
231
+ s.bind(("localhost", 0))
226
232
  telnet_port = s.getsockname()[1]
227
233
  s.close()
228
234
 
@@ -230,14 +236,19 @@ async def createREPLServer(sdk: ScryptedStatic, plugin: ScryptedDevice) -> int:
230
236
  # repl_loop owns the print capabilities, but the prints will
231
237
  # be executed in scrypted_loop. We need to bridge the two here
232
238
  repl_print = partial(print_formatted_text, output=connection.vt100_output)
239
+
233
240
  def print_across_loops(*args, **kwargs):
234
241
  repl_loop.call_soon_threadsafe(repl_print, *args, **kwargs)
235
242
 
236
243
  global_dict = {
237
244
  **globals(),
238
245
  "print": print_across_loops,
239
- "help": lambda *args, **kwargs: print_across_loops("Help is not available in this environment"),
240
- "input": lambda *args, **kwargs: print_across_loops("Input is not available in this environment"),
246
+ "help": lambda *args, **kwargs: print_across_loops(
247
+ "Help is not available in this environment"
248
+ ),
249
+ "input": lambda *args, **kwargs: print_across_loops(
250
+ "Input is not available in this environment"
251
+ ),
241
252
  }
242
253
  locals_dict = {
243
254
  "device": device,
@@ -245,19 +256,32 @@ async def createREPLServer(sdk: ScryptedStatic, plugin: ScryptedDevice) -> int:
245
256
  "deviceManager": deviceManager,
246
257
  "mediaManager": mediaManager,
247
258
  "sdk": sdk,
248
- "realDevice": realDevice
259
+ "realDevice": realDevice,
249
260
  }
250
- vars_prompt = '\n'.join([f" {k}" for k in locals_dict.keys()])
261
+ vars_prompt = "\n".join([f" {k}" for k in locals_dict.keys()])
251
262
  banner = f"Python REPL variables:\n{vars_prompt}"
252
263
  print_formatted_text(banner)
253
- await embed(return_asyncio_coroutine=True, globals=global_dict, locals=locals_dict, configure=partial(configure, scrypted_loop))
264
+ await embed(
265
+ return_asyncio_coroutine=True,
266
+ globals=global_dict,
267
+ locals=locals_dict,
268
+ configure=partial(configure, scrypted_loop),
269
+ )
254
270
 
255
271
  server_task: asyncio.Task = None
272
+
256
273
  def ready_cb():
257
- future.set_result((telnet_port, lambda: repl_loop.call_soon_threadsafe(server_task.cancel)))
274
+ future.set_result(
275
+ (
276
+ telnet_port,
277
+ lambda: repl_loop.call_soon_threadsafe(server_task.cancel),
278
+ )
279
+ )
258
280
 
259
281
  # Start the REPL server
260
- telnet_server = TelnetServer(interact=interact, port=telnet_port, enable_cpr=False)
282
+ telnet_server = TelnetServer(
283
+ interact=interact, port=telnet_port, enable_cpr=False
284
+ )
261
285
  server_task = asyncio.create_task(telnet_server.run(ready_cb=ready_cb))
262
286
  try:
263
287
  await server_task
@@ -277,16 +301,19 @@ async def createREPLServer(sdk: ScryptedStatic, plugin: ScryptedDevice) -> int:
277
301
  def finish_setup():
278
302
  telnet_port, exit_server = server_started_future.result()
279
303
 
280
- telnet_client = telnetlib.Telnet('localhost', telnet_port, timeout=None)
304
+ telnet_client = telnetlib.Telnet("localhost", telnet_port, timeout=None)
281
305
 
282
306
  def telnet_negotiation_cb(telnet_socket, command, option):
283
307
  pass # ignore telnet negotiation
308
+
284
309
  telnet_client.set_option_negotiation_callback(telnet_negotiation_cb)
285
310
 
286
311
  # initialize telnet terminal
287
312
  # this tells the telnet server we are a vt100 terminal
288
- telnet_client.get_socket().sendall(b'\xff\xfb\x18\xff\xfa\x18\x00\x61\x6e\x73\x69\xff\xf0')
289
- telnet_client.get_socket().sendall(b'\r\n')
313
+ telnet_client.get_socket().sendall(
314
+ b"\xff\xfb\x18\xff\xfa\x18\x00\x61\x6e\x73\x69\xff\xf0"
315
+ )
316
+ telnet_client.get_socket().sendall(b"\r\n")
290
317
 
291
318
  # Bridge the connection to the telnet server, two way
292
319
  def forward_to_telnet():
@@ -303,7 +330,7 @@ async def createREPLServer(sdk: ScryptedStatic, plugin: ScryptedDevice) -> int:
303
330
  while True:
304
331
  data = telnet_client.read_some()
305
332
  if not data:
306
- conn.sendall('REPL exited'.encode())
333
+ conn.sendall("REPL exited".encode())
307
334
  break
308
335
  if b">>>" in data:
309
336
  # This is an ugly hack - somewhere in ptpython, the
@@ -333,4 +360,4 @@ async def createREPLServer(sdk: ScryptedStatic, plugin: ScryptedDevice) -> int:
333
360
  threading.Thread(target=accept_connection).start()
334
361
 
335
362
  proxy_port = sock.getsockname()[1]
336
- return proxy_port
363
+ return proxy_port