@scrypted/server 0.115.1 → 0.115.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/deno/deno-plugin-remote.ts +5 -0
  2. package/dist/cluster/cluster-hash.js +1 -1
  3. package/dist/cluster/cluster-hash.js.map +1 -1
  4. package/dist/cluster/connect-rpc-object.d.ts +2 -1
  5. package/dist/plugin/plugin-api.d.ts +1 -0
  6. package/dist/plugin/plugin-console.js +4 -0
  7. package/dist/plugin/plugin-console.js.map +1 -1
  8. package/dist/plugin/plugin-remote-stats.js +19 -15
  9. package/dist/plugin/plugin-remote-stats.js.map +1 -1
  10. package/dist/plugin/plugin-remote-worker.js +77 -45
  11. package/dist/plugin/plugin-remote-worker.js.map +1 -1
  12. package/dist/plugin/runtime/child-process-worker.js +1 -1
  13. package/dist/plugin/runtime/child-process-worker.js.map +1 -1
  14. package/dist/plugin/runtime/deno-worker.d.ts +10 -0
  15. package/dist/plugin/runtime/deno-worker.js +79 -0
  16. package/dist/plugin/runtime/deno-worker.js.map +1 -0
  17. package/dist/plugin/runtime/node-fork-worker.d.ts +1 -1
  18. package/dist/plugin/runtime/node-fork-worker.js +2 -2
  19. package/dist/plugin/runtime/node-fork-worker.js.map +1 -1
  20. package/dist/plugin/runtime/node-thread-worker.d.ts +3 -3
  21. package/dist/plugin/runtime/node-thread-worker.js +22 -7
  22. package/dist/plugin/runtime/node-thread-worker.js.map +1 -1
  23. package/dist/plugin/runtime/python-worker.d.ts +0 -1
  24. package/dist/plugin/runtime/python-worker.js +0 -3
  25. package/dist/plugin/runtime/python-worker.js.map +1 -1
  26. package/dist/rpc-peer-eval.d.ts +4 -1
  27. package/dist/rpc-peer-eval.js +18 -15
  28. package/dist/rpc-peer-eval.js.map +1 -1
  29. package/dist/rpc.js +2 -2
  30. package/dist/rpc.js.map +1 -1
  31. package/dist/runtime.js +2 -0
  32. package/dist/runtime.js.map +1 -1
  33. package/dist/scrypted-main-exports.js +2 -2
  34. package/dist/scrypted-main-exports.js.map +1 -1
  35. package/dist/scrypted-main.js +4 -1
  36. package/dist/scrypted-main.js.map +1 -1
  37. package/dist/scrypted-plugin-main.js +14 -4
  38. package/dist/scrypted-plugin-main.js.map +1 -1
  39. package/package.json +4 -3
  40. package/python/plugin_remote.py +406 -218
  41. package/src/cluster/cluster-hash.ts +1 -1
  42. package/src/cluster/connect-rpc-object.ts +2 -1
  43. package/src/plugin/plugin-api.ts +1 -0
  44. package/src/plugin/plugin-console.ts +5 -0
  45. package/src/plugin/plugin-remote-stats.ts +20 -15
  46. package/src/plugin/plugin-remote-worker.ts +87 -47
  47. package/src/plugin/runtime/child-process-worker.ts +1 -1
  48. package/src/plugin/runtime/deno-worker.ts +83 -0
  49. package/src/plugin/runtime/node-fork-worker.ts +3 -5
  50. package/src/plugin/runtime/node-thread-worker.ts +22 -6
  51. package/src/plugin/runtime/python-worker.ts +0 -4
  52. package/src/rpc-peer-eval.ts +23 -19
  53. package/src/rpc.ts +3 -3
  54. package/src/runtime.ts +2 -0
  55. package/src/scrypted-main-exports.ts +2 -2
  56. package/src/scrypted-main.ts +4 -1
  57. package/src/scrypted-plugin-main.ts +14 -4
@@ -8,7 +8,6 @@ import multiprocessing
8
8
  import multiprocessing.connection
9
9
  import os
10
10
  import platform
11
- import shutil
12
11
  import sys
13
12
  import threading
14
13
  import time
@@ -39,11 +38,13 @@ ptpython
39
38
  wheel
40
39
  """.strip()
41
40
 
41
+
42
42
  class ClusterObject(TypedDict):
43
43
  id: str
44
+ address: str
44
45
  port: int
45
46
  proxyId: str
46
- sourcePort: int
47
+ sourceKey: str
47
48
  sha256: str
48
49
 
49
50
 
@@ -60,7 +61,7 @@ class DeviceProxy(object):
60
61
  self.device: asyncio.Future[rpc.RpcProxy] = None
61
62
 
62
63
  def __getattr__(self, name):
63
- if name == 'id':
64
+ if name == "id":
64
65
  return self.id
65
66
 
66
67
  if hasattr(ScryptedInterfaceProperty, name):
@@ -70,28 +71,33 @@ class DeviceProxy(object):
70
71
  p = state.get(name)
71
72
  if not p:
72
73
  return
73
- return p.get('value', None)
74
+ return p.get("value", None)
74
75
  if hasattr(ScryptedInterfaceMethods, name):
75
76
  return rpc.RpcProxyMethod(self, name)
76
77
 
77
78
  def __setattr__(self, name: str, value: Any) -> None:
78
- if name == '__proxy_finalizer_id':
79
- self.__dict__['__proxy_entry']['finalizerId'] = value
79
+ if name == "__proxy_finalizer_id":
80
+ self.__dict__["__proxy_entry"]["finalizerId"] = value
80
81
 
81
82
  return super().__setattr__(name, value)
82
83
 
83
84
  def __apply__(self, method: str, args: list):
84
85
  if not self.device:
85
- self.device = asyncio.ensure_future(self.systemManager.api.getDeviceById(self.id))
86
+ self.device = asyncio.ensure_future(
87
+ self.systemManager.api.getDeviceById(self.id)
88
+ )
86
89
 
87
90
  async def apply():
88
91
  device = await self.device
89
92
  return await device.__apply__(method, args)
93
+
90
94
  return apply()
91
95
 
92
96
 
93
97
  class SystemManager(scrypted_python.scrypted_sdk.types.SystemManager):
94
- def __init__(self, api: Any, systemState: Mapping[str, Mapping[str, SystemDeviceState]]) -> None:
98
+ def __init__(
99
+ self, api: Any, systemState: Mapping[str, Mapping[str, SystemDeviceState]]
100
+ ) -> None:
95
101
  super().__init__()
96
102
  self.api = api
97
103
  self.systemState = systemState
@@ -103,7 +109,9 @@ class SystemManager(scrypted_python.scrypted_sdk.types.SystemManager):
103
109
  def getSystemState(self) -> Any:
104
110
  return self.systemState
105
111
 
106
- def getDeviceById(self, idOrPluginId: str, nativeId: str = None) -> scrypted_python.scrypted_sdk.ScryptedDevice:
112
+ def getDeviceById(
113
+ self, idOrPluginId: str, nativeId: str = None
114
+ ) -> scrypted_python.scrypted_sdk.ScryptedDevice:
107
115
  id: str = None
108
116
  if self.systemState.get(idOrPluginId, None):
109
117
  if nativeId is not None:
@@ -114,15 +122,15 @@ class SystemManager(scrypted_python.scrypted_sdk.types.SystemManager):
114
122
  state = self.systemState.get(check, None)
115
123
  if not state:
116
124
  continue
117
- pluginId = state.get('pluginId', None)
125
+ pluginId = state.get("pluginId", None)
118
126
  if not pluginId:
119
127
  continue
120
- pluginId = pluginId.get('value', None)
128
+ pluginId = pluginId.get("value", None)
121
129
  if pluginId == idOrPluginId:
122
- checkNativeId = state.get('nativeId', None)
130
+ checkNativeId = state.get("nativeId", None)
123
131
  if not checkNativeId:
124
132
  continue
125
- checkNativeId = checkNativeId.get('value', None)
133
+ checkNativeId = checkNativeId.get("value", None)
126
134
  if nativeId == checkNativeId:
127
135
  id = idOrPluginId
128
136
  break
@@ -140,31 +148,38 @@ class SystemManager(scrypted_python.scrypted_sdk.types.SystemManager):
140
148
  state = self.systemState.get(check, None)
141
149
  if not state:
142
150
  continue
143
- checkInterfaces = state.get('interfaces', None)
151
+ checkInterfaces = state.get("interfaces", None)
144
152
  if not checkInterfaces:
145
153
  continue
146
- interfaces = checkInterfaces.get('value', [])
154
+ interfaces = checkInterfaces.get("value", [])
147
155
  if ScryptedInterface.ScryptedPlugin.value in interfaces:
148
- checkPluginId = state.get('pluginId', None)
156
+ checkPluginId = state.get("pluginId", None)
149
157
  if not checkPluginId:
150
158
  continue
151
- pluginId = checkPluginId.get('value', None)
159
+ pluginId = checkPluginId.get("value", None)
152
160
  if not pluginId:
153
161
  continue
154
162
  if pluginId == name:
155
163
  return self.getDeviceById(check)
156
- checkName = state.get('name', None)
164
+ checkName = state.get("name", None)
157
165
  if not checkName:
158
166
  continue
159
- if checkName.get('value', None) == name:
167
+ if checkName.get("value", None) == name:
160
168
  return self.getDeviceById(check)
161
169
 
162
170
  # TODO
163
- async def listen(self, callback: scrypted_python.scrypted_sdk.EventListener) -> scrypted_python.scrypted_sdk.EventListenerRegister:
171
+ async def listen(
172
+ self, callback: scrypted_python.scrypted_sdk.EventListener
173
+ ) -> scrypted_python.scrypted_sdk.EventListenerRegister:
164
174
  return super().listen(callback)
165
175
 
166
176
  # TODO
167
- async def listenDevice(self, id: str, event: str | scrypted_python.scrypted_sdk.EventListenerOptions, callback: scrypted_python.scrypted_sdk.EventListener) -> scrypted_python.scrypted_sdk.EventListenerRegister:
177
+ async def listenDevice(
178
+ self,
179
+ id: str,
180
+ event: str | scrypted_python.scrypted_sdk.EventListenerOptions,
181
+ callback: scrypted_python.scrypted_sdk.EventListener,
182
+ ) -> scrypted_python.scrypted_sdk.EventListenerRegister:
168
183
  return super().listenDevice(id, event, callback)
169
184
 
170
185
  async def removeDevice(self, id: str) -> None:
@@ -179,7 +194,7 @@ class MediaObject(scrypted_python.scrypted_sdk.types.MediaObject):
179
194
  setattr(self, rpc.RpcPeer.PROPERTY_PROXY_PROPERTIES, proxyProps)
180
195
 
181
196
  options = options or {}
182
- options['mimeType'] = mimeType
197
+ options["mimeType"] = mimeType
183
198
 
184
199
  for key, value in options.items():
185
200
  if rpc.RpcPeer.isTransportSafe(value):
@@ -194,38 +209,83 @@ class MediaManager:
194
209
  def __init__(self, mediaManager: scrypted_python.scrypted_sdk.types.MediaManager):
195
210
  self.mediaManager = mediaManager
196
211
 
197
- async def addConverter(self, converter: scrypted_python.scrypted_sdk.types.BufferConverter) -> None:
212
+ async def addConverter(
213
+ self, converter: scrypted_python.scrypted_sdk.types.BufferConverter
214
+ ) -> None:
198
215
  return await self.mediaManager.addConverter(converter)
199
216
 
200
217
  async def clearConverters(self) -> None:
201
218
  return await self.mediaManager.clearConverters()
202
219
 
203
- async def convertMediaObject(self, mediaObject: scrypted_python.scrypted_sdk.types.MediaObject, toMimeType: str) -> Any:
220
+ async def convertMediaObject(
221
+ self,
222
+ mediaObject: scrypted_python.scrypted_sdk.types.MediaObject,
223
+ toMimeType: str,
224
+ ) -> Any:
204
225
  return await self.mediaManager.convertMediaObject(mediaObject, toMimeType)
205
226
 
206
- async def convertMediaObjectToBuffer(self, mediaObject: scrypted_python.scrypted_sdk.types.MediaObject, toMimeType: str) -> bytearray:
207
- return await self.mediaManager.convertMediaObjectToBuffer(mediaObject, toMimeType)
208
-
209
- async def convertMediaObjectToInsecureLocalUrl(self, mediaObject: str | scrypted_python.scrypted_sdk.types.MediaObject, toMimeType: str) -> str:
210
- return await self.mediaManager.convertMediaObjectToInsecureLocalUrl(mediaObject, toMimeType)
211
-
212
- async def convertMediaObjectToJSON(self, mediaObject: scrypted_python.scrypted_sdk.types.MediaObject, toMimeType: str) -> Any:
227
+ async def convertMediaObjectToBuffer(
228
+ self,
229
+ mediaObject: scrypted_python.scrypted_sdk.types.MediaObject,
230
+ toMimeType: str,
231
+ ) -> bytearray:
232
+ return await self.mediaManager.convertMediaObjectToBuffer(
233
+ mediaObject, toMimeType
234
+ )
235
+
236
+ async def convertMediaObjectToInsecureLocalUrl(
237
+ self,
238
+ mediaObject: str | scrypted_python.scrypted_sdk.types.MediaObject,
239
+ toMimeType: str,
240
+ ) -> str:
241
+ return await self.mediaManager.convertMediaObjectToInsecureLocalUrl(
242
+ mediaObject, toMimeType
243
+ )
244
+
245
+ async def convertMediaObjectToJSON(
246
+ self,
247
+ mediaObject: scrypted_python.scrypted_sdk.types.MediaObject,
248
+ toMimeType: str,
249
+ ) -> Any:
213
250
  return await self.mediaManager.convertMediaObjectToJSON(mediaObject, toMimeType)
214
251
 
215
- async def convertMediaObjectToLocalUrl(self, mediaObject: str | scrypted_python.scrypted_sdk.types.MediaObject, toMimeType: str) -> str:
216
- return await self.mediaManager.convertMediaObjectToLocalUrl(mediaObject, toMimeType)
217
-
218
- async def convertMediaObjectToUrl(self, mediaObject: str | scrypted_python.scrypted_sdk.types.MediaObject, toMimeType: str) -> str:
252
+ async def convertMediaObjectToLocalUrl(
253
+ self,
254
+ mediaObject: str | scrypted_python.scrypted_sdk.types.MediaObject,
255
+ toMimeType: str,
256
+ ) -> str:
257
+ return await self.mediaManager.convertMediaObjectToLocalUrl(
258
+ mediaObject, toMimeType
259
+ )
260
+
261
+ async def convertMediaObjectToUrl(
262
+ self,
263
+ mediaObject: str | scrypted_python.scrypted_sdk.types.MediaObject,
264
+ toMimeType: str,
265
+ ) -> str:
219
266
  return await self.mediaManager.convertMediaObjectToUrl(mediaObject, toMimeType)
220
267
 
221
- async def createFFmpegMediaObject(self, ffmpegInput: scrypted_python.scrypted_sdk.types.FFmpegInput, options: scrypted_python.scrypted_sdk.types.MediaObjectOptions = None) -> scrypted_python.scrypted_sdk.types.MediaObject:
268
+ async def createFFmpegMediaObject(
269
+ self,
270
+ ffmpegInput: scrypted_python.scrypted_sdk.types.FFmpegInput,
271
+ options: scrypted_python.scrypted_sdk.types.MediaObjectOptions = None,
272
+ ) -> scrypted_python.scrypted_sdk.types.MediaObject:
222
273
  return await self.mediaManager.createFFmpegMediaObject(ffmpegInput, options)
223
274
 
224
- async def createMediaObject(self, data: Any, mimeType: str, options: scrypted_python.scrypted_sdk.types.MediaObjectOptions = None) -> scrypted_python.scrypted_sdk.types.MediaObject:
275
+ async def createMediaObject(
276
+ self,
277
+ data: Any,
278
+ mimeType: str,
279
+ options: scrypted_python.scrypted_sdk.types.MediaObjectOptions = None,
280
+ ) -> scrypted_python.scrypted_sdk.types.MediaObject:
225
281
  # return await self.createMediaObject(data, mimetypes, options)
226
282
  return MediaObject(data, mimeType, options)
227
283
 
228
- async def createMediaObjectFromUrl(self, data: str, options: scrypted_python.scrypted_sdk.types.MediaObjectOptions = None) -> scrypted_python.scrypted_sdk.types.MediaObject:
284
+ async def createMediaObjectFromUrl(
285
+ self,
286
+ data: str,
287
+ options: scrypted_python.scrypted_sdk.types.MediaObjectOptions = None,
288
+ ) -> scrypted_python.scrypted_sdk.types.MediaObject:
229
289
  return await self.mediaManager.createMediaObjectFromUrl(data, options)
230
290
 
231
291
  async def getFFmpegPath(self) -> str:
@@ -236,7 +296,13 @@ class MediaManager:
236
296
 
237
297
 
238
298
  class DeviceState(scrypted_python.scrypted_sdk.types.DeviceState):
239
- def __init__(self, id: str, nativeId: str, systemManager: SystemManager, deviceManager: scrypted_python.scrypted_sdk.types.DeviceManager) -> None:
299
+ def __init__(
300
+ self,
301
+ id: str,
302
+ nativeId: str,
303
+ systemManager: SystemManager,
304
+ deviceManager: scrypted_python.scrypted_sdk.types.DeviceManager,
305
+ ) -> None:
240
306
  super().__init__()
241
307
  self._id = id
242
308
  self.nativeId = nativeId
@@ -253,7 +319,7 @@ class DeviceState(scrypted_python.scrypted_sdk.types.DeviceState):
253
319
  sdd = deviceState.get(property, None)
254
320
  if not sdd:
255
321
  return None
256
- return sdd.get('value', None)
322
+ return sdd.get("value", None)
257
323
 
258
324
  def setScryptedProperty(self, property: str, value: Any):
259
325
  if property == ScryptedInterfaceProperty.id.value:
@@ -262,13 +328,14 @@ class DeviceState(scrypted_python.scrypted_sdk.types.DeviceState):
262
328
  raise Exception("mixins is read only")
263
329
  if property == ScryptedInterfaceProperty.interfaces.value:
264
330
  raise Exception(
265
- "interfaces is a read only post-mixin computed property, use providedInterfaces")
331
+ "interfaces is a read only post-mixin computed property, use providedInterfaces"
332
+ )
266
333
 
267
334
  now = int(time.time() * 1000)
268
335
  self.systemManager.systemState[self._id][property] = {
269
336
  "lastEventTime": now,
270
337
  "stateTime": now,
271
- "value": value
338
+ "value": value,
272
339
  }
273
340
 
274
341
  self.systemManager.api.setState(self.nativeId, property, value)
@@ -311,7 +378,9 @@ class DeviceStorage(Storage):
311
378
 
312
379
 
313
380
  class DeviceManager(scrypted_python.scrypted_sdk.types.DeviceManager):
314
- def __init__(self, nativeIds: Mapping[str, DeviceStorage], systemManager: SystemManager) -> None:
381
+ def __init__(
382
+ self, nativeIds: Mapping[str, DeviceStorage], systemManager: SystemManager
383
+ ) -> None:
315
384
  super().__init__()
316
385
  self.nativeIds = nativeIds
317
386
  self.systemManager = systemManager
@@ -320,7 +389,9 @@ class DeviceManager(scrypted_python.scrypted_sdk.types.DeviceManager):
320
389
  id = self.nativeIds[nativeId].id
321
390
  return DeviceState(id, nativeId, self.systemManager, self)
322
391
 
323
- async def onDeviceEvent(self, nativeId: str, eventInterface: str, eventData: Any = None) -> None:
392
+ async def onDeviceEvent(
393
+ self, nativeId: str, eventInterface: str, eventData: Any = None
394
+ ) -> None:
324
395
  await self.systemManager.api.onDeviceEvent(nativeId, eventInterface, eventData)
325
396
 
326
397
  async def onDevicesChanged(self, devices: DeviceManifest) -> None:
@@ -332,8 +403,12 @@ class DeviceManager(scrypted_python.scrypted_sdk.types.DeviceManager):
332
403
  async def onDeviceRemoved(self, nativeId: str) -> None:
333
404
  return await self.systemManager.api.onDeviceRemoved(nativeId)
334
405
 
335
- async def onMixinEvent(self, id: str, mixinDevice: Any, eventInterface: str, eventData: Any) -> None:
336
- return await self.systemManager.api.onMixinEvent(id, mixinDevice, eventInterface, eventData)
406
+ async def onMixinEvent(
407
+ self, id: str, mixinDevice: Any, eventInterface: str, eventData: Any
408
+ ) -> None:
409
+ return await self.systemManager.api.onMixinEvent(
410
+ id, mixinDevice, eventInterface, eventData
411
+ )
337
412
 
338
413
  async def requestRestart(self) -> None:
339
414
  return await self.systemManager.api.requestRestart()
@@ -343,7 +418,9 @@ class DeviceManager(scrypted_python.scrypted_sdk.types.DeviceManager):
343
418
 
344
419
 
345
420
  class PluginRemote:
346
- def __init__(self, peer: rpc.RpcPeer, api, pluginId: str, hostInfo, loop: AbstractEventLoop):
421
+ def __init__(
422
+ self, peer: rpc.RpcPeer, api, pluginId: str, hostInfo, loop: AbstractEventLoop
423
+ ):
347
424
  self.systemState: Mapping[str, Mapping[str, SystemDeviceState]] = {}
348
425
  self.nativeIds: Mapping[str, DeviceStorage] = {}
349
426
  self.mediaManager: MediaManager
@@ -356,166 +433,225 @@ class PluginRemote:
356
433
  self.hostInfo = hostInfo
357
434
  self.loop = loop
358
435
  self.replPort = None
359
- self.__dict__['__proxy_oneway_methods'] = [
360
- 'notify',
361
- 'updateDeviceState',
362
- 'setSystemState',
363
- 'ioEvent',
364
- 'setNativeId',
436
+ self.__dict__["__proxy_oneway_methods"] = [
437
+ "notify",
438
+ "updateDeviceState",
439
+ "setSystemState",
440
+ "ioEvent",
441
+ "setNativeId",
365
442
  ]
366
443
 
367
- async def print_async(self, nativeId: str, *values: object, sep: Optional[str] = ' ',
368
- end: Optional[str] = '\n',
369
- flush: bool = False,):
444
+ async def print_async(
445
+ self,
446
+ nativeId: str,
447
+ *values: object,
448
+ sep: Optional[str] = " ",
449
+ end: Optional[str] = "\n",
450
+ flush: bool = False,
451
+ ):
370
452
  consoleFuture = self.consoles.get(nativeId)
371
453
  if not consoleFuture:
372
454
  consoleFuture = Future()
373
455
  self.consoles[nativeId] = consoleFuture
374
- plugins = await self.api.getComponent('plugins')
375
- port = await plugins.getRemoteServicePort(self.pluginId, 'console-writer')
456
+ plugins = await self.api.getComponent("plugins")
457
+ port = await plugins.getRemoteServicePort(self.pluginId, "console-writer")
376
458
  connection = await asyncio.open_connection(port=port)
377
459
  _, writer = connection
378
460
  if not nativeId:
379
- nid = 'undefined'
461
+ nid = "undefined"
380
462
  else:
381
463
  nid = nativeId
382
- nid += '\n'
383
- writer.write(nid.encode('utf8'))
464
+ nid += "\n"
465
+ writer.write(nid.encode("utf8"))
384
466
  consoleFuture.set_result(connection)
385
467
  _, writer = await consoleFuture
386
468
  strio = StringIO()
387
469
  print(*values, sep=sep, end=end, flush=flush, file=strio)
388
470
  strio.seek(0)
389
- b = strio.read().encode('utf8')
471
+ b = strio.read().encode("utf8")
390
472
  writer.write(b)
391
473
 
392
- def print(self, nativeId: str, *values: object, sep: Optional[str] = ' ',
393
- end: Optional[str] = '\n',
394
- flush: bool = False,):
395
- asyncio.run_coroutine_threadsafe(self.print_async(
396
- nativeId, *values, sep=sep, end=end, flush=flush), self.loop)
474
+ def print(
475
+ self,
476
+ nativeId: str,
477
+ *values: object,
478
+ sep: Optional[str] = " ",
479
+ end: Optional[str] = "\n",
480
+ flush: bool = False,
481
+ ):
482
+ asyncio.run_coroutine_threadsafe(
483
+ self.print_async(nativeId, *values, sep=sep, end=end, flush=flush),
484
+ self.loop,
485
+ )
397
486
 
398
487
  async def loadZip(self, packageJson, getZip: Any, options: dict):
399
488
  try:
400
489
  return await self.loadZipWrapped(packageJson, getZip, options)
401
490
  except:
402
- print('plugin start/fork failed')
491
+ print("plugin start/fork failed")
403
492
  traceback.print_exc()
404
493
  raise
405
494
 
406
495
  async def loadZipWrapped(self, packageJson, getZip: Any, options: dict):
407
496
  sdk = ScryptedStatic()
408
497
 
409
- clusterId = options['clusterId']
410
- clusterSecret = options['clusterSecret']
498
+ clusterId = options["clusterId"]
499
+ clusterSecret = options["clusterSecret"]
500
+ SCRYPTED_CLUSTER_ADDRESS = os.environ.get("SCRYPTED_CLUSTER_ADDRESS", None)
411
501
 
412
502
  def computeClusterObjectHash(o: ClusterObject) -> str:
413
503
  m = hashlib.sha256()
414
- m.update(bytes(f"{o['id']}{o['port']}{o.get('sourcePort') or ''}{o['proxyId']}{clusterSecret}", 'utf8'))
415
- return base64.b64encode(m.digest()).decode('utf-8')
416
-
417
- def onProxySerialization(value: Any, sourcePeerPort: int = None):
504
+ m.update(
505
+ bytes(
506
+ f"{o['id']}{o.get('address') or ''}{o['port']}{o.get('sourceKey', None) or ''}{o['proxyId']}{clusterSecret}",
507
+ "utf8",
508
+ )
509
+ )
510
+ return base64.b64encode(m.digest()).decode("utf-8")
511
+
512
+ def onProxySerialization(value: Any, sourceKey: str = None):
418
513
  properties: dict = rpc.RpcPeer.prepareProxyProperties(value) or {}
419
- clusterEntry = properties.get('__cluster', None)
420
- proxyId: str = (clusterEntry and clusterEntry.get('proxyId', None)) or rpc.RpcPeer.generateId()
421
-
422
- if clusterEntry and clusterPort == clusterEntry['port'] and sourcePeerPort != clusterEntry.get('sourcePort', None):
514
+ clusterEntry = properties.get("__cluster", None)
515
+ proxyId: str = (
516
+ clusterEntry and clusterEntry.get("proxyId", None)
517
+ ) or rpc.RpcPeer.generateId()
518
+
519
+ if (
520
+ clusterEntry
521
+ and clusterPort == clusterEntry["port"]
522
+ and sourceKey != clusterEntry.get("sourceKey", None)
523
+ ):
423
524
  clusterEntry = None
424
525
 
425
- if not properties.get('__cluster', None):
526
+ if not clusterEntry:
426
527
  clusterEntry: ClusterObject = {
427
- 'id': clusterId,
428
- 'proxyId': proxyId,
429
- 'port': clusterPort,
430
- 'sourcePort': sourcePeerPort,
528
+ "id": clusterId,
529
+ "proxyId": proxyId,
530
+ "address": SCRYPTED_CLUSTER_ADDRESS,
531
+ "port": clusterPort,
532
+ "sourceKey": sourceKey,
431
533
  }
432
- clusterEntry['sha256'] = computeClusterObjectHash(clusterEntry)
433
- properties['__cluster'] = clusterEntry
534
+ clusterEntry["sha256"] = computeClusterObjectHash(clusterEntry)
535
+ properties["__cluster"] = clusterEntry
434
536
 
435
537
  return proxyId, properties
436
538
 
437
539
  self.peer.onProxySerialization = onProxySerialization
438
540
 
439
- async def resolveObject(id: str, sourcePeerPort: int):
440
- sourcePeer: rpc.RpcPeer = self.peer if not sourcePeerPort else await rpc.maybe_await(clusterPeers.get(sourcePeerPort))
541
+ async def resolveObject(id: str, sourceKey: str):
542
+ sourcePeer: rpc.RpcPeer = (
543
+ self.peer
544
+ if not sourceKey
545
+ else await rpc.maybe_await(clusterPeers.get(sourceKey, None))
546
+ )
441
547
  if not sourcePeer:
442
548
  return
443
549
  return sourcePeer.localProxyMap.get(id, None)
444
550
 
445
- clusterPeers: Mapping[int, asyncio.Future[rpc.RpcPeer]] = {}
551
+ clusterPeers: Mapping[str, asyncio.Future[rpc.RpcPeer]] = {}
446
552
 
447
- async def handleClusterClient(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
448
- _, clusterPeerPort = writer.get_extra_info('peername')
553
+ def getClusterPeerKey(address: str, port: int):
554
+ return f"{address}:{port}"
555
+
556
+ async def handleClusterClient(
557
+ reader: asyncio.StreamReader, writer: asyncio.StreamWriter
558
+ ):
559
+ clusterPeerAddress, clusterPeerPort = writer.get_extra_info("peername")
560
+ clusterPeerKey = getClusterPeerKey(clusterPeerAddress, clusterPeerPort)
449
561
  rpcTransport = rpc_reader.RpcStreamTransport(reader, writer)
450
562
  peer: rpc.RpcPeer
451
- peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
563
+ peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(
564
+ self.loop, rpcTransport
565
+ )
452
566
  peer.onProxySerialization = lambda value: onProxySerialization(
453
- value, clusterPeerPort)
567
+ value, clusterPeerPort
568
+ )
454
569
  future: asyncio.Future[rpc.RpcPeer] = asyncio.Future()
455
570
  future.set_result(peer)
456
- clusterPeers[clusterPeerPort] = future
571
+ clusterPeers[clusterPeerKey] = future
457
572
 
458
573
  async def connectRPCObject(o: ClusterObject):
459
574
  sha256 = computeClusterObjectHash(o)
460
- if sha256 != o['sha256']:
461
- raise Exception('secret incorrect')
462
- return await resolveObject(o['proxyId'], o.get('sourcePort'))
575
+ if sha256 != o["sha256"]:
576
+ raise Exception("secret incorrect")
577
+ return await resolveObject(o["proxyId"], o.get("sourceKey", None))
463
578
 
464
- peer.params['connectRPCObject'] = connectRPCObject
579
+ peer.params["connectRPCObject"] = connectRPCObject
465
580
  try:
466
581
  await peerReadLoop()
467
582
  except:
468
583
  pass
469
584
  finally:
470
- clusterPeers.pop(clusterPeerPort)
471
- peer.kill('cluster client killed')
585
+ clusterPeers.pop(clusterPeerKey)
586
+ peer.kill("cluster client killed")
472
587
  writer.close()
473
588
 
474
- clusterRpcServer = await asyncio.start_server(handleClusterClient, '127.0.0.1', 0)
589
+ listenAddress = "0.0.0.0" if SCRYPTED_CLUSTER_ADDRESS else "127.0.0.1"
590
+ clusterRpcServer = await asyncio.start_server(
591
+ handleClusterClient, listenAddress, 0
592
+ )
475
593
  clusterPort = clusterRpcServer.sockets[0].getsockname()[1]
476
594
 
477
- def ensureClusterPeer(port: int):
478
- clusterPeerPromise = clusterPeers.get(port)
479
- if not clusterPeerPromise:
480
- async def connectClusterPeer():
481
- reader, writer = await asyncio.open_connection(
482
- '127.0.0.1', port)
483
- _, clusterPeerPort = writer.get_extra_info('sockname')
484
- rpcTransport = rpc_reader.RpcStreamTransport(
485
- reader, writer)
486
- clusterPeer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
487
- clusterPeer.onProxySerialization = lambda value: onProxySerialization(
488
- value, clusterPeerPort)
489
-
490
- async def run_loop():
491
- try:
492
- await peerReadLoop()
493
- except:
494
- pass
495
- finally:
496
- clusterPeers.pop(port)
497
- asyncio.run_coroutine_threadsafe(run_loop(), self.loop)
498
- return clusterPeer
499
- clusterPeerPromise = self.loop.create_task(
500
- connectClusterPeer())
501
- clusterPeers[port] = clusterPeerPromise
595
+ def ensureClusterPeer(address: str, port: int):
596
+ if not address or address == SCRYPTED_CLUSTER_ADDRESS:
597
+ address = "127.0.0.1"
598
+ clusterPeerKey = getClusterPeerKey(address, port)
599
+ clusterPeerPromise = clusterPeers.get(clusterPeerKey)
600
+ if clusterPeerPromise:
601
+ return clusterPeerPromise
602
+
603
+ async def connectClusterPeer():
604
+ reader, writer = await asyncio.open_connection(address, port)
605
+ sourceAddress, sourcePort = writer.get_extra_info("sockname")
606
+ if (
607
+ sourceAddress != SCRYPTED_CLUSTER_ADDRESS
608
+ and sourceAddress != "127.0.0.1"
609
+ ):
610
+ print("source address mismatch", sourceAddress)
611
+ sourceKey = getClusterPeerKey(sourceAddress, sourcePort)
612
+ rpcTransport = rpc_reader.RpcStreamTransport(reader, writer)
613
+ clusterPeer, peerReadLoop = await rpc_reader.prepare_peer_readloop(
614
+ self.loop, rpcTransport
615
+ )
616
+ clusterPeer.onProxySerialization = lambda value: onProxySerialization(
617
+ value, sourceKey
618
+ )
619
+
620
+ async def run_loop():
621
+ try:
622
+ await peerReadLoop()
623
+ except:
624
+ pass
625
+ finally:
626
+ clusterPeers.pop(clusterPeerKey)
627
+
628
+ asyncio.run_coroutine_threadsafe(run_loop(), self.loop)
629
+ return clusterPeer
630
+
631
+ clusterPeerPromise = self.loop.create_task(connectClusterPeer())
632
+
633
+ clusterPeers[clusterPeerKey] = clusterPeerPromise
502
634
  return clusterPeerPromise
503
635
 
504
636
  async def connectRPCObject(value):
505
- clusterObject = getattr(value, '__cluster')
506
- if type(clusterObject) is not dict:
637
+ __cluster = getattr(value, "__cluster")
638
+ if type(__cluster) is not dict:
507
639
  return value
508
640
 
509
- if clusterObject.get('id', None) != clusterId:
641
+ clusterObject: ClusterObject = __cluster
642
+
643
+ if clusterObject.get("id", None) != clusterId:
510
644
  return value
511
645
 
512
- port = clusterObject['port']
513
- proxyId = clusterObject['proxyId']
514
- sourcePort = clusterObject.get('sourcePort', None)
646
+ address = clusterObject.get("address", None)
647
+ port = clusterObject["port"]
648
+ proxyId = clusterObject["proxyId"]
515
649
  if port == clusterPort:
516
- return await resolveObject(proxyId, sourcePort)
650
+ return await resolveObject(
651
+ proxyId, clusterObject.get("sourceKey", None)
652
+ )
517
653
 
518
- clusterPeerPromise = ensureClusterPeer(port)
654
+ clusterPeerPromise = ensureClusterPeer(address, port)
519
655
 
520
656
  try:
521
657
  clusterPeer = await clusterPeerPromise
@@ -524,38 +660,40 @@ class PluginRemote:
524
660
  if existing:
525
661
  return existing
526
662
 
527
- peerConnectRPCObject = clusterPeer.tags.get('connectRPCObject')
663
+ peerConnectRPCObject = clusterPeer.tags.get("connectRPCObject")
528
664
  if not peerConnectRPCObject:
529
- peerConnectRPCObject = await clusterPeer.getParam('connectRPCObject')
530
- clusterPeer.tags['connectRPCObject'] = peerConnectRPCObject
665
+ peerConnectRPCObject = await clusterPeer.getParam(
666
+ "connectRPCObject"
667
+ )
668
+ clusterPeer.tags["connectRPCObject"] = peerConnectRPCObject
531
669
  newValue = await peerConnectRPCObject(clusterObject)
532
670
  if not newValue:
533
- raise Exception('ipc object not found?')
671
+ raise Exception("rpc object not found?")
534
672
  return newValue
535
673
  except Exception as e:
536
674
  return value
537
675
 
538
676
  sdk.connectRPCObject = connectRPCObject
539
677
 
540
- forkMain = options and options.get('fork')
541
- debug = options.get('debug', None)
678
+ forkMain = options and options.get("fork")
679
+ debug = options.get("debug", None)
542
680
  plugin_volume = pv.ensure_plugin_volume(self.pluginId)
543
- plugin_zip_paths = pv.prep(plugin_volume, options.get('zipHash'))
681
+ plugin_zip_paths = pv.prep(plugin_volume, options.get("zipHash"))
544
682
 
545
683
  if debug:
546
684
  scrypted_volume = pv.get_scrypted_volume()
547
685
  # python debugger needs a predictable path for the plugin.zip,
548
686
  # as the vscode python extension doesn't seem to have a way
549
687
  # to read the package.json to configure the python remoteRoot.
550
- zipPath = os.path.join(scrypted_volume, 'plugin.zip')
688
+ zipPath = os.path.join(scrypted_volume, "plugin.zip")
551
689
  else:
552
- zipPath = plugin_zip_paths.get('zip_file')
690
+ zipPath = plugin_zip_paths.get("zip_file")
553
691
 
554
692
  if not os.path.exists(zipPath) or debug:
555
693
  os.makedirs(os.path.dirname(zipPath), exist_ok=True)
556
694
  zipData = await getZip()
557
- zipPathTmp = zipPath + '.tmp'
558
- with open(zipPathTmp, 'wb') as f:
695
+ zipPathTmp = zipPath + ".tmp"
696
+ with open(zipPathTmp, "wb") as f:
559
697
  f.write(zipData)
560
698
  try:
561
699
  os.remove(zipPath)
@@ -566,69 +704,98 @@ class PluginRemote:
566
704
  zip = zipfile.ZipFile(zipPath)
567
705
 
568
706
  if not forkMain:
569
- multiprocessing.set_start_method('spawn')
707
+ multiprocessing.set_start_method("spawn")
570
708
 
571
709
  # it's possible to run 32bit docker on aarch64, which cause pip requirements
572
710
  # to fail because pip only allows filtering on machine, even if running a different architeture.
573
711
  # this will cause prebuilt wheel installation to fail.
574
- if platform.machine() == 'aarch64' and platform.architecture()[0] == '32bit':
575
- print('=============================================')
712
+ if (
713
+ platform.machine() == "aarch64"
714
+ and platform.architecture()[0] == "32bit"
715
+ ):
716
+ print("=============================================")
576
717
  print(
577
- 'Python machine vs architecture mismatch detected. Plugin installation may fail.')
718
+ "Python machine vs architecture mismatch detected. Plugin installation may fail."
719
+ )
578
720
  print(
579
- 'This issue occurs if a 32bit system was upgraded to a 64bit kernel.')
721
+ "This issue occurs if a 32bit system was upgraded to a 64bit kernel."
722
+ )
580
723
  print(
581
- 'Reverting to the 32bit kernel (or reflashing as native 64 bit is recommended.')
582
- print('https://github.com/koush/scrypted/issues/678')
583
- print('=============================================')
724
+ "Reverting to the 32bit kernel (or reflashing as native 64 bit is recommended."
725
+ )
726
+ print("https://github.com/koush/scrypted/issues/678")
727
+ print("=============================================")
584
728
 
585
- python_version = 'python%s' % str(
586
- sys.version_info[0])+"."+str(sys.version_info[1])
587
- print('python version:', python_version)
588
- print('interpreter:', sys.executable)
729
+ python_version = (
730
+ "python%s" % str(sys.version_info[0]) + "." + str(sys.version_info[1])
731
+ )
732
+ print("python version:", python_version)
733
+ print("interpreter:", sys.executable)
589
734
 
590
- python_versioned_directory = '%s-%s-%s' % (
591
- python_version, platform.system(), platform.machine())
592
- SCRYPTED_PYTHON_VERSION = os.environ.get('SCRYPTED_PYTHON_VERSION')
593
- python_versioned_directory += '-' + SCRYPTED_PYTHON_VERSION
735
+ python_versioned_directory = "%s-%s-%s" % (
736
+ python_version,
737
+ platform.system(),
738
+ platform.machine(),
739
+ )
740
+ SCRYPTED_PYTHON_VERSION = os.environ.get("SCRYPTED_PYTHON_VERSION")
741
+ python_versioned_directory += "-" + SCRYPTED_PYTHON_VERSION
594
742
 
595
- pip_target = os.path.join(
596
- plugin_volume, python_versioned_directory)
743
+ pip_target = os.path.join(plugin_volume, python_versioned_directory)
597
744
 
598
- print('pip target: %s' % pip_target)
745
+ print("pip target: %s" % pip_target)
599
746
 
600
747
  if not os.path.exists(pip_target):
601
748
  os.makedirs(pip_target, exist_ok=True)
602
749
 
603
-
604
750
  def read_requirements(filename: str) -> str:
605
751
  if filename in zip.namelist():
606
- return zip.open(filename).read().decode('utf8')
607
- return ''
752
+ return zip.open(filename).read().decode("utf8")
753
+ return ""
608
754
 
609
- str_requirements = read_requirements('requirements.txt')
610
- str_optional_requirements = read_requirements('requirements.optional.txt')
755
+ str_requirements = read_requirements("requirements.txt")
756
+ str_optional_requirements = read_requirements("requirements.optional.txt")
611
757
 
612
758
  scrypted_requirements_basename = os.path.join(
613
- pip_target, 'requirements.scrypted')
614
- requirements_basename = os.path.join(
615
- pip_target, 'requirements')
759
+ pip_target, "requirements.scrypted"
760
+ )
761
+ requirements_basename = os.path.join(pip_target, "requirements")
616
762
  optional_requirements_basename = os.path.join(
617
- pip_target, 'requirements.optional')
763
+ pip_target, "requirements.optional"
764
+ )
618
765
 
619
766
  need_pip = True
620
767
  if str_requirements:
621
768
  need_pip = need_requirements(requirements_basename, str_requirements)
622
769
  if not need_pip:
623
- need_pip = need_requirements(scrypted_requirements_basename, SCRYPTED_REQUIREMENTS)
770
+ need_pip = need_requirements(
771
+ scrypted_requirements_basename, SCRYPTED_REQUIREMENTS
772
+ )
624
773
 
625
774
  if need_pip:
626
775
  remove_pip_dirs(plugin_volume)
627
- install_with_pip(pip_target, packageJson, SCRYPTED_REQUIREMENTS, scrypted_requirements_basename, ignore_error=True)
628
- install_with_pip(pip_target, packageJson, str_requirements, requirements_basename, ignore_error=False)
629
- install_with_pip(pip_target, packageJson, str_optional_requirements, optional_requirements_basename, ignore_error=True)
776
+ install_with_pip(
777
+ pip_target,
778
+ packageJson,
779
+ SCRYPTED_REQUIREMENTS,
780
+ scrypted_requirements_basename,
781
+ ignore_error=True,
782
+ )
783
+ install_with_pip(
784
+ pip_target,
785
+ packageJson,
786
+ str_requirements,
787
+ requirements_basename,
788
+ ignore_error=False,
789
+ )
790
+ install_with_pip(
791
+ pip_target,
792
+ packageJson,
793
+ str_optional_requirements,
794
+ optional_requirements_basename,
795
+ ignore_error=True,
796
+ )
630
797
  else:
631
- print('requirements.txt (up to date)')
798
+ print("requirements.txt (up to date)")
632
799
  print(str_requirements)
633
800
 
634
801
  sys.path.insert(0, zipPath)
@@ -653,9 +820,10 @@ class PluginRemote:
653
820
  def host_fork() -> PluginFork:
654
821
  parent_conn, child_conn = multiprocessing.Pipe()
655
822
  pluginFork = PluginFork()
656
- print('new fork')
823
+ print("new fork")
657
824
  pluginFork.worker = multiprocessing.Process(
658
- target=plugin_fork, args=(child_conn,), daemon=True)
825
+ target=plugin_fork, args=(child_conn,), daemon=True
826
+ )
659
827
  pluginFork.worker.start()
660
828
 
661
829
  def schedule_exit_check():
@@ -664,42 +832,47 @@ class PluginRemote:
664
832
  pluginFork.worker.join()
665
833
  else:
666
834
  schedule_exit_check()
835
+
667
836
  self.loop.call_later(2, exit_check)
668
837
 
669
838
  schedule_exit_check()
670
839
 
671
840
  async def getFork():
672
- rpcTransport = rpc_reader.RpcConnectionTransport(
673
- parent_conn)
674
- forkPeer, readLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
675
- forkPeer.peerName = 'thread'
841
+ rpcTransport = rpc_reader.RpcConnectionTransport(parent_conn)
842
+ forkPeer, readLoop = await rpc_reader.prepare_peer_readloop(
843
+ self.loop, rpcTransport
844
+ )
845
+ forkPeer.peerName = "thread"
676
846
 
677
847
  async def updateStats(stats):
678
- self.ptimeSum += stats['cpu']['user']
848
+ self.ptimeSum += stats["cpu"]["user"]
679
849
  self.allMemoryStats[forkPeer] = stats
680
- forkPeer.params['updateStats'] = updateStats
850
+
851
+ forkPeer.params["updateStats"] = updateStats
681
852
 
682
853
  async def forkReadLoop():
683
854
  try:
684
855
  await readLoop()
685
856
  except:
686
857
  # traceback.print_exc()
687
- print('fork read loop exited')
858
+ print("fork read loop exited")
688
859
  finally:
689
860
  self.allMemoryStats.pop(forkPeer)
690
861
  parent_conn.close()
691
862
  rpcTransport.executor.shutdown()
692
863
  pluginFork.worker.kill()
693
- asyncio.run_coroutine_threadsafe(
694
- forkReadLoop(), loop=self.loop)
695
- getRemote = await forkPeer.getParam('getRemote')
696
- remote: PluginRemote = await getRemote(self.api, self.pluginId, self.hostInfo)
864
+
865
+ asyncio.run_coroutine_threadsafe(forkReadLoop(), loop=self.loop)
866
+ getRemote = await forkPeer.getParam("getRemote")
867
+ remote: PluginRemote = await getRemote(
868
+ self.api, self.pluginId, self.hostInfo
869
+ )
697
870
  await remote.setSystemState(self.systemManager.getSystemState())
698
871
  for nativeId, ds in self.nativeIds.items():
699
872
  await remote.setNativeId(nativeId, ds.id, ds.storage)
700
873
  forkOptions = options.copy()
701
- forkOptions['fork'] = True
702
- forkOptions['debug'] = debug
874
+ forkOptions["fork"] = True
875
+ forkOptions["debug"] = debug
703
876
  return await remote.loadZip(packageJson, getZip, forkOptions)
704
877
 
705
878
  pluginFork.result = asyncio.create_task(getFork())
@@ -711,14 +884,18 @@ class PluginRemote:
711
884
  sdk_init2(sdk)
712
885
  except:
713
886
  from scrypted_sdk import sdk_init # type: ignore
714
- sdk_init(zip, self, self.systemManager,
715
- self.deviceManager, self.mediaManager)
887
+
888
+ sdk_init(
889
+ zip, self, self.systemManager, self.deviceManager, self.mediaManager
890
+ )
716
891
 
717
892
  if not forkMain:
718
893
  from main import create_scrypted_plugin # type: ignore
894
+
719
895
  pluginInstance = await rpc.maybe_await(create_scrypted_plugin())
720
896
  try:
721
897
  from plugin_repl import createREPLServer
898
+
722
899
  self.replPort = await createREPLServer(sdk, pluginInstance)
723
900
  except Exception as e:
724
901
  print(f"Warning: Python REPL cannot be loaded: {e}")
@@ -726,6 +903,7 @@ class PluginRemote:
726
903
  return pluginInstance
727
904
 
728
905
  from main import fork # type: ignore
906
+
729
907
  forked = await rpc.maybe_await(fork())
730
908
  if type(forked) == dict:
731
909
  forked[rpc.RpcPeer.PROPERTY_JSON_COPY_SERIALIZE_CHILDREN] = True
@@ -753,13 +931,13 @@ class PluginRemote:
753
931
  self.systemState[id] = state
754
932
 
755
933
  async def notify(self, id, eventDetails: EventDetails, value):
756
- property = eventDetails.get('property')
934
+ property = eventDetails.get("property")
757
935
  if property:
758
936
  state = None
759
937
  if self.systemState:
760
938
  state = self.systemState.get(id, None)
761
939
  if not state:
762
- print('state not found for %s' % id)
940
+ print("state not found for %s" % id)
763
941
  return
764
942
  state[property] = value
765
943
  # systemManager.events.notify(id, eventTime, eventInterface, property, value.value, changed);
@@ -776,49 +954,52 @@ class PluginRemote:
776
954
  async def getServicePort(self, name):
777
955
  if name == "repl":
778
956
  if self.replPort is None:
779
- raise Exception('REPL unavailable: Plugin not loaded.')
957
+ raise Exception("REPL unavailable: Plugin not loaded.")
780
958
  if self.replPort == 0:
781
- raise Exception('REPL unavailable: Python REPL not available.')
959
+ raise Exception("REPL unavailable: Python REPL not available.")
782
960
  return self.replPort
783
- raise Exception(f'unknown service {name}')
961
+ raise Exception(f"unknown service {name}")
784
962
 
785
963
  async def start_stats_runner(self):
786
964
  pong = None
965
+
787
966
  async def ping(time: int):
788
967
  nonlocal pong
789
- pong = pong or await self.peer.getParam('pong')
968
+ pong = pong or await self.peer.getParam("pong")
790
969
  await pong(time)
791
- self.peer.params['ping'] = ping
792
970
 
793
- update_stats = await self.peer.getParam('updateStats')
971
+ self.peer.params["ping"] = ping
972
+
973
+ update_stats = await self.peer.getParam("updateStats")
794
974
  if not update_stats:
795
- print('host did not provide update_stats')
975
+ print("host did not provide update_stats")
796
976
  return
797
977
 
798
978
  def stats_runner():
799
979
  ptime = round(time.process_time() * 1000000) + self.ptimeSum
800
980
  try:
801
981
  import psutil
982
+
802
983
  process = psutil.Process(os.getpid())
803
984
  heapTotal = process.memory_info().rss
804
985
  except:
805
986
  try:
806
987
  import resource
807
- heapTotal = resource.getrusage(
808
- resource.RUSAGE_SELF).ru_maxrss
988
+
989
+ heapTotal = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
809
990
  except:
810
991
  heapTotal = 0
811
992
 
812
993
  for _, stats in self.allMemoryStats.items():
813
- heapTotal += stats['memoryUsage']['heapTotal']
994
+ heapTotal += stats["memoryUsage"]["heapTotal"]
814
995
 
815
996
  stats = {
816
- 'cpu': {
817
- 'user': ptime,
818
- 'system': 0,
997
+ "cpu": {
998
+ "user": ptime,
999
+ "system": 0,
819
1000
  },
820
- 'memoryUsage': {
821
- 'heapTotal': heapTotal,
1001
+ "memoryUsage": {
1002
+ "heapTotal": heapTotal,
822
1003
  },
823
1004
  }
824
1005
  asyncio.run_coroutine_threadsafe(update_stats(stats), self.loop)
@@ -827,11 +1008,14 @@ class PluginRemote:
827
1008
  stats_runner()
828
1009
 
829
1010
 
830
- async def plugin_async_main(loop: AbstractEventLoop, rpcTransport: rpc_reader.RpcTransport):
1011
+ async def plugin_async_main(
1012
+ loop: AbstractEventLoop, rpcTransport: rpc_reader.RpcTransport
1013
+ ):
831
1014
  peer, readLoop = await rpc_reader.prepare_peer_readloop(loop, rpcTransport)
832
- peer.params['print'] = print
833
- peer.params['getRemote'] = lambda api, pluginId, hostInfo: PluginRemote(
834
- peer, api, pluginId, hostInfo, loop)
1015
+ peer.params["print"] = print
1016
+ peer.params["getRemote"] = lambda api, pluginId, hostInfo: PluginRemote(
1017
+ peer, api, pluginId, hostInfo, loop
1018
+ )
835
1019
 
836
1020
  try:
837
1021
  await readLoop()
@@ -845,6 +1029,7 @@ def main(rpcTransport: rpc_reader.RpcTransport):
845
1029
  def gc_runner():
846
1030
  gc.collect()
847
1031
  loop.call_later(10, gc_runner)
1032
+
848
1033
  gc_runner()
849
1034
 
850
1035
  loop.run_until_complete(plugin_async_main(loop, rpcTransport))
@@ -864,8 +1049,10 @@ def plugin_main(rpcTransport: rpc_reader.RpcTransport):
864
1049
  # if it does, try starting without it.
865
1050
  try:
866
1051
  import gi
867
- gi.require_version('Gst', '1.0')
1052
+
1053
+ gi.require_version("Gst", "1.0")
868
1054
  from gi.repository import GLib, Gst
1055
+
869
1056
  Gst.init(None)
870
1057
 
871
1058
  # can't remember why starting the glib main loop is necessary.
@@ -873,8 +1060,9 @@ def plugin_main(rpcTransport: rpc_reader.RpcTransport):
873
1060
  # seems optional on other platforms.
874
1061
  loop = GLib.MainLoop()
875
1062
 
876
- worker = threading.Thread(target=main, args=(
877
- rpcTransport,), name="asyncio-main")
1063
+ worker = threading.Thread(
1064
+ target=main, args=(rpcTransport,), name="asyncio-main"
1065
+ )
878
1066
  worker.start()
879
1067
 
880
1068
  loop.run()