@scrypted/server 0.7.13 → 0.7.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of @scrypted/server might be problematic. Click here for more details.

package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@scrypted/server",
3
- "version": "0.7.13",
3
+ "version": "0.7.14",
4
4
  "description": "",
5
5
  "dependencies": {
6
6
  "@mapbox/node-pre-gyp": "^1.0.10",
@@ -67,8 +67,11 @@
67
67
  "prebuild": "rimraf dist",
68
68
  "build": "tsc --outDir dist",
69
69
  "postbuild": "node test/check-build-output.js",
70
- "prepublishOnly": "npm version patch && git add package.json && npm run build && git commit -m prepublish",
71
- "postpublish": "git tag v$npm_package_version && git push origin v$npm_package_version",
70
+ "prebeta": "npm version patch && git add package.json && npm run build && git commit -m prebeta",
71
+ "beta": "npm publish --tag beta",
72
+ "release": "npm publish",
73
+ "prerelease": "npm version patch && git add package.json && npm run build && git commit -m prerelease",
74
+ "postrelease": "git tag v$npm_package_version && git push origin v$npm_package_version",
72
75
  "docker": "scripts/github-workflow-publish-docker.sh"
73
76
  },
74
77
  "author": "",
@@ -35,25 +35,6 @@ class SystemDeviceState(TypedDict):
35
35
  stateTime: int
36
36
  value: any
37
37
 
38
-
39
- class StreamPipeReader:
40
- def __init__(self, conn: multiprocessing.connection.Connection) -> None:
41
- self.conn = conn
42
- self.executor = concurrent.futures.ThreadPoolExecutor()
43
-
44
- def readBlocking(self, n):
45
- b = bytes(0)
46
- while len(b) < n:
47
- self.conn.poll(None)
48
- add = os.read(self.conn.fileno(), n - len(b))
49
- if not len(add):
50
- raise Exception('unable to read requested bytes')
51
- b += add
52
- return b
53
-
54
- async def read(self, n):
55
- return await asyncio.get_event_loop().run_in_executor(self.executor, lambda: self.readBlocking(n))
56
-
57
38
  class SystemManager(scrypted_python.scrypted_sdk.types.SystemManager):
58
39
  def __init__(self, api: Any, systemState: Mapping[str, Mapping[str, SystemDeviceState]]) -> None:
59
40
  super().__init__()
@@ -288,8 +269,9 @@ class PluginRemote:
288
269
  clusterSecret = options['clusterSecret']
289
270
 
290
271
  async def handleClusterClient(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
272
+ rpcTransport = rpc_reader.RpcStreamTransport(reader, writer)
291
273
  peer: rpc.RpcPeer
292
- peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, reader = reader, writer = writer)
274
+ peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
293
275
  async def connectRPCObject(id: str, secret: str):
294
276
  m = hashlib.sha256()
295
277
  m.update(bytes('%s%s' % (clusterPort, clusterSecret), 'utf8'))
@@ -324,7 +306,8 @@ class PluginRemote:
324
306
  async def connectClusterPeer():
325
307
  reader, writer = await asyncio.open_connection(
326
308
  '127.0.0.1', port)
327
- peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, reader = reader, writer = writer)
309
+ rpcTransport = rpc_reader.RpcStreamTransport(reader, writer)
310
+ peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
328
311
  async def run_loop():
329
312
  try:
330
313
  await peerReadLoop()
@@ -485,8 +468,8 @@ class PluginRemote:
485
468
  schedule_exit_check()
486
469
 
487
470
  async def getFork():
488
- reader = StreamPipeReader(parent_conn)
489
- forkPeer, readLoop = await rpc_reader.prepare_peer_readloop(self.loop, reader = reader, writeFd = parent_conn.fileno())
471
+ rpcTransport = rpc_reader.RpcConnectionTransport(parent_conn)
472
+ forkPeer, readLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
490
473
  forkPeer.peerName = 'thread'
491
474
 
492
475
  async def updateStats(stats):
@@ -502,7 +485,7 @@ class PluginRemote:
502
485
  finally:
503
486
  allMemoryStats.pop(forkPeer)
504
487
  parent_conn.close()
505
- reader.executor.shutdown()
488
+ rpcTransport.executor.shutdown()
506
489
  asyncio.run_coroutine_threadsafe(forkReadLoop(), loop=self.loop)
507
490
  getRemote = await forkPeer.getParam('getRemote')
508
491
  remote: PluginRemote = await getRemote(self.api, self.pluginId, self.hostInfo)
@@ -594,8 +577,8 @@ class PluginRemote:
594
577
 
595
578
  allMemoryStats = {}
596
579
 
597
- async def plugin_async_main(loop: AbstractEventLoop, readFd: int = None, writeFd: int = None, reader: asyncio.StreamReader = None, writer: asyncio.StreamWriter = None):
598
- peer, readLoop = await rpc_reader.prepare_peer_readloop(loop, readFd=readFd, writeFd=writeFd, reader=reader, writer=writer)
580
+ async def plugin_async_main(loop: AbstractEventLoop, rpcTransport: rpc_reader.RpcTransport):
581
+ peer, readLoop = await rpc_reader.prepare_peer_readloop(loop, rpcTransport)
599
582
  peer.params['print'] = print
600
583
  peer.params['getRemote'] = lambda api, pluginId, hostInfo: PluginRemote(peer, api, pluginId, hostInfo, loop)
601
584
 
@@ -642,11 +625,11 @@ async def plugin_async_main(loop: AbstractEventLoop, readFd: int = None, writeFd
642
625
  try:
643
626
  await readLoop()
644
627
  finally:
645
- if reader and hasattr(reader, 'executor'):
646
- r: StreamPipeReader = reader
628
+ if type(rpcTransport) == rpc_reader.RpcConnectionTransport:
629
+ r: rpc_reader.RpcConnectionTransport = rpcTransport
647
630
  r.executor.shutdown()
648
631
 
649
- def main(readFd: int = None, writeFd: int = None, reader: asyncio.StreamReader = None, writer: asyncio.StreamWriter = None):
632
+ def main(rpcTransport: rpc_reader.RpcTransport):
650
633
  loop = asyncio.new_event_loop()
651
634
 
652
635
  def gc_runner():
@@ -654,10 +637,10 @@ def main(readFd: int = None, writeFd: int = None, reader: asyncio.StreamReader =
654
637
  loop.call_later(10, gc_runner)
655
638
  gc_runner()
656
639
 
657
- loop.run_until_complete(plugin_async_main(loop, readFd=readFd, writeFd=writeFd, reader=reader, writer=writer))
640
+ loop.run_until_complete(plugin_async_main(loop, rpcTransport))
658
641
  loop.close()
659
642
 
660
- def plugin_main(readFd: int = None, writeFd: int = None, reader: asyncio.StreamReader = None, writer: asyncio.StreamWriter = None):
643
+ def plugin_main(rpcTransport: rpc_reader.RpcTransport):
661
644
  try:
662
645
  import gi
663
646
  gi.require_version('Gst', '1.0')
@@ -666,18 +649,16 @@ def plugin_main(readFd: int = None, writeFd: int = None, reader: asyncio.StreamR
666
649
 
667
650
  loop = GLib.MainLoop()
668
651
 
669
- worker = threading.Thread(target=main, args=(readFd, writeFd, reader, writer), name="asyncio-main")
652
+ worker = threading.Thread(target=main, args=(rpcTransport,), name="asyncio-main")
670
653
  worker.start()
671
654
 
672
655
  loop.run()
673
656
  except:
674
- main(readFd=readFd, writeFd=writeFd, reader=reader, writer=writer)
657
+ main(rpcTransport)
675
658
 
676
659
 
677
660
  def plugin_fork(conn: multiprocessing.connection.Connection):
678
- fd = os.dup(conn.fileno())
679
- reader = StreamPipeReader(conn)
680
- plugin_main(reader=reader, writeFd=fd)
661
+ plugin_main(rpc_reader.RpcConnectionTransport(conn))
681
662
 
682
663
  if __name__ == "__main__":
683
- plugin_main(3, 4)
664
+ plugin_main(rpc_reader.RpcFileTransport(3, 4))
@@ -4,15 +4,14 @@ import asyncio
4
4
  import base64
5
5
  import json
6
6
  import os
7
- import sys
8
7
  import threading
9
8
  from asyncio.events import AbstractEventLoop
10
- from os import sys
11
- from typing import List
12
-
9
+ from typing import List, Any
10
+ import multiprocessing.connection
13
11
  import aiofiles
14
12
  import rpc
15
-
13
+ import concurrent.futures
14
+ import json
16
15
 
17
16
  class BufferSerializer(rpc.RpcSerializer):
18
17
  def serialize(self, value, serializationContext):
@@ -36,31 +35,118 @@ class SidebandBufferSerializer(rpc.RpcSerializer):
36
35
  buffer = buffers.pop()
37
36
  return buffer
38
37
 
39
- async def readLoop(loop, peer: rpc.RpcPeer, reader: asyncio.StreamReader):
40
- deserializationContext = {
41
- 'buffers': []
42
- }
38
+ class RpcTransport:
39
+ async def prepare(self):
40
+ pass
43
41
 
44
- if isinstance(reader, asyncio.StreamReader):
45
- async def read(n):
46
- return await reader.readexactly(n)
47
- else:
48
- async def read(n):
49
- return await reader.read(n)
42
+ async def read(self):
43
+ pass
50
44
 
45
+ def writeBuffer(self, buffer, reject):
46
+ pass
51
47
 
52
- while True:
53
- lengthBytes = await read(4)
54
- typeBytes = await read(1)
48
+ def writeJSON(self, json, reject):
49
+ pass
50
+
51
+ class RpcFileTransport(RpcTransport):
52
+ reader: asyncio.StreamReader
53
+
54
+ def __init__(self, readFd: int, writeFd: int) -> None:
55
+ super().__init__()
56
+ self.readFd = readFd
57
+ self.writeFd = writeFd
58
+ self.reader = None
59
+
60
+ async def prepare(self):
61
+ await super().prepare()
62
+ self.reader = await aiofiles.open(self.readFd, mode='rb')
63
+
64
+ async def read(self):
65
+ lengthBytes = await self.reader.read(4)
66
+ typeBytes = await self.reader.read(1)
55
67
  type = typeBytes[0]
56
68
  length = int.from_bytes(lengthBytes, 'big')
57
- data = await read(length - 1)
58
-
69
+ data = await self.reader.read(length - 1)
59
70
  if type == 1:
60
- deserializationContext['buffers'].append(data)
71
+ return data
72
+ message = json.loads(data)
73
+ return message
74
+
75
+ def writeMessage(self, type: int, buffer, reject):
76
+ length = len(buffer) + 1
77
+ lb = length.to_bytes(4, 'big')
78
+ try:
79
+ for b in [lb, bytes([type]), buffer]:
80
+ os.write(self.writeFd, b)
81
+ except Exception as e:
82
+ if reject:
83
+ reject(e)
84
+
85
+ def writeJSON(self, j, reject):
86
+ return self.writeMessage(0, bytes(json.dumps(j), 'utf8'), reject)
87
+
88
+ def writeBuffer(self, buffer, reject):
89
+ return self.writeMessage(1, buffer, reject)
90
+
91
+ class RpcStreamTransport(RpcTransport):
92
+ def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None:
93
+ super().__init__()
94
+ self.reader = reader
95
+ self.writer = writer
96
+
97
+ async def read(self, n: int):
98
+ return await self.reader.readexactly(n)
99
+
100
+ def writeMessage(self, type: int, buffer, reject):
101
+ length = len(buffer) + 1
102
+ lb = length.to_bytes(4, 'big')
103
+ try:
104
+ for b in [lb, bytes([type]), buffer]:
105
+ self.writer.write(b)
106
+ except Exception as e:
107
+ if reject:
108
+ reject(e)
109
+
110
+ def writeJSON(self, j, reject):
111
+ return self.writeMessage(0, bytes(json.dumps(j), 'utf8'), reject)
112
+
113
+ def writeBuffer(self, buffer, reject):
114
+ return self.writeMessage(1, buffer, reject)
115
+
116
+ class RpcConnectionTransport(RpcTransport):
117
+ def __init__(self, connection: multiprocessing.connection.Connection) -> None:
118
+ super().__init__()
119
+ self.connection = connection
120
+ self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
121
+
122
+ async def read(self):
123
+ return await asyncio.get_event_loop().run_in_executor(self.executor, lambda: self.connection.recv())
124
+
125
+ def writeMessage(self, json, reject):
126
+ try:
127
+ self.connection.send(json)
128
+ except Exception as e:
129
+ if reject:
130
+ reject(e)
131
+
132
+ def writeJSON(self, json, reject):
133
+ return self.writeMessage(json, reject)
134
+
135
+ def writeBuffer(self, buffer, reject):
136
+ return self.writeMessage(bytes(buffer), reject)
137
+
138
+ async def readLoop(loop, peer: rpc.RpcPeer, rpcTransport: RpcTransport):
139
+ deserializationContext = {
140
+ 'buffers': []
141
+ }
142
+
143
+ while True:
144
+ message = await rpcTransport.read()
145
+
146
+ if type(message) != dict:
147
+ deserializationContext['buffers'].append(message)
61
148
  continue
62
149
 
63
- message = json.loads(data)
64
150
  asyncio.run_coroutine_threadsafe(
65
151
  peer.handleMessage(message, deserializationContext), loop)
66
152
 
@@ -68,46 +154,20 @@ async def readLoop(loop, peer: rpc.RpcPeer, reader: asyncio.StreamReader):
68
154
  'buffers': []
69
155
  }
70
156
 
71
- async def prepare_peer_readloop(loop: AbstractEventLoop, readFd: int = None, writeFd: int = None, reader: asyncio.StreamReader = None, writer: asyncio.StreamWriter = None):
72
- reader = reader or await aiofiles.open(readFd, mode='rb')
157
+ async def prepare_peer_readloop(loop: AbstractEventLoop, rpcTransport: RpcTransport):
158
+ await rpcTransport.prepare()
73
159
 
74
160
  mutex = threading.Lock()
75
161
 
76
- if writer:
77
- def write(buffers, reject):
78
- try:
79
- for b in buffers:
80
- writer.write(b)
81
- except Exception as e:
82
- if reject:
83
- reject(e)
84
- return None
85
- else:
86
- def write(buffers, reject):
87
- try:
88
- for b in buffers:
89
- os.write(writeFd, b)
90
- except Exception as e:
91
- if reject:
92
- reject(e)
93
-
94
162
  def send(message, reject=None, serializationContext=None):
95
163
  with mutex:
96
164
  if serializationContext:
97
165
  buffers = serializationContext.get('buffers', None)
98
166
  if buffers:
99
167
  for buffer in buffers:
100
- length = len(buffer) + 1
101
- lb = length.to_bytes(4, 'big')
102
- type = 1
103
- write([lb, bytes([type]), buffer], reject)
104
-
105
- jsonString = json.dumps(message)
106
- b = bytes(jsonString, 'utf8')
107
- length = len(b) + 1
108
- lb = length.to_bytes(4, 'big')
109
- type = 0
110
- write([lb, bytes([type]), b], reject)
168
+ rpcTransport.writeBuffer(buffer, reject)
169
+
170
+ rpcTransport.writeJSON(message, reject)
111
171
 
112
172
  peer = rpc.RpcPeer(send)
113
173
  peer.nameDeserializerMap['Buffer'] = SidebandBufferSerializer()
@@ -117,7 +177,7 @@ async def prepare_peer_readloop(loop: AbstractEventLoop, readFd: int = None, wri
117
177
 
118
178
  async def peerReadLoop():
119
179
  try:
120
- await readLoop(loop, peer, reader)
180
+ await readLoop(loop, peer, rpcTransport)
121
181
  except:
122
182
  peer.kill()
123
183
  raise