wslink 1.12.3__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wslink/backends/aiohttp/__init__.py +21 -19
- wslink/backends/aiohttp/launcher.py +16 -12
- wslink/backends/aiohttp/relay.py +0 -2
- wslink/backends/generic/core.py +1 -1
- wslink/chunking.py +214 -0
- wslink/launcher.py +7 -5
- wslink/protocol.py +75 -132
- wslink/publish.py +5 -34
- wslink/websocket.py +3 -1
- {wslink-1.12.3.dist-info → wslink-2.0.0.dist-info}/METADATA +2 -1
- wslink-2.0.0.dist-info/RECORD +25 -0
- {wslink-1.12.3.dist-info → wslink-2.0.0.dist-info}/WHEEL +1 -1
- wslink-1.12.3.dist-info/RECORD +0 -24
- {wslink-1.12.3.dist-info → wslink-2.0.0.dist-info}/top_level.txt +0 -0
@@ -15,6 +15,8 @@ import aiohttp.web as aiohttp_web
|
|
15
15
|
MAX_MSG_SIZE = int(os.environ.get("WSLINK_MAX_MSG_SIZE", 4194304))
|
16
16
|
HEART_BEAT = int(os.environ.get("WSLINK_HEART_BEAT", 30)) # 30 seconds
|
17
17
|
|
18
|
+
logger = logging.getLogger(__name__)
|
19
|
+
|
18
20
|
# -----------------------------------------------------------------------------
|
19
21
|
# HTTP helpers
|
20
22
|
# -----------------------------------------------------------------------------
|
@@ -98,20 +100,20 @@ class WebAppServer(AbstractWebApp):
|
|
98
100
|
self.app, handle_signals=self.handle_signals
|
99
101
|
)
|
100
102
|
|
101
|
-
|
103
|
+
logger.info("awaiting runner setup")
|
102
104
|
await self._runner.setup()
|
103
105
|
|
104
106
|
self._site = aiohttp_web.TCPSite(
|
105
107
|
self._runner, self.host, self.port, ssl_context=self.ssl_context
|
106
108
|
)
|
107
109
|
|
108
|
-
|
110
|
+
logger.info("awaiting site startup")
|
109
111
|
await self._site.start()
|
110
112
|
|
111
113
|
if port_callback is not None:
|
112
114
|
port_callback(self.get_port())
|
113
115
|
|
114
|
-
|
116
|
+
logger.info("Print WSLINK_READY_MSG")
|
115
117
|
STARTUP_MSG = os.environ.get("WSLINK_READY_MSG", "wslink: Starting factory")
|
116
118
|
if STARTUP_MSG:
|
117
119
|
# Emit an expected log message so launcher.py knows we've started up.
|
@@ -119,10 +121,10 @@ class WebAppServer(AbstractWebApp):
|
|
119
121
|
# We've seen some issues with stdout buffering - be conservative.
|
120
122
|
sys.stdout.flush()
|
121
123
|
|
122
|
-
|
124
|
+
logger.info(f"Schedule auto shutdown with timout {self.timeout}")
|
123
125
|
self.shutdown_schedule()
|
124
126
|
|
125
|
-
|
127
|
+
logger.info("awaiting running future")
|
126
128
|
await self.completion
|
127
129
|
|
128
130
|
async def stop(self):
|
@@ -133,12 +135,12 @@ class WebAppServer(AbstractWebApp):
|
|
133
135
|
# Neither site.stop() nor runner.cleanup() actually stop the server
|
134
136
|
# as documented, but at least runner.cleanup() results in the
|
135
137
|
# "on_shutdown" signal getting sent.
|
136
|
-
|
138
|
+
logger.info("Performing runner.cleanup()")
|
137
139
|
await self.runner.cleanup()
|
138
140
|
|
139
141
|
# So to actually stop the server, the workaround is just to resolve
|
140
142
|
# the future we awaited in the start method.
|
141
|
-
|
143
|
+
logger.info("Stopping server")
|
142
144
|
self.completion.set_result(True)
|
143
145
|
|
144
146
|
|
@@ -163,7 +165,7 @@ class ReverseWebAppServer(AbstractWebApp):
|
|
163
165
|
|
164
166
|
def create_webserver(server_config):
|
165
167
|
if "logging_level" in server_config and server_config["logging_level"]:
|
166
|
-
logging.
|
168
|
+
logging.getLogger("wslink").setLevel(server_config["logging_level"])
|
167
169
|
|
168
170
|
# Shortcut for reverse connection
|
169
171
|
if "reverse_url" in server_config:
|
@@ -184,10 +186,10 @@ def is_binary(msg):
|
|
184
186
|
|
185
187
|
class AioHttpWsHandler(WslinkHandler):
|
186
188
|
async def disconnectClients(self):
|
187
|
-
|
189
|
+
logger.info("Closing client connections:")
|
188
190
|
keys = list(self.connections.keys())
|
189
191
|
for client_id in keys:
|
190
|
-
|
192
|
+
logger.info(" {0}".format(client_id))
|
191
193
|
ws = self.connections[client_id]
|
192
194
|
await ws.close(
|
193
195
|
code=aiohttp.WSCloseCode.GOING_AWAY, message="Server shutdown"
|
@@ -202,7 +204,7 @@ class AioHttpWsHandler(WslinkHandler):
|
|
202
204
|
)
|
203
205
|
self.connections[client_id] = current_ws
|
204
206
|
|
205
|
-
|
207
|
+
logger.info("client {0} connected".format(client_id))
|
206
208
|
|
207
209
|
self.web_app.shutdown_cancel()
|
208
210
|
|
@@ -217,31 +219,31 @@ class AioHttpWsHandler(WslinkHandler):
|
|
217
219
|
del self.connections[client_id]
|
218
220
|
self.authentified_client_ids.discard(client_id)
|
219
221
|
|
220
|
-
|
222
|
+
logger.info("client {0} disconnected".format(client_id))
|
221
223
|
|
222
224
|
if not self.connections:
|
223
|
-
|
225
|
+
logger.info("No more connections, scheduling shutdown")
|
224
226
|
self.web_app.shutdown_schedule()
|
225
227
|
|
226
228
|
return current_ws
|
227
229
|
|
228
230
|
async def reverse_connect_to(self, url):
|
229
|
-
|
231
|
+
logger.debug("reverse_connect_to: running with url %s", url)
|
230
232
|
client_id = self.reverse_connection_client_id
|
231
233
|
async with aiohttp.ClientSession() as session:
|
232
|
-
|
234
|
+
logger.debug("reverse_connect_to: client session started")
|
233
235
|
async with session.ws_connect(url) as current_ws:
|
234
|
-
|
236
|
+
logger.debug("reverse_connect_to: ws started")
|
235
237
|
self.connections[client_id] = current_ws
|
236
|
-
|
238
|
+
logger.debug("reverse_connect_to: onConnect")
|
237
239
|
await self.onConnect(url, client_id)
|
238
240
|
|
239
241
|
async for msg in current_ws:
|
240
242
|
if not current_ws.closed:
|
241
243
|
await self.onMessage(is_binary(msg), msg, client_id)
|
242
244
|
|
243
|
-
|
245
|
+
logger.debug("reverse_connect_to: onClose")
|
244
246
|
await self.onClose(client_id)
|
245
247
|
del self.connections[client_id]
|
246
248
|
|
247
|
-
|
249
|
+
logger.debug("reverse_connect_to: exited")
|
@@ -23,6 +23,8 @@ from wslink.launcher import (
|
|
23
23
|
STATUS_NOT_FOUND,
|
24
24
|
)
|
25
25
|
|
26
|
+
logger = logging.getLogger(__name__)
|
27
|
+
|
26
28
|
# ===========================================================================
|
27
29
|
# Launcher ENV configuration
|
28
30
|
# ===========================================================================
|
@@ -50,7 +52,7 @@ class LauncherResource(object):
|
|
50
52
|
def __del__(self):
|
51
53
|
try:
|
52
54
|
# causes an exception when server is killed with Ctrl-C
|
53
|
-
|
55
|
+
logger.warning("Server factory shutting down. Stopping all processes")
|
54
56
|
except:
|
55
57
|
pass
|
56
58
|
|
@@ -141,17 +143,17 @@ class LauncherResource(object):
|
|
141
143
|
|
142
144
|
if not id:
|
143
145
|
message = "id not provided in GET request"
|
144
|
-
|
146
|
+
logger.error(message)
|
145
147
|
return aiohttp_web.json_response(
|
146
148
|
{"error": message}, status=STATUS_BAD_REQUEST
|
147
149
|
)
|
148
150
|
|
149
|
-
|
151
|
+
logger.info("GET request received for id: %s" % id)
|
150
152
|
|
151
153
|
session = self.session_manager.getSession(id)
|
152
154
|
if not session:
|
153
155
|
message = "No session with id: %s" % id
|
154
|
-
|
156
|
+
logger.error(message)
|
155
157
|
return aiohttp_web.json_response(
|
156
158
|
{"error": message}, status=STATUS_BAD_REQUEST
|
157
159
|
)
|
@@ -170,17 +172,17 @@ class LauncherResource(object):
|
|
170
172
|
|
171
173
|
if not id:
|
172
174
|
message = "id not provided in DELETE request"
|
173
|
-
|
175
|
+
logger.error(message)
|
174
176
|
return aiohttp_web.json_response(
|
175
177
|
{"error": message}, status=STATUS_BAD_REQUEST
|
176
178
|
)
|
177
179
|
|
178
|
-
|
180
|
+
logger.info("DELETE request received for id: %s" % id)
|
179
181
|
|
180
182
|
session = self.session_manager.getSession(id)
|
181
183
|
if not session:
|
182
184
|
message = "No session with id: %s" % id
|
183
|
-
|
185
|
+
logger.error(message)
|
184
186
|
return aiohttp_web.json_response(
|
185
187
|
{"error": message}, status=STATUS_NOT_FOUND
|
186
188
|
)
|
@@ -190,7 +192,7 @@ class LauncherResource(object):
|
|
190
192
|
self.process_manager.stopProcess(id)
|
191
193
|
|
192
194
|
message = "Deleted session with id: %s" % id
|
193
|
-
|
195
|
+
logger.info(message)
|
194
196
|
|
195
197
|
return aiohttp_web.json_response(session, status=STATUS_OK)
|
196
198
|
|
@@ -213,15 +215,17 @@ def startWebServer(options, config):
|
|
213
215
|
# Setup logging
|
214
216
|
logFileName = log_dir + os.sep + "launcherLog.log"
|
215
217
|
formatting = "%(asctime)s:%(levelname)s:%(name)s:%(message)s"
|
216
|
-
|
217
|
-
|
218
|
-
)
|
218
|
+
# create file handler which logs even debug messages
|
219
|
+
fh = logging.FileHandler(logFileName, mode="w")
|
220
|
+
fh.setLevel(logging.DEBUG)
|
221
|
+
fh.setFormatter(logging.Formatter(formatting))
|
222
|
+
logging.getLogger("wslink").addHandler(fh)
|
219
223
|
if options.debug:
|
220
224
|
console = logging.StreamHandler(sys.stdout)
|
221
225
|
console.setLevel(logging.INFO)
|
222
226
|
formatter = logging.Formatter(formatting)
|
223
227
|
console.setFormatter(formatter)
|
224
|
-
logging.getLogger("").addHandler(console)
|
228
|
+
logging.getLogger("wslink").addHandler(console)
|
225
229
|
|
226
230
|
web_app = aiohttp_web.Application()
|
227
231
|
|
wslink/backends/aiohttp/relay.py
CHANGED
wslink/backends/generic/core.py
CHANGED
@@ -125,7 +125,7 @@ def startWebServer(*args, **kwargs):
|
|
125
125
|
|
126
126
|
def create_webserver(server_config):
|
127
127
|
if "logging_level" in server_config and server_config["logging_level"]:
|
128
|
-
logging.
|
128
|
+
logging.getLogger("wslink").setLevel(server_config["logging_level"])
|
129
129
|
|
130
130
|
# Reverse connection
|
131
131
|
if "reverse_url" in server_config:
|
wslink/chunking.py
ADDED
@@ -0,0 +1,214 @@
|
|
1
|
+
import sys
|
2
|
+
import secrets
|
3
|
+
import msgpack
|
4
|
+
from typing import Dict, Tuple, Union
|
5
|
+
if sys.version_info >= (3, 8):
|
6
|
+
from typing import TypedDict # pylint: disable=no-name-in-module
|
7
|
+
else:
|
8
|
+
from typing_extensions import TypedDict
|
9
|
+
|
10
|
+
UINT32_LENGTH = 4
|
11
|
+
ID_LOCATION = 0
|
12
|
+
ID_LENGTH = UINT32_LENGTH
|
13
|
+
MESSAGE_OFFSET_LOCATION = ID_LOCATION + ID_LENGTH
|
14
|
+
MESSAGE_OFFSET_LENGTH = UINT32_LENGTH
|
15
|
+
MESSAGE_SIZE_LOCATION = MESSAGE_OFFSET_LOCATION + MESSAGE_OFFSET_LENGTH
|
16
|
+
MESSAGE_SIZE_LENGTH = UINT32_LENGTH
|
17
|
+
|
18
|
+
HEADER_LENGTH = ID_LENGTH + MESSAGE_OFFSET_LENGTH + MESSAGE_SIZE_LENGTH
|
19
|
+
|
20
|
+
|
21
|
+
def _encode_header(id: bytes, offset: int, size: int) -> bytes:
|
22
|
+
return (
|
23
|
+
id
|
24
|
+
+ offset.to_bytes(MESSAGE_OFFSET_LENGTH, "little", signed=False)
|
25
|
+
+ size.to_bytes(MESSAGE_SIZE_LENGTH, "little", signed=False)
|
26
|
+
)
|
27
|
+
|
28
|
+
|
29
|
+
def _decode_header(header: bytes) -> Tuple[bytes, int, int]:
|
30
|
+
id = header[ID_LOCATION:ID_LENGTH]
|
31
|
+
offset = int.from_bytes(
|
32
|
+
header[
|
33
|
+
MESSAGE_OFFSET_LOCATION : MESSAGE_OFFSET_LOCATION + MESSAGE_OFFSET_LENGTH
|
34
|
+
],
|
35
|
+
"little",
|
36
|
+
signed=False,
|
37
|
+
)
|
38
|
+
size = int.from_bytes(
|
39
|
+
header[MESSAGE_SIZE_LOCATION : MESSAGE_SIZE_LOCATION + MESSAGE_SIZE_LENGTH],
|
40
|
+
"little",
|
41
|
+
signed=False,
|
42
|
+
)
|
43
|
+
return id, offset, size
|
44
|
+
|
45
|
+
|
46
|
+
def generate_chunks(message: bytes, max_size: int):
|
47
|
+
total_size = len(message)
|
48
|
+
|
49
|
+
if max_size == 0:
|
50
|
+
max_content_size = total_size
|
51
|
+
else:
|
52
|
+
max_content_size = max(max_size - HEADER_LENGTH, 1)
|
53
|
+
|
54
|
+
id = secrets.token_bytes(ID_LENGTH)
|
55
|
+
|
56
|
+
offset = 0
|
57
|
+
|
58
|
+
while offset < total_size:
|
59
|
+
header = _encode_header(id, offset, total_size)
|
60
|
+
chunk_content = message[offset : offset + max_content_size]
|
61
|
+
|
62
|
+
yield header + chunk_content
|
63
|
+
|
64
|
+
offset += max_content_size
|
65
|
+
|
66
|
+
return
|
67
|
+
|
68
|
+
|
69
|
+
class PendingMessage(TypedDict):
|
70
|
+
received_size: int
|
71
|
+
content: bytearray
|
72
|
+
|
73
|
+
|
74
|
+
# This un-chunker is vulnerable to DOS.
|
75
|
+
# If it receives a message with a header claiming a large incoming message
|
76
|
+
# it will allocate the memory blindly even without actually receiving the content
|
77
|
+
# Chunks for a given message can come in any order
|
78
|
+
# Chunks across messages can be interleaved.
|
79
|
+
class UnChunker:
|
80
|
+
pending_messages: Dict[bytes, PendingMessage]
|
81
|
+
max_message_size: int
|
82
|
+
|
83
|
+
def __init__(self):
|
84
|
+
self.pending_messages = {}
|
85
|
+
self.max_message_size = 512
|
86
|
+
|
87
|
+
def set_max_message_size(self, size):
|
88
|
+
self.max_message_size = size
|
89
|
+
|
90
|
+
def release_pending_messages(self):
|
91
|
+
self.pending_messages = {}
|
92
|
+
|
93
|
+
def process_chunk(self, chunk: bytes) -> Union[bytes, None]:
|
94
|
+
header, chunk_content = chunk[:HEADER_LENGTH], chunk[HEADER_LENGTH:]
|
95
|
+
id, offset, total_size = _decode_header(header)
|
96
|
+
|
97
|
+
pending_message = self.pending_messages.get(id, None)
|
98
|
+
|
99
|
+
if pending_message is None:
|
100
|
+
if total_size > self.max_message_size:
|
101
|
+
raise ValueError(
|
102
|
+
f"""Total size for message {id} exceeds the allocation limit allowed.
|
103
|
+
Maximum size = {self.max_message_size},
|
104
|
+
Received size = {total_size}."""
|
105
|
+
)
|
106
|
+
|
107
|
+
pending_message = PendingMessage(
|
108
|
+
received_size=0, content=bytearray(total_size)
|
109
|
+
)
|
110
|
+
self.pending_messages[id] = pending_message
|
111
|
+
|
112
|
+
# This should never happen, but still check it
|
113
|
+
if total_size != len(pending_message["content"]):
|
114
|
+
del self.pending_messages[id]
|
115
|
+
raise ValueError(
|
116
|
+
f"Total size in chunk header for message {id} does not match total size declared by previous chunk."
|
117
|
+
)
|
118
|
+
|
119
|
+
content_size = len(chunk_content)
|
120
|
+
content_view = memoryview(pending_message["content"])
|
121
|
+
content_view[offset : offset + content_size] = chunk_content
|
122
|
+
pending_message["received_size"] += content_size
|
123
|
+
|
124
|
+
if pending_message["received_size"] >= total_size:
|
125
|
+
full_message = pending_message["content"]
|
126
|
+
del self.pending_messages[id]
|
127
|
+
return msgpack.unpackb(bytes(full_message))
|
128
|
+
|
129
|
+
return None
|
130
|
+
|
131
|
+
|
132
|
+
class StreamPendingMessage(TypedDict):
|
133
|
+
received_size: int
|
134
|
+
total_size: int
|
135
|
+
unpacker: msgpack.Unpacker
|
136
|
+
|
137
|
+
|
138
|
+
# This un-chunker is more memory efficient
|
139
|
+
# (each chunk is passed immediately to msgpack)
|
140
|
+
# and it will only allocate memory when it receives content.
|
141
|
+
# Chunks for a given message are expected to come sequentially
|
142
|
+
# Chunks across messages can be interleaved.
|
143
|
+
class StreamUnChunker:
|
144
|
+
pending_messages: Dict[bytes, StreamPendingMessage]
|
145
|
+
|
146
|
+
def __init__(self):
|
147
|
+
self.pending_messages = {}
|
148
|
+
|
149
|
+
def set_max_message_size(self, _size):
|
150
|
+
pass
|
151
|
+
|
152
|
+
def release_pending_messages(self):
|
153
|
+
self.pending_messages = {}
|
154
|
+
|
155
|
+
def process_chunk(self, chunk: bytes) -> Union[bytes, None]:
|
156
|
+
header, chunk_content = chunk[:HEADER_LENGTH], chunk[HEADER_LENGTH:]
|
157
|
+
id, offset, total_size = _decode_header(header)
|
158
|
+
|
159
|
+
pending_message = self.pending_messages.get(id, None)
|
160
|
+
|
161
|
+
if pending_message is None:
|
162
|
+
pending_message = StreamPendingMessage(
|
163
|
+
received_size=0,
|
164
|
+
total_size=total_size,
|
165
|
+
unpacker=msgpack.Unpacker(max_buffer_size=total_size),
|
166
|
+
)
|
167
|
+
self.pending_messages[id] = pending_message
|
168
|
+
|
169
|
+
# This should never happen, but still check it
|
170
|
+
if offset != pending_message["received_size"]:
|
171
|
+
del self.pending_messages[id]
|
172
|
+
raise ValueError(
|
173
|
+
f"""Received an unexpected chunk for message {id}.
|
174
|
+
Expected offset = {pending_message['received_size']},
|
175
|
+
Received offset = {offset}."""
|
176
|
+
)
|
177
|
+
|
178
|
+
# This should never happen, but still check it
|
179
|
+
if total_size != pending_message["total_size"]:
|
180
|
+
del self.pending_messages[id]
|
181
|
+
raise ValueError(
|
182
|
+
f"""Received an unexpected total size in chunk header for message {id}.
|
183
|
+
Expected size = {pending_message['total_size']},
|
184
|
+
Received size = {total_size}."""
|
185
|
+
)
|
186
|
+
|
187
|
+
content_size = len(chunk_content)
|
188
|
+
pending_message["received_size"] += content_size
|
189
|
+
|
190
|
+
unpacker = pending_message["unpacker"]
|
191
|
+
unpacker.feed(chunk_content)
|
192
|
+
|
193
|
+
full_message = None
|
194
|
+
|
195
|
+
try:
|
196
|
+
full_message = unpacker.unpack()
|
197
|
+
except msgpack.OutOfData:
|
198
|
+
pass # message is incomplete, keep ingesting chunks
|
199
|
+
|
200
|
+
if full_message is not None:
|
201
|
+
del self.pending_messages[id]
|
202
|
+
|
203
|
+
if pending_message["received_size"] < total_size:
|
204
|
+
# In principle feeding a stream to the unpacker could yield multiple outputs
|
205
|
+
# for example unpacker.feed(b'0123') would yield b'0', b'1', ect
|
206
|
+
# or concatenated packed payloads would yield two or more unpacked objects
|
207
|
+
# but in our use case we expect a full message to be mapped to a single object
|
208
|
+
raise ValueError(
|
209
|
+
f"""Received a parsable payload shorter than expected for message {id}.
|
210
|
+
Expected size = {total_size},
|
211
|
+
Received size = {pending_message['received_size']}."""
|
212
|
+
)
|
213
|
+
|
214
|
+
return full_message
|
wslink/launcher.py
CHANGED
@@ -16,6 +16,8 @@ from random import choice
|
|
16
16
|
|
17
17
|
from wslink import backends
|
18
18
|
|
19
|
+
logger = logging.getLogger(__name__)
|
20
|
+
|
19
21
|
STATUS_OK = 200
|
20
22
|
STATUS_BAD_REQUEST = 400
|
21
23
|
STATUS_NOT_FOUND = 404
|
@@ -205,7 +207,7 @@ def checkSanitize(key_pair, sanitize):
|
|
205
207
|
value = key_pair[key]
|
206
208
|
if checkItem["type"] == "inList":
|
207
209
|
if not value in checkItem["list"]:
|
208
|
-
|
210
|
+
logger.warning(
|
209
211
|
"key %s: sanitize %s with default" % (key, key_pair[key])
|
210
212
|
)
|
211
213
|
key_pair[key] = checkItem["default"]
|
@@ -214,7 +216,7 @@ def checkSanitize(key_pair, sanitize):
|
|
214
216
|
# User is responsible to add begin- and end- string symbols, to make sure entire string is matched.
|
215
217
|
checkItem["compiled"] = re.compile(checkItem["regexp"])
|
216
218
|
if checkItem["compiled"].match(value) == None:
|
217
|
-
|
219
|
+
logger.warning(
|
218
220
|
"key %s: sanitize %s with default" % (key, key_pair[key])
|
219
221
|
)
|
220
222
|
key_pair[key] = checkItem["default"]
|
@@ -232,7 +234,7 @@ def replaceVariables(template_str, variable_list, sanitize):
|
|
232
234
|
template_str = item_template.safe_substitute(key_pair)
|
233
235
|
|
234
236
|
if "$" in template_str:
|
235
|
-
|
237
|
+
logger.error("Some properties could not be resolved: " + template_str)
|
236
238
|
|
237
239
|
return template_str
|
238
240
|
|
@@ -441,8 +443,8 @@ class ProcessManager(object):
|
|
441
443
|
)
|
442
444
|
self.processes[session["id"]] = proc
|
443
445
|
except:
|
444
|
-
|
445
|
-
|
446
|
+
logger.error("The command line failed")
|
447
|
+
logger.error(" ".join(map(str, session["cmd"])))
|
446
448
|
return None
|
447
449
|
|
448
450
|
return proc
|
wslink/protocol.py
CHANGED
@@ -1,13 +1,14 @@
|
|
1
1
|
import asyncio
|
2
2
|
import copy
|
3
3
|
import inspect
|
4
|
-
import json
|
5
4
|
import logging
|
6
|
-
import
|
5
|
+
import msgpack
|
6
|
+
import os
|
7
7
|
import traceback
|
8
8
|
|
9
9
|
from wslink import schedule_coroutine
|
10
10
|
from wslink.publish import PublishManager
|
11
|
+
from wslink.chunking import generate_chunks, UnChunker
|
11
12
|
|
12
13
|
# from http://www.jsonrpc.org/specification, section 5.1
|
13
14
|
METHOD_NOT_FOUND = -32601
|
@@ -17,6 +18,11 @@ RESULT_SERIALIZE_ERROR = -32002
|
|
17
18
|
# used in client JS code:
|
18
19
|
CLIENT_ERROR = -32099
|
19
20
|
|
21
|
+
# 4MB is the default inside aiohttp
|
22
|
+
MAX_MSG_SIZE = int(os.environ.get("WSLINK_MAX_MSG_SIZE", 4194304))
|
23
|
+
|
24
|
+
logger = logging.getLogger(__name__)
|
25
|
+
|
20
26
|
|
21
27
|
class AbstractWebApp:
|
22
28
|
def __init__(self, server_config):
|
@@ -108,7 +114,7 @@ class AbstractWebApp:
|
|
108
114
|
|
109
115
|
def shutdown_cancel(self):
|
110
116
|
if self._shutdown_task is not None:
|
111
|
-
|
117
|
+
logger.info("Canceling shutdown task")
|
112
118
|
self._shutdown_task.cancel()
|
113
119
|
self._shutdown_task = None
|
114
120
|
|
@@ -145,6 +151,7 @@ class WslinkHandler(object):
|
|
145
151
|
self.authentified_client_ids = set()
|
146
152
|
self.attachment_atomic = asyncio.Lock()
|
147
153
|
self.pub_manager = PublishManager()
|
154
|
+
self.unchunkers = {}
|
148
155
|
|
149
156
|
# Build the rpc method dictionary, assuming we were given a serverprotocol
|
150
157
|
if self.getServerProtocol():
|
@@ -181,6 +188,8 @@ class WslinkHandler(object):
|
|
181
188
|
return "reverse_connection_client_id"
|
182
189
|
|
183
190
|
async def onConnect(self, request, client_id):
|
191
|
+
self.unchunkers[client_id] = UnChunker()
|
192
|
+
|
184
193
|
if not self.serverProtocol:
|
185
194
|
return
|
186
195
|
if hasattr(self.serverProtocol, "onConnect"):
|
@@ -190,6 +199,8 @@ class WslinkHandler(object):
|
|
190
199
|
linkProtocol.onConnect(request, client_id)
|
191
200
|
|
192
201
|
async def onClose(self, client_id):
|
202
|
+
del self.unchunkers[client_id]
|
203
|
+
|
193
204
|
if not self.serverProtocol:
|
194
205
|
return
|
195
206
|
if hasattr(self.serverProtocol, "onClose"):
|
@@ -210,9 +221,16 @@ class WslinkHandler(object):
|
|
210
221
|
and await self.validateToken(args[0]["secret"], client_id)
|
211
222
|
):
|
212
223
|
self.authentified_client_ids.add(client_id)
|
224
|
+
# Once a client is authenticated let the unchunker allocate memory unrestricted
|
225
|
+
self.unchunkers[client_id].set_max_message_size(
|
226
|
+
4 * 1024 * 1024 * 1024
|
227
|
+
) # 4GB
|
213
228
|
await self.sendWrappedMessage(
|
214
229
|
rpcid,
|
215
|
-
{
|
230
|
+
{
|
231
|
+
"clientID": "c{0}".format(client_id),
|
232
|
+
"maxMsgSize": MAX_MSG_SIZE,
|
233
|
+
},
|
216
234
|
client_id=client_id,
|
217
235
|
)
|
218
236
|
else:
|
@@ -233,34 +251,16 @@ class WslinkHandler(object):
|
|
233
251
|
return False
|
234
252
|
|
235
253
|
async def onMessage(self, is_binary, msg, client_id):
|
236
|
-
|
237
|
-
|
238
|
-
if is_binary:
|
239
|
-
if self.isClientAuthenticated(client_id):
|
240
|
-
# assume all binary messages are attachments
|
241
|
-
try:
|
242
|
-
key = self.attachmentsRecvQueue.pop(0)
|
243
|
-
self.attachmentsReceived[key] = payload
|
244
|
-
except:
|
245
|
-
pass
|
246
|
-
return
|
254
|
+
if not is_binary:
|
255
|
+
return
|
247
256
|
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
if type(payload) is bytes:
|
252
|
-
payload = payload.decode("utf-8")
|
257
|
+
full_message = self.unchunkers[client_id].process_chunk(msg.data)
|
258
|
+
if full_message is not None:
|
259
|
+
await self.onCompleteMessage(full_message, client_id)
|
253
260
|
|
254
|
-
|
255
|
-
|
261
|
+
async def onCompleteMessage(self, rpc, client_id):
|
262
|
+
logger.debug("wslink incoming msg %s", self.payloadWithSecretStripped(rpc))
|
256
263
|
if "id" not in rpc:
|
257
|
-
# should be a binary attachment header
|
258
|
-
if rpc.get("method") == "wslink.binary.attachment":
|
259
|
-
keys = rpc.get("args", [])
|
260
|
-
if isinstance(keys, list):
|
261
|
-
for k in keys:
|
262
|
-
# wait for an attachment by it's order
|
263
|
-
self.attachmentsRecvQueue.append(k)
|
264
264
|
return
|
265
265
|
|
266
266
|
# TODO validate
|
@@ -301,73 +301,37 @@ class WslinkHandler(object):
|
|
301
301
|
return
|
302
302
|
|
303
303
|
obj, func = self.functionMap[methodName]
|
304
|
+
args.insert(0, obj)
|
305
|
+
|
304
306
|
try:
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
del self.attachmentsReceived[o]
|
314
|
-
return attachment
|
315
|
-
elif isinstance(o, list):
|
316
|
-
for i, v in enumerate(o):
|
317
|
-
o[i] = findAttachments(v)
|
318
|
-
elif isinstance(o, dict):
|
319
|
-
for k in o:
|
320
|
-
o[k] = findAttachments(o[k])
|
321
|
-
return o
|
322
|
-
|
323
|
-
args = findAttachments(args)
|
324
|
-
kwargs = findAttachments(kwargs)
|
325
|
-
|
326
|
-
args.insert(0, obj)
|
327
|
-
|
328
|
-
try:
|
329
|
-
self.web_app.last_active_client_id = client_id
|
330
|
-
results = func(*args, **kwargs)
|
331
|
-
if inspect.isawaitable(results):
|
332
|
-
results = await results
|
333
|
-
|
334
|
-
if self.connections[client_id].closed:
|
335
|
-
# Connection was closed during RPC call.
|
336
|
-
return
|
337
|
-
|
338
|
-
await self.sendWrappedMessage(
|
339
|
-
rpcid, results, method=methodName, client_id=client_id
|
340
|
-
)
|
341
|
-
except Exception as e_inst:
|
342
|
-
captured_trace = traceback.format_exc()
|
343
|
-
logging.error("Exception raised")
|
344
|
-
logging.error(repr(e_inst))
|
345
|
-
logging.error(captured_trace)
|
346
|
-
await self.sendWrappedError(
|
347
|
-
rpcid,
|
348
|
-
EXCEPTION_ERROR,
|
349
|
-
"Exception raised",
|
350
|
-
{
|
351
|
-
"method": methodName,
|
352
|
-
"exception": repr(e_inst),
|
353
|
-
"trace": captured_trace,
|
354
|
-
},
|
355
|
-
client_id=client_id,
|
356
|
-
)
|
307
|
+
self.web_app.last_active_client_id = client_id
|
308
|
+
results = func(*args, **kwargs)
|
309
|
+
if inspect.isawaitable(results):
|
310
|
+
results = await results
|
311
|
+
|
312
|
+
if self.connections[client_id].closed:
|
313
|
+
# Connection was closed during RPC call.
|
314
|
+
return
|
357
315
|
|
358
|
-
|
316
|
+
await self.sendWrappedMessage(
|
317
|
+
rpcid, results, method=methodName, client_id=client_id
|
318
|
+
)
|
319
|
+
except Exception as e_inst:
|
320
|
+
captured_trace = traceback.format_exc()
|
321
|
+
logger.error("Exception raised")
|
322
|
+
logger.error(repr(e_inst))
|
323
|
+
logger.error(captured_trace)
|
359
324
|
await self.sendWrappedError(
|
360
325
|
rpcid,
|
361
326
|
EXCEPTION_ERROR,
|
362
327
|
"Exception raised",
|
363
328
|
{
|
364
329
|
"method": methodName,
|
365
|
-
"exception": repr(
|
366
|
-
"trace":
|
330
|
+
"exception": repr(e_inst),
|
331
|
+
"trace": captured_trace,
|
367
332
|
},
|
368
333
|
client_id=client_id,
|
369
334
|
)
|
370
|
-
return
|
371
335
|
|
372
336
|
def payloadWithSecretStripped(self, payload):
|
373
337
|
payload = copy.deepcopy(payload)
|
@@ -426,9 +390,10 @@ class WslinkHandler(object):
|
|
426
390
|
"id": rpcid,
|
427
391
|
"result": content,
|
428
392
|
}
|
393
|
+
|
429
394
|
try:
|
430
|
-
|
431
|
-
except
|
395
|
+
packed_wrapper = msgpack.packb(wrapper)
|
396
|
+
except Exception:
|
432
397
|
# the content which is not serializable might be arbitrarily large, don't include.
|
433
398
|
# repr(content) would do that...
|
434
399
|
await self.sendWrappedError(
|
@@ -442,47 +407,14 @@ class WslinkHandler(object):
|
|
442
407
|
|
443
408
|
websockets = self.getAuthenticatedWebsockets(client_id, skip_last_active_client)
|
444
409
|
|
445
|
-
#
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
for
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
found_keys.append(key)
|
454
|
-
# increment for key
|
455
|
-
self.pub_manager.registerAttachment(key)
|
456
|
-
|
457
|
-
for key in found_keys:
|
458
|
-
# send header
|
459
|
-
header = {
|
460
|
-
"wslink": "1.0",
|
461
|
-
"method": "wslink.binary.attachment",
|
462
|
-
"args": [key],
|
463
|
-
}
|
464
|
-
json_header = json.dumps(header, ensure_ascii=False)
|
465
|
-
|
466
|
-
# aiohttp can not handle pending ws.send_bytes()
|
467
|
-
# tried with semaphore but got exception with >1
|
468
|
-
# https://github.com/aio-libs/aiohttp/issues/2934
|
469
|
-
async with self.attachment_atomic:
|
470
|
-
for ws in websockets:
|
471
|
-
if ws is not None:
|
472
|
-
# Send binary header
|
473
|
-
await ws.send_str(json_header)
|
474
|
-
# Send binary message
|
475
|
-
await ws.send_bytes(attachments[key])
|
476
|
-
|
477
|
-
# decrement for key
|
478
|
-
self.pub_manager.unregisterAttachment(key)
|
479
|
-
|
480
|
-
for ws in websockets:
|
481
|
-
if ws is not None:
|
482
|
-
await ws.send_str(encMsg)
|
483
|
-
|
484
|
-
loop = asyncio.get_event_loop()
|
485
|
-
loop.call_soon(self.pub_manager.freeAttachments, found_keys)
|
410
|
+
# aiohttp can not handle pending ws.send_bytes()
|
411
|
+
# tried with semaphore but got exception with >1
|
412
|
+
# https://github.com/aio-libs/aiohttp/issues/2934
|
413
|
+
async with self.attachment_atomic:
|
414
|
+
for chunk in generate_chunks(packed_wrapper, MAX_MSG_SIZE):
|
415
|
+
for ws in websockets:
|
416
|
+
if ws is not None:
|
417
|
+
await ws.send_bytes(chunk)
|
486
418
|
|
487
419
|
async def sendWrappedError(self, rpcid, code, message, data=None, client_id=None):
|
488
420
|
wrapper = {
|
@@ -495,15 +427,26 @@ class WslinkHandler(object):
|
|
495
427
|
}
|
496
428
|
if data:
|
497
429
|
wrapper["error"]["data"] = data
|
498
|
-
|
430
|
+
|
431
|
+
try:
|
432
|
+
packed_wrapper = msgpack.packb(wrapper)
|
433
|
+
except Exception:
|
434
|
+
del wrapper["error"]["data"]
|
435
|
+
packed_wrapper = msgpack.packb(wrapper)
|
436
|
+
|
499
437
|
websockets = (
|
500
438
|
[self.connections[client_id]]
|
501
439
|
if client_id
|
502
440
|
else [self.connections[c] for c in self.connections]
|
503
441
|
)
|
504
|
-
|
505
|
-
|
506
|
-
|
442
|
+
# aiohttp can not handle pending ws.send_bytes()
|
443
|
+
# tried with semaphore but got exception with >1
|
444
|
+
# https://github.com/aio-libs/aiohttp/issues/2934
|
445
|
+
async with self.attachment_atomic:
|
446
|
+
for chunk in generate_chunks(packed_wrapper, MAX_MSG_SIZE):
|
447
|
+
for ws in websockets:
|
448
|
+
if ws is not None:
|
449
|
+
await ws.send_bytes(chunk)
|
507
450
|
|
508
451
|
def publish(self, topic, data, client_id=None, skip_last_active_client=False):
|
509
452
|
client_list = [client_id] if client_id else [c_id for c_id in self.connections]
|
wslink/publish.py
CHANGED
@@ -7,9 +7,6 @@ from . import schedule_coroutine
|
|
7
7
|
class PublishManager(object):
|
8
8
|
def __init__(self):
|
9
9
|
self.protocols = []
|
10
|
-
self.attachmentMap = {}
|
11
|
-
self.attachmentRefCounts = {} # keyed same as attachment map
|
12
|
-
self.attachmentId = 0
|
13
10
|
self.publishCount = 0
|
14
11
|
|
15
12
|
def registerProtocol(self, protocol):
|
@@ -19,38 +16,12 @@ class PublishManager(object):
|
|
19
16
|
if protocol in self.protocols:
|
20
17
|
self.protocols.remove(protocol)
|
21
18
|
|
22
|
-
def getAttachmentMap(self):
|
23
|
-
return self.attachmentMap
|
24
|
-
|
25
|
-
def clearAttachmentMap(self):
|
26
|
-
self.attachmentMap.clear()
|
27
|
-
|
28
|
-
def registerAttachment(self, attachKey):
|
29
|
-
self.attachmentRefCounts[attachKey] += 1
|
30
|
-
|
31
|
-
def unregisterAttachment(self, attachKey):
|
32
|
-
self.attachmentRefCounts[attachKey] -= 1
|
33
|
-
|
34
|
-
def freeAttachments(self, keys=None):
|
35
|
-
keys_to_delete = []
|
36
|
-
keys_to_check = keys if keys is not None else [k for k in self.attachmentMap]
|
37
|
-
|
38
|
-
for key in keys_to_check:
|
39
|
-
if self.attachmentRefCounts.get(key) == 0:
|
40
|
-
keys_to_delete.append(key)
|
41
|
-
|
42
|
-
for key in keys_to_delete:
|
43
|
-
self.attachmentMap.pop(key)
|
44
|
-
self.attachmentRefCounts.pop(key)
|
45
|
-
|
46
19
|
def addAttachment(self, payload):
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
self.attachmentId += 1
|
53
|
-
return binaryId
|
20
|
+
"""Deprecated method, keeping it to avoid breaking compatibility
|
21
|
+
Now that we use msgpack to pack/unpack messages,
|
22
|
+
We can have binary data directly in the object itself,
|
23
|
+
without needing to transfer it separately from the rest."""
|
24
|
+
return payload
|
54
25
|
|
55
26
|
def publish(self, topic, data, client_id=None, skip_last_active_client=False):
|
56
27
|
for protocol in self.protocols:
|
wslink/websocket.py
CHANGED
@@ -9,6 +9,8 @@ import logging
|
|
9
9
|
from . import register as exportRpc
|
10
10
|
from . import schedule_callback
|
11
11
|
|
12
|
+
logger = logging.getLogger(__name__)
|
13
|
+
|
12
14
|
|
13
15
|
# =============================================================================
|
14
16
|
#
|
@@ -114,7 +116,7 @@ class ServerProtocol(object):
|
|
114
116
|
try:
|
115
117
|
self.linkProtocols.remove(protocol)
|
116
118
|
except ValueError as e:
|
117
|
-
|
119
|
+
logger.error("Link protocol missing from registered list.")
|
118
120
|
|
119
121
|
def getLinkProtocols(self):
|
120
122
|
return self.linkProtocols
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: wslink
|
3
|
-
Version:
|
3
|
+
Version: 2.0.0
|
4
4
|
Summary: Python/JavaScript library for communicating over WebSocket
|
5
5
|
Home-page: https://github.com/kitware/wslink
|
6
6
|
Author: Kitware, Inc.
|
@@ -22,6 +22,7 @@ Classifier: Programming Language :: Python :: 3.8
|
|
22
22
|
Classifier: Programming Language :: Python :: 3.9
|
23
23
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
24
24
|
Requires-Dist: aiohttp <4
|
25
|
+
Requires-Dist: msgpack <2,>=1
|
25
26
|
Provides-Extra: ssl
|
26
27
|
Requires-Dist: cryptography ; extra == 'ssl'
|
27
28
|
|
@@ -0,0 +1,25 @@
|
|
1
|
+
wslink/LICENSE,sha256=I44UH7kDVqxDLnnlOWw_hFL2Fz7RjQ_4vPzZv9NYgTU,1483
|
2
|
+
wslink/__init__.py,sha256=AbEm-sUSoGL-uLpnbK1rSSjHSvyW-bMsGHWie7FgMHw,2708
|
3
|
+
wslink/chunking.py,sha256=BZZ0YAlh6PNI8rQe80NfdxU8pAvn_Klxew47AkvUJow,7130
|
4
|
+
wslink/launcher.py,sha256=8VMs3juObLkyGYQFNLjMoo4qFpKIcxWz0kS-af-DKO4,21170
|
5
|
+
wslink/protocol.py,sha256=zdf4QthFHpAgEw3hTUyyaOuN76jzHeOJBpvekPbk7aY,15886
|
6
|
+
wslink/publish.py,sha256=9G5TXqyGr-LCo_LwHYhzif6lhG2iXDvEBmEgwR8fh1M,1437
|
7
|
+
wslink/relay.py,sha256=E8Lzu2Ay7KbOheN1-ArAZawo8lLqdDgJXOZSBuMknYs,86
|
8
|
+
wslink/server.py,sha256=FKSJAKHDyfkNVM45-M-y1Zn8hh2TTYto1hTCIJx1pp8,9440
|
9
|
+
wslink/ssl_context.py,sha256=hNOJJCdrStws1Qf6vPvY4vTk9Bf8J5d90W3fS0cRv8o,2290
|
10
|
+
wslink/uri.py,sha256=woCQ4yChUqTMg9IT6YYDtUYeKmCg7OUCEgeBGA-19DY,384
|
11
|
+
wslink/websocket.py,sha256=pBiWqkL8Zn8LuSJ9nv3yA-KjEynbolOQ2gLHtQFJ2Ic,4611
|
12
|
+
wslink/backends/__init__.py,sha256=cyJGjm-YyBSyOEX81owyTbJ3YnrA6dB7--B4LnsEtHI,1214
|
13
|
+
wslink/backends/aiohttp/__init__.py,sha256=u2UxSnaMJPoiba1CL1fOeM8bTggyOw6znQ2iGd69qJI,8463
|
14
|
+
wslink/backends/aiohttp/launcher.py,sha256=gHNMvtgNHEwBN_QBRDSCrTp2B4K1PsfV81rKaHi7Cxo,8897
|
15
|
+
wslink/backends/aiohttp/relay.py,sha256=oZAzIQTpsQaObWXaa-_VtoTOUQALC_QLDd9UvWspYaU,13311
|
16
|
+
wslink/backends/generic/__init__.py,sha256=Qu65gWsd2xCSsxybnDtEDI5vMjHN-F5jgPZOyNIxnGs,112
|
17
|
+
wslink/backends/generic/core.py,sha256=Gwo7nksgph96i7J77nYm4hB5NwysybQ23rRs86f0d6U,4149
|
18
|
+
wslink/backends/jupyter/__init__.py,sha256=Qu65gWsd2xCSsxybnDtEDI5vMjHN-F5jgPZOyNIxnGs,112
|
19
|
+
wslink/backends/jupyter/core.py,sha256=H73IEEHyom3TsbhkyI5O88bFBbUIDzHVuvqbIF6PAIM,3858
|
20
|
+
wslink/backends/tornado/__init__.py,sha256=Qu65gWsd2xCSsxybnDtEDI5vMjHN-F5jgPZOyNIxnGs,112
|
21
|
+
wslink/backends/tornado/core.py,sha256=tPMkkhWuO_ovkisVim0zcegwZKEAG4IRUdd_O_0a_R0,2157
|
22
|
+
wslink-2.0.0.dist-info/METADATA,sha256=XWnXD2kxZEtIuOy4Y-_8HZ8anq9hYFL7axDQhv96GoE,3045
|
23
|
+
wslink-2.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
24
|
+
wslink-2.0.0.dist-info/top_level.txt,sha256=N0d8eqvhwhfW1p1yPTmvxlbzhjz7ZyhBfysNvaFqpQY,7
|
25
|
+
wslink-2.0.0.dist-info/RECORD,,
|
wslink-1.12.3.dist-info/RECORD
DELETED
@@ -1,24 +0,0 @@
|
|
1
|
-
wslink/LICENSE,sha256=I44UH7kDVqxDLnnlOWw_hFL2Fz7RjQ_4vPzZv9NYgTU,1483
|
2
|
-
wslink/__init__.py,sha256=AbEm-sUSoGL-uLpnbK1rSSjHSvyW-bMsGHWie7FgMHw,2708
|
3
|
-
wslink/launcher.py,sha256=4l_z3l2xkRSiH8hWbZzjtsIbBeujl-P8bTA0TBtJEEg,21137
|
4
|
-
wslink/protocol.py,sha256=AXBlQ4BD2iB7e3WHPedIv0fzgd05eBuNLQpXjX74j5U,18132
|
5
|
-
wslink/publish.py,sha256=6xLr7tGkvbq5LbXbAvPQ6OplEBbs192WR-uhiTJMPdg,2354
|
6
|
-
wslink/relay.py,sha256=E8Lzu2Ay7KbOheN1-ArAZawo8lLqdDgJXOZSBuMknYs,86
|
7
|
-
wslink/server.py,sha256=FKSJAKHDyfkNVM45-M-y1Zn8hh2TTYto1hTCIJx1pp8,9440
|
8
|
-
wslink/ssl_context.py,sha256=hNOJJCdrStws1Qf6vPvY4vTk9Bf8J5d90W3fS0cRv8o,2290
|
9
|
-
wslink/uri.py,sha256=woCQ4yChUqTMg9IT6YYDtUYeKmCg7OUCEgeBGA-19DY,384
|
10
|
-
wslink/websocket.py,sha256=REYYyKB4WweMuDruvPBA0oPOfhddPF9G5WIGiK367KA,4574
|
11
|
-
wslink/backends/__init__.py,sha256=cyJGjm-YyBSyOEX81owyTbJ3YnrA6dB7--B4LnsEtHI,1214
|
12
|
-
wslink/backends/aiohttp/__init__.py,sha256=FUPMwi0Z1nuVVIJhY_sI0ZzRY5ne30tmUILi7JQTgP4,8433
|
13
|
-
wslink/backends/aiohttp/launcher.py,sha256=Itblr2-lX_ZpAInvLlGkl1_XrNYjIn3w469BDZ5XyyQ,8737
|
14
|
-
wslink/backends/aiohttp/relay.py,sha256=Nn1ELqGU1kwvst4DNwNqYWU4nYd08K3STAMQIhcangY,13338
|
15
|
-
wslink/backends/generic/__init__.py,sha256=Qu65gWsd2xCSsxybnDtEDI5vMjHN-F5jgPZOyNIxnGs,112
|
16
|
-
wslink/backends/generic/core.py,sha256=nphpXjwZ_U0-OyNDgLGg0IMpi6lfYb8tke_YDti4U4Y,4138
|
17
|
-
wslink/backends/jupyter/__init__.py,sha256=Qu65gWsd2xCSsxybnDtEDI5vMjHN-F5jgPZOyNIxnGs,112
|
18
|
-
wslink/backends/jupyter/core.py,sha256=H73IEEHyom3TsbhkyI5O88bFBbUIDzHVuvqbIF6PAIM,3858
|
19
|
-
wslink/backends/tornado/__init__.py,sha256=Qu65gWsd2xCSsxybnDtEDI5vMjHN-F5jgPZOyNIxnGs,112
|
20
|
-
wslink/backends/tornado/core.py,sha256=tPMkkhWuO_ovkisVim0zcegwZKEAG4IRUdd_O_0a_R0,2157
|
21
|
-
wslink-1.12.3.dist-info/METADATA,sha256=DeacEUE7uOSFLAJHAf5tHdgPh2CkWDwxZKBl-xSoLpk,3016
|
22
|
-
wslink-1.12.3.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
23
|
-
wslink-1.12.3.dist-info/top_level.txt,sha256=N0d8eqvhwhfW1p1yPTmvxlbzhjz7ZyhBfysNvaFqpQY,7
|
24
|
-
wslink-1.12.3.dist-info/RECORD,,
|
File without changes
|