@kitware/wslink 2.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,179 @@
1
+ /* global window */
2
+ import CompositeClosureHelper from "../CompositeClosureHelper";
3
+
4
+ import ProcessLauncher from "../ProcessLauncher";
5
+ import WebsocketConnection from "../WebsocketConnection";
6
+
7
+ function DEFAULT_CONFIG_DECORATOR(config) {
8
+ if (config.sessionURL) {
9
+ config.sessionURL = config.sessionURL.replaceAll(
10
+ "USE_HOSTNAME",
11
+ window.location.hostname
12
+ );
13
+ config.sessionURL = config.sessionURL.replaceAll(
14
+ "USE_HOST",
15
+ window.location.host
16
+ );
17
+ }
18
+ return config;
19
+ }
20
+
21
+ function extractPathName(addOn, pathName = window.location.pathname) {
22
+ if (pathName.endsWith(".html") || pathName.endsWith(".htm")) {
23
+ const tokens = pathName.split("/");
24
+ tokens.pop();
25
+ pathName = tokens.join("/");
26
+ }
27
+ while (pathName.length > 0 && pathName[pathName.length - 1] === "/") {
28
+ pathName = pathName.substring(0, pathName.length - 1);
29
+ }
30
+ if (pathName.length === 0) {
31
+ return addOn;
32
+ }
33
+ return `${pathName}${addOn}`;
34
+ }
35
+
36
+ export const DEFAULT_SESSION_MANAGER_URL = `${document.baseURI}paraview/`;
37
+
38
+ export const DEFAULT_SESSION_URL = `${
39
+ window.location.protocol === "https:" ? "wss:" : "ws:"
40
+ }//${window.location.hostname}:${window.location.port}${extractPathName(
41
+ "/ws"
42
+ )}`;
43
+
44
+ function wsConnect(publicAPI, model) {
45
+ const wsConnection = WebsocketConnection.newInstance({
46
+ urls: model.config.sessionURL,
47
+ secret: model.config.secret,
48
+ retry: model.config.retry,
49
+ wsProxy: model.config.iframe || model.config.wsProxy,
50
+ });
51
+ model.subscriptions.push(
52
+ wsConnection.onConnectionReady(publicAPI.readyForwarder)
53
+ );
54
+ model.subscriptions.push(
55
+ wsConnection.onConnectionError(publicAPI.errorForwarder)
56
+ );
57
+ model.subscriptions.push(
58
+ wsConnection.onConnectionClose(publicAPI.closeForwarder)
59
+ );
60
+
61
+ // Add to the garbage collector
62
+ model.gc.push(wsConnection);
63
+
64
+ return wsConnection.connect();
65
+ }
66
+
67
+ function smartConnect(publicAPI, model) {
68
+ let session = null;
69
+ model.gc = [];
70
+
71
+ // Event forwarders
72
+ publicAPI.readyForwarder = (data) => {
73
+ session = data.getSession();
74
+ publicAPI.fireConnectionReady(data);
75
+ };
76
+ publicAPI.errorForwarder = (data, err) => {
77
+ publicAPI.fireConnectionError(data, err);
78
+ };
79
+ publicAPI.closeForwarder = (data, err) => {
80
+ publicAPI.fireConnectionClose(data, err);
81
+ };
82
+
83
+ publicAPI.connect = () => {
84
+ if (model.configDecorator) {
85
+ model.config = model.configDecorator(model.config);
86
+ }
87
+ model.config = DEFAULT_CONFIG_DECORATOR(model.config);
88
+
89
+ if (model.config.sessionURL) {
90
+ // We have a direct connection URL
91
+ session = wsConnect(publicAPI, model);
92
+ } else if (model.config.wsProxy) {
93
+ // Provide fake url if missing since we rely on a proxy
94
+ model.config.sessionURL = model.config.sessionURL || "wss://proxy/";
95
+ session = wsConnect(publicAPI, model);
96
+ } else {
97
+ // We need to use the Launcher
98
+ const launcher = ProcessLauncher.newInstance({
99
+ endPoint: model.config.sessionManagerURL || DEFAULT_SESSION_MANAGER_URL,
100
+ });
101
+
102
+ model.subscriptions.push(
103
+ launcher.onProcessReady((data) => {
104
+ if (model.configDecorator) {
105
+ model.config = model.configDecorator(
106
+ Object.assign({}, model.config, data)
107
+ );
108
+ } else {
109
+ model.config = Object.assign({}, model.config, data);
110
+ }
111
+ model.config = DEFAULT_CONFIG_DECORATOR(model.config);
112
+
113
+ session = wsConnect(publicAPI, model);
114
+ })
115
+ );
116
+ model.subscriptions.push(
117
+ launcher.onError((data) => {
118
+ if (data && data.response && data.response.error) {
119
+ publicAPI.errorForwarder(data, data.response.error);
120
+ } else {
121
+ // Try to use standard connection URL
122
+ model.config.sessionURL = DEFAULT_SESSION_URL;
123
+ model.config = DEFAULT_CONFIG_DECORATOR(model.config);
124
+ session = wsConnect(publicAPI, model);
125
+ }
126
+ })
127
+ );
128
+
129
+ launcher.start(model.config);
130
+
131
+ // Add to the garbage collector
132
+ model.gc.push(launcher);
133
+ }
134
+ };
135
+
136
+ publicAPI.getSession = () => {
137
+ return session;
138
+ };
139
+
140
+ function cleanUp(timeout) {
141
+ if (session) {
142
+ if (timeout > 0) {
143
+ session.call("application.exit.later", [timeout]);
144
+ }
145
+ session.close();
146
+ }
147
+ session = null;
148
+
149
+ while (model.gc.length) {
150
+ model.gc.pop().destroy();
151
+ }
152
+ }
153
+
154
+ publicAPI.destroy = CompositeClosureHelper.chain(cleanUp, publicAPI.destroy);
155
+ }
156
+
157
+ const DEFAULT_VALUES = {
158
+ config: {},
159
+ // configDecorator: null,
160
+ };
161
+
162
+ export function extend(publicAPI, model, initialValues = {}) {
163
+ Object.assign(model, DEFAULT_VALUES, initialValues);
164
+
165
+ CompositeClosureHelper.destroy(publicAPI, model);
166
+ CompositeClosureHelper.event(publicAPI, model, "ConnectionReady");
167
+ CompositeClosureHelper.event(publicAPI, model, "ConnectionClose");
168
+ CompositeClosureHelper.event(publicAPI, model, "ConnectionError");
169
+ CompositeClosureHelper.isA(publicAPI, model, "SmartConnect");
170
+ CompositeClosureHelper.get(publicAPI, model, ["config", "configDecorator"]);
171
+ CompositeClosureHelper.set(publicAPI, model, ["configDecorator"]);
172
+
173
+ smartConnect(publicAPI, model);
174
+ }
175
+
176
+ // ----------------------------------------------------------------------------
177
+ export const newInstance = CompositeClosureHelper.newInstance(extend);
178
+
179
+ export default { newInstance, extend };
@@ -0,0 +1,37 @@
1
+ # WebsocketConnection
2
+
3
+ ## WebsocketConnection.newInstance({ urls })
4
+
5
+ Create an instance of a websocket connection. The urls should be a single url
6
+ (string).
7
+
8
+ Usually with a ProcessLauncher we will set the **urls** to
9
+ **connection.sessionURL**.
10
+
11
+ The input can optionally include a string to authenticate the connection during
12
+ the handshake: `{ urls, secret:"wslink-secret" }`
13
+
14
+ ## connect()
15
+
16
+ Trigger the actual connection request with the server.
17
+
18
+ ## onConnectionReady(callback) : subscription
19
+
20
+ Register callback for when the connection became ready.
21
+
22
+ ## onConnectionClose(callback) : subscription
23
+
24
+ Register callback for when the connection close.
25
+
26
+ ## getSession() : object
27
+
28
+ Return null if the connection is not yet established or the session for making
29
+ RPC calls.
30
+
31
+ ## destroy(timeout=10)
32
+
33
+ Close the connection and ask the server to automatically shutdown after the
34
+ given timeout while removing any listener.
35
+
36
+ If the provided timeout is negative, we will close the connection without asking
37
+ the server to shutdown.
@@ -0,0 +1,211 @@
1
+ // Project not setup for typescript, manually compiling this file to chunker.js
2
+ // npx tsc chunking.ts --target esnext
3
+ const UINT32_LENGTH = 4;
4
+ const ID_LOCATION = 0;
5
+ const ID_LENGTH = UINT32_LENGTH;
6
+ const MESSAGE_OFFSET_LOCATION = ID_LOCATION + ID_LENGTH;
7
+ const MESSAGE_OFFSET_LENGTH = UINT32_LENGTH;
8
+ const MESSAGE_SIZE_LOCATION = MESSAGE_OFFSET_LOCATION + MESSAGE_OFFSET_LENGTH;
9
+ const MESSAGE_SIZE_LENGTH = UINT32_LENGTH;
10
+ const HEADER_LENGTH = ID_LENGTH + MESSAGE_OFFSET_LENGTH + MESSAGE_SIZE_LENGTH;
11
+ function encodeHeader(id, offset, size) {
12
+ const buffer = new ArrayBuffer(HEADER_LENGTH);
13
+ const header = new Uint8Array(buffer);
14
+ const view = new DataView(buffer);
15
+ view.setUint32(ID_LOCATION, id, true);
16
+ view.setUint32(MESSAGE_OFFSET_LOCATION, offset, true);
17
+ view.setUint32(MESSAGE_SIZE_LOCATION, size, true);
18
+ return header;
19
+ }
20
+ function decodeHeader(header) {
21
+ const view = new DataView(header.buffer);
22
+ const id = view.getUint32(ID_LOCATION, true);
23
+ const offset = view.getUint32(MESSAGE_OFFSET_LOCATION, true);
24
+ const size = view.getUint32(MESSAGE_SIZE_LOCATION, true);
25
+ return { id, offset, size };
26
+ }
27
+ function* generateChunks(message, maxSize) {
28
+ const totalSize = message.byteLength;
29
+ let maxContentSize;
30
+ if (maxSize === 0) {
31
+ maxContentSize = totalSize;
32
+ } else {
33
+ maxContentSize = Math.max(maxSize - HEADER_LENGTH, 1);
34
+ }
35
+ const id = new Uint32Array(1);
36
+ crypto.getRandomValues(id);
37
+ let offset = 0;
38
+ while (offset < totalSize) {
39
+ const contentSize = Math.min(maxContentSize, totalSize - offset);
40
+ const chunk = new Uint8Array(new ArrayBuffer(HEADER_LENGTH + contentSize));
41
+ const header = encodeHeader(id[0], offset, totalSize);
42
+ chunk.set(new Uint8Array(header.buffer), 0);
43
+ chunk.set(message.subarray(offset, offset + contentSize), HEADER_LENGTH);
44
+ yield chunk;
45
+ offset += contentSize;
46
+ }
47
+ return;
48
+ }
49
+ /*
50
+ This un-chunker is vulnerable to DOS.
51
+ If it receives a message with a header claiming a large incoming message
52
+ it will allocate the memory blindly even without actually receiving the content
53
+ Chunks for a given message can come in any order
54
+ Chunks across messages can be interleaved.
55
+ */
56
+ class UnChunker {
57
+ pendingMessages;
58
+ constructor() {
59
+ this.pendingMessages = {};
60
+ }
61
+ releasePendingMessages() {
62
+ this.pendingMessages = {};
63
+ }
64
+ async processChunk(chunk, decoderFactory) {
65
+ const headerBlob = chunk.slice(0, HEADER_LENGTH);
66
+ const contentBlob = chunk.slice(HEADER_LENGTH);
67
+ const header = new Uint8Array(await headerBlob.arrayBuffer());
68
+ const { id, offset, size: totalSize } = decodeHeader(header);
69
+ let pendingMessage = this.pendingMessages[id];
70
+ if (!pendingMessage) {
71
+ pendingMessage = {
72
+ receivedSize: 0,
73
+ content: new Uint8Array(totalSize),
74
+ decoder: decoderFactory(),
75
+ };
76
+ this.pendingMessages[id] = pendingMessage;
77
+ }
78
+ // This should never happen, but still check it
79
+ if (totalSize !== pendingMessage.content.byteLength) {
80
+ delete this.pendingMessages[id];
81
+ throw new Error(
82
+ `Total size in chunk header for message ${id} does not match total size declared by previous chunk.`
83
+ );
84
+ }
85
+ const chunkContent = new Uint8Array(await contentBlob.arrayBuffer());
86
+ const content = pendingMessage.content;
87
+ content.set(chunkContent, offset);
88
+ pendingMessage.receivedSize += chunkContent.byteLength;
89
+ if (pendingMessage.receivedSize >= totalSize) {
90
+ delete this.pendingMessages[id];
91
+ try {
92
+ return pendingMessage["decoder"].decode(content);
93
+ } catch (e) {
94
+ console.error("Malformed message: ", content.slice(0, 100));
95
+ // debugger;
96
+ }
97
+ }
98
+ return undefined;
99
+ }
100
+ }
101
+ // Makes sure messages are processed in order of arrival,
102
+ export class SequentialTaskQueue {
103
+ taskId;
104
+ pendingTaskId;
105
+ tasks;
106
+ constructor() {
107
+ this.taskId = 0;
108
+ this.pendingTaskId = -1;
109
+ this.tasks = {};
110
+ }
111
+ enqueue(fn, ...args) {
112
+ return new Promise((resolve, reject) => {
113
+ const taskId = this.taskId++;
114
+ this.tasks[taskId] = { fn, args, resolve, reject };
115
+ this._maybeExecuteNext();
116
+ });
117
+ }
118
+ _maybeExecuteNext() {
119
+ let pendingTask = this.tasks[this.pendingTaskId];
120
+ if (pendingTask) {
121
+ return;
122
+ }
123
+ const nextPendingTaskId = this.pendingTaskId + 1;
124
+ pendingTask = this.tasks[nextPendingTaskId];
125
+ if (!pendingTask) {
126
+ return;
127
+ }
128
+ this.pendingTaskId = nextPendingTaskId;
129
+ const { fn, args, resolve, reject } = pendingTask;
130
+ fn(...args)
131
+ .then((result) => {
132
+ resolve(result);
133
+ delete this.tasks[nextPendingTaskId];
134
+ this._maybeExecuteNext();
135
+ })
136
+ .catch((err) => {
137
+ reject(err);
138
+ delete this.tasks[nextPendingTaskId];
139
+ this._maybeExecuteNext();
140
+ });
141
+ }
142
+ }
143
+ /*
144
+ This un-chunker is more memory efficient
145
+ (each chunk is passed immediately to msgpack)
146
+ and it will only allocate memory when it receives content.
147
+ Chunks for a given message are expected to come sequentially
148
+ Chunks across messages can be interleaved.
149
+ */
150
+ class StreamUnChunker {
151
+ pendingMessages;
152
+ constructor() {
153
+ this.pendingMessages = {};
154
+ }
155
+ processChunk = async (chunk, decoderFactory) => {
156
+ const headerBlob = chunk.slice(0, HEADER_LENGTH);
157
+ const header = new Uint8Array(await headerBlob.arrayBuffer());
158
+ const { id, offset, size: totalSize } = decodeHeader(header);
159
+ const contentBlob = chunk.slice(HEADER_LENGTH);
160
+ let pendingMessage = this.pendingMessages[id];
161
+ if (!pendingMessage) {
162
+ pendingMessage = {
163
+ receivedSize: 0,
164
+ totalSize: totalSize,
165
+ decoder: decoderFactory(),
166
+ };
167
+ this.pendingMessages[id] = pendingMessage;
168
+ }
169
+ // This should never happen, but still check it
170
+ if (totalSize !== pendingMessage.totalSize) {
171
+ delete this.pendingMessages[id];
172
+ throw new Error(
173
+ `Total size in chunk header for message ${id} does not match total size declared by previous chunk.`
174
+ );
175
+ }
176
+ // This should never happen, but still check it
177
+ if (offset !== pendingMessage.receivedSize) {
178
+ delete this.pendingMessages[id];
179
+ throw new Error(`Received an unexpected chunk for message ${id}.
180
+ Expected offset = ${pendingMessage.receivedSize},
181
+ Received offset = ${offset}.`);
182
+ }
183
+ let result;
184
+ try {
185
+ result = await pendingMessage.decoder.decodeAsync(contentBlob.stream());
186
+ } catch (e) {
187
+ if (e instanceof RangeError) {
188
+ // More data is needed, it should come in the next chunk
189
+ result = undefined;
190
+ }
191
+ }
192
+ pendingMessage.receivedSize += contentBlob.size;
193
+ /*
194
+ In principle feeding a stream to the unpacker could yield multiple outputs
195
+ for example unpacker.feed(b'0123') would yield b'0', b'1', etc
196
+ or concatenated packed payloads would yield two or more unpacked objects
197
+ but in our use case we expect a full message to be mapped to a single object
198
+ */
199
+ if (result && pendingMessage.receivedSize < totalSize) {
200
+ delete this.pendingMessages[id];
201
+ throw new Error(`Received a parsable payload shorter than expected for message ${id}.
202
+ Expected size = ${totalSize},
203
+ Received size = ${pendingMessage.receivedSize}.`);
204
+ }
205
+ if (pendingMessage.receivedSize >= totalSize) {
206
+ delete this.pendingMessages[id];
207
+ }
208
+ return result;
209
+ };
210
+ }
211
+ export { UnChunker, StreamUnChunker, generateChunks };