@yz-social/webrtc 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -2
- package/index.js +63 -7
- package/package.json +1 -1
- package/spec/portal.js +15 -4
- package/spec/webrtcCapacitySpec.js +50 -10
- package/spec/webrtcSpec.js +69 -6
- package/stun.js +52 -0
package/README.md
CHANGED
|
@@ -1,10 +1,30 @@
|
|
|
1
|
+
# @yz-social/webrtc
|
|
1
2
|
|
|
2
|
-
|
|
3
|
+
A wrapper around either the browser's WebRTC, or around @roamhq/wrtc on NodeJS.
|
|
4
|
+
|
|
5
|
+
Installing this package in NodeJS - i.e., with `npm install` in either this package's directory or in some other module that imports this - will install the @roamhq/wrtc dependency. Succesfully installing _that_ may require extra C++ tools on the host system.
|
|
6
|
+
|
|
7
|
+
For example, you probably need to have `git bash` or the like installed, and to execute the various `npm` commands within that. During the Windows installation of NodeJS (prior to installing this package), you may be asked whether to install the additional tools for VC++, including Chocalety. You should check the box to install them, and follow those directions. Installing those tools will occur in a separate window, prompt for proceeding, and may take a very long time to execute.
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
## Some Tweaks
|
|
11
|
+
|
|
12
|
+
See the test cases and spec/ports.js for examples.
|
|
13
|
+
|
|
14
|
+
### Semi-trickled ice
|
|
15
|
+
|
|
16
|
+
RTCPeerConnection generates a bunch of ICE candidates right away, and then more over the next few seconds. It can be a while before it is finished. In this package, we have utilities to collect signals as they occur, and then gather them for sending while accumulating a new set of signals to be sent.
|
|
17
|
+
|
|
18
|
+
### Simultaneous outreach
|
|
19
|
+
|
|
20
|
+
Things can get confused if two nodes try to connect to each other at the same time. There is supposed to be some automatic rollback mechanism, but implementations vary. This code tries to sort that out, if the applicaiton can label one of the pair to be "polite" and the other not.
|
|
21
|
+
|
|
22
|
+
### Data channel name event
|
|
3
23
|
|
|
4
24
|
RTCPeerConnection defines a 'datachannel' event, and RTCDataChannel defines an 'open' event, but it is difficult to use them correctly:
|
|
5
25
|
- 'datachannel' fires only for one side of a connection, and only when negotiated:false.
|
|
6
26
|
- To listen for 'open', you must already have the data channel. Not all implementations fire a handler for this when assigned in a 'datachannel' handler, and it can fire multiple times for the same channel name when two sides initiate the channel simultaneously with negotiated:true.
|
|
7
27
|
|
|
8
|
-
|
|
28
|
+
### close event
|
|
9
29
|
|
|
10
30
|
RTCPeerConnection defines a 'signalingstatechange' event in which application handlers can fire code when aPeerConnection.readyState === 'closed', but this not particuarly convenient.
|
package/index.js
CHANGED
|
@@ -20,7 +20,7 @@ export class WebRTC {
|
|
|
20
20
|
];
|
|
21
21
|
cleanup() { // Attempt to allow everything to be garbage-collected.
|
|
22
22
|
if (!this.pc) return;
|
|
23
|
-
this.pc.onicecandidate = this.pc.ondatachannel = this.pc.onnegotiationneeded = this.pc.onconnectionstatechange = null;
|
|
23
|
+
this.pc.onicecandidate = this.pc.ondatachannel = this.pc.onnegotiationneeded = this.pc.onconnectionstatechange = this.pc.oniceconnectionstatechange = null;
|
|
24
24
|
delete this.pc;
|
|
25
25
|
delete this.dataChannelPromises;
|
|
26
26
|
delete this.dataChannelOursPromises;
|
|
@@ -56,11 +56,27 @@ export class WebRTC {
|
|
|
56
56
|
this.ignoreOffer = false;
|
|
57
57
|
|
|
58
58
|
this.pc.onicecandidate = e => {
|
|
59
|
-
if (
|
|
59
|
+
if (e.candidate === null) return;
|
|
60
60
|
//if (this.pc.connectionState === 'connected') return; // Don't waste messages. FIXME
|
|
61
|
+
// Including an empty string and of candidates marker.
|
|
62
|
+
// https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/icecandidate_event#indicating_the_end_of_a_generation_of_candidates
|
|
61
63
|
this.signal({ candidate: e.candidate });
|
|
62
64
|
};
|
|
63
65
|
this.pc.ondatachannel = e => this.ondatachannel(e.channel);
|
|
66
|
+
this.pc.oniceconnectionstatechange = () => {
|
|
67
|
+
if (!this.pc) return;
|
|
68
|
+
//this.flog('iceConnectionState', this.pc.iceConnectionState);
|
|
69
|
+
switch (this.pc.iceConnectionState) {
|
|
70
|
+
case 'completed':
|
|
71
|
+
this._resolveIceCompleted?.(true);
|
|
72
|
+
break;
|
|
73
|
+
case 'failed':
|
|
74
|
+
this.pc.restartIce();
|
|
75
|
+
break;
|
|
76
|
+
default:
|
|
77
|
+
;
|
|
78
|
+
}
|
|
79
|
+
};
|
|
64
80
|
this.pc.onnegotiationneeded = async () => {
|
|
65
81
|
try {
|
|
66
82
|
this.makingOffer = true;
|
|
@@ -76,6 +92,23 @@ export class WebRTC {
|
|
|
76
92
|
}
|
|
77
93
|
};
|
|
78
94
|
}
|
|
95
|
+
async renegotiate() { // Trigger negotiationneeded and promise to resolve when completed. Used in testing.
|
|
96
|
+
this._resolveIceCompleted?.(false); // clearing old promise, if any.
|
|
97
|
+
const promise = this.iceConnected;
|
|
98
|
+
this.pc.restartIce();
|
|
99
|
+
return promise;
|
|
100
|
+
}
|
|
101
|
+
get iceConnected() { // Return a promise that resolves when iceConnectionState transitions to connected or completed. Used in testing.
|
|
102
|
+
// I haven't found a reliable way to detect when this happens. If our side was in iceConnectionState 'connected' (but not 'completed')
|
|
103
|
+
// before the renegotiation, then we might never go completed. For testing, what I've been doing is racing between this side resolving,
|
|
104
|
+
// the other side resolving, and a timeout of a few seconds. NodeJS almost always resolves with the first two, and browsers mostly use the last.
|
|
105
|
+
return this._iceConnected ||= new Promise(resolve => {
|
|
106
|
+
this._resolveIceCompleted = value => {
|
|
107
|
+
this._iceConnected = null;
|
|
108
|
+
resolve(value);
|
|
109
|
+
};
|
|
110
|
+
});
|
|
111
|
+
}
|
|
79
112
|
async close() {
|
|
80
113
|
// Do not try to close or wait for data channels. It confuses Safari.
|
|
81
114
|
const pc = this.pc;
|
|
@@ -129,7 +162,7 @@ export class WebRTC {
|
|
|
129
162
|
this.settingRemote = true;
|
|
130
163
|
try {
|
|
131
164
|
await this.pc.setRemoteDescription(description)
|
|
132
|
-
.catch(e => this
|
|
165
|
+
.catch(e => this.log(this.name, 'ignoring error in setRemoteDescription while in state', this.pc.signalingState, e));
|
|
133
166
|
if (offerCollision) this.rolledBack = true;
|
|
134
167
|
} finally {
|
|
135
168
|
this.settingRemote = false;
|
|
@@ -139,7 +172,7 @@ export class WebRTC {
|
|
|
139
172
|
if (description.type === "offer") {
|
|
140
173
|
const answer = await this.pc.createAnswer();
|
|
141
174
|
await this.pc.setLocalDescription(answer)
|
|
142
|
-
.catch(e => this.
|
|
175
|
+
.catch(e => this.log(this.name, 'ignoring error setLocalDescription of answer', e));
|
|
143
176
|
this.signal({ description: this.pc.localDescription });
|
|
144
177
|
}
|
|
145
178
|
|
|
@@ -186,7 +219,7 @@ export class WebRTC {
|
|
|
186
219
|
// If this peer is responding to the other side, we arrange our waiting respond() to continue with data for the other side.
|
|
187
220
|
//
|
|
188
221
|
// Otherwise, if this side is allowed to initiate an outbound network request, then this side must define transferSignals(signals)
|
|
189
|
-
// to promise otherSide.respond(signals). If so, we call it with all pending signals (including the new one) and handle the
|
|
222
|
+
// to promise otherSide.respond(signals). If so, we call it with all pending signals (including the new one) and handle the
|
|
190
223
|
// response. (Which latter may trigger more calls to signal() on our side.)
|
|
191
224
|
//
|
|
192
225
|
// Otherwise, we just remember the signal for some future respond() on our side.
|
|
@@ -238,8 +271,8 @@ export class WebRTC {
|
|
|
238
271
|
dc.onopen = () => { // Idempotent (except for logging), if we do not bash dataChannePromises[label] multiple times.
|
|
239
272
|
dc.onopen = null;
|
|
240
273
|
this.log('channel onopen:', label, dc.id, readyState, 'negotiated:', dc.negotiated);
|
|
241
|
-
this[this.restrictablePromiseKey()][label]?.resolve(dc);
|
|
242
|
-
this[this.restrictablePromiseKey(kind)][label]?.resolve(dc);
|
|
274
|
+
this[this.restrictablePromiseKey()]?.[label]?.resolve(dc);
|
|
275
|
+
this[this.restrictablePromiseKey(kind)]?.[label]?.resolve(dc);
|
|
243
276
|
};
|
|
244
277
|
if (isTheirs) dc.onopen();
|
|
245
278
|
return dc;
|
|
@@ -298,5 +331,28 @@ export class WebRTC {
|
|
|
298
331
|
Object.assign(this, {stats, transport, candidatePair, remote, protocol, candidateType, statsTime: now, statsElapsed});
|
|
299
332
|
if (doLogging) console.info(this.name, 'connected', protocol, candidateType, (statsElapsed/1e3).toFixed(1));
|
|
300
333
|
}
|
|
334
|
+
|
|
335
|
+
static getPublicIP(stunServer = "stun:stun.l.google.com:19302") { // Promise external/WAN/public IP addresses for this device.
|
|
336
|
+
// This is the equivalent of whatismyip.com and the like, but using the same stun protocol that
|
|
337
|
+
// webrtc is using. Alas, the stun protocol itself is UDP and so cannot be fetched from a browser,
|
|
338
|
+
// so we use WebRTC itself.
|
|
339
|
+
return new Promise((resolve, reject) => {
|
|
340
|
+
const pc = new wrtc.RTCPeerConnection({iceServers: [{ urls: stunServer }] });
|
|
341
|
+
pc.createDataChannel("");
|
|
342
|
+
pc.onicecandidate = ({candidate}) => {
|
|
343
|
+
if (!candidate) return;
|
|
344
|
+
if (candidate.type === 'host') return;
|
|
345
|
+
// IWBNI we could gather all such addresses and let the app pick an ipV6 if desired.
|
|
346
|
+
// However, we don't always get two addresses, and I haven't been able to get a reliable indication of end of candidates.
|
|
347
|
+
resolve(candidate.address);
|
|
348
|
+
pc.onicecandidate = pc.onicecandidateerror = null;
|
|
349
|
+
pc.close();
|
|
350
|
+
};
|
|
351
|
+
pc.onicecandidateerror = reject;
|
|
352
|
+
pc.createOffer()
|
|
353
|
+
.then((offer) => pc.setLocalDescription(offer))
|
|
354
|
+
.catch(reject);
|
|
355
|
+
});
|
|
356
|
+
}
|
|
301
357
|
}
|
|
302
358
|
|
package/package.json
CHANGED
package/spec/portal.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
1
2
|
import process from 'node:process';
|
|
2
3
|
import cluster from 'node:cluster';
|
|
3
4
|
import express from 'express';
|
|
@@ -13,6 +14,8 @@ import { WebRTC } from '../index.js';
|
|
|
13
14
|
// For a more complete example, see https://github.com/YZ-social/kdht/blob/main/spec/portal.js
|
|
14
15
|
|
|
15
16
|
const nPortals = parseInt(process.argv[2] || WebRTC.suggestedInstancesLimit);
|
|
17
|
+
const perPortalDelay = parseInt(process.argv[3] || 1e3);
|
|
18
|
+
const port = parseInt(process.argv[4] || 3000);
|
|
16
19
|
|
|
17
20
|
if (cluster.isPrimary) { // Parent process with portal webserver through which clienta can bootstrap
|
|
18
21
|
process.title = 'webrtc-test-portal';
|
|
@@ -25,6 +28,7 @@ if (cluster.isPrimary) { // Parent process with portal webserver through which c
|
|
|
25
28
|
worker.on('message', signals => { // Message from a worker, in response to a POST.
|
|
26
29
|
worker.requestResolver?.(signals);
|
|
27
30
|
});
|
|
31
|
+
await new Promise(resolve => setTimeout(resolve, perPortalDelay));
|
|
28
32
|
}
|
|
29
33
|
const workers = Object.values(cluster.workers);
|
|
30
34
|
app.use(logger(':date[iso] :status :method :url :res[content-length] - :response-time ms'));
|
|
@@ -32,7 +36,6 @@ if (cluster.isPrimary) { // Parent process with portal webserver through which c
|
|
|
32
36
|
app.use(express.static(path.resolve(__dirname, '..'))); // Serve files needed for testing browsers.
|
|
33
37
|
app.post('/join/:to', async (req, res, next) => { // Handler for JSON POST requests that provide an array of signals and get signals back.
|
|
34
38
|
const {params, body} = req;
|
|
35
|
-
// Find the specifed worker, or pick one at random. TODO CLEANUP: Remove. We now use as separate /name/:label to pick one.
|
|
36
39
|
const worker = workers[params.to];
|
|
37
40
|
if (!worker) {
|
|
38
41
|
console.warn('no worker', params.to);
|
|
@@ -48,7 +51,8 @@ if (cluster.isPrimary) { // Parent process with portal webserver through which c
|
|
|
48
51
|
|
|
49
52
|
return res.send(response);
|
|
50
53
|
});
|
|
51
|
-
app.listen(
|
|
54
|
+
app.listen(port);
|
|
55
|
+
console.log(new Date(), 'listening on', port);
|
|
52
56
|
} else {
|
|
53
57
|
process.title = 'webrtc-test-bot-' + cluster.worker.id;
|
|
54
58
|
let portal;
|
|
@@ -59,8 +63,15 @@ if (cluster.isPrimary) { // Parent process with portal webserver through which c
|
|
|
59
63
|
function setup() {
|
|
60
64
|
console.log(new Date(), 'launched bot', cluster.worker.id);
|
|
61
65
|
portal = new WebRTC({name: 'portal'});
|
|
62
|
-
portal.getDataChannelPromise('data').then(dc =>
|
|
63
|
-
|
|
66
|
+
portal.getDataChannelPromise('data').then(dc => {
|
|
67
|
+
console.log(new Date(), 'connected bot', cluster.worker.id);
|
|
68
|
+
dc.send('Welcome!');
|
|
69
|
+
});
|
|
70
|
+
portal.closed.then(() => { // Without any explicit message, this is 15 seconds after the other end goes away.
|
|
71
|
+
console.log('disconnected', cluster.worker.id);
|
|
72
|
+
// Not needed for this test, but for other purposes:
|
|
73
|
+
// setup());
|
|
74
|
+
});
|
|
64
75
|
}
|
|
65
76
|
setup();
|
|
66
77
|
}
|
|
@@ -1,18 +1,31 @@
|
|
|
1
1
|
const { describe, it, expect, beforeAll, afterAll, beforeEach, afterEach} = globalThis; // For linters.
|
|
2
2
|
import { WebRTC } from '../index.js';
|
|
3
3
|
|
|
4
|
+
function delay(ms) {
|
|
5
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
6
|
+
}
|
|
7
|
+
|
|
4
8
|
describe("WebRTC capacity", function () {
|
|
5
|
-
let nNodes =
|
|
9
|
+
let nNodes = 75; // When running all webrtc tests at once, it is important to keep this low. (Memory leak?)
|
|
10
|
+
let perPortalDelay = 1e3;
|
|
11
|
+
let portalSlopDelay = 2e3;
|
|
12
|
+
let perConnectionDelay = 100;
|
|
13
|
+
let connectionSlopDelay = 2e3;
|
|
14
|
+
let port = 3000;
|
|
15
|
+
let baseURL = `http://localhost:${port}`;
|
|
16
|
+
// Alas, I can't seem to get more than about 150-160 nodes through ngrok, even on a machine that can handle 200 directly.
|
|
17
|
+
//let baseURL = 'https://dorado.ngrok.dev'; // if E.g., node spec/portal.js 200 100; ngrok http 3000 --url https://dorado.ngrok.dev
|
|
6
18
|
|
|
7
19
|
// Uncomment this line if running a stand-alone capacity test.
|
|
8
20
|
// (And also likely comment out the import './webrtcSpec.js' in test.html.)
|
|
9
21
|
// nNodes = WebRTC.suggestedInstancesLimit;
|
|
10
22
|
|
|
11
23
|
const isNodeJS = typeof(globalThis.process) !== 'undefined';
|
|
24
|
+
const portalIsLocal = isNodeJS && baseURL.startsWith('http://localhost');
|
|
12
25
|
let nodes = [];
|
|
13
26
|
beforeAll(async function () {
|
|
14
27
|
|
|
15
|
-
if (
|
|
28
|
+
if (portalIsLocal) {
|
|
16
29
|
const { spawn } = await import('node:child_process');
|
|
17
30
|
const path = await import('path');
|
|
18
31
|
const { fileURLToPath } = await import('url');
|
|
@@ -20,29 +33,45 @@ describe("WebRTC capacity", function () {
|
|
|
20
33
|
const __filename = fileURLToPath(import.meta.url);
|
|
21
34
|
const __dirname = path.dirname(__filename);
|
|
22
35
|
function echo(data) { data = data.slice(0, -1); console.log(data.toString()); }
|
|
23
|
-
const portalProcess = spawn('node', [path.resolve(__dirname, 'portal.js'), nNodes]);
|
|
36
|
+
const portalProcess = spawn('node', [path.resolve(__dirname, 'portal.js'), nNodes, perPortalDelay, port]);
|
|
24
37
|
portalProcess.stdout.on('data', echo);
|
|
25
38
|
portalProcess.stderr.on('data', echo);
|
|
26
|
-
await
|
|
39
|
+
await delay(perPortalDelay * nNodes + portalSlopDelay);
|
|
27
40
|
}
|
|
28
41
|
|
|
29
42
|
console.log(new Date(), 'creating', nNodes, 'nodes');
|
|
30
43
|
for (let index = 0; index < nNodes; index++) {
|
|
31
|
-
const node = nodes[index] = new WebRTC({name: 'node'});
|
|
32
|
-
|
|
44
|
+
const node = nodes[index] = new WebRTC({name: 'node' + index});
|
|
45
|
+
console.log('connecting', index);
|
|
46
|
+
node.nFetches = 0;
|
|
47
|
+
node.transferSignals = messages => fetch(`${baseURL}/join/${index}`, {
|
|
33
48
|
method: 'POST',
|
|
34
49
|
headers: { 'Content-Type': 'application/json', 'Connection': 'close' },
|
|
35
50
|
body: JSON.stringify(messages)
|
|
36
|
-
}).then(response =>
|
|
51
|
+
}).then(response => {
|
|
52
|
+
if (!response.ok) {
|
|
53
|
+
console.log('fetch', index, 'failed', response.status, response.statusText);
|
|
54
|
+
return null;
|
|
55
|
+
}
|
|
56
|
+
node.nFetches++;
|
|
57
|
+
return response.json();
|
|
58
|
+
});
|
|
37
59
|
node.closed.then(() => console.log('closed', index)); // Just for debugging.
|
|
38
60
|
const dataOpened = node.getDataChannelPromise('data')
|
|
39
61
|
.then(dc => node.dataReceived = new Promise(resolve => dc.onmessage = event => resolve(event.data)));
|
|
40
|
-
node.createChannel('data', {negotiated: false});
|
|
62
|
+
node.createChannel('data', {negotiated: false});
|
|
41
63
|
await dataOpened;
|
|
42
64
|
console.log('opened', index);
|
|
65
|
+
// if (!portalIsLocal) {
|
|
66
|
+
// const maxConnectionsPerNode = 3;
|
|
67
|
+
// const maxNgrokConnectionsPerSecond = 120 / 60;
|
|
68
|
+
// const secondsPerNode = maxConnectionsPerNode / maxNgrokConnectionsPerSecond;
|
|
69
|
+
// await delay(secondsPerNode * 1.5e3); // fudge factor milliseconds/second
|
|
70
|
+
// }
|
|
43
71
|
}
|
|
72
|
+
await delay(connectionSlopDelay);
|
|
44
73
|
console.log(new Date(), 'finished setup');
|
|
45
|
-
},
|
|
74
|
+
}, nNodes * perPortalDelay + portalSlopDelay + nNodes * perConnectionDelay + connectionSlopDelay + 1e3);
|
|
46
75
|
for (let index = 0; index < nNodes; index++) {
|
|
47
76
|
it('opened connection ' + index, function () {
|
|
48
77
|
expect(nodes[index].pc.connectionState).toBe('connected');
|
|
@@ -50,6 +79,17 @@ describe("WebRTC capacity", function () {
|
|
|
50
79
|
it('got data ' + index, async function () {
|
|
51
80
|
expect(await nodes[index].dataReceived).toBe('Welcome!');
|
|
52
81
|
});
|
|
82
|
+
it('resignals on restartIce ' + index, async function () {
|
|
83
|
+
const node = nodes[index];
|
|
84
|
+
expect(node.pc.iceConnectionState).toBe('completed');
|
|
85
|
+
node.nFetches = 0;
|
|
86
|
+
node.pc.restartIce();
|
|
87
|
+
await delay(100); // timing will vary
|
|
88
|
+
expect(node.pc.iceConnectionState).not.toBe('completed');
|
|
89
|
+
await delay(2e3); // timing will vary
|
|
90
|
+
expect(node.pc.iceConnectionState).toBe('completed');
|
|
91
|
+
expect(node.nFetches).toBeGreaterThan(0); // We will have re-signalled.
|
|
92
|
+
});
|
|
53
93
|
}
|
|
54
94
|
afterAll(async function () {
|
|
55
95
|
console.log(new Date(), 'starting teardown');
|
|
@@ -60,7 +100,7 @@ describe("WebRTC capacity", function () {
|
|
|
60
100
|
expect(pc.connectionState).toBe('closed'));
|
|
61
101
|
delete nodes[index];
|
|
62
102
|
}
|
|
63
|
-
if (
|
|
103
|
+
if (portalIsLocal) {
|
|
64
104
|
const { exec } = await import('node:child_process');
|
|
65
105
|
exec('pkill webrtc-test-');
|
|
66
106
|
}
|
package/spec/webrtcSpec.js
CHANGED
|
@@ -6,7 +6,16 @@ describe("WebRTC", function () {
|
|
|
6
6
|
let connections = [];
|
|
7
7
|
describe("direct in-process signaling", function () {
|
|
8
8
|
async function makePair({debug = false, delay = 0, index = 0} = {}) {
|
|
9
|
-
//
|
|
9
|
+
// Make a pair of WebRTC objects (in the same Javascript process) that transfer signals to each other by calling
|
|
10
|
+
// respond(signals) on the other of the pair. In a real application, the WebRTC instances would be in different
|
|
11
|
+
// processes (likely on different devices) and transferSignals would instead involve some sort of InterProcess
|
|
12
|
+
// Communication or network call, ultimately resulting in the same respond(signals) being called on the other
|
|
13
|
+
// end, and the resulting signals being transferred back.
|
|
14
|
+
//
|
|
15
|
+
// connections[index] will contain {A, B, bothOpen}, where A and B are the two WebRTC, and bothOpen resolves
|
|
16
|
+
// when A and B are both open (regardless of how they were triggered, which is different for each test).
|
|
17
|
+
// We also annotate the WebRTC with various flags used in the test, and arranges to set those when the channel
|
|
18
|
+
// named 'data' is opened.
|
|
10
19
|
const configuration = { iceServers: WebRTC.iceServers };
|
|
11
20
|
const A = new WebRTC({name: `A (impolite) ${index}`, polite: false, debug, configuration});
|
|
12
21
|
const B = new WebRTC({name: `B (polite) ${index}`, polite: true, debug, configuration});
|
|
@@ -75,7 +84,11 @@ describe("WebRTC", function () {
|
|
|
75
84
|
await WebRTC.delay(1); // TODO: This is crazy, but without out, the FIRST connection in chrome hangs!
|
|
76
85
|
return connections[index] = {A, B, bothOpen: Promise.all(promises)};
|
|
77
86
|
}
|
|
78
|
-
function standardBehavior(setup, {includeConflictCheck = isBrowser, includeSecondChannel = false} = {}) {
|
|
87
|
+
function standardBehavior(setup, {includeConflictCheck = isBrowser, includeSecondChannel = false, reneg = true} = {}) {
|
|
88
|
+
// Defines a set of tests, intended to be within a suite.
|
|
89
|
+
// A beforeAll is created which calls the given setup({index}) nPairs times. setup() is expected to makePair and
|
|
90
|
+
// open the data channel in some suite-specific way.
|
|
91
|
+
|
|
79
92
|
// The nPairs does NOT seem to be a reliable way to determine how many webrtc peers can be active in the same Javascript.
|
|
80
93
|
// I have had numbers that work for every one of the cases DESCRIBEd below, and even in combinations,
|
|
81
94
|
// but it seems to get upset when all are run together, and it seemms to depend on the state of the machine or phases of the moon.
|
|
@@ -91,6 +104,7 @@ describe("WebRTC", function () {
|
|
|
91
104
|
//
|
|
92
105
|
// webrtcCapacitySpec.js may be a better test for capacity.
|
|
93
106
|
const nPairs = 10;
|
|
107
|
+
|
|
94
108
|
beforeAll(async function () {
|
|
95
109
|
const start = Date.now();
|
|
96
110
|
console.log(new Date(), 'start setup', nPairs, 'pairs');
|
|
@@ -110,7 +124,7 @@ describe("WebRTC", function () {
|
|
|
110
124
|
expect(B.theirs.readyState).toBe('open');
|
|
111
125
|
});
|
|
112
126
|
it(`receives ${index}.`, async function () {
|
|
113
|
-
const {A, B} = connections[index];
|
|
127
|
+
const {A, B} = connections[index];
|
|
114
128
|
await B.gotData;
|
|
115
129
|
expect(B.receivedMessageCount).toBe(A.sentMessageCount);
|
|
116
130
|
await A.gotData;
|
|
@@ -121,6 +135,34 @@ describe("WebRTC", function () {
|
|
|
121
135
|
expect(A.sentMessageCount).toBe(1);
|
|
122
136
|
expect(B.sentMessageCount).toBe(1);
|
|
123
137
|
});
|
|
138
|
+
let waitBefore = Math.random() < 0.5;
|
|
139
|
+
it(`re-negotiates ${index} waiting to settle ${waitBefore ? 'before' : 'after'} sending.`, async function () {
|
|
140
|
+
const {A, B} = connections[index];
|
|
141
|
+
await A.gotData; // if receive test hasn't fired yet, the set setup might not yet have completed capturing the send count.
|
|
142
|
+
await B.gotData;
|
|
143
|
+
|
|
144
|
+
// Capture counts expected by the other tests.
|
|
145
|
+
const {sentMessageCount:aSend, receivedMessageCount:aReceive} = A;
|
|
146
|
+
const {sentMessageCount:bSend, receivedMessageCount:bReceive} = B;
|
|
147
|
+
|
|
148
|
+
async function reneg(A, B) {
|
|
149
|
+
let aIce = A.renegotiate();
|
|
150
|
+
// We're supposed to be able to send and receive during renegotiation, so we flip a coin
|
|
151
|
+
// as to whether the test will wait before sending or after.
|
|
152
|
+
const timeout = 1e3; // NodeJS usually resolves with our side or sometimes the other going to completed, but not browsers.
|
|
153
|
+
if (waitBefore) await Promise.race([aIce, B.iceConnected, WebRTC.delay(timeout)]);
|
|
154
|
+
const gotData = new Promise(resolve => B.ours.addEventListener('message', e => resolve(e.data)));
|
|
155
|
+
A.theirs.send('after');
|
|
156
|
+
expect(await gotData).toBe('after');
|
|
157
|
+
if (!waitBefore) await Promise.race([aIce, B.iceConnected, WebRTC.delay(timeout)]);
|
|
158
|
+
}
|
|
159
|
+
await reneg(A, B);
|
|
160
|
+
await reneg(B, A);
|
|
161
|
+
|
|
162
|
+
// Restore counts expected by the other tests.
|
|
163
|
+
Object.assign(A, {sentMessageCount:aSend, receivedMessageCount:aReceive});
|
|
164
|
+
Object.assign(B, {sentMessageCount:bSend, receivedMessageCount:bReceive});
|
|
165
|
+
}, 10e3);
|
|
124
166
|
if (includeSecondChannel) {
|
|
125
167
|
it(`handles second channel ${index}.`, async function () {
|
|
126
168
|
const {A, B} = connections[index];
|
|
@@ -155,6 +197,9 @@ describe("WebRTC", function () {
|
|
|
155
197
|
let promise = A.close().then(async apc => {
|
|
156
198
|
expect(apc.connectionState).toBe('closed'); // Only on the side that explicitly closed.
|
|
157
199
|
expect(apc.signalingState).toBe('closed');
|
|
200
|
+
|
|
201
|
+
await B.close(); // fixme: B will try to reconnect unless we tell it to stop.
|
|
202
|
+
|
|
158
203
|
const bpc = await B.closed; // Waiting for B to notice.
|
|
159
204
|
await B.close(); // Resources are not necessarilly freed when the other side closes. An explicit close() is needed.
|
|
160
205
|
expect(['closed', 'disconnected', 'failed']).toContain(bpc.connectionState);
|
|
@@ -168,6 +213,12 @@ describe("WebRTC", function () {
|
|
|
168
213
|
}, Math.max(30e3, 1e3 * nPairs));
|
|
169
214
|
}
|
|
170
215
|
describe("one side opens", function () {
|
|
216
|
+
// One of each pair definitively initiates the connection that the other side was waiting for. This can happen
|
|
217
|
+
// when one peer contacts another out of the blue, or when a client contacts a server or portal.
|
|
218
|
+
// We test the usual negotiated=true case (where the initiator names the bidirectional channel and the app
|
|
219
|
+
// arranges for the reciever to actively create a channel with the same app-specific name,
|
|
220
|
+
// and the negotiated=false case.
|
|
221
|
+
|
|
171
222
|
describe('non-negotiated', function () {
|
|
172
223
|
beforeAll(function () {console.log('one-sided non-negotiated'); });
|
|
173
224
|
standardBehavior(async function ({index}) {
|
|
@@ -199,6 +250,18 @@ describe("WebRTC", function () {
|
|
|
199
250
|
});
|
|
200
251
|
|
|
201
252
|
describe("simultaneous two-sided", function () {
|
|
253
|
+
// The two sides both attempt to initiate a connection at the same time. This can happen betwen homogeneous peers.
|
|
254
|
+
// There is a matrix of four possibilities:
|
|
255
|
+
//
|
|
256
|
+
// negotiated=true - the usual case, in which the application expects both sides to be be created with the same
|
|
257
|
+
// app-specific bidirectional channel name...
|
|
258
|
+
// negotiatedl=false - meaning that each side is going to create it's own sending channel which will automatically
|
|
259
|
+
// have a distinct index (even if the channel name is the same).
|
|
260
|
+
//
|
|
261
|
+
// Within these, we test with the "polite" side starting first, or starting second. On the network, we cannot
|
|
262
|
+
// coordinate which app instance will attempt to initiate connection, but we can arrange for any pair to agree
|
|
263
|
+
// on which of the two is the "polite" one (e.g., by sort order of their names or some such).
|
|
264
|
+
|
|
202
265
|
describe("negotiated single full-duplex-channel", function () {
|
|
203
266
|
describe("impolite first", function () {
|
|
204
267
|
beforeAll(function () {console.log('two-sided negotiated impolite-first'); });
|
|
@@ -207,7 +270,7 @@ describe("WebRTC", function () {
|
|
|
207
270
|
A.createChannel("data", {negotiated: true});
|
|
208
271
|
B.createChannel("data", {negotiated: true});
|
|
209
272
|
await bothOpen;
|
|
210
|
-
});
|
|
273
|
+
}, {reneg: true});
|
|
211
274
|
});
|
|
212
275
|
describe("polite first", function () {
|
|
213
276
|
beforeAll(function () {console.log('two-sided negotiated polite-first');});
|
|
@@ -217,10 +280,10 @@ describe("WebRTC", function () {
|
|
|
217
280
|
B.createChannel("data", {negotiated: true});
|
|
218
281
|
A.createChannel("data", {negotiated: true});
|
|
219
282
|
await bothOpen;
|
|
220
|
-
});
|
|
283
|
+
}, {reneg: true});
|
|
221
284
|
});
|
|
222
285
|
});
|
|
223
|
-
describe("non-negotiated dual half-duplex channels", function () {
|
|
286
|
+
describe("non-negotiated dual half-duplex channels", function () { // fixme: sometimes fail to renegotiate
|
|
224
287
|
const delay = 200;
|
|
225
288
|
const debug = false;
|
|
226
289
|
describe("impolite first", function () {
|
package/stun.js
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
const wrtc = (typeof(process) === 'undefined') ? globalThis : (await import('#wrtc')).default;
|
|
2
|
+
|
|
3
|
+
// function getIPs(stunServer = 'stun:stun.l.google.com:19302') { // Promise external/WAN/public IP addresses for this device.
|
|
4
|
+
// // This is the equivalent of whatismyip.com and the like, but using the same stun protocol that
|
|
5
|
+
// // webrtc is using. Alas, the stun protocol itself is UDP and so cannot be fetched from a browser,
|
|
6
|
+
// // so we use WebRTC itself.
|
|
7
|
+
// return new Promise((resolve, reject) => {
|
|
8
|
+
// const addresses = [];
|
|
9
|
+
// const pc = new wrtc.RTCPeerConnection({ iceServers: [ {urls: stunServer} ] });
|
|
10
|
+
// const done = () => {
|
|
11
|
+
// console.log('done');
|
|
12
|
+
// pc.onicecandidateerror = pc.onicegatheringstatechange = pc.onicecandidate = null;
|
|
13
|
+
// pc.close();
|
|
14
|
+
// resolve(addresses);
|
|
15
|
+
// };
|
|
16
|
+
// pc.createDataChannel('');
|
|
17
|
+
// pc.createOffer()
|
|
18
|
+
// .then(offer => pc.setLocalDescription(offer))
|
|
19
|
+
// .catch(reject);
|
|
20
|
+
// pc.onicecandidateerror = e => reject(e);
|
|
21
|
+
// pc.onicegatheringstatechange = e => (pc.iceGatheringState === 'complete') && done();
|
|
22
|
+
// pc.onicecandidate = (ice) => {
|
|
23
|
+
// console.log(ice.candidate.type);
|
|
24
|
+
// if (!ice || !ice.candidate) return done();
|
|
25
|
+
// if (ice.candidate.type === 'host') return null;
|
|
26
|
+
// return addresses.push(ice.candidate.address);
|
|
27
|
+
// };
|
|
28
|
+
// });
|
|
29
|
+
// }
|
|
30
|
+
|
|
31
|
+
export function getPublicIP(stunServer = "stun:stun.l.google.com:19302") {
|
|
32
|
+
return new Promise((resolve, reject) => {
|
|
33
|
+
const pc = new wrtc.RTCPeerConnection({iceServers: [{ urls: stunServer }] });
|
|
34
|
+
pc.createDataChannel("");
|
|
35
|
+
pc.onicecandidate = ({candidate}) => {
|
|
36
|
+
if (!candidate) return;
|
|
37
|
+
if (candidate.type !== "srflx") return;
|
|
38
|
+
resolve(candidate.address);
|
|
39
|
+
pc.onicecandidate = pc.onicecandidateerror = null;
|
|
40
|
+
pc.close();
|
|
41
|
+
};
|
|
42
|
+
pc.onicecandidateerror = reject;
|
|
43
|
+
pc.createOffer()
|
|
44
|
+
.then((offer) => pc.setLocalDescription(offer))
|
|
45
|
+
.catch(reject);
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Usage
|
|
50
|
+
//await getPublicIP().then(ip => console.log("Public IP:", ip));
|
|
51
|
+
|
|
52
|
+
|