@hashtree/worker 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +38 -0
- package/dist/capabilities/blossomBandwidthTracker.d.ts +26 -0
- package/dist/capabilities/blossomBandwidthTracker.d.ts.map +1 -0
- package/dist/capabilities/blossomBandwidthTracker.js +53 -0
- package/dist/capabilities/blossomBandwidthTracker.js.map +1 -0
- package/dist/capabilities/blossomTransport.d.ts +22 -0
- package/dist/capabilities/blossomTransport.d.ts.map +1 -0
- package/dist/capabilities/blossomTransport.js +124 -0
- package/dist/capabilities/blossomTransport.js.map +1 -0
- package/dist/capabilities/connectivity.d.ts +3 -0
- package/dist/capabilities/connectivity.d.ts.map +1 -0
- package/dist/capabilities/connectivity.js +49 -0
- package/dist/capabilities/connectivity.js.map +1 -0
- package/dist/capabilities/idbStorage.d.ts +25 -0
- package/dist/capabilities/idbStorage.d.ts.map +1 -0
- package/dist/capabilities/idbStorage.js +73 -0
- package/dist/capabilities/idbStorage.js.map +1 -0
- package/dist/client.d.ts +54 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +336 -0
- package/dist/client.js.map +1 -0
- package/dist/entry.d.ts +2 -0
- package/dist/entry.d.ts.map +1 -0
- package/dist/entry.js +2 -0
- package/dist/entry.js.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +3 -0
- package/dist/index.js.map +1 -0
- package/dist/iris/identity.d.ts +36 -0
- package/dist/iris/identity.d.ts.map +1 -0
- package/dist/iris/identity.js +78 -0
- package/dist/iris/identity.js.map +1 -0
- package/dist/iris/mediaHandler.d.ts +16 -0
- package/dist/iris/mediaHandler.d.ts.map +1 -0
- package/dist/iris/mediaHandler.js +529 -0
- package/dist/iris/mediaHandler.js.map +1 -0
- package/dist/iris/ndk.d.ts +95 -0
- package/dist/iris/ndk.d.ts.map +1 -0
- package/dist/iris/ndk.js +496 -0
- package/dist/iris/ndk.js.map +1 -0
- package/dist/iris/nostr-wasm.d.ts +14 -0
- package/dist/iris/nostr-wasm.d.ts.map +1 -0
- package/dist/iris/nostr-wasm.js +246 -0
- package/dist/iris/nostr-wasm.js.map +1 -0
- package/dist/iris/nostr.d.ts +60 -0
- package/dist/iris/nostr.d.ts.map +1 -0
- package/dist/iris/nostr.js +207 -0
- package/dist/iris/nostr.js.map +1 -0
- package/dist/iris/protocol.d.ts +574 -0
- package/dist/iris/protocol.d.ts.map +1 -0
- package/dist/iris/protocol.js +16 -0
- package/dist/iris/protocol.js.map +1 -0
- package/dist/iris/signing.d.ts +50 -0
- package/dist/iris/signing.d.ts.map +1 -0
- package/dist/iris/signing.js +299 -0
- package/dist/iris/signing.js.map +1 -0
- package/dist/iris/treeRootCache.d.ts +73 -0
- package/dist/iris/treeRootCache.d.ts.map +1 -0
- package/dist/iris/treeRootCache.js +191 -0
- package/dist/iris/treeRootCache.js.map +1 -0
- package/dist/iris/treeRootSubscription.d.ts +49 -0
- package/dist/iris/treeRootSubscription.d.ts.map +1 -0
- package/dist/iris/treeRootSubscription.js +185 -0
- package/dist/iris/treeRootSubscription.js.map +1 -0
- package/dist/iris/utils/constants.d.ts +76 -0
- package/dist/iris/utils/constants.d.ts.map +1 -0
- package/dist/iris/utils/constants.js +113 -0
- package/dist/iris/utils/constants.js.map +1 -0
- package/dist/iris/utils/errorMessage.d.ts +5 -0
- package/dist/iris/utils/errorMessage.d.ts.map +1 -0
- package/dist/iris/utils/errorMessage.js +8 -0
- package/dist/iris/utils/errorMessage.js.map +1 -0
- package/dist/iris/utils/lruCache.d.ts +26 -0
- package/dist/iris/utils/lruCache.d.ts.map +1 -0
- package/dist/iris/utils/lruCache.js +66 -0
- package/dist/iris/utils/lruCache.js.map +1 -0
- package/dist/iris/webrtc.d.ts +2 -0
- package/dist/iris/webrtc.d.ts.map +1 -0
- package/dist/iris/webrtc.js +3 -0
- package/dist/iris/webrtc.js.map +1 -0
- package/dist/iris/webrtcSignaling.d.ts +37 -0
- package/dist/iris/webrtcSignaling.d.ts.map +1 -0
- package/dist/iris/webrtcSignaling.js +86 -0
- package/dist/iris/webrtcSignaling.js.map +1 -0
- package/dist/iris/worker.d.ts +12 -0
- package/dist/iris/worker.d.ts.map +1 -0
- package/dist/iris/worker.js +1582 -0
- package/dist/iris/worker.js.map +1 -0
- package/dist/iris-entry.d.ts +2 -0
- package/dist/iris-entry.d.ts.map +1 -0
- package/dist/iris-entry.js +2 -0
- package/dist/iris-entry.js.map +1 -0
- package/dist/mediaStreaming.d.ts +7 -0
- package/dist/mediaStreaming.d.ts.map +1 -0
- package/dist/mediaStreaming.js +48 -0
- package/dist/mediaStreaming.js.map +1 -0
- package/dist/p2p/boundedQueue.d.ts +74 -0
- package/dist/p2p/boundedQueue.d.ts.map +1 -0
- package/dist/p2p/boundedQueue.js +112 -0
- package/dist/p2p/boundedQueue.js.map +1 -0
- package/dist/p2p/errorMessage.d.ts +5 -0
- package/dist/p2p/errorMessage.d.ts.map +1 -0
- package/dist/p2p/errorMessage.js +7 -0
- package/dist/p2p/errorMessage.js.map +1 -0
- package/dist/p2p/index.d.ts +7 -0
- package/dist/p2p/index.d.ts.map +1 -0
- package/dist/p2p/index.js +5 -0
- package/dist/p2p/index.js.map +1 -0
- package/dist/p2p/lruCache.d.ts +26 -0
- package/dist/p2p/lruCache.d.ts.map +1 -0
- package/dist/p2p/lruCache.js +65 -0
- package/dist/p2p/lruCache.js.map +1 -0
- package/dist/p2p/protocol.d.ts +10 -0
- package/dist/p2p/protocol.d.ts.map +1 -0
- package/dist/p2p/protocol.js +2 -0
- package/dist/p2p/protocol.js.map +1 -0
- package/dist/p2p/queryForwardingMachine.d.ts +46 -0
- package/dist/p2p/queryForwardingMachine.d.ts.map +1 -0
- package/dist/p2p/queryForwardingMachine.js +144 -0
- package/dist/p2p/queryForwardingMachine.js.map +1 -0
- package/dist/p2p/signaling.d.ts +63 -0
- package/dist/p2p/signaling.d.ts.map +1 -0
- package/dist/p2p/signaling.js +165 -0
- package/dist/p2p/signaling.js.map +1 -0
- package/dist/p2p/webrtcController.d.ts +152 -0
- package/dist/p2p/webrtcController.d.ts.map +1 -0
- package/dist/p2p/webrtcController.js +813 -0
- package/dist/p2p/webrtcController.js.map +1 -0
- package/dist/p2p/webrtcProxy.d.ts +55 -0
- package/dist/p2p/webrtcProxy.d.ts.map +1 -0
- package/dist/p2p/webrtcProxy.js +386 -0
- package/dist/p2p/webrtcProxy.js.map +1 -0
- package/dist/privacyGuards.d.ts +14 -0
- package/dist/privacyGuards.d.ts.map +1 -0
- package/dist/privacyGuards.js +27 -0
- package/dist/privacyGuards.js.map +1 -0
- package/dist/protocol.d.ts +171 -0
- package/dist/protocol.d.ts.map +1 -0
- package/dist/protocol.js +2 -0
- package/dist/protocol.js.map +1 -0
- package/dist/types.d.ts +2 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/dist/worker.d.ts +2 -0
- package/dist/worker.d.ts.map +1 -0
- package/dist/worker.js +616 -0
- package/dist/worker.js.map +1 -0
- package/package.json +64 -0
|
@@ -0,0 +1,813 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Worker WebRTC Controller
|
|
3
|
+
*
|
|
4
|
+
* Controls WebRTC connections from the worker thread.
|
|
5
|
+
* Main thread proxy executes RTCPeerConnection operations.
|
|
6
|
+
*
|
|
7
|
+
* Worker owns:
|
|
8
|
+
* - Peer state tracking
|
|
9
|
+
* - Connection lifecycle decisions
|
|
10
|
+
* - Data protocol (request/response)
|
|
11
|
+
* - Signaling message handling
|
|
12
|
+
*
|
|
13
|
+
* Main thread proxy owns:
|
|
14
|
+
* - RTCPeerConnection instances (not available in workers)
|
|
15
|
+
* - Data channel I/O
|
|
16
|
+
*/
|
|
17
|
+
import { MAX_HTL, MSG_TYPE_REQUEST, MSG_TYPE_RESPONSE, FRAGMENT_SIZE, PeerId, generateUuid, encodeRequest, encodeResponse, parseMessage, createRequest, createResponse, createFragmentResponse, hashToKey, verifyHash, generatePeerHTLConfig, decrementHTL, shouldForward, } from '@hashtree/nostr';
|
|
18
|
+
import { LRUCache } from './lruCache.js';
|
|
19
|
+
import { QueryForwardingMachine } from './queryForwardingMachine.js';
|
|
20
|
+
// ============================================================================
|
|
21
|
+
// Controller
|
|
22
|
+
// ============================================================================
|
|
23
|
+
export class WebRTCController {
|
|
24
|
+
myPeerId;
|
|
25
|
+
peers = new Map();
|
|
26
|
+
localStore;
|
|
27
|
+
sendCommand;
|
|
28
|
+
sendSignaling;
|
|
29
|
+
classifyPeer;
|
|
30
|
+
requestTimeout;
|
|
31
|
+
debug;
|
|
32
|
+
recentRequests = new LRUCache(1000);
|
|
33
|
+
forwardingMachine;
|
|
34
|
+
// Pool configuration - reasonable defaults, settings sync will override
|
|
35
|
+
poolConfig = {
|
|
36
|
+
follows: { maxConnections: 20, satisfiedConnections: 10 },
|
|
37
|
+
other: { maxConnections: 16, satisfiedConnections: 8 },
|
|
38
|
+
};
|
|
39
|
+
// Hello interval - 5s for faster peer discovery
|
|
40
|
+
helloInterval;
|
|
41
|
+
HELLO_INTERVAL = 5000;
|
|
42
|
+
constructor(config) {
|
|
43
|
+
this.myPeerId = new PeerId(config.pubkey, generateUuid());
|
|
44
|
+
this.localStore = config.localStore;
|
|
45
|
+
this.sendCommand = config.sendCommand;
|
|
46
|
+
this.sendSignaling = config.sendSignaling;
|
|
47
|
+
this.requestTimeout = config.requestTimeout ?? 1000;
|
|
48
|
+
this.debug = config.debug ?? false;
|
|
49
|
+
this.forwardingMachine = new QueryForwardingMachine({
|
|
50
|
+
requestTimeoutMs: this.requestTimeout,
|
|
51
|
+
maxForwardsPerPeerWindow: config.forwardRateLimit?.maxForwardsPerPeerWindow,
|
|
52
|
+
forwardRateLimitWindowMs: config.forwardRateLimit?.windowMs,
|
|
53
|
+
onForwardTimeout: ({ hashKey, requesterIds }) => {
|
|
54
|
+
this.clearRequesterMarkers(hashKey, requesterIds);
|
|
55
|
+
},
|
|
56
|
+
});
|
|
57
|
+
// Default classifier: check if pubkey is in follows
|
|
58
|
+
const getFollows = config.getFollows ?? (() => new Set());
|
|
59
|
+
this.classifyPeer = (pubkey) => {
|
|
60
|
+
const follows = getFollows();
|
|
61
|
+
const isFollow = follows.has(pubkey);
|
|
62
|
+
return isFollow ? 'follows' : 'other';
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
// ============================================================================
|
|
66
|
+
// Lifecycle
|
|
67
|
+
// ============================================================================
|
|
68
|
+
start() {
|
|
69
|
+
this.log('Starting WebRTC controller');
|
|
70
|
+
// Send hello periodically
|
|
71
|
+
this.helloInterval = setInterval(() => {
|
|
72
|
+
this.sendHello();
|
|
73
|
+
}, this.HELLO_INTERVAL);
|
|
74
|
+
// Send initial hello
|
|
75
|
+
this.sendHello();
|
|
76
|
+
}
|
|
77
|
+
stop() {
|
|
78
|
+
this.log('Stopping WebRTC controller');
|
|
79
|
+
if (this.helloInterval) {
|
|
80
|
+
clearInterval(this.helloInterval);
|
|
81
|
+
this.helloInterval = undefined;
|
|
82
|
+
}
|
|
83
|
+
// Close all peers
|
|
84
|
+
for (const peerId of this.peers.keys()) {
|
|
85
|
+
this.closePeer(peerId);
|
|
86
|
+
}
|
|
87
|
+
this.forwardingMachine.stop();
|
|
88
|
+
}
|
|
89
|
+
// ============================================================================
|
|
90
|
+
// Signaling
|
|
91
|
+
// ============================================================================
|
|
92
|
+
sendHello() {
|
|
93
|
+
const msg = {
|
|
94
|
+
type: 'hello',
|
|
95
|
+
peerId: this.myPeerId.uuid,
|
|
96
|
+
};
|
|
97
|
+
this.sendSignaling(msg).catch(err => {
|
|
98
|
+
console.error('[WebRTC] sendSignaling error:', err);
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Public method to trigger a hello broadcast.
|
|
103
|
+
* Used for testing to force peer discovery after follows are set up.
|
|
104
|
+
*/
|
|
105
|
+
broadcastHello() {
|
|
106
|
+
this.sendHello();
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Handle incoming signaling message (from Nostr kind 25050)
|
|
110
|
+
*
|
|
111
|
+
* Note: For hello messages, msg.peerId is just the UUID (from Nostr tag).
|
|
112
|
+
* For directed messages (offer/answer/candidate), msg.peerId is already the full
|
|
113
|
+
* pubkey:uuid format from Rust's SignalingMessage.
|
|
114
|
+
*/
|
|
115
|
+
async handleSignalingMessage(msg, senderPubkey) {
|
|
116
|
+
this.log(`Signaling from ${senderPubkey.slice(0, 8)}:`, msg.type);
|
|
117
|
+
switch (msg.type) {
|
|
118
|
+
case 'hello':
|
|
119
|
+
// For hello, msg.peerId is just UUID - handleHello constructs the full peer ID
|
|
120
|
+
{
|
|
121
|
+
const senderUuid = msg.peerId.includes(':') ? msg.peerId.split(':').slice(-1)[0] : msg.peerId;
|
|
122
|
+
await this.handleHello(senderPubkey, senderUuid);
|
|
123
|
+
}
|
|
124
|
+
break;
|
|
125
|
+
case 'offer':
|
|
126
|
+
// For directed messages, msg.peerId is already full pubkey:uuid format
|
|
127
|
+
if (msg.peerId === this.myPeerId.toString()) {
|
|
128
|
+
return; // Skip messages from ourselves
|
|
129
|
+
}
|
|
130
|
+
if (this.isMessageForUs(msg)) {
|
|
131
|
+
// Construct RTCSessionDescriptionInit from flat sdp field
|
|
132
|
+
await this.handleOffer(msg.peerId, senderPubkey, { type: 'offer', sdp: msg.sdp });
|
|
133
|
+
}
|
|
134
|
+
break;
|
|
135
|
+
case 'answer':
|
|
136
|
+
if (msg.peerId === this.myPeerId.toString()) {
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
if (this.isMessageForUs(msg)) {
|
|
140
|
+
// Construct RTCSessionDescriptionInit from flat sdp field
|
|
141
|
+
await this.handleAnswer(msg.peerId, { type: 'answer', sdp: msg.sdp });
|
|
142
|
+
}
|
|
143
|
+
break;
|
|
144
|
+
case 'candidate':
|
|
145
|
+
if (msg.peerId === this.myPeerId.toString()) {
|
|
146
|
+
return;
|
|
147
|
+
}
|
|
148
|
+
if (this.isMessageForUs(msg)) {
|
|
149
|
+
// Construct RTCIceCandidateInit from flat fields
|
|
150
|
+
await this.handleIceCandidate(msg.peerId, {
|
|
151
|
+
candidate: msg.candidate,
|
|
152
|
+
sdpMLineIndex: msg.sdpMLineIndex,
|
|
153
|
+
sdpMid: msg.sdpMid,
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
break;
|
|
157
|
+
case 'candidates':
|
|
158
|
+
if (msg.peerId === this.myPeerId.toString()) {
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
161
|
+
if (this.isMessageForUs(msg)) {
|
|
162
|
+
for (const c of msg.candidates) {
|
|
163
|
+
await this.handleIceCandidate(msg.peerId, {
|
|
164
|
+
candidate: c.candidate,
|
|
165
|
+
sdpMLineIndex: c.sdpMLineIndex,
|
|
166
|
+
sdpMid: c.sdpMid,
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
break;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
isMessageForUs(msg) {
|
|
174
|
+
if ('targetPeerId' in msg && msg.targetPeerId) {
|
|
175
|
+
return msg.targetPeerId === this.myPeerId.toString();
|
|
176
|
+
}
|
|
177
|
+
return true;
|
|
178
|
+
}
|
|
179
|
+
async handleHello(senderPubkey, senderUuid) {
|
|
180
|
+
const peerId = `${senderPubkey}:${senderUuid}`;
|
|
181
|
+
// Already connected?
|
|
182
|
+
if (this.peers.has(peerId)) {
|
|
183
|
+
return;
|
|
184
|
+
}
|
|
185
|
+
// Check pool limits
|
|
186
|
+
const pool = this.classifyPeer(senderPubkey);
|
|
187
|
+
if (!this.shouldConnect(pool)) {
|
|
188
|
+
this.log(`Pool ${pool} at capacity, ignoring hello`);
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
// In 'other' pool, only allow 1 connection per pubkey
|
|
192
|
+
if (pool === 'other' && this.hasOtherPoolPubkey(senderPubkey)) {
|
|
193
|
+
this.log(`Already have connection from ${senderPubkey.slice(0, 8)} in other pool`);
|
|
194
|
+
return;
|
|
195
|
+
}
|
|
196
|
+
// Tie-breaking: lower UUID initiates
|
|
197
|
+
const shouldInitiate = this.myPeerId.uuid < senderUuid;
|
|
198
|
+
if (shouldInitiate) {
|
|
199
|
+
this.log(`Initiating connection to ${peerId.slice(0, 20)}`);
|
|
200
|
+
await this.createOutboundPeer(peerId, senderPubkey, pool);
|
|
201
|
+
}
|
|
202
|
+
else {
|
|
203
|
+
this.log(`Waiting for offer from ${peerId.slice(0, 20)}`);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
async handleOffer(peerId, pubkey, offer) {
|
|
207
|
+
this.log(`handleOffer from ${pubkey.slice(0, 8)}, peerId: ${peerId.slice(0, 20)}`);
|
|
208
|
+
let peer = this.peers.get(peerId);
|
|
209
|
+
if (!peer) {
|
|
210
|
+
const pool = this.classifyPeer(pubkey);
|
|
211
|
+
if (!this.shouldConnect(pool)) {
|
|
212
|
+
this.log(`Pool ${pool} at capacity, rejecting offer`);
|
|
213
|
+
return;
|
|
214
|
+
}
|
|
215
|
+
if (pool === 'other' && this.hasOtherPoolPubkey(pubkey)) {
|
|
216
|
+
this.log(`Already have connection from ${pubkey.slice(0, 8)} in other pool, rejecting offer`);
|
|
217
|
+
return;
|
|
218
|
+
}
|
|
219
|
+
this.log(`Creating inbound peer for ${pubkey.slice(0, 8)}`);
|
|
220
|
+
peer = this.createPeer(peerId, pubkey, pool, 'inbound');
|
|
221
|
+
}
|
|
222
|
+
this.log(`Setting remote description for ${peerId.slice(0, 20)}`);
|
|
223
|
+
this.sendCommand({ type: 'rtc:setRemoteDescription', peerId, sdp: offer });
|
|
224
|
+
}
|
|
225
|
+
async handleAnswer(peerId, answer) {
|
|
226
|
+
const peer = this.peers.get(peerId);
|
|
227
|
+
if (!peer) {
|
|
228
|
+
this.log(`Answer for unknown peer: ${peerId}`);
|
|
229
|
+
return;
|
|
230
|
+
}
|
|
231
|
+
this.sendCommand({ type: 'rtc:setRemoteDescription', peerId, sdp: answer });
|
|
232
|
+
}
|
|
233
|
+
async handleIceCandidate(peerId, candidate) {
|
|
234
|
+
const peer = this.peers.get(peerId);
|
|
235
|
+
if (!peer) {
|
|
236
|
+
return;
|
|
237
|
+
}
|
|
238
|
+
this.sendCommand({ type: 'rtc:addIceCandidate', peerId, candidate });
|
|
239
|
+
}
|
|
240
|
+
// ============================================================================
|
|
241
|
+
// Peer Management
|
|
242
|
+
// ============================================================================
|
|
243
|
+
shouldConnect(pool) {
|
|
244
|
+
const config = this.poolConfig[pool];
|
|
245
|
+
const count = this.getPoolCount(pool);
|
|
246
|
+
return count < config.maxConnections;
|
|
247
|
+
}
|
|
248
|
+
getPoolCount(pool) {
|
|
249
|
+
let count = 0;
|
|
250
|
+
for (const peer of this.peers.values()) {
|
|
251
|
+
if (peer.pool === pool && peer.state !== 'disconnected') {
|
|
252
|
+
count++;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
return count;
|
|
256
|
+
}
|
|
257
|
+
/**
|
|
258
|
+
* Check if we already have a connection from this pubkey in the 'other' pool.
|
|
259
|
+
* In the 'other' pool, we only allow 1 connection per pubkey to prevent spam.
|
|
260
|
+
*/
|
|
261
|
+
hasOtherPoolPubkey(pubkey) {
|
|
262
|
+
for (const peer of this.peers.values()) {
|
|
263
|
+
if (peer.pool === 'other' && peer.pubkey === pubkey && peer.state !== 'disconnected') {
|
|
264
|
+
return true;
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
return false;
|
|
268
|
+
}
|
|
269
|
+
createPeer(peerId, pubkey, pool, direction) {
|
|
270
|
+
const peer = {
|
|
271
|
+
peerId,
|
|
272
|
+
pubkey,
|
|
273
|
+
pool,
|
|
274
|
+
direction,
|
|
275
|
+
state: 'connecting',
|
|
276
|
+
dataChannelReady: false,
|
|
277
|
+
answerCreated: false,
|
|
278
|
+
htlConfig: generatePeerHTLConfig(),
|
|
279
|
+
pendingRequests: new Map(),
|
|
280
|
+
theirRequests: new LRUCache(200),
|
|
281
|
+
stats: {
|
|
282
|
+
requestsSent: 0,
|
|
283
|
+
requestsReceived: 0,
|
|
284
|
+
responsesSent: 0,
|
|
285
|
+
responsesReceived: 0,
|
|
286
|
+
bytesSent: 0,
|
|
287
|
+
bytesReceived: 0,
|
|
288
|
+
forwardedRequests: 0,
|
|
289
|
+
forwardedResolved: 0,
|
|
290
|
+
forwardedSuppressed: 0,
|
|
291
|
+
},
|
|
292
|
+
createdAt: Date.now(),
|
|
293
|
+
bufferPaused: false,
|
|
294
|
+
deferredRequests: [],
|
|
295
|
+
};
|
|
296
|
+
this.peers.set(peerId, peer);
|
|
297
|
+
this.sendCommand({ type: 'rtc:createPeer', peerId, pubkey });
|
|
298
|
+
return peer;
|
|
299
|
+
}
|
|
300
|
+
async createOutboundPeer(peerId, pubkey, pool) {
|
|
301
|
+
this.createPeer(peerId, pubkey, pool, 'outbound');
|
|
302
|
+
// Proxy will create peer and we'll get rtc:peerCreated, then request offer
|
|
303
|
+
}
|
|
304
|
+
closePeer(peerId) {
|
|
305
|
+
const peer = this.peers.get(peerId);
|
|
306
|
+
if (!peer)
|
|
307
|
+
return;
|
|
308
|
+
// Clear pending requests
|
|
309
|
+
for (const pending of peer.pendingRequests.values()) {
|
|
310
|
+
clearTimeout(pending.timeout);
|
|
311
|
+
pending.resolve(null);
|
|
312
|
+
}
|
|
313
|
+
peer.state = 'disconnected';
|
|
314
|
+
this.sendCommand({ type: 'rtc:closePeer', peerId });
|
|
315
|
+
this.peers.delete(peerId);
|
|
316
|
+
this.forwardingMachine.removePeer(peerId);
|
|
317
|
+
this.log(`Closed peer: ${peerId.slice(0, 20)}`);
|
|
318
|
+
}
|
|
319
|
+
// ============================================================================
|
|
320
|
+
// Proxy Events
|
|
321
|
+
// ============================================================================
|
|
322
|
+
/**
|
|
323
|
+
* Handle event from main thread proxy
|
|
324
|
+
*/
|
|
325
|
+
handleProxyEvent(event) {
|
|
326
|
+
switch (event.type) {
|
|
327
|
+
case 'rtc:peerCreated':
|
|
328
|
+
this.onPeerCreated(event.peerId);
|
|
329
|
+
break;
|
|
330
|
+
case 'rtc:peerStateChange':
|
|
331
|
+
this.onPeerStateChange(event.peerId, event.state);
|
|
332
|
+
break;
|
|
333
|
+
case 'rtc:peerClosed':
|
|
334
|
+
this.onPeerClosed(event.peerId);
|
|
335
|
+
break;
|
|
336
|
+
case 'rtc:offerCreated':
|
|
337
|
+
this.onOfferCreated(event.peerId, event.sdp);
|
|
338
|
+
break;
|
|
339
|
+
case 'rtc:answerCreated':
|
|
340
|
+
this.onAnswerCreated(event.peerId, event.sdp);
|
|
341
|
+
break;
|
|
342
|
+
case 'rtc:descriptionSet':
|
|
343
|
+
this.onDescriptionSet(event.peerId, event.error);
|
|
344
|
+
break;
|
|
345
|
+
case 'rtc:iceCandidate':
|
|
346
|
+
this.onIceCandidate(event.peerId, event.candidate);
|
|
347
|
+
break;
|
|
348
|
+
case 'rtc:dataChannelOpen':
|
|
349
|
+
this.onDataChannelOpen(event.peerId);
|
|
350
|
+
break;
|
|
351
|
+
case 'rtc:dataChannelMessage':
|
|
352
|
+
this.onDataChannelMessage(event.peerId, event.data);
|
|
353
|
+
break;
|
|
354
|
+
case 'rtc:dataChannelClose':
|
|
355
|
+
this.onDataChannelClose(event.peerId);
|
|
356
|
+
break;
|
|
357
|
+
case 'rtc:dataChannelError':
|
|
358
|
+
this.onDataChannelError(event.peerId, event.error);
|
|
359
|
+
break;
|
|
360
|
+
case 'rtc:bufferHigh':
|
|
361
|
+
this.onBufferHigh(event.peerId);
|
|
362
|
+
break;
|
|
363
|
+
case 'rtc:bufferLow':
|
|
364
|
+
this.onBufferLow(event.peerId);
|
|
365
|
+
break;
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
onPeerCreated(peerId) {
|
|
369
|
+
const peer = this.peers.get(peerId);
|
|
370
|
+
if (!peer)
|
|
371
|
+
return;
|
|
372
|
+
// If outbound, create offer
|
|
373
|
+
if (peer.direction === 'outbound') {
|
|
374
|
+
this.sendCommand({ type: 'rtc:createOffer', peerId });
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
onPeerStateChange(peerId, state) {
|
|
378
|
+
const peer = this.peers.get(peerId);
|
|
379
|
+
if (!peer)
|
|
380
|
+
return;
|
|
381
|
+
this.log(`Peer ${peerId.slice(0, 20)} state: ${state}`);
|
|
382
|
+
if (state === 'connected') {
|
|
383
|
+
peer.state = 'connected';
|
|
384
|
+
peer.connectedAt = Date.now();
|
|
385
|
+
}
|
|
386
|
+
else if (state === 'failed' || state === 'closed') {
|
|
387
|
+
this.closePeer(peerId);
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
onPeerClosed(peerId) {
|
|
391
|
+
this.peers.delete(peerId);
|
|
392
|
+
}
|
|
393
|
+
onOfferCreated(peerId, sdp) {
|
|
394
|
+
const peer = this.peers.get(peerId);
|
|
395
|
+
if (!peer)
|
|
396
|
+
return;
|
|
397
|
+
// Set local description
|
|
398
|
+
this.sendCommand({ type: 'rtc:setLocalDescription', peerId, sdp });
|
|
399
|
+
// Send offer via signaling (use Rust-compatible format: full pubkey:uuid for peerId)
|
|
400
|
+
const msg = {
|
|
401
|
+
type: 'offer',
|
|
402
|
+
sdp: sdp.sdp,
|
|
403
|
+
targetPeerId: peerId,
|
|
404
|
+
peerId: this.myPeerId.toString(), // Full pubkey:uuid
|
|
405
|
+
};
|
|
406
|
+
this.sendSignaling(msg, peer.pubkey);
|
|
407
|
+
}
|
|
408
|
+
onAnswerCreated(peerId, sdp) {
|
|
409
|
+
this.log(`onAnswerCreated for ${peerId.slice(0, 20)}`);
|
|
410
|
+
const peer = this.peers.get(peerId);
|
|
411
|
+
if (!peer) {
|
|
412
|
+
this.log(`onAnswerCreated: peer not found for ${peerId.slice(0, 20)}`);
|
|
413
|
+
return;
|
|
414
|
+
}
|
|
415
|
+
this.sendCommand({ type: 'rtc:setLocalDescription', peerId, sdp });
|
|
416
|
+
this.log(`Sending answer to ${peer.pubkey.slice(0, 8)}`);
|
|
417
|
+
const msg = {
|
|
418
|
+
type: 'answer',
|
|
419
|
+
sdp: sdp.sdp,
|
|
420
|
+
targetPeerId: peerId,
|
|
421
|
+
peerId: this.myPeerId.toString(),
|
|
422
|
+
};
|
|
423
|
+
this.sendSignaling(msg, peer.pubkey);
|
|
424
|
+
}
|
|
425
|
+
onDescriptionSet(peerId, error) {
|
|
426
|
+
if (error) {
|
|
427
|
+
this.log(`Description set error for ${peerId.slice(0, 20)}: ${error}`);
|
|
428
|
+
return;
|
|
429
|
+
}
|
|
430
|
+
const peer = this.peers.get(peerId);
|
|
431
|
+
if (!peer) {
|
|
432
|
+
this.log(`onDescriptionSet: peer not found for ${peerId.slice(0, 20)}`);
|
|
433
|
+
return;
|
|
434
|
+
}
|
|
435
|
+
this.log(`onDescriptionSet for ${peerId.slice(0, 20)}: direction=${peer.direction}, state=${peer.state}, answerCreated=${peer.answerCreated}`);
|
|
436
|
+
if (peer.direction === 'inbound' && peer.state === 'connecting' && !peer.answerCreated) {
|
|
437
|
+
peer.answerCreated = true;
|
|
438
|
+
this.log(`Creating answer for ${peerId.slice(0, 20)}`);
|
|
439
|
+
this.sendCommand({ type: 'rtc:createAnswer', peerId });
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
onIceCandidate(peerId, candidate) {
|
|
443
|
+
if (!candidate || !candidate.candidate)
|
|
444
|
+
return;
|
|
445
|
+
const peer = this.peers.get(peerId);
|
|
446
|
+
if (!peer)
|
|
447
|
+
return;
|
|
448
|
+
// Send candidate via signaling (use Rust-compatible format: full pubkey:uuid for peerId)
|
|
449
|
+
const msg = {
|
|
450
|
+
type: 'candidate',
|
|
451
|
+
candidate: candidate.candidate,
|
|
452
|
+
sdpMLineIndex: candidate.sdpMLineIndex ?? undefined,
|
|
453
|
+
sdpMid: candidate.sdpMid ?? undefined,
|
|
454
|
+
targetPeerId: peerId,
|
|
455
|
+
peerId: this.myPeerId.toString(), // Full pubkey:uuid
|
|
456
|
+
};
|
|
457
|
+
this.sendSignaling(msg, peer.pubkey);
|
|
458
|
+
}
|
|
459
|
+
onDataChannelOpen(peerId) {
|
|
460
|
+
const peer = this.peers.get(peerId);
|
|
461
|
+
if (!peer)
|
|
462
|
+
return;
|
|
463
|
+
peer.dataChannelReady = true;
|
|
464
|
+
this.log(`Data channel open: ${peerId.slice(0, 20)}`);
|
|
465
|
+
}
|
|
466
|
+
onDataChannelClose(peerId) {
|
|
467
|
+
const peer = this.peers.get(peerId);
|
|
468
|
+
if (!peer)
|
|
469
|
+
return;
|
|
470
|
+
peer.dataChannelReady = false;
|
|
471
|
+
this.closePeer(peerId);
|
|
472
|
+
}
|
|
473
|
+
onDataChannelError(peerId, error) {
|
|
474
|
+
this.log(`Data channel error for ${peerId}: ${error}`);
|
|
475
|
+
}
|
|
476
|
+
onBufferHigh(peerId) {
|
|
477
|
+
const peer = this.peers.get(peerId);
|
|
478
|
+
if (!peer)
|
|
479
|
+
return;
|
|
480
|
+
peer.bufferPaused = true;
|
|
481
|
+
this.log(`Buffer high for ${peerId.slice(0, 20)}, pausing responses`);
|
|
482
|
+
}
|
|
483
|
+
onBufferLow(peerId) {
|
|
484
|
+
const peer = this.peers.get(peerId);
|
|
485
|
+
if (!peer)
|
|
486
|
+
return;
|
|
487
|
+
peer.bufferPaused = false;
|
|
488
|
+
this.log(`Buffer low for ${peerId.slice(0, 20)}, resuming responses`);
|
|
489
|
+
// Process deferred requests
|
|
490
|
+
this.processDeferredRequests(peer);
|
|
491
|
+
}
|
|
492
|
+
async processDeferredRequests(peer) {
|
|
493
|
+
while (!peer.bufferPaused && peer.deferredRequests.length > 0) {
|
|
494
|
+
const req = peer.deferredRequests.shift();
|
|
495
|
+
await this.processRequest(peer, req);
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
// ============================================================================
|
|
499
|
+
// Data Protocol
|
|
500
|
+
// ============================================================================
|
|
501
|
+
sendDataToPeer(peer, data) {
|
|
502
|
+
peer.stats.bytesSent += data.byteLength;
|
|
503
|
+
this.sendCommand({ type: 'rtc:sendData', peerId: peer.peerId, data });
|
|
504
|
+
}
|
|
505
|
+
async onDataChannelMessage(peerId, data) {
|
|
506
|
+
const peer = this.peers.get(peerId);
|
|
507
|
+
if (!peer)
|
|
508
|
+
return;
|
|
509
|
+
// Count all inbound DataChannel bytes (requests + responses + protocol overhead).
|
|
510
|
+
peer.stats.bytesReceived += data.byteLength;
|
|
511
|
+
const msg = parseMessage(data);
|
|
512
|
+
if (!msg) {
|
|
513
|
+
this.log(`Failed to parse message from ${peerId}`);
|
|
514
|
+
return;
|
|
515
|
+
}
|
|
516
|
+
if (msg.type === MSG_TYPE_REQUEST) {
|
|
517
|
+
await this.handleRequest(peer, msg.body);
|
|
518
|
+
}
|
|
519
|
+
else if (msg.type === MSG_TYPE_RESPONSE) {
|
|
520
|
+
await this.handleResponse(peer, msg.body);
|
|
521
|
+
}
|
|
522
|
+
}
|
|
523
|
+
async handleRequest(peer, req) {
|
|
524
|
+
peer.stats.requestsReceived++;
|
|
525
|
+
// If buffer is full, defer the request for later processing
|
|
526
|
+
if (peer.bufferPaused) {
|
|
527
|
+
// Limit deferred requests to prevent memory issues
|
|
528
|
+
if (peer.deferredRequests.length < 100) {
|
|
529
|
+
peer.deferredRequests.push(req);
|
|
530
|
+
}
|
|
531
|
+
return;
|
|
532
|
+
}
|
|
533
|
+
await this.processRequest(peer, req);
|
|
534
|
+
}
|
|
535
|
+
async processRequest(peer, req) {
|
|
536
|
+
const hashKey = hashToKey(req.h);
|
|
537
|
+
// Try to get from local store
|
|
538
|
+
const data = await this.localStore.get(req.h);
|
|
539
|
+
if (data) {
|
|
540
|
+
// Send response
|
|
541
|
+
await this.sendResponse(peer, req.h, data);
|
|
542
|
+
}
|
|
543
|
+
else {
|
|
544
|
+
// Track their request for later push
|
|
545
|
+
peer.theirRequests.set(hashKey, {
|
|
546
|
+
hash: req.h,
|
|
547
|
+
requestedAt: Date.now(),
|
|
548
|
+
});
|
|
549
|
+
// Forward if HTL allows
|
|
550
|
+
const htl = req.htl ?? MAX_HTL;
|
|
551
|
+
if (shouldForward(htl)) {
|
|
552
|
+
const newHtl = decrementHTL(htl, peer.htlConfig);
|
|
553
|
+
const decision = this.forwardingMachine.beginForward(hashKey, peer.peerId, this.getForwardTargets(peer.peerId));
|
|
554
|
+
if (decision.kind === 'suppressed') {
|
|
555
|
+
peer.stats.forwardedSuppressed++;
|
|
556
|
+
return;
|
|
557
|
+
}
|
|
558
|
+
if (decision.kind === 'rate_limited') {
|
|
559
|
+
peer.theirRequests.delete(hashKey);
|
|
560
|
+
this.log(`Forward rate-limited for ${peer.peerId.slice(0, 20)} hash ${hashKey.slice(0, 16)}`);
|
|
561
|
+
return;
|
|
562
|
+
}
|
|
563
|
+
if (decision.kind === 'no_targets') {
|
|
564
|
+
peer.theirRequests.delete(hashKey);
|
|
565
|
+
return;
|
|
566
|
+
}
|
|
567
|
+
const forwarded = this.forwardRequest(req.h, decision.targets, newHtl);
|
|
568
|
+
if (forwarded <= 0) {
|
|
569
|
+
const requesterIds = this.forwardingMachine.cancelForward(hashKey);
|
|
570
|
+
this.clearRequesterMarkers(hashKey, requesterIds);
|
|
571
|
+
return;
|
|
572
|
+
}
|
|
573
|
+
peer.stats.forwardedRequests++;
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
async handleResponse(peer, res) {
|
|
578
|
+
peer.stats.responsesReceived++;
|
|
579
|
+
const hashKey = hashToKey(res.h);
|
|
580
|
+
const pending = peer.pendingRequests.get(hashKey);
|
|
581
|
+
if (!pending) {
|
|
582
|
+
const hasRequesters = Array.from(this.peers.values()).some(p => p.theirRequests.has(hashKey));
|
|
583
|
+
// Late response: cache if we requested this hash recently
|
|
584
|
+
const requestedAt = this.recentRequests.get(hashKey);
|
|
585
|
+
if (!requestedAt && !hasRequesters)
|
|
586
|
+
return;
|
|
587
|
+
if (requestedAt && Date.now() - requestedAt > 60000) {
|
|
588
|
+
this.recentRequests.delete(hashKey);
|
|
589
|
+
if (!hasRequesters)
|
|
590
|
+
return;
|
|
591
|
+
}
|
|
592
|
+
const valid = await verifyHash(res.d, res.h);
|
|
593
|
+
if (valid) {
|
|
594
|
+
await this.localStore.put(res.h, res.d);
|
|
595
|
+
if (requestedAt) {
|
|
596
|
+
this.recentRequests.delete(hashKey);
|
|
597
|
+
}
|
|
598
|
+
if (hasRequesters) {
|
|
599
|
+
await this.pushToRequesters(res.h, res.d, peer.peerId);
|
|
600
|
+
this.forwardingMachine.resolveForward(hashKey);
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
return;
|
|
604
|
+
}
|
|
605
|
+
clearTimeout(pending.timeout);
|
|
606
|
+
peer.pendingRequests.delete(hashKey);
|
|
607
|
+
// Verify hash
|
|
608
|
+
const valid = await verifyHash(res.d, res.h);
|
|
609
|
+
if (valid) {
|
|
610
|
+
// Store locally
|
|
611
|
+
await this.localStore.put(res.h, res.d);
|
|
612
|
+
pending.resolve(res.d);
|
|
613
|
+
// Push to peers who requested this
|
|
614
|
+
await this.pushToRequesters(res.h, res.d, peer.peerId);
|
|
615
|
+
this.forwardingMachine.resolveForward(hashKey);
|
|
616
|
+
}
|
|
617
|
+
else {
|
|
618
|
+
this.log(`Hash mismatch from ${peer.peerId}`);
|
|
619
|
+
pending.resolve(null);
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
async sendResponse(peer, hash, data) {
|
|
623
|
+
if (!peer.dataChannelReady)
|
|
624
|
+
return;
|
|
625
|
+
peer.stats.responsesSent++;
|
|
626
|
+
// Fragment if needed
|
|
627
|
+
if (data.length > FRAGMENT_SIZE) {
|
|
628
|
+
const totalFragments = Math.ceil(data.length / FRAGMENT_SIZE);
|
|
629
|
+
for (let i = 0; i < totalFragments; i++) {
|
|
630
|
+
const start = i * FRAGMENT_SIZE;
|
|
631
|
+
const end = Math.min(start + FRAGMENT_SIZE, data.length);
|
|
632
|
+
const fragment = data.slice(start, end);
|
|
633
|
+
const res = createFragmentResponse(hash, fragment, i, totalFragments);
|
|
634
|
+
const encoded = new Uint8Array(encodeResponse(res));
|
|
635
|
+
this.sendDataToPeer(peer, encoded);
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
else {
|
|
639
|
+
const res = createResponse(hash, data);
|
|
640
|
+
const encoded = new Uint8Array(encodeResponse(res));
|
|
641
|
+
this.sendDataToPeer(peer, encoded);
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
getForwardTargets(excludePeerId) {
|
|
645
|
+
const targets = [];
|
|
646
|
+
for (const [peerId, peer] of this.peers) {
|
|
647
|
+
if (peerId === excludePeerId)
|
|
648
|
+
continue;
|
|
649
|
+
if (!peer.dataChannelReady)
|
|
650
|
+
continue;
|
|
651
|
+
targets.push(peerId);
|
|
652
|
+
}
|
|
653
|
+
return targets;
|
|
654
|
+
}
|
|
655
|
+
forwardRequest(hash, targetPeerIds, htl) {
|
|
656
|
+
const hashKey = hashToKey(hash);
|
|
657
|
+
let forwarded = 0;
|
|
658
|
+
for (const peerId of targetPeerIds) {
|
|
659
|
+
const peer = this.peers.get(peerId);
|
|
660
|
+
if (!peer || !peer.dataChannelReady)
|
|
661
|
+
continue;
|
|
662
|
+
// Set up pending request so we can process the response
|
|
663
|
+
const timeout = setTimeout(() => {
|
|
664
|
+
peer.pendingRequests.delete(hashKey);
|
|
665
|
+
}, this.requestTimeout);
|
|
666
|
+
peer.pendingRequests.set(hashKey, {
|
|
667
|
+
hash,
|
|
668
|
+
resolve: () => {
|
|
669
|
+
// Response will be pushed to original requester via pushToRequesters
|
|
670
|
+
},
|
|
671
|
+
timeout,
|
|
672
|
+
});
|
|
673
|
+
const req = createRequest(hash, htl);
|
|
674
|
+
const encoded = new Uint8Array(encodeRequest(req));
|
|
675
|
+
this.sendDataToPeer(peer, encoded);
|
|
676
|
+
forwarded++;
|
|
677
|
+
}
|
|
678
|
+
return forwarded;
|
|
679
|
+
}
|
|
680
|
+
async pushToRequesters(hash, data, excludePeerId) {
|
|
681
|
+
const hashKey = hashToKey(hash);
|
|
682
|
+
for (const [peerId, peer] of this.peers) {
|
|
683
|
+
if (peerId === excludePeerId)
|
|
684
|
+
continue;
|
|
685
|
+
const theirReq = peer.theirRequests.get(hashKey);
|
|
686
|
+
if (theirReq) {
|
|
687
|
+
peer.theirRequests.delete(hashKey);
|
|
688
|
+
peer.stats.forwardedResolved++;
|
|
689
|
+
await this.sendResponse(peer, hash, data);
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
clearRequesterMarkers(hashKey, requesterIds) {
|
|
694
|
+
for (const requesterId of requesterIds) {
|
|
695
|
+
this.peers.get(requesterId)?.theirRequests.delete(hashKey);
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
// ============================================================================
|
|
699
|
+
// Public API
|
|
700
|
+
// ============================================================================
|
|
701
|
+
/**
|
|
702
|
+
* Request data from peers
|
|
703
|
+
*/
|
|
704
|
+
async get(hash) {
|
|
705
|
+
// Try connected peers
|
|
706
|
+
const connectedPeers = Array.from(this.peers.values())
|
|
707
|
+
.filter(p => p.dataChannelReady);
|
|
708
|
+
if (connectedPeers.length === 0) {
|
|
709
|
+
return null;
|
|
710
|
+
}
|
|
711
|
+
// Send request to all peers, first response wins
|
|
712
|
+
return new Promise((resolve) => {
|
|
713
|
+
let resolved = false;
|
|
714
|
+
const hashKey = hashToKey(hash);
|
|
715
|
+
this.recentRequests.set(hashKey, Date.now());
|
|
716
|
+
for (const peer of connectedPeers) {
|
|
717
|
+
const timeout = setTimeout(() => {
|
|
718
|
+
peer.pendingRequests.delete(hashKey);
|
|
719
|
+
checkDone();
|
|
720
|
+
}, this.requestTimeout);
|
|
721
|
+
peer.pendingRequests.set(hashKey, {
|
|
722
|
+
hash,
|
|
723
|
+
resolve: (data) => {
|
|
724
|
+
if (!resolved && data) {
|
|
725
|
+
resolved = true;
|
|
726
|
+
resolve(data);
|
|
727
|
+
}
|
|
728
|
+
checkDone();
|
|
729
|
+
},
|
|
730
|
+
timeout,
|
|
731
|
+
});
|
|
732
|
+
peer.stats.requestsSent++;
|
|
733
|
+
const req = createRequest(hash, MAX_HTL);
|
|
734
|
+
const encoded = new Uint8Array(encodeRequest(req));
|
|
735
|
+
this.sendDataToPeer(peer, encoded);
|
|
736
|
+
}
|
|
737
|
+
let pending = connectedPeers.length;
|
|
738
|
+
const checkDone = () => {
|
|
739
|
+
pending--;
|
|
740
|
+
if (pending === 0 && !resolved) {
|
|
741
|
+
resolve(null);
|
|
742
|
+
}
|
|
743
|
+
};
|
|
744
|
+
});
|
|
745
|
+
}
|
|
746
|
+
/**
|
|
747
|
+
* Get peer stats for UI
|
|
748
|
+
*/
|
|
749
|
+
getPeerStats() {
|
|
750
|
+
return Array.from(this.peers.values()).map(peer => ({
|
|
751
|
+
peerId: peer.peerId,
|
|
752
|
+
pubkey: peer.pubkey,
|
|
753
|
+
connected: peer.state === 'connected' && peer.dataChannelReady,
|
|
754
|
+
pool: peer.pool,
|
|
755
|
+
requestsSent: peer.stats.requestsSent,
|
|
756
|
+
requestsReceived: peer.stats.requestsReceived,
|
|
757
|
+
responsesSent: peer.stats.responsesSent,
|
|
758
|
+
responsesReceived: peer.stats.responsesReceived,
|
|
759
|
+
bytesSent: peer.stats.bytesSent,
|
|
760
|
+
bytesReceived: peer.stats.bytesReceived,
|
|
761
|
+
forwardedRequests: peer.stats.forwardedRequests,
|
|
762
|
+
forwardedResolved: peer.stats.forwardedResolved,
|
|
763
|
+
forwardedSuppressed: peer.stats.forwardedSuppressed,
|
|
764
|
+
}));
|
|
765
|
+
}
|
|
766
|
+
/**
|
|
767
|
+
* Get connected peer count
|
|
768
|
+
*/
|
|
769
|
+
getConnectedCount() {
|
|
770
|
+
let count = 0;
|
|
771
|
+
for (const peer of this.peers.values()) {
|
|
772
|
+
if (peer.state === 'connected' && peer.dataChannelReady) {
|
|
773
|
+
count++;
|
|
774
|
+
}
|
|
775
|
+
}
|
|
776
|
+
return count;
|
|
777
|
+
}
|
|
778
|
+
/**
|
|
779
|
+
* Set pool configuration
|
|
780
|
+
*/
|
|
781
|
+
setPoolConfig(config) {
|
|
782
|
+
this.poolConfig = {
|
|
783
|
+
follows: { maxConnections: config.follows.max, satisfiedConnections: config.follows.satisfied },
|
|
784
|
+
other: { maxConnections: config.other.max, satisfiedConnections: config.other.satisfied },
|
|
785
|
+
};
|
|
786
|
+
this.log('Pool config updated:', this.poolConfig);
|
|
787
|
+
// Re-broadcast hello to trigger peer discovery with new limits
|
|
788
|
+
this.sendHello();
|
|
789
|
+
}
|
|
790
|
+
/**
|
|
791
|
+
* Update identity (pubkey) and restart signaling if already running.
|
|
792
|
+
* This keeps peerId consistent with the current account.
|
|
793
|
+
*/
|
|
794
|
+
setIdentity(pubkey) {
|
|
795
|
+
if (this.myPeerId.pubkey === pubkey)
|
|
796
|
+
return;
|
|
797
|
+
const wasStarted = !!this.helloInterval;
|
|
798
|
+
this.stop();
|
|
799
|
+
this.myPeerId = new PeerId(pubkey, generateUuid());
|
|
800
|
+
if (wasStarted) {
|
|
801
|
+
this.start();
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
// ============================================================================
|
|
805
|
+
// Helpers
|
|
806
|
+
// ============================================================================
|
|
807
|
+
log(...args) {
|
|
808
|
+
if (this.debug) {
|
|
809
|
+
console.log('[WebRTC]', ...args);
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
//# sourceMappingURL=webrtcController.js.map
|