@hashtree/worker 0.2.1 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/app-runtime.d.ts +60 -0
- package/dist/app-runtime.d.ts.map +1 -0
- package/dist/app-runtime.js +271 -0
- package/dist/app-runtime.js.map +1 -0
- package/dist/capabilities/blossomBandwidthTracker.d.ts +26 -0
- package/dist/capabilities/blossomBandwidthTracker.d.ts.map +1 -0
- package/dist/capabilities/blossomBandwidthTracker.js +53 -0
- package/dist/capabilities/blossomBandwidthTracker.js.map +1 -0
- package/dist/capabilities/blossomTransport.d.ts +22 -0
- package/dist/capabilities/blossomTransport.d.ts.map +1 -0
- package/dist/capabilities/blossomTransport.js +139 -0
- package/dist/capabilities/blossomTransport.js.map +1 -0
- package/dist/capabilities/connectivity.d.ts +3 -0
- package/dist/capabilities/connectivity.d.ts.map +1 -0
- package/dist/capabilities/connectivity.js +49 -0
- package/dist/capabilities/connectivity.js.map +1 -0
- package/dist/capabilities/idbStorage.d.ts +25 -0
- package/dist/capabilities/idbStorage.d.ts.map +1 -0
- package/dist/capabilities/idbStorage.js +73 -0
- package/dist/capabilities/idbStorage.js.map +1 -0
- package/dist/capabilities/meshRouterStore.d.ts +71 -0
- package/dist/capabilities/meshRouterStore.d.ts.map +1 -0
- package/dist/capabilities/meshRouterStore.js +316 -0
- package/dist/capabilities/meshRouterStore.js.map +1 -0
- package/dist/capabilities/rootResolver.d.ts +10 -0
- package/dist/capabilities/rootResolver.d.ts.map +1 -0
- package/dist/capabilities/rootResolver.js +392 -0
- package/dist/capabilities/rootResolver.js.map +1 -0
- package/dist/client-id.d.ts +18 -0
- package/dist/client-id.d.ts.map +1 -0
- package/dist/client-id.js +98 -0
- package/dist/client-id.js.map +1 -0
- package/dist/client.d.ts +61 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +417 -0
- package/dist/client.js.map +1 -0
- package/dist/entry.d.ts +2 -0
- package/dist/entry.d.ts.map +1 -0
- package/dist/entry.js +3 -0
- package/dist/entry.js.map +1 -0
- package/dist/htree-path.d.ts +13 -0
- package/dist/htree-path.d.ts.map +1 -0
- package/dist/htree-path.js +38 -0
- package/dist/htree-path.js.map +1 -0
- package/dist/htree-url.d.ts +22 -0
- package/dist/htree-url.d.ts.map +1 -0
- package/dist/htree-url.js +118 -0
- package/dist/htree-url.js.map +1 -0
- package/dist/index.d.ts +17 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +8 -0
- package/dist/index.js.map +1 -0
- package/dist/mediaStreaming.d.ts +7 -0
- package/dist/mediaStreaming.d.ts.map +1 -0
- package/dist/mediaStreaming.js +48 -0
- package/dist/mediaStreaming.js.map +1 -0
- package/dist/p2p/boundedQueue.d.ts +79 -0
- package/dist/p2p/boundedQueue.d.ts.map +1 -0
- package/dist/p2p/boundedQueue.js +134 -0
- package/dist/p2p/boundedQueue.js.map +1 -0
- package/dist/p2p/errorMessage.d.ts +5 -0
- package/dist/p2p/errorMessage.d.ts.map +1 -0
- package/dist/p2p/errorMessage.js +7 -0
- package/dist/p2p/errorMessage.js.map +1 -0
- package/dist/p2p/index.d.ts +8 -0
- package/dist/p2p/index.d.ts.map +1 -0
- package/dist/p2p/index.js +6 -0
- package/dist/p2p/index.js.map +1 -0
- package/dist/p2p/lruCache.d.ts +26 -0
- package/dist/p2p/lruCache.d.ts.map +1 -0
- package/dist/p2p/lruCache.js +65 -0
- package/dist/p2p/lruCache.js.map +1 -0
- package/dist/p2p/meshQueryRouter.d.ts +57 -0
- package/dist/p2p/meshQueryRouter.d.ts.map +1 -0
- package/dist/p2p/meshQueryRouter.js +264 -0
- package/dist/p2p/meshQueryRouter.js.map +1 -0
- package/dist/p2p/protocol.d.ts +10 -0
- package/dist/p2p/protocol.d.ts.map +1 -0
- package/dist/p2p/protocol.js +2 -0
- package/dist/p2p/protocol.js.map +1 -0
- package/dist/p2p/queryForwardingMachine.d.ts +46 -0
- package/dist/p2p/queryForwardingMachine.d.ts.map +1 -0
- package/dist/p2p/queryForwardingMachine.js +144 -0
- package/dist/p2p/queryForwardingMachine.js.map +1 -0
- package/dist/p2p/signaling.d.ts +63 -0
- package/dist/p2p/signaling.d.ts.map +1 -0
- package/dist/p2p/signaling.js +185 -0
- package/dist/p2p/signaling.js.map +1 -0
- package/dist/p2p/uploadRateLimiter.d.ts +21 -0
- package/dist/p2p/uploadRateLimiter.d.ts.map +1 -0
- package/dist/p2p/uploadRateLimiter.js +62 -0
- package/dist/p2p/uploadRateLimiter.js.map +1 -0
- package/dist/p2p/webrtcController.d.ts +176 -0
- package/dist/p2p/webrtcController.d.ts.map +1 -0
- package/dist/p2p/webrtcController.js +938 -0
- package/dist/p2p/webrtcController.js.map +1 -0
- package/dist/p2p/webrtcProxy.d.ts +62 -0
- package/dist/p2p/webrtcProxy.d.ts.map +1 -0
- package/dist/p2p/webrtcProxy.js +447 -0
- package/dist/p2p/webrtcProxy.js.map +1 -0
- package/dist/privacyGuards.d.ts +14 -0
- package/dist/privacyGuards.d.ts.map +1 -0
- package/dist/privacyGuards.js +27 -0
- package/dist/privacyGuards.js.map +1 -0
- package/dist/protocol.d.ts +225 -0
- package/dist/protocol.d.ts.map +1 -0
- package/dist/protocol.js +2 -0
- package/dist/protocol.js.map +1 -0
- package/dist/relay/identity.d.ts +36 -0
- package/dist/relay/identity.d.ts.map +1 -0
- package/dist/relay/identity.js +78 -0
- package/dist/relay/identity.js.map +1 -0
- package/dist/relay/mediaHandler.d.ts +64 -0
- package/dist/relay/mediaHandler.d.ts.map +1 -0
- package/dist/relay/mediaHandler.js +1285 -0
- package/dist/relay/mediaHandler.js.map +1 -0
- package/dist/relay/ndk.d.ts +96 -0
- package/dist/relay/ndk.d.ts.map +1 -0
- package/dist/relay/ndk.js +502 -0
- package/dist/relay/ndk.js.map +1 -0
- package/dist/relay/nostr-wasm.d.ts +14 -0
- package/dist/relay/nostr-wasm.d.ts.map +1 -0
- package/dist/relay/nostr-wasm.js +246 -0
- package/dist/relay/nostr-wasm.js.map +1 -0
- package/dist/relay/nostr.d.ts +60 -0
- package/dist/relay/nostr.d.ts.map +1 -0
- package/dist/relay/nostr.js +207 -0
- package/dist/relay/nostr.js.map +1 -0
- package/dist/relay/protocol.d.ts +592 -0
- package/dist/relay/protocol.d.ts.map +1 -0
- package/dist/relay/protocol.js +16 -0
- package/dist/relay/protocol.js.map +1 -0
- package/dist/relay/publicAssetUrl.d.ts +6 -0
- package/dist/relay/publicAssetUrl.d.ts.map +1 -0
- package/dist/relay/publicAssetUrl.js +14 -0
- package/dist/relay/publicAssetUrl.js.map +1 -0
- package/dist/relay/rootPathResolver.d.ts +9 -0
- package/dist/relay/rootPathResolver.d.ts.map +1 -0
- package/dist/relay/rootPathResolver.js +32 -0
- package/dist/relay/rootPathResolver.js.map +1 -0
- package/dist/relay/signing.d.ts +50 -0
- package/dist/relay/signing.d.ts.map +1 -0
- package/dist/relay/signing.js +299 -0
- package/dist/relay/signing.js.map +1 -0
- package/dist/relay/treeRootCache.d.ts +86 -0
- package/dist/relay/treeRootCache.d.ts.map +1 -0
- package/dist/relay/treeRootCache.js +269 -0
- package/dist/relay/treeRootCache.js.map +1 -0
- package/dist/relay/treeRootSubscription.d.ts +55 -0
- package/dist/relay/treeRootSubscription.d.ts.map +1 -0
- package/dist/relay/treeRootSubscription.js +478 -0
- package/dist/relay/treeRootSubscription.js.map +1 -0
- package/dist/relay/utils/constants.d.ts +76 -0
- package/dist/relay/utils/constants.d.ts.map +1 -0
- package/dist/relay/utils/constants.js +113 -0
- package/dist/relay/utils/constants.js.map +1 -0
- package/dist/relay/utils/errorMessage.d.ts +5 -0
- package/dist/relay/utils/errorMessage.d.ts.map +1 -0
- package/dist/relay/utils/errorMessage.js +8 -0
- package/dist/relay/utils/errorMessage.js.map +1 -0
- package/dist/relay/utils/lruCache.d.ts +26 -0
- package/dist/relay/utils/lruCache.d.ts.map +1 -0
- package/dist/relay/utils/lruCache.js +66 -0
- package/dist/relay/utils/lruCache.js.map +1 -0
- package/dist/relay/webrtc.d.ts +2 -0
- package/dist/relay/webrtc.d.ts.map +1 -0
- package/dist/relay/webrtc.js +3 -0
- package/dist/relay/webrtc.js.map +1 -0
- package/dist/relay/webrtcSignaling.d.ts +37 -0
- package/dist/relay/webrtcSignaling.d.ts.map +1 -0
- package/dist/relay/webrtcSignaling.js +86 -0
- package/dist/relay/webrtcSignaling.js.map +1 -0
- package/dist/relay/worker.d.ts +12 -0
- package/dist/relay/worker.d.ts.map +1 -0
- package/dist/relay/worker.js +1540 -0
- package/dist/relay/worker.js.map +1 -0
- package/dist/relay-client.d.ts +31 -0
- package/dist/relay-client.d.ts.map +1 -0
- package/dist/relay-client.js +197 -0
- package/dist/relay-client.js.map +1 -0
- package/dist/relay-entry.d.ts +2 -0
- package/dist/relay-entry.d.ts.map +1 -0
- package/dist/relay-entry.js +2 -0
- package/dist/relay-entry.js.map +1 -0
- package/dist/runtime-network.d.ts +23 -0
- package/dist/runtime-network.d.ts.map +1 -0
- package/dist/runtime-network.js +105 -0
- package/dist/runtime-network.js.map +1 -0
- package/dist/runtime.d.ts +24 -0
- package/dist/runtime.d.ts.map +1 -0
- package/dist/runtime.js +126 -0
- package/dist/runtime.js.map +1 -0
- package/dist/transferableBytes.d.ts +2 -0
- package/dist/transferableBytes.d.ts.map +1 -0
- package/dist/transferableBytes.js +6 -0
- package/dist/transferableBytes.js.map +1 -0
- package/dist/tree-root.d.ts +201 -0
- package/dist/tree-root.d.ts.map +1 -0
- package/dist/tree-root.js +632 -0
- package/dist/tree-root.js.map +1 -0
- package/dist/types.d.ts +2 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/dist/worker.d.ts +9 -0
- package/dist/worker.d.ts.map +1 -0
- package/dist/worker.js +793 -0
- package/dist/worker.js.map +1 -0
- package/package.json +14 -14
- package/src/capabilities/blossomTransport.ts +1 -1
- package/src/worker.ts +2 -1
|
@@ -0,0 +1,938 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Worker WebRTC Controller
|
|
3
|
+
*
|
|
4
|
+
* Controls WebRTC connections from the worker thread.
|
|
5
|
+
* Main thread proxy executes RTCPeerConnection operations.
|
|
6
|
+
*
|
|
7
|
+
* Worker owns:
|
|
8
|
+
* - Peer state tracking
|
|
9
|
+
* - Connection lifecycle decisions
|
|
10
|
+
* - Data protocol (request/response)
|
|
11
|
+
* - Signaling message handling
|
|
12
|
+
*
|
|
13
|
+
* Main thread proxy owns:
|
|
14
|
+
* - RTCPeerConnection instances (not available in workers)
|
|
15
|
+
* - Data channel I/O
|
|
16
|
+
*/
|
|
17
|
+
import { fromHex, sha256, toHex } from '@hashtree/core';
|
|
18
|
+
import { MAX_HTL, MSG_TYPE_REQUEST, MSG_TYPE_RESPONSE, FRAGMENT_SIZE, PeerId, encodeRequest, encodeResponse, parseMessage, createRequest, createResponse, createFragmentResponse, hashToKey, verifyHash, generatePeerHTLConfig, PeerSelector, buildHedgedWavePlan, normalizeDispatchConfig, syncSelectorPeers, } from '@hashtree/nostr';
|
|
19
|
+
import { LRUCache } from './lruCache.js';
|
|
20
|
+
import { MeshQueryRouter, encodeForwardRequest } from './meshQueryRouter.js';
|
|
21
|
+
const PEER_METADATA_POINTER_SLOT_KEY = 'hashtree-webrtc/peer-metadata/latest/v1';
|
|
22
|
+
const DEFAULT_REQUEST_DISPATCH = {
|
|
23
|
+
initialFanout: 2,
|
|
24
|
+
hedgeFanout: 1,
|
|
25
|
+
maxFanout: 8,
|
|
26
|
+
hedgeIntervalMs: 120,
|
|
27
|
+
};
|
|
28
|
+
const ACTIVE_REQUEST_RANK_PENALTY = 3;
|
|
29
|
+
// ============================================================================
|
|
30
|
+
// Controller
|
|
31
|
+
// ============================================================================
|
|
32
|
+
export class WebRTCController {
|
|
33
|
+
myPeerId;
|
|
34
|
+
peers = new Map();
|
|
35
|
+
pendingRemoteCandidates = new Map();
|
|
36
|
+
localStore;
|
|
37
|
+
sendCommand;
|
|
38
|
+
sendSignaling;
|
|
39
|
+
classifyPeer;
|
|
40
|
+
requestTimeout;
|
|
41
|
+
debug;
|
|
42
|
+
recentRequests = new LRUCache(1000);
|
|
43
|
+
activePeerRequests = new Map();
|
|
44
|
+
meshRouter;
|
|
45
|
+
peerSelector;
|
|
46
|
+
routing;
|
|
47
|
+
// Pool configuration - reasonable defaults, settings sync will override
|
|
48
|
+
poolConfig = {
|
|
49
|
+
follows: { maxConnections: 20, satisfiedConnections: 10 },
|
|
50
|
+
other: { maxConnections: 16, satisfiedConnections: 8 },
|
|
51
|
+
};
|
|
52
|
+
// Hello interval - 5s for faster peer discovery
|
|
53
|
+
helloInterval;
|
|
54
|
+
HELLO_INTERVAL = 5000;
|
|
55
|
+
constructor(config) {
|
|
56
|
+
this.myPeerId = new PeerId(config.pubkey);
|
|
57
|
+
this.localStore = config.localStore;
|
|
58
|
+
this.sendCommand = config.sendCommand;
|
|
59
|
+
this.sendSignaling = config.sendSignaling;
|
|
60
|
+
this.requestTimeout = config.requestTimeout ?? 1000;
|
|
61
|
+
this.debug = config.debug ?? false;
|
|
62
|
+
this.routing = {
|
|
63
|
+
selectionStrategy: config.requestSelectionStrategy ?? 'titForTat',
|
|
64
|
+
fairnessEnabled: config.requestFairnessEnabled ?? true,
|
|
65
|
+
dispatch: config.requestDispatch ?? DEFAULT_REQUEST_DISPATCH,
|
|
66
|
+
};
|
|
67
|
+
this.peerSelector = PeerSelector.withStrategy(this.routing.selectionStrategy);
|
|
68
|
+
this.peerSelector.setFairness(this.routing.fairnessEnabled);
|
|
69
|
+
this.meshRouter = new MeshQueryRouter({
|
|
70
|
+
localStore: this.localStore,
|
|
71
|
+
requestTimeoutMs: this.requestTimeout,
|
|
72
|
+
upstreamFetch: config.upstreamFetch,
|
|
73
|
+
queryPeers: (hash, options) => this.queryPeersWithDispatch(hash, options),
|
|
74
|
+
maxForwardsPerPeerWindow: config.forwardRateLimit?.maxForwardsPerPeerWindow,
|
|
75
|
+
forwardRateLimitWindowMs: config.forwardRateLimit?.windowMs,
|
|
76
|
+
});
|
|
77
|
+
// Default classifier: check if pubkey is in follows
|
|
78
|
+
const getFollows = config.getFollows ?? (() => new Set());
|
|
79
|
+
this.classifyPeer = (pubkey) => {
|
|
80
|
+
const follows = getFollows();
|
|
81
|
+
const isFollow = follows.has(pubkey);
|
|
82
|
+
return isFollow ? 'follows' : 'other';
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
// ============================================================================
|
|
86
|
+
// Lifecycle
|
|
87
|
+
// ============================================================================
|
|
88
|
+
start() {
|
|
89
|
+
this.log('Starting WebRTC controller');
|
|
90
|
+
// Send hello periodically
|
|
91
|
+
this.helloInterval = setInterval(() => {
|
|
92
|
+
this.sendHello();
|
|
93
|
+
}, this.HELLO_INTERVAL);
|
|
94
|
+
// Send initial hello
|
|
95
|
+
this.sendHello();
|
|
96
|
+
}
|
|
97
|
+
stop() {
|
|
98
|
+
this.log('Stopping WebRTC controller');
|
|
99
|
+
if (this.helloInterval) {
|
|
100
|
+
clearInterval(this.helloInterval);
|
|
101
|
+
this.helloInterval = undefined;
|
|
102
|
+
}
|
|
103
|
+
// Close all peers
|
|
104
|
+
for (const peerId of this.peers.keys()) {
|
|
105
|
+
this.closePeer(peerId);
|
|
106
|
+
}
|
|
107
|
+
this.meshRouter.stop();
|
|
108
|
+
}
|
|
109
|
+
// ============================================================================
|
|
110
|
+
// Signaling
|
|
111
|
+
// ============================================================================
|
|
112
|
+
sendHello() {
|
|
113
|
+
const msg = {
|
|
114
|
+
type: 'hello',
|
|
115
|
+
peerId: this.myPeerId.toString(),
|
|
116
|
+
};
|
|
117
|
+
this.sendSignaling(msg).catch(err => {
|
|
118
|
+
console.error('[WebRTC] sendSignaling error:', err);
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Public method to trigger a hello broadcast.
|
|
123
|
+
* Used for testing to force peer discovery after follows are set up.
|
|
124
|
+
*/
|
|
125
|
+
broadcastHello() {
|
|
126
|
+
this.sendHello();
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* Handle incoming signaling message (from Nostr kind 25050)
|
|
130
|
+
*
|
|
131
|
+
* `peerId` is the remote endpoint identity.
|
|
132
|
+
*/
|
|
133
|
+
async handleSignalingMessage(msg, senderPubkey) {
|
|
134
|
+
this.log(`Signaling from ${senderPubkey.slice(0, 8)}:`, msg.type);
|
|
135
|
+
switch (msg.type) {
|
|
136
|
+
case 'hello':
|
|
137
|
+
await this.handleHello(senderPubkey);
|
|
138
|
+
break;
|
|
139
|
+
case 'offer':
|
|
140
|
+
if (msg.peerId === this.myPeerId.toString()) {
|
|
141
|
+
return; // Skip messages from ourselves
|
|
142
|
+
}
|
|
143
|
+
if (this.isMessageForUs(msg)) {
|
|
144
|
+
// Construct RTCSessionDescriptionInit from flat sdp field
|
|
145
|
+
await this.handleOffer(msg.peerId, senderPubkey, { type: 'offer', sdp: msg.sdp });
|
|
146
|
+
}
|
|
147
|
+
break;
|
|
148
|
+
case 'answer':
|
|
149
|
+
if (msg.peerId === this.myPeerId.toString()) {
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
if (this.isMessageForUs(msg)) {
|
|
153
|
+
// Construct RTCSessionDescriptionInit from flat sdp field
|
|
154
|
+
await this.handleAnswer(msg.peerId, { type: 'answer', sdp: msg.sdp });
|
|
155
|
+
}
|
|
156
|
+
break;
|
|
157
|
+
case 'candidate':
|
|
158
|
+
if (msg.peerId === this.myPeerId.toString()) {
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
161
|
+
if (this.isMessageForUs(msg)) {
|
|
162
|
+
// Construct RTCIceCandidateInit from flat fields
|
|
163
|
+
await this.handleIceCandidate(msg.peerId, {
|
|
164
|
+
candidate: msg.candidate,
|
|
165
|
+
sdpMLineIndex: msg.sdpMLineIndex,
|
|
166
|
+
sdpMid: msg.sdpMid,
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
break;
|
|
170
|
+
case 'candidates':
|
|
171
|
+
if (msg.peerId === this.myPeerId.toString()) {
|
|
172
|
+
return;
|
|
173
|
+
}
|
|
174
|
+
if (this.isMessageForUs(msg)) {
|
|
175
|
+
for (const c of msg.candidates) {
|
|
176
|
+
await this.handleIceCandidate(msg.peerId, {
|
|
177
|
+
candidate: c.candidate,
|
|
178
|
+
sdpMLineIndex: c.sdpMLineIndex,
|
|
179
|
+
sdpMid: c.sdpMid,
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
break;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
isMessageForUs(msg) {
|
|
187
|
+
if ('targetPeerId' in msg && msg.targetPeerId) {
|
|
188
|
+
return msg.targetPeerId === this.myPeerId.toString();
|
|
189
|
+
}
|
|
190
|
+
return true;
|
|
191
|
+
}
|
|
192
|
+
async handleHello(senderPubkey) {
|
|
193
|
+
const peerId = new PeerId(senderPubkey).toString();
|
|
194
|
+
// Already connected?
|
|
195
|
+
if (this.peers.has(peerId)) {
|
|
196
|
+
return;
|
|
197
|
+
}
|
|
198
|
+
// Check pool limits
|
|
199
|
+
const pool = this.classifyPeer(senderPubkey);
|
|
200
|
+
if (!this.shouldConnect(pool)) {
|
|
201
|
+
this.log(`Pool ${pool} at capacity, ignoring hello`);
|
|
202
|
+
return;
|
|
203
|
+
}
|
|
204
|
+
// In 'other' pool, only allow 1 connection per pubkey
|
|
205
|
+
if (pool === 'other' && this.hasOtherPoolPubkey(senderPubkey)) {
|
|
206
|
+
this.log(`Already have connection from ${senderPubkey.slice(0, 8)} in other pool`);
|
|
207
|
+
return;
|
|
208
|
+
}
|
|
209
|
+
// Tie-breaking: lower endpoint ID initiates
|
|
210
|
+
const shouldInitiate = this.myPeerId.toString() < peerId;
|
|
211
|
+
if (shouldInitiate) {
|
|
212
|
+
this.log(`Initiating connection to ${peerId.slice(0, 20)}`);
|
|
213
|
+
await this.createOutboundPeer(peerId, senderPubkey, pool);
|
|
214
|
+
}
|
|
215
|
+
else {
|
|
216
|
+
this.log(`Waiting for offer from ${peerId.slice(0, 20)}`);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
async handleOffer(peerId, pubkey, offer) {
|
|
220
|
+
this.log(`handleOffer from ${pubkey.slice(0, 8)}, peerId: ${peerId.slice(0, 20)}`);
|
|
221
|
+
let peer = this.peers.get(peerId);
|
|
222
|
+
if (!peer) {
|
|
223
|
+
const pool = this.classifyPeer(pubkey);
|
|
224
|
+
if (!this.shouldConnect(pool)) {
|
|
225
|
+
this.log(`Pool ${pool} at capacity, rejecting offer`);
|
|
226
|
+
return;
|
|
227
|
+
}
|
|
228
|
+
if (pool === 'other' && this.hasOtherPoolPubkey(pubkey)) {
|
|
229
|
+
this.log(`Already have connection from ${pubkey.slice(0, 8)} in other pool, rejecting offer`);
|
|
230
|
+
return;
|
|
231
|
+
}
|
|
232
|
+
this.log(`Creating inbound peer for ${pubkey.slice(0, 8)}`);
|
|
233
|
+
peer = this.createPeer(peerId, pubkey, pool, 'inbound');
|
|
234
|
+
}
|
|
235
|
+
else if (peer.direction === 'outbound' && peer.state === 'connecting') {
|
|
236
|
+
const isPolite = this.myPeerId.toString() < peerId;
|
|
237
|
+
if (!isPolite) {
|
|
238
|
+
this.log(`Ignoring offer collision from ${pubkey.slice(0, 8)} as impolite peer`);
|
|
239
|
+
return;
|
|
240
|
+
}
|
|
241
|
+
// Perfect negotiation: the polite peer abandons its local offer and
|
|
242
|
+
// switches into answerer mode for the remote offer.
|
|
243
|
+
peer.direction = 'inbound';
|
|
244
|
+
peer.answerCreated = false;
|
|
245
|
+
}
|
|
246
|
+
this.log(`Setting remote description for ${peerId.slice(0, 20)}`);
|
|
247
|
+
this.sendCommand({ type: 'rtc:setRemoteDescription', peerId, sdp: offer });
|
|
248
|
+
}
|
|
249
|
+
async handleAnswer(peerId, answer) {
|
|
250
|
+
const peer = this.peers.get(peerId);
|
|
251
|
+
if (!peer) {
|
|
252
|
+
this.log(`Answer for unknown peer: ${peerId}`);
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
this.sendCommand({ type: 'rtc:setRemoteDescription', peerId, sdp: answer });
|
|
256
|
+
}
|
|
257
|
+
async handleIceCandidate(peerId, candidate) {
|
|
258
|
+
const peer = this.peers.get(peerId);
|
|
259
|
+
if (!peer) {
|
|
260
|
+
const queued = this.pendingRemoteCandidates.get(peerId) ?? [];
|
|
261
|
+
queued.push(candidate);
|
|
262
|
+
this.pendingRemoteCandidates.set(peerId, queued);
|
|
263
|
+
return;
|
|
264
|
+
}
|
|
265
|
+
this.sendCommand({ type: 'rtc:addIceCandidate', peerId, candidate });
|
|
266
|
+
}
|
|
267
|
+
// ============================================================================
|
|
268
|
+
// Peer Management
|
|
269
|
+
// ============================================================================
|
|
270
|
+
shouldConnect(pool) {
|
|
271
|
+
const config = this.poolConfig[pool];
|
|
272
|
+
const count = this.getPoolCount(pool);
|
|
273
|
+
return count < config.maxConnections;
|
|
274
|
+
}
|
|
275
|
+
getPoolCount(pool) {
|
|
276
|
+
let count = 0;
|
|
277
|
+
for (const peer of this.peers.values()) {
|
|
278
|
+
if (peer.pool === pool && peer.state !== 'disconnected') {
|
|
279
|
+
count++;
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
return count;
|
|
283
|
+
}
|
|
284
|
+
/**
|
|
285
|
+
* Check if we already have a connection from this pubkey in the 'other' pool.
|
|
286
|
+
* In the 'other' pool, we only allow 1 connection per pubkey to prevent spam.
|
|
287
|
+
*/
|
|
288
|
+
hasOtherPoolPubkey(pubkey) {
|
|
289
|
+
for (const peer of this.peers.values()) {
|
|
290
|
+
if (peer.pool === 'other' && peer.pubkey === pubkey && peer.state !== 'disconnected') {
|
|
291
|
+
return true;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
return false;
|
|
295
|
+
}
|
|
296
|
+
createPeer(peerId, pubkey, pool, direction) {
|
|
297
|
+
const peer = {
|
|
298
|
+
peerId,
|
|
299
|
+
pubkey,
|
|
300
|
+
pool,
|
|
301
|
+
direction,
|
|
302
|
+
state: 'connecting',
|
|
303
|
+
dataChannelReady: false,
|
|
304
|
+
answerCreated: false,
|
|
305
|
+
htlConfig: generatePeerHTLConfig(),
|
|
306
|
+
pendingRequests: new Map(),
|
|
307
|
+
stats: {
|
|
308
|
+
requestsSent: 0,
|
|
309
|
+
requestsReceived: 0,
|
|
310
|
+
responsesSent: 0,
|
|
311
|
+
responsesReceived: 0,
|
|
312
|
+
bytesSent: 0,
|
|
313
|
+
bytesReceived: 0,
|
|
314
|
+
forwardedRequests: 0,
|
|
315
|
+
forwardedResolved: 0,
|
|
316
|
+
forwardedSuppressed: 0,
|
|
317
|
+
},
|
|
318
|
+
createdAt: Date.now(),
|
|
319
|
+
bufferPaused: false,
|
|
320
|
+
deferredRequests: [],
|
|
321
|
+
};
|
|
322
|
+
this.peers.set(peerId, peer);
|
|
323
|
+
this.peerSelector.addPeer(peerId);
|
|
324
|
+
this.meshRouter.registerPeer({
|
|
325
|
+
peerId,
|
|
326
|
+
canSend: () => peer.dataChannelReady,
|
|
327
|
+
getHtlConfig: () => peer.htlConfig,
|
|
328
|
+
sendRequest: (hash, htl) => this.sendRequestToPeer(peer, hash, htl),
|
|
329
|
+
sendResponse: async (hash, data) => this.sendResponse(peer, hash, data),
|
|
330
|
+
onForwardedRequest: () => {
|
|
331
|
+
peer.stats.forwardedRequests++;
|
|
332
|
+
},
|
|
333
|
+
onForwardedResolved: () => {
|
|
334
|
+
peer.stats.forwardedResolved++;
|
|
335
|
+
},
|
|
336
|
+
onForwardedSuppressed: () => {
|
|
337
|
+
peer.stats.forwardedSuppressed++;
|
|
338
|
+
},
|
|
339
|
+
});
|
|
340
|
+
this.sendCommand({ type: 'rtc:createPeer', peerId, pubkey });
|
|
341
|
+
return peer;
|
|
342
|
+
}
|
|
343
|
+
async createOutboundPeer(peerId, pubkey, pool) {
|
|
344
|
+
this.createPeer(peerId, pubkey, pool, 'outbound');
|
|
345
|
+
// Proxy will create peer and we'll get rtc:peerCreated, then request offer
|
|
346
|
+
}
|
|
347
|
+
closePeer(peerId) {
|
|
348
|
+
const peer = this.peers.get(peerId);
|
|
349
|
+
if (!peer)
|
|
350
|
+
return;
|
|
351
|
+
// Clear pending requests
|
|
352
|
+
for (const [hashKey, pending] of peer.pendingRequests.entries()) {
|
|
353
|
+
clearTimeout(pending.timeout);
|
|
354
|
+
peer.pendingRequests.delete(hashKey);
|
|
355
|
+
this.releasePeerRequest(peer.peerId);
|
|
356
|
+
pending.resolve(null);
|
|
357
|
+
}
|
|
358
|
+
peer.state = 'disconnected';
|
|
359
|
+
this.sendCommand({ type: 'rtc:closePeer', peerId });
|
|
360
|
+
this.peers.delete(peerId);
|
|
361
|
+
this.pendingRemoteCandidates.delete(peerId);
|
|
362
|
+
this.peerSelector.removePeer(peerId);
|
|
363
|
+
this.meshRouter.removePeer(peerId);
|
|
364
|
+
this.log(`Closed peer: ${peerId.slice(0, 20)}`);
|
|
365
|
+
}
|
|
366
|
+
// ============================================================================
|
|
367
|
+
// Proxy Events
|
|
368
|
+
// ============================================================================
|
|
369
|
+
/**
|
|
370
|
+
* Handle event from main thread proxy
|
|
371
|
+
*/
|
|
372
|
+
handleProxyEvent(event) {
|
|
373
|
+
switch (event.type) {
|
|
374
|
+
case 'rtc:peerCreated':
|
|
375
|
+
this.onPeerCreated(event.peerId);
|
|
376
|
+
break;
|
|
377
|
+
case 'rtc:peerStateChange':
|
|
378
|
+
this.onPeerStateChange(event.peerId, event.state);
|
|
379
|
+
break;
|
|
380
|
+
case 'rtc:peerClosed':
|
|
381
|
+
this.onPeerClosed(event.peerId);
|
|
382
|
+
break;
|
|
383
|
+
case 'rtc:offerCreated':
|
|
384
|
+
this.onOfferCreated(event.peerId, event.sdp);
|
|
385
|
+
break;
|
|
386
|
+
case 'rtc:answerCreated':
|
|
387
|
+
this.onAnswerCreated(event.peerId, event.sdp);
|
|
388
|
+
break;
|
|
389
|
+
case 'rtc:descriptionSet':
|
|
390
|
+
this.onDescriptionSet(event.peerId, event.error);
|
|
391
|
+
break;
|
|
392
|
+
case 'rtc:iceCandidate':
|
|
393
|
+
this.onIceCandidate(event.peerId, event.candidate);
|
|
394
|
+
break;
|
|
395
|
+
case 'rtc:dataChannelOpen':
|
|
396
|
+
this.onDataChannelOpen(event.peerId);
|
|
397
|
+
break;
|
|
398
|
+
case 'rtc:dataChannelMessage':
|
|
399
|
+
this.onDataChannelMessage(event.peerId, event.data);
|
|
400
|
+
break;
|
|
401
|
+
case 'rtc:dataChannelClose':
|
|
402
|
+
this.onDataChannelClose(event.peerId);
|
|
403
|
+
break;
|
|
404
|
+
case 'rtc:dataChannelError':
|
|
405
|
+
this.onDataChannelError(event.peerId, event.error);
|
|
406
|
+
break;
|
|
407
|
+
case 'rtc:bufferHigh':
|
|
408
|
+
this.onBufferHigh(event.peerId);
|
|
409
|
+
break;
|
|
410
|
+
case 'rtc:bufferLow':
|
|
411
|
+
this.onBufferLow(event.peerId);
|
|
412
|
+
break;
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
onPeerCreated(peerId) {
|
|
416
|
+
const peer = this.peers.get(peerId);
|
|
417
|
+
if (!peer)
|
|
418
|
+
return;
|
|
419
|
+
const queuedCandidates = this.pendingRemoteCandidates.get(peerId);
|
|
420
|
+
if (queuedCandidates?.length) {
|
|
421
|
+
for (const candidate of queuedCandidates) {
|
|
422
|
+
this.sendCommand({ type: 'rtc:addIceCandidate', peerId, candidate });
|
|
423
|
+
}
|
|
424
|
+
this.pendingRemoteCandidates.delete(peerId);
|
|
425
|
+
}
|
|
426
|
+
// If outbound, create offer
|
|
427
|
+
if (peer.direction === 'outbound') {
|
|
428
|
+
this.sendCommand({ type: 'rtc:createOffer', peerId });
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
onPeerStateChange(peerId, state) {
|
|
432
|
+
const peer = this.peers.get(peerId);
|
|
433
|
+
if (!peer)
|
|
434
|
+
return;
|
|
435
|
+
this.log(`Peer ${peerId.slice(0, 20)} state: ${state}`);
|
|
436
|
+
if (state === 'connected') {
|
|
437
|
+
peer.state = 'connected';
|
|
438
|
+
peer.connectedAt = Date.now();
|
|
439
|
+
}
|
|
440
|
+
else if (state === 'failed' || state === 'closed') {
|
|
441
|
+
this.closePeer(peerId);
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
onPeerClosed(peerId) {
|
|
445
|
+
this.peers.delete(peerId);
|
|
446
|
+
this.peerSelector.removePeer(peerId);
|
|
447
|
+
}
|
|
448
|
+
onOfferCreated(peerId, sdp) {
|
|
449
|
+
const peer = this.peers.get(peerId);
|
|
450
|
+
if (!peer)
|
|
451
|
+
return;
|
|
452
|
+
// Set local description
|
|
453
|
+
this.sendCommand({ type: 'rtc:setLocalDescription', peerId, sdp });
|
|
454
|
+
// Send offer via signaling using endpoint identities.
|
|
455
|
+
const msg = {
|
|
456
|
+
type: 'offer',
|
|
457
|
+
sdp: sdp.sdp,
|
|
458
|
+
targetPeerId: peerId,
|
|
459
|
+
peerId: this.myPeerId.toString(),
|
|
460
|
+
};
|
|
461
|
+
this.sendSignaling(msg, peer.pubkey);
|
|
462
|
+
}
|
|
463
|
+
onAnswerCreated(peerId, sdp) {
|
|
464
|
+
this.log(`onAnswerCreated for ${peerId.slice(0, 20)}`);
|
|
465
|
+
const peer = this.peers.get(peerId);
|
|
466
|
+
if (!peer) {
|
|
467
|
+
this.log(`onAnswerCreated: peer not found for ${peerId.slice(0, 20)}`);
|
|
468
|
+
return;
|
|
469
|
+
}
|
|
470
|
+
this.sendCommand({ type: 'rtc:setLocalDescription', peerId, sdp });
|
|
471
|
+
this.log(`Sending answer to ${peer.pubkey.slice(0, 8)}`);
|
|
472
|
+
const msg = {
|
|
473
|
+
type: 'answer',
|
|
474
|
+
sdp: sdp.sdp,
|
|
475
|
+
targetPeerId: peerId,
|
|
476
|
+
peerId: this.myPeerId.toString(),
|
|
477
|
+
};
|
|
478
|
+
this.sendSignaling(msg, peer.pubkey);
|
|
479
|
+
}
|
|
480
|
+
onDescriptionSet(peerId, error) {
|
|
481
|
+
if (error) {
|
|
482
|
+
this.log(`Description set error for ${peerId.slice(0, 20)}: ${error}`);
|
|
483
|
+
return;
|
|
484
|
+
}
|
|
485
|
+
const peer = this.peers.get(peerId);
|
|
486
|
+
if (!peer) {
|
|
487
|
+
this.log(`onDescriptionSet: peer not found for ${peerId.slice(0, 20)}`);
|
|
488
|
+
return;
|
|
489
|
+
}
|
|
490
|
+
this.log(`onDescriptionSet for ${peerId.slice(0, 20)}: direction=${peer.direction}, state=${peer.state}, answerCreated=${peer.answerCreated}`);
|
|
491
|
+
if (peer.direction === 'inbound' && peer.state === 'connecting' && !peer.answerCreated) {
|
|
492
|
+
peer.answerCreated = true;
|
|
493
|
+
this.log(`Creating answer for ${peerId.slice(0, 20)}`);
|
|
494
|
+
this.sendCommand({ type: 'rtc:createAnswer', peerId });
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
onIceCandidate(peerId, candidate) {
|
|
498
|
+
if (!candidate || !candidate.candidate)
|
|
499
|
+
return;
|
|
500
|
+
const peer = this.peers.get(peerId);
|
|
501
|
+
if (!peer)
|
|
502
|
+
return;
|
|
503
|
+
// Send candidate via signaling using endpoint identities.
|
|
504
|
+
const msg = {
|
|
505
|
+
type: 'candidate',
|
|
506
|
+
candidate: candidate.candidate,
|
|
507
|
+
sdpMLineIndex: candidate.sdpMLineIndex ?? undefined,
|
|
508
|
+
sdpMid: candidate.sdpMid ?? undefined,
|
|
509
|
+
targetPeerId: peerId,
|
|
510
|
+
peerId: this.myPeerId.toString(),
|
|
511
|
+
};
|
|
512
|
+
this.sendSignaling(msg, peer.pubkey);
|
|
513
|
+
}
|
|
514
|
+
onDataChannelOpen(peerId) {
|
|
515
|
+
const peer = this.peers.get(peerId);
|
|
516
|
+
if (!peer)
|
|
517
|
+
return;
|
|
518
|
+
peer.dataChannelReady = true;
|
|
519
|
+
this.log(`Data channel open: ${peerId.slice(0, 20)}`);
|
|
520
|
+
}
|
|
521
|
+
onDataChannelClose(peerId) {
|
|
522
|
+
const peer = this.peers.get(peerId);
|
|
523
|
+
if (!peer)
|
|
524
|
+
return;
|
|
525
|
+
peer.dataChannelReady = false;
|
|
526
|
+
this.closePeer(peerId);
|
|
527
|
+
}
|
|
528
|
+
onDataChannelError(peerId, error) {
|
|
529
|
+
this.log(`Data channel error for ${peerId}: ${error}`);
|
|
530
|
+
}
|
|
531
|
+
onBufferHigh(peerId) {
|
|
532
|
+
const peer = this.peers.get(peerId);
|
|
533
|
+
if (!peer)
|
|
534
|
+
return;
|
|
535
|
+
peer.bufferPaused = true;
|
|
536
|
+
this.log(`Buffer high for ${peerId.slice(0, 20)}, pausing responses`);
|
|
537
|
+
}
|
|
538
|
+
onBufferLow(peerId) {
|
|
539
|
+
const peer = this.peers.get(peerId);
|
|
540
|
+
if (!peer)
|
|
541
|
+
return;
|
|
542
|
+
peer.bufferPaused = false;
|
|
543
|
+
this.log(`Buffer low for ${peerId.slice(0, 20)}, resuming responses`);
|
|
544
|
+
// Process deferred requests
|
|
545
|
+
this.processDeferredRequests(peer);
|
|
546
|
+
}
|
|
547
|
+
async processDeferredRequests(peer) {
|
|
548
|
+
while (!peer.bufferPaused && peer.deferredRequests.length > 0) {
|
|
549
|
+
const req = peer.deferredRequests.shift();
|
|
550
|
+
await this.processRequest(peer, req);
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
orderedConnectedPeers(excludePeerId) {
|
|
554
|
+
const connectedAll = Array.from(this.peers.values())
|
|
555
|
+
.filter((peer) => peer.dataChannelReady);
|
|
556
|
+
if (connectedAll.length === 0)
|
|
557
|
+
return [];
|
|
558
|
+
const peerIds = connectedAll.map((peer) => peer.peerId);
|
|
559
|
+
syncSelectorPeers(this.peerSelector, peerIds);
|
|
560
|
+
const connectedPeers = connectedAll
|
|
561
|
+
.filter((peer) => !excludePeerId || peer.peerId !== excludePeerId);
|
|
562
|
+
const selectorOrder = this.peerSelector.selectPeers();
|
|
563
|
+
const rank = new Map(selectorOrder.map((peerId, idx) => [peerId, idx]));
|
|
564
|
+
connectedPeers.sort((a, b) => {
|
|
565
|
+
const leftBackedOff = this.peerSelector.isPeerBackedOff(a.peerId);
|
|
566
|
+
const rightBackedOff = this.peerSelector.isPeerBackedOff(b.peerId);
|
|
567
|
+
if (leftBackedOff !== rightBackedOff)
|
|
568
|
+
return leftBackedOff ? 1 : -1;
|
|
569
|
+
if (a.pool === 'follows' && b.pool !== 'follows')
|
|
570
|
+
return -1;
|
|
571
|
+
if (a.pool !== 'follows' && b.pool === 'follows')
|
|
572
|
+
return 1;
|
|
573
|
+
const leftRank = rank.get(a.peerId) ?? Number.MAX_SAFE_INTEGER;
|
|
574
|
+
const rightRank = rank.get(b.peerId) ?? Number.MAX_SAFE_INTEGER;
|
|
575
|
+
const leftLoad = this.activePeerRequests.get(a.peerId) ?? 0;
|
|
576
|
+
const rightLoad = this.activePeerRequests.get(b.peerId) ?? 0;
|
|
577
|
+
return (leftRank + leftLoad * ACTIVE_REQUEST_RANK_PENALTY) -
|
|
578
|
+
(rightRank + rightLoad * ACTIVE_REQUEST_RANK_PENALTY);
|
|
579
|
+
});
|
|
580
|
+
return connectedPeers;
|
|
581
|
+
}
|
|
582
|
+
async peerMetadataPointerHash() {
|
|
583
|
+
return sha256(new TextEncoder().encode(PEER_METADATA_POINTER_SLOT_KEY));
|
|
584
|
+
}
|
|
585
|
+
createInFlightRequest(peer, hash, htl) {
|
|
586
|
+
const hashKey = hashToKey(hash);
|
|
587
|
+
const startedAt = Date.now();
|
|
588
|
+
this.reservePeerRequest(peer.peerId);
|
|
589
|
+
this.peerSelector.recordRequest(peer.peerId, 40);
|
|
590
|
+
const promise = new Promise((resolve) => {
|
|
591
|
+
const timeout = setTimeout(() => {
|
|
592
|
+
peer.pendingRequests.delete(hashKey);
|
|
593
|
+
this.releasePeerRequest(peer.peerId);
|
|
594
|
+
this.peerSelector.recordTimeout(peer.peerId);
|
|
595
|
+
resolve({ peerId: peer.peerId, data: null, elapsedMs: Math.max(1, Date.now() - startedAt) });
|
|
596
|
+
}, this.requestTimeout);
|
|
597
|
+
peer.pendingRequests.set(hashKey, {
|
|
598
|
+
hash,
|
|
599
|
+
startedAt,
|
|
600
|
+
resolve: (data) => {
|
|
601
|
+
resolve({ peerId: peer.peerId, data, elapsedMs: Math.max(1, Date.now() - startedAt) });
|
|
602
|
+
},
|
|
603
|
+
timeout,
|
|
604
|
+
});
|
|
605
|
+
peer.stats.requestsSent++;
|
|
606
|
+
const req = createRequest(hash, htl);
|
|
607
|
+
const encoded = new Uint8Array(encodeRequest(req));
|
|
608
|
+
this.sendDataToPeer(peer, encoded);
|
|
609
|
+
});
|
|
610
|
+
return {
|
|
611
|
+
peerId: peer.peerId,
|
|
612
|
+
settled: false,
|
|
613
|
+
promise,
|
|
614
|
+
};
|
|
615
|
+
}
|
|
616
|
+
async waitForInFlightResult(inFlight, waitMs) {
|
|
617
|
+
const active = inFlight.filter((task) => !task.settled);
|
|
618
|
+
if (active.length === 0 || waitMs <= 0)
|
|
619
|
+
return null;
|
|
620
|
+
const timeout = new Promise((resolve) => {
|
|
621
|
+
setTimeout(() => resolve(null), waitMs);
|
|
622
|
+
});
|
|
623
|
+
const outcome = await Promise.race([
|
|
624
|
+
timeout,
|
|
625
|
+
...active.map((task) => task.promise.then((result) => ({
|
|
626
|
+
task,
|
|
627
|
+
data: result.data,
|
|
628
|
+
elapsedMs: result.elapsedMs,
|
|
629
|
+
}))),
|
|
630
|
+
]);
|
|
631
|
+
if (!outcome)
|
|
632
|
+
return null;
|
|
633
|
+
outcome.task.settled = true;
|
|
634
|
+
return outcome;
|
|
635
|
+
}
|
|
636
|
+
clearPendingHashFromPeers(hashKey, keepPeerId) {
|
|
637
|
+
for (const peer of this.peers.values()) {
|
|
638
|
+
if (keepPeerId && peer.peerId === keepPeerId)
|
|
639
|
+
continue;
|
|
640
|
+
const pending = peer.pendingRequests.get(hashKey);
|
|
641
|
+
if (!pending)
|
|
642
|
+
continue;
|
|
643
|
+
clearTimeout(pending.timeout);
|
|
644
|
+
peer.pendingRequests.delete(hashKey);
|
|
645
|
+
this.releasePeerRequest(peer.peerId);
|
|
646
|
+
pending.resolve(null);
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
reservePeerRequest(peerId) {
|
|
650
|
+
this.activePeerRequests.set(peerId, (this.activePeerRequests.get(peerId) ?? 0) + 1);
|
|
651
|
+
}
|
|
652
|
+
releasePeerRequest(peerId) {
|
|
653
|
+
const next = (this.activePeerRequests.get(peerId) ?? 0) - 1;
|
|
654
|
+
if (next <= 0) {
|
|
655
|
+
this.activePeerRequests.delete(peerId);
|
|
656
|
+
return;
|
|
657
|
+
}
|
|
658
|
+
this.activePeerRequests.set(peerId, next);
|
|
659
|
+
}
|
|
660
|
+
/**
|
|
661
|
+
* Persist selector metadata snapshot to local store.
|
|
662
|
+
* Returns the snapshot hash.
|
|
663
|
+
*/
|
|
664
|
+
async persistPeerMetadata() {
|
|
665
|
+
const snapshot = this.peerSelector.exportPeerMetadataSnapshot();
|
|
666
|
+
const bytes = new TextEncoder().encode(JSON.stringify(snapshot));
|
|
667
|
+
const snapshotHash = await sha256(bytes);
|
|
668
|
+
await this.localStore.put(snapshotHash, bytes);
|
|
669
|
+
const pointerHash = await this.peerMetadataPointerHash();
|
|
670
|
+
await this.localStore.delete(pointerHash);
|
|
671
|
+
await this.localStore.put(pointerHash, new TextEncoder().encode(toHex(snapshotHash)));
|
|
672
|
+
return snapshotHash;
|
|
673
|
+
}
|
|
674
|
+
/**
|
|
675
|
+
* Load selector metadata snapshot from local store.
|
|
676
|
+
*/
|
|
677
|
+
async loadPeerMetadata() {
|
|
678
|
+
const pointerHash = await this.peerMetadataPointerHash();
|
|
679
|
+
const pointerBytes = await this.localStore.get(pointerHash);
|
|
680
|
+
if (!pointerBytes)
|
|
681
|
+
return false;
|
|
682
|
+
const pointerHex = new TextDecoder().decode(pointerBytes).trim();
|
|
683
|
+
if (pointerHex.length !== 64)
|
|
684
|
+
return false;
|
|
685
|
+
const snapshotHash = fromHex(pointerHex);
|
|
686
|
+
if (snapshotHash.length !== 32)
|
|
687
|
+
return false;
|
|
688
|
+
const snapshotBytes = await this.localStore.get(snapshotHash);
|
|
689
|
+
if (!snapshotBytes)
|
|
690
|
+
return false;
|
|
691
|
+
let snapshot;
|
|
692
|
+
try {
|
|
693
|
+
snapshot = JSON.parse(new TextDecoder().decode(snapshotBytes));
|
|
694
|
+
}
|
|
695
|
+
catch {
|
|
696
|
+
return false;
|
|
697
|
+
}
|
|
698
|
+
this.peerSelector.importPeerMetadataSnapshot(snapshot);
|
|
699
|
+
syncSelectorPeers(this.peerSelector, Array.from(this.peers.keys()));
|
|
700
|
+
return true;
|
|
701
|
+
}
|
|
702
|
+
// ============================================================================
|
|
703
|
+
// Data Protocol
|
|
704
|
+
// ============================================================================
|
|
705
|
+
sendDataToPeer(peer, data) {
|
|
706
|
+
peer.stats.bytesSent += data.byteLength;
|
|
707
|
+
this.sendCommand({ type: 'rtc:sendData', peerId: peer.peerId, data });
|
|
708
|
+
}
|
|
709
|
+
async onDataChannelMessage(peerId, data) {
|
|
710
|
+
const peer = this.peers.get(peerId);
|
|
711
|
+
if (!peer)
|
|
712
|
+
return;
|
|
713
|
+
// Count all inbound DataChannel bytes (requests + responses + protocol overhead).
|
|
714
|
+
peer.stats.bytesReceived += data.byteLength;
|
|
715
|
+
const msg = parseMessage(data);
|
|
716
|
+
if (!msg) {
|
|
717
|
+
this.log(`Failed to parse message from ${peerId}`);
|
|
718
|
+
return;
|
|
719
|
+
}
|
|
720
|
+
if (msg.type === MSG_TYPE_REQUEST) {
|
|
721
|
+
await this.handleRequest(peer, msg.body);
|
|
722
|
+
}
|
|
723
|
+
else if (msg.type === MSG_TYPE_RESPONSE) {
|
|
724
|
+
await this.handleResponse(peer, msg.body);
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
async handleRequest(peer, req) {
|
|
728
|
+
peer.stats.requestsReceived++;
|
|
729
|
+
// If buffer is full, defer the request for later processing
|
|
730
|
+
if (peer.bufferPaused) {
|
|
731
|
+
// Limit deferred requests to prevent memory issues
|
|
732
|
+
if (peer.deferredRequests.length < 100) {
|
|
733
|
+
peer.deferredRequests.push(req);
|
|
734
|
+
}
|
|
735
|
+
return;
|
|
736
|
+
}
|
|
737
|
+
await this.processRequest(peer, req);
|
|
738
|
+
}
|
|
739
|
+
async processRequest(peer, req) {
|
|
740
|
+
await this.meshRouter.handleRequest(peer.peerId, req);
|
|
741
|
+
}
|
|
742
|
+
async handleResponse(peer, res) {
|
|
743
|
+
peer.stats.responsesReceived++;
|
|
744
|
+
const hashKey = hashToKey(res.h);
|
|
745
|
+
const pending = peer.pendingRequests.get(hashKey);
|
|
746
|
+
if (!pending) {
|
|
747
|
+
const hasRequesters = this.meshRouter.hasInFlight(hashKey);
|
|
748
|
+
// Late response: cache if we requested this hash recently
|
|
749
|
+
const requestedAt = this.recentRequests.get(hashKey);
|
|
750
|
+
if (!requestedAt && !hasRequesters)
|
|
751
|
+
return;
|
|
752
|
+
if (requestedAt && Date.now() - requestedAt > 60000) {
|
|
753
|
+
this.recentRequests.delete(hashKey);
|
|
754
|
+
if (!hasRequesters)
|
|
755
|
+
return;
|
|
756
|
+
}
|
|
757
|
+
const valid = await verifyHash(res.d, res.h);
|
|
758
|
+
if (valid) {
|
|
759
|
+
await this.localStore.put(res.h, res.d);
|
|
760
|
+
if (requestedAt) {
|
|
761
|
+
this.recentRequests.delete(hashKey);
|
|
762
|
+
}
|
|
763
|
+
if (hasRequesters) {
|
|
764
|
+
await this.meshRouter.resolve(res.h, res.d);
|
|
765
|
+
}
|
|
766
|
+
}
|
|
767
|
+
return;
|
|
768
|
+
}
|
|
769
|
+
clearTimeout(pending.timeout);
|
|
770
|
+
peer.pendingRequests.delete(hashKey);
|
|
771
|
+
this.releasePeerRequest(peer.peerId);
|
|
772
|
+
// Verify hash
|
|
773
|
+
const valid = await verifyHash(res.d, res.h);
|
|
774
|
+
const elapsedMs = pending.startedAt ? Math.max(1, Date.now() - pending.startedAt) : this.requestTimeout;
|
|
775
|
+
if (valid) {
|
|
776
|
+
// Store locally
|
|
777
|
+
await this.localStore.put(res.h, res.d);
|
|
778
|
+
this.peerSelector.recordSuccess(peer.peerId, elapsedMs, res.d.length);
|
|
779
|
+
pending.resolve(res.d);
|
|
780
|
+
await this.meshRouter.resolve(res.h, res.d);
|
|
781
|
+
}
|
|
782
|
+
else {
|
|
783
|
+
this.log(`Hash mismatch from ${peer.peerId}`);
|
|
784
|
+
this.peerSelector.recordFailure(peer.peerId);
|
|
785
|
+
pending.resolve(null);
|
|
786
|
+
}
|
|
787
|
+
}
|
|
788
|
+
async sendResponse(peer, hash, data) {
|
|
789
|
+
if (!peer.dataChannelReady)
|
|
790
|
+
return;
|
|
791
|
+
peer.stats.responsesSent++;
|
|
792
|
+
// Fragment if needed
|
|
793
|
+
if (data.length > FRAGMENT_SIZE) {
|
|
794
|
+
const totalFragments = Math.ceil(data.length / FRAGMENT_SIZE);
|
|
795
|
+
for (let i = 0; i < totalFragments; i++) {
|
|
796
|
+
const start = i * FRAGMENT_SIZE;
|
|
797
|
+
const end = Math.min(start + FRAGMENT_SIZE, data.length);
|
|
798
|
+
const fragment = data.slice(start, end);
|
|
799
|
+
const res = createFragmentResponse(hash, fragment, i, totalFragments);
|
|
800
|
+
const encoded = new Uint8Array(encodeResponse(res));
|
|
801
|
+
this.sendDataToPeer(peer, encoded);
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
else {
|
|
805
|
+
const res = createResponse(hash, data);
|
|
806
|
+
const encoded = new Uint8Array(encodeResponse(res));
|
|
807
|
+
this.sendDataToPeer(peer, encoded);
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
sendRequestToPeer(peer, hash, htl) {
|
|
811
|
+
if (!peer.dataChannelReady) {
|
|
812
|
+
return false;
|
|
813
|
+
}
|
|
814
|
+
const encoded = encodeForwardRequest(hash, htl);
|
|
815
|
+
this.sendDataToPeer(peer, encoded);
|
|
816
|
+
return true;
|
|
817
|
+
}
|
|
818
|
+
async queryPeersWithDispatch(hash, options) {
|
|
819
|
+
const orderedPeers = this.orderedConnectedPeers(options.excludePeerId);
|
|
820
|
+
if (orderedPeers.length === 0)
|
|
821
|
+
return null;
|
|
822
|
+
const dispatch = normalizeDispatchConfig(this.routing.dispatch, orderedPeers.length);
|
|
823
|
+
const wavePlan = buildHedgedWavePlan(orderedPeers.length, dispatch);
|
|
824
|
+
if (wavePlan.length === 0)
|
|
825
|
+
return null;
|
|
826
|
+
const deadline = Date.now() + this.requestTimeout;
|
|
827
|
+
const inFlight = [];
|
|
828
|
+
let nextPeerIdx = 0;
|
|
829
|
+
for (let waveIdx = 0; waveIdx < wavePlan.length; waveIdx++) {
|
|
830
|
+
const waveSize = wavePlan[waveIdx];
|
|
831
|
+
const from = nextPeerIdx;
|
|
832
|
+
const to = Math.min(from + waveSize, orderedPeers.length);
|
|
833
|
+
nextPeerIdx = to;
|
|
834
|
+
for (const peer of orderedPeers.slice(from, to)) {
|
|
835
|
+
inFlight.push(this.createInFlightRequest(peer, hash, options.htl));
|
|
836
|
+
}
|
|
837
|
+
const isLastWave = waveIdx === wavePlan.length - 1 || nextPeerIdx >= orderedPeers.length;
|
|
838
|
+
const windowEnd = isLastWave
|
|
839
|
+
? deadline
|
|
840
|
+
: Math.min(deadline, Date.now() + dispatch.hedgeIntervalMs);
|
|
841
|
+
while (Date.now() < windowEnd) {
|
|
842
|
+
const remaining = windowEnd - Date.now();
|
|
843
|
+
const result = await this.waitForInFlightResult(inFlight, remaining);
|
|
844
|
+
if (!result)
|
|
845
|
+
break;
|
|
846
|
+
if (!result.data)
|
|
847
|
+
continue;
|
|
848
|
+
this.clearPendingHashFromPeers(hashToKey(hash), result.task.peerId);
|
|
849
|
+
return result.data;
|
|
850
|
+
}
|
|
851
|
+
if (Date.now() >= deadline)
|
|
852
|
+
break;
|
|
853
|
+
}
|
|
854
|
+
this.clearPendingHashFromPeers(hashToKey(hash));
|
|
855
|
+
return null;
|
|
856
|
+
}
|
|
857
|
+
// ============================================================================
|
|
858
|
+
// Public API
|
|
859
|
+
// ============================================================================
|
|
860
|
+
/**
|
|
861
|
+
* Request data from peers
|
|
862
|
+
*/
|
|
863
|
+
async get(hash) {
|
|
864
|
+
const hashKey = hashToKey(hash);
|
|
865
|
+
this.recentRequests.set(hashKey, Date.now());
|
|
866
|
+
return this.queryPeersWithDispatch(hash, { htl: MAX_HTL });
|
|
867
|
+
}
|
|
868
|
+
/**
|
|
869
|
+
* Get peer stats for UI
|
|
870
|
+
*/
|
|
871
|
+
getPeerStats() {
|
|
872
|
+
return Array.from(this.peers.values()).map(peer => ({
|
|
873
|
+
peerId: peer.peerId,
|
|
874
|
+
pubkey: peer.pubkey,
|
|
875
|
+
connected: peer.state === 'connected' && peer.dataChannelReady,
|
|
876
|
+
pool: peer.pool,
|
|
877
|
+
requestsSent: peer.stats.requestsSent,
|
|
878
|
+
requestsReceived: peer.stats.requestsReceived,
|
|
879
|
+
responsesSent: peer.stats.responsesSent,
|
|
880
|
+
responsesReceived: peer.stats.responsesReceived,
|
|
881
|
+
bytesSent: peer.stats.bytesSent,
|
|
882
|
+
bytesReceived: peer.stats.bytesReceived,
|
|
883
|
+
forwardedRequests: peer.stats.forwardedRequests,
|
|
884
|
+
forwardedResolved: peer.stats.forwardedResolved,
|
|
885
|
+
forwardedSuppressed: peer.stats.forwardedSuppressed,
|
|
886
|
+
}));
|
|
887
|
+
}
|
|
888
|
+
/**
|
|
889
|
+
* Get connected peer count
|
|
890
|
+
*/
|
|
891
|
+
getConnectedCount() {
|
|
892
|
+
let count = 0;
|
|
893
|
+
for (const peer of this.peers.values()) {
|
|
894
|
+
if (peer.state === 'connected' && peer.dataChannelReady) {
|
|
895
|
+
count++;
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
return count;
|
|
899
|
+
}
|
|
900
|
+
/**
|
|
901
|
+
* Set pool configuration
|
|
902
|
+
*/
|
|
903
|
+
setPoolConfig(config) {
|
|
904
|
+
this.poolConfig = {
|
|
905
|
+
follows: { maxConnections: config.follows.max, satisfiedConnections: config.follows.satisfied },
|
|
906
|
+
other: { maxConnections: config.other.max, satisfiedConnections: config.other.satisfied },
|
|
907
|
+
};
|
|
908
|
+
this.log('Pool config updated:', this.poolConfig);
|
|
909
|
+
// Re-broadcast hello to trigger peer discovery with new limits
|
|
910
|
+
this.sendHello();
|
|
911
|
+
}
|
|
912
|
+
setForwardRateLimit(config) {
|
|
913
|
+
this.meshRouter.setForwardRateLimit(config);
|
|
914
|
+
}
|
|
915
|
+
/**
|
|
916
|
+
* Update identity (pubkey) and restart signaling if already running.
|
|
917
|
+
* This keeps peerId consistent with the current account.
|
|
918
|
+
*/
|
|
919
|
+
setIdentity(pubkey) {
|
|
920
|
+
if (this.myPeerId.pubkey === pubkey)
|
|
921
|
+
return;
|
|
922
|
+
const wasStarted = !!this.helloInterval;
|
|
923
|
+
this.stop();
|
|
924
|
+
this.myPeerId = new PeerId(pubkey);
|
|
925
|
+
if (wasStarted) {
|
|
926
|
+
this.start();
|
|
927
|
+
}
|
|
928
|
+
}
|
|
929
|
+
// ============================================================================
|
|
930
|
+
// Helpers
|
|
931
|
+
// ============================================================================
|
|
932
|
+
log(...args) {
|
|
933
|
+
if (this.debug) {
|
|
934
|
+
console.log('[WebRTC]', ...args);
|
|
935
|
+
}
|
|
936
|
+
}
|
|
937
|
+
}
|
|
938
|
+
//# sourceMappingURL=webrtcController.js.map
|