@hashtree/worker 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +7 -3
- package/src/app-runtime.ts +393 -0
- package/src/capabilities/blossomBandwidthTracker.ts +74 -0
- package/src/capabilities/blossomTransport.ts +179 -0
- package/src/capabilities/connectivity.ts +54 -0
- package/src/capabilities/idbStorage.ts +94 -0
- package/src/capabilities/meshRouterStore.ts +426 -0
- package/src/capabilities/rootResolver.ts +497 -0
- package/src/client-id.ts +137 -0
- package/src/client.ts +501 -0
- package/{dist/entry.js → src/entry.ts} +1 -1
- package/src/htree-path.ts +53 -0
- package/src/htree-url.ts +156 -0
- package/src/index.ts +76 -0
- package/src/mediaStreaming.ts +64 -0
- package/src/p2p/boundedQueue.ts +168 -0
- package/src/p2p/errorMessage.ts +6 -0
- package/src/p2p/index.ts +48 -0
- package/src/p2p/lruCache.ts +78 -0
- package/src/p2p/meshQueryRouter.ts +361 -0
- package/src/p2p/protocol.ts +11 -0
- package/src/p2p/queryForwardingMachine.ts +197 -0
- package/src/p2p/signaling.ts +284 -0
- package/src/p2p/uploadRateLimiter.ts +85 -0
- package/src/p2p/webrtcController.ts +1168 -0
- package/src/p2p/webrtcProxy.ts +519 -0
- package/src/privacyGuards.ts +31 -0
- package/src/protocol.ts +124 -0
- package/src/relay/identity.ts +86 -0
- package/src/relay/mediaHandler.ts +1633 -0
- package/src/relay/ndk.ts +590 -0
- package/{dist/relay/nostr-wasm.js → src/relay/nostr-wasm.ts} +4 -1
- package/src/relay/nostr.ts +249 -0
- package/src/relay/protocol.ts +361 -0
- package/src/relay/publicAssetUrl.ts +25 -0
- package/src/relay/rootPathResolver.ts +50 -0
- package/src/relay/shims.d.ts +17 -0
- package/src/relay/signing.ts +332 -0
- package/src/relay/treeRootCache.ts +354 -0
- package/src/relay/treeRootSubscription.ts +577 -0
- package/src/relay/utils/constants.ts +139 -0
- package/src/relay/utils/errorMessage.ts +7 -0
- package/src/relay/utils/lruCache.ts +79 -0
- package/src/relay/webrtc.ts +5 -0
- package/src/relay/webrtcSignaling.ts +108 -0
- package/src/relay/worker.ts +1787 -0
- package/src/relay-client.ts +265 -0
- package/src/relay-entry.ts +1 -0
- package/src/runtime-network.ts +134 -0
- package/src/runtime.ts +153 -0
- package/{dist/transferableBytes.js → src/transferableBytes.ts} +2 -3
- package/src/tree-root.ts +851 -0
- package/src/types.ts +8 -0
- package/src/worker.ts +975 -0
- package/dist/app-runtime.d.ts +0 -60
- package/dist/app-runtime.d.ts.map +0 -1
- package/dist/app-runtime.js +0 -271
- package/dist/app-runtime.js.map +0 -1
- package/dist/capabilities/blossomBandwidthTracker.d.ts +0 -26
- package/dist/capabilities/blossomBandwidthTracker.d.ts.map +0 -1
- package/dist/capabilities/blossomBandwidthTracker.js +0 -53
- package/dist/capabilities/blossomBandwidthTracker.js.map +0 -1
- package/dist/capabilities/blossomTransport.d.ts +0 -22
- package/dist/capabilities/blossomTransport.d.ts.map +0 -1
- package/dist/capabilities/blossomTransport.js +0 -139
- package/dist/capabilities/blossomTransport.js.map +0 -1
- package/dist/capabilities/connectivity.d.ts +0 -3
- package/dist/capabilities/connectivity.d.ts.map +0 -1
- package/dist/capabilities/connectivity.js +0 -49
- package/dist/capabilities/connectivity.js.map +0 -1
- package/dist/capabilities/idbStorage.d.ts +0 -25
- package/dist/capabilities/idbStorage.d.ts.map +0 -1
- package/dist/capabilities/idbStorage.js +0 -73
- package/dist/capabilities/idbStorage.js.map +0 -1
- package/dist/capabilities/meshRouterStore.d.ts +0 -71
- package/dist/capabilities/meshRouterStore.d.ts.map +0 -1
- package/dist/capabilities/meshRouterStore.js +0 -316
- package/dist/capabilities/meshRouterStore.js.map +0 -1
- package/dist/capabilities/rootResolver.d.ts +0 -10
- package/dist/capabilities/rootResolver.d.ts.map +0 -1
- package/dist/capabilities/rootResolver.js +0 -392
- package/dist/capabilities/rootResolver.js.map +0 -1
- package/dist/client-id.d.ts +0 -18
- package/dist/client-id.d.ts.map +0 -1
- package/dist/client-id.js +0 -98
- package/dist/client-id.js.map +0 -1
- package/dist/client.d.ts +0 -61
- package/dist/client.d.ts.map +0 -1
- package/dist/client.js +0 -417
- package/dist/client.js.map +0 -1
- package/dist/entry.d.ts +0 -2
- package/dist/entry.d.ts.map +0 -1
- package/dist/entry.js.map +0 -1
- package/dist/htree-path.d.ts +0 -13
- package/dist/htree-path.d.ts.map +0 -1
- package/dist/htree-path.js +0 -38
- package/dist/htree-path.js.map +0 -1
- package/dist/htree-url.d.ts +0 -22
- package/dist/htree-url.d.ts.map +0 -1
- package/dist/htree-url.js +0 -118
- package/dist/htree-url.js.map +0 -1
- package/dist/index.d.ts +0 -17
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js +0 -8
- package/dist/index.js.map +0 -1
- package/dist/mediaStreaming.d.ts +0 -7
- package/dist/mediaStreaming.d.ts.map +0 -1
- package/dist/mediaStreaming.js +0 -48
- package/dist/mediaStreaming.js.map +0 -1
- package/dist/p2p/boundedQueue.d.ts +0 -79
- package/dist/p2p/boundedQueue.d.ts.map +0 -1
- package/dist/p2p/boundedQueue.js +0 -134
- package/dist/p2p/boundedQueue.js.map +0 -1
- package/dist/p2p/errorMessage.d.ts +0 -5
- package/dist/p2p/errorMessage.d.ts.map +0 -1
- package/dist/p2p/errorMessage.js +0 -7
- package/dist/p2p/errorMessage.js.map +0 -1
- package/dist/p2p/index.d.ts +0 -8
- package/dist/p2p/index.d.ts.map +0 -1
- package/dist/p2p/index.js +0 -6
- package/dist/p2p/index.js.map +0 -1
- package/dist/p2p/lruCache.d.ts +0 -26
- package/dist/p2p/lruCache.d.ts.map +0 -1
- package/dist/p2p/lruCache.js +0 -65
- package/dist/p2p/lruCache.js.map +0 -1
- package/dist/p2p/meshQueryRouter.d.ts +0 -57
- package/dist/p2p/meshQueryRouter.d.ts.map +0 -1
- package/dist/p2p/meshQueryRouter.js +0 -264
- package/dist/p2p/meshQueryRouter.js.map +0 -1
- package/dist/p2p/protocol.d.ts +0 -10
- package/dist/p2p/protocol.d.ts.map +0 -1
- package/dist/p2p/protocol.js +0 -2
- package/dist/p2p/protocol.js.map +0 -1
- package/dist/p2p/queryForwardingMachine.d.ts +0 -46
- package/dist/p2p/queryForwardingMachine.d.ts.map +0 -1
- package/dist/p2p/queryForwardingMachine.js +0 -144
- package/dist/p2p/queryForwardingMachine.js.map +0 -1
- package/dist/p2p/signaling.d.ts +0 -63
- package/dist/p2p/signaling.d.ts.map +0 -1
- package/dist/p2p/signaling.js +0 -185
- package/dist/p2p/signaling.js.map +0 -1
- package/dist/p2p/uploadRateLimiter.d.ts +0 -21
- package/dist/p2p/uploadRateLimiter.d.ts.map +0 -1
- package/dist/p2p/uploadRateLimiter.js +0 -62
- package/dist/p2p/uploadRateLimiter.js.map +0 -1
- package/dist/p2p/webrtcController.d.ts +0 -176
- package/dist/p2p/webrtcController.d.ts.map +0 -1
- package/dist/p2p/webrtcController.js +0 -938
- package/dist/p2p/webrtcController.js.map +0 -1
- package/dist/p2p/webrtcProxy.d.ts +0 -62
- package/dist/p2p/webrtcProxy.d.ts.map +0 -1
- package/dist/p2p/webrtcProxy.js +0 -447
- package/dist/p2p/webrtcProxy.js.map +0 -1
- package/dist/privacyGuards.d.ts +0 -14
- package/dist/privacyGuards.d.ts.map +0 -1
- package/dist/privacyGuards.js +0 -27
- package/dist/privacyGuards.js.map +0 -1
- package/dist/protocol.d.ts +0 -225
- package/dist/protocol.d.ts.map +0 -1
- package/dist/protocol.js +0 -2
- package/dist/protocol.js.map +0 -1
- package/dist/relay/identity.d.ts +0 -36
- package/dist/relay/identity.d.ts.map +0 -1
- package/dist/relay/identity.js +0 -78
- package/dist/relay/identity.js.map +0 -1
- package/dist/relay/mediaHandler.d.ts +0 -64
- package/dist/relay/mediaHandler.d.ts.map +0 -1
- package/dist/relay/mediaHandler.js +0 -1285
- package/dist/relay/mediaHandler.js.map +0 -1
- package/dist/relay/ndk.d.ts +0 -96
- package/dist/relay/ndk.d.ts.map +0 -1
- package/dist/relay/ndk.js +0 -502
- package/dist/relay/ndk.js.map +0 -1
- package/dist/relay/nostr-wasm.d.ts +0 -14
- package/dist/relay/nostr-wasm.d.ts.map +0 -1
- package/dist/relay/nostr-wasm.js.map +0 -1
- package/dist/relay/nostr.d.ts +0 -60
- package/dist/relay/nostr.d.ts.map +0 -1
- package/dist/relay/nostr.js +0 -207
- package/dist/relay/nostr.js.map +0 -1
- package/dist/relay/protocol.d.ts +0 -592
- package/dist/relay/protocol.d.ts.map +0 -1
- package/dist/relay/protocol.js +0 -16
- package/dist/relay/protocol.js.map +0 -1
- package/dist/relay/publicAssetUrl.d.ts +0 -6
- package/dist/relay/publicAssetUrl.d.ts.map +0 -1
- package/dist/relay/publicAssetUrl.js +0 -14
- package/dist/relay/publicAssetUrl.js.map +0 -1
- package/dist/relay/rootPathResolver.d.ts +0 -9
- package/dist/relay/rootPathResolver.d.ts.map +0 -1
- package/dist/relay/rootPathResolver.js +0 -32
- package/dist/relay/rootPathResolver.js.map +0 -1
- package/dist/relay/signing.d.ts +0 -50
- package/dist/relay/signing.d.ts.map +0 -1
- package/dist/relay/signing.js +0 -299
- package/dist/relay/signing.js.map +0 -1
- package/dist/relay/treeRootCache.d.ts +0 -86
- package/dist/relay/treeRootCache.d.ts.map +0 -1
- package/dist/relay/treeRootCache.js +0 -269
- package/dist/relay/treeRootCache.js.map +0 -1
- package/dist/relay/treeRootSubscription.d.ts +0 -55
- package/dist/relay/treeRootSubscription.d.ts.map +0 -1
- package/dist/relay/treeRootSubscription.js +0 -478
- package/dist/relay/treeRootSubscription.js.map +0 -1
- package/dist/relay/utils/constants.d.ts +0 -76
- package/dist/relay/utils/constants.d.ts.map +0 -1
- package/dist/relay/utils/constants.js +0 -113
- package/dist/relay/utils/constants.js.map +0 -1
- package/dist/relay/utils/errorMessage.d.ts +0 -5
- package/dist/relay/utils/errorMessage.d.ts.map +0 -1
- package/dist/relay/utils/errorMessage.js +0 -8
- package/dist/relay/utils/errorMessage.js.map +0 -1
- package/dist/relay/utils/lruCache.d.ts +0 -26
- package/dist/relay/utils/lruCache.d.ts.map +0 -1
- package/dist/relay/utils/lruCache.js +0 -66
- package/dist/relay/utils/lruCache.js.map +0 -1
- package/dist/relay/webrtc.d.ts +0 -2
- package/dist/relay/webrtc.d.ts.map +0 -1
- package/dist/relay/webrtc.js +0 -3
- package/dist/relay/webrtc.js.map +0 -1
- package/dist/relay/webrtcSignaling.d.ts +0 -37
- package/dist/relay/webrtcSignaling.d.ts.map +0 -1
- package/dist/relay/webrtcSignaling.js +0 -86
- package/dist/relay/webrtcSignaling.js.map +0 -1
- package/dist/relay/worker.d.ts +0 -12
- package/dist/relay/worker.d.ts.map +0 -1
- package/dist/relay/worker.js +0 -1540
- package/dist/relay/worker.js.map +0 -1
- package/dist/relay-client.d.ts +0 -31
- package/dist/relay-client.d.ts.map +0 -1
- package/dist/relay-client.js +0 -197
- package/dist/relay-client.js.map +0 -1
- package/dist/relay-entry.d.ts +0 -2
- package/dist/relay-entry.d.ts.map +0 -1
- package/dist/relay-entry.js +0 -2
- package/dist/relay-entry.js.map +0 -1
- package/dist/runtime-network.d.ts +0 -23
- package/dist/runtime-network.d.ts.map +0 -1
- package/dist/runtime-network.js +0 -105
- package/dist/runtime-network.js.map +0 -1
- package/dist/runtime.d.ts +0 -24
- package/dist/runtime.d.ts.map +0 -1
- package/dist/runtime.js +0 -126
- package/dist/runtime.js.map +0 -1
- package/dist/transferableBytes.d.ts +0 -2
- package/dist/transferableBytes.d.ts.map +0 -1
- package/dist/transferableBytes.js.map +0 -1
- package/dist/tree-root.d.ts +0 -201
- package/dist/tree-root.d.ts.map +0 -1
- package/dist/tree-root.js +0 -632
- package/dist/tree-root.js.map +0 -1
- package/dist/types.d.ts +0 -2
- package/dist/types.d.ts.map +0 -1
- package/dist/types.js +0 -2
- package/dist/types.js.map +0 -1
- package/dist/worker.d.ts +0 -9
- package/dist/worker.d.ts.map +0 -1
- package/dist/worker.js +0 -792
- package/dist/worker.js.map +0 -1
package/dist/worker.js
DELETED
|
@@ -1,792 +0,0 @@
|
|
|
1
|
-
/// <reference lib="webworker" />
|
|
2
|
-
import { HashTree, decryptChk, fromHex, nhashDecode, nhashEncode, toHex, tryDecodeTreeNode, } from '@hashtree/core';
|
|
3
|
-
import { IdbBlobStorage } from './capabilities/idbStorage.js';
|
|
4
|
-
import { BlossomTransport, DEFAULT_BLOSSOM_SERVERS } from './capabilities/blossomTransport.js';
|
|
5
|
-
import { probeConnectivity } from './capabilities/connectivity.js';
|
|
6
|
-
import { MeshRouterStore } from './capabilities/meshRouterStore.js';
|
|
7
|
-
import { resolveRootPathFromRelays, watchRootPathFromRelays } from './capabilities/rootResolver.js';
|
|
8
|
-
import { assertEncryptedUploadCid, markEncryptedHashes, shouldServeHashToPeer } from './privacyGuards.js';
|
|
9
|
-
import { streamFileRangeChunks } from './mediaStreaming.js';
|
|
10
|
-
import { cloneTransferableBytes } from './transferableBytes.js';
|
|
11
|
-
const DEFAULT_STORE_NAME = 'hashtree-worker';
|
|
12
|
-
const DEFAULT_STORAGE_MAX_BYTES = 1024 * 1024 * 1024;
|
|
13
|
-
const DEFAULT_CONNECTIVITY_PROBE_INTERVAL_MS = 20_000;
|
|
14
|
-
const P2P_FETCH_TIMEOUT_MS = 2_000;
|
|
15
|
-
let endpoint = null;
|
|
16
|
-
let endpointListener = null;
|
|
17
|
-
let storage = null;
|
|
18
|
-
let blossom = null;
|
|
19
|
-
let meshStore = null;
|
|
20
|
-
let tree = null;
|
|
21
|
-
let nostrRelays = [];
|
|
22
|
-
let probeInterval = null;
|
|
23
|
-
let probeIntervalMs = DEFAULT_CONNECTIVITY_PROBE_INTERVAL_MS;
|
|
24
|
-
let p2pFetchCounter = 0;
|
|
25
|
-
let rootWatchCounter = 0;
|
|
26
|
-
let diagnosticsEnabled = false;
|
|
27
|
-
let diagnosticsMirrorToConsole = false;
|
|
28
|
-
const pendingP2PFetches = new Map();
|
|
29
|
-
const peerShareableEncryptedHashes = new Set();
|
|
30
|
-
const activeRootWatches = new Map();
|
|
31
|
-
let putBlobStreamCounter = 0;
|
|
32
|
-
const activePutBlobStreams = new Map();
|
|
33
|
-
const MEDIA_CHUNK_SIZE = 64 * 1024;
|
|
34
|
-
function getErrorMessage(err) {
|
|
35
|
-
return err instanceof Error ? err.message : String(err);
|
|
36
|
-
}
|
|
37
|
-
const EMPTY_BLOSSOM_BANDWIDTH = {
|
|
38
|
-
totalBytesSent: 0,
|
|
39
|
-
totalBytesReceived: 0,
|
|
40
|
-
updatedAt: 0,
|
|
41
|
-
servers: [],
|
|
42
|
-
};
|
|
43
|
-
let blossomBandwidth = { ...EMPTY_BLOSSOM_BANDWIDTH };
|
|
44
|
-
function respond(message) {
|
|
45
|
-
endpoint?.postMessage(message);
|
|
46
|
-
}
|
|
47
|
-
function emitDiagnostic(level, scope, code, message, data) {
|
|
48
|
-
if (!diagnosticsEnabled && !diagnosticsMirrorToConsole) {
|
|
49
|
-
return;
|
|
50
|
-
}
|
|
51
|
-
const event = {
|
|
52
|
-
scope,
|
|
53
|
-
code,
|
|
54
|
-
level,
|
|
55
|
-
message,
|
|
56
|
-
timestamp: Date.now(),
|
|
57
|
-
data,
|
|
58
|
-
};
|
|
59
|
-
if (diagnosticsEnabled) {
|
|
60
|
-
respond({ type: 'diagnostic', event });
|
|
61
|
-
}
|
|
62
|
-
if (diagnosticsMirrorToConsole) {
|
|
63
|
-
const prefix = `[HashtreeWorker:${scope}:${code}] ${message}`;
|
|
64
|
-
if (level === 'error') {
|
|
65
|
-
console.error(prefix, data ?? {});
|
|
66
|
-
return;
|
|
67
|
-
}
|
|
68
|
-
if (level === 'warn') {
|
|
69
|
-
console.warn(prefix, data ?? {});
|
|
70
|
-
return;
|
|
71
|
-
}
|
|
72
|
-
console.log(prefix, data ?? {});
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
function publishBlossomBandwidth(stats) {
|
|
76
|
-
blossomBandwidth = {
|
|
77
|
-
totalBytesSent: stats.totalBytesSent,
|
|
78
|
-
totalBytesReceived: stats.totalBytesReceived,
|
|
79
|
-
updatedAt: stats.updatedAt,
|
|
80
|
-
servers: stats.servers.map(server => ({
|
|
81
|
-
url: server.url,
|
|
82
|
-
bytesSent: server.bytesSent,
|
|
83
|
-
bytesReceived: server.bytesReceived,
|
|
84
|
-
})),
|
|
85
|
-
};
|
|
86
|
-
respond({
|
|
87
|
-
type: 'blossomBandwidth',
|
|
88
|
-
stats: blossomBandwidth,
|
|
89
|
-
});
|
|
90
|
-
}
|
|
91
|
-
function resetState() {
|
|
92
|
-
if (probeInterval) {
|
|
93
|
-
clearInterval(probeInterval);
|
|
94
|
-
probeInterval = null;
|
|
95
|
-
}
|
|
96
|
-
for (const watch of activeRootWatches.values()) {
|
|
97
|
-
void Promise.resolve(watch.close()).catch(() => undefined);
|
|
98
|
-
}
|
|
99
|
-
activeRootWatches.clear();
|
|
100
|
-
storage?.close();
|
|
101
|
-
storage = null;
|
|
102
|
-
blossom = null;
|
|
103
|
-
meshStore = null;
|
|
104
|
-
tree = null;
|
|
105
|
-
for (const pending of pendingP2PFetches.values()) {
|
|
106
|
-
clearTimeout(pending.timeoutId);
|
|
107
|
-
}
|
|
108
|
-
pendingP2PFetches.clear();
|
|
109
|
-
peerShareableEncryptedHashes.clear();
|
|
110
|
-
activePutBlobStreams.clear();
|
|
111
|
-
blossomBandwidth = { ...EMPTY_BLOSSOM_BANDWIDTH };
|
|
112
|
-
nostrRelays = [];
|
|
113
|
-
diagnosticsEnabled = false;
|
|
114
|
-
diagnosticsMirrorToConsole = false;
|
|
115
|
-
}
|
|
116
|
-
async function markEncryptedTreeHashesAsPeerShareable(id) {
|
|
117
|
-
if (!tree)
|
|
118
|
-
return;
|
|
119
|
-
const hashes = [];
|
|
120
|
-
for await (const block of tree.walkBlocks(id)) {
|
|
121
|
-
hashes.push(toHex(block.hash));
|
|
122
|
-
}
|
|
123
|
-
markEncryptedHashes(hashes, peerShareableEncryptedHashes);
|
|
124
|
-
}
|
|
125
|
-
async function emitConnectivityUpdate() {
|
|
126
|
-
if (!blossom)
|
|
127
|
-
return;
|
|
128
|
-
const state = await probeConnectivity(blossom.getServers());
|
|
129
|
-
respond({ type: 'connectivityUpdate', state });
|
|
130
|
-
}
|
|
131
|
-
function startConnectivityProbeLoop() {
|
|
132
|
-
if (probeInterval) {
|
|
133
|
-
clearInterval(probeInterval);
|
|
134
|
-
probeInterval = null;
|
|
135
|
-
}
|
|
136
|
-
probeInterval = setInterval(() => {
|
|
137
|
-
void emitConnectivityUpdate();
|
|
138
|
-
}, probeIntervalMs);
|
|
139
|
-
}
|
|
140
|
-
function nextP2PFetchRequestId() {
|
|
141
|
-
p2pFetchCounter += 1;
|
|
142
|
-
return `p2p_${Date.now()}_${p2pFetchCounter}`;
|
|
143
|
-
}
|
|
144
|
-
function nextRootWatchId() {
|
|
145
|
-
rootWatchCounter += 1;
|
|
146
|
-
return `root_${Date.now()}_${rootWatchCounter}`;
|
|
147
|
-
}
|
|
148
|
-
async function requestP2PBlob(hashHex) {
|
|
149
|
-
const requestId = nextP2PFetchRequestId();
|
|
150
|
-
const data = await new Promise((resolve) => {
|
|
151
|
-
const timeoutId = setTimeout(() => {
|
|
152
|
-
pendingP2PFetches.delete(requestId);
|
|
153
|
-
resolve(null);
|
|
154
|
-
}, P2P_FETCH_TIMEOUT_MS);
|
|
155
|
-
pendingP2PFetches.set(requestId, { resolve, timeoutId });
|
|
156
|
-
respond({ type: 'p2pFetch', requestId, hashHex });
|
|
157
|
-
});
|
|
158
|
-
return data;
|
|
159
|
-
}
|
|
160
|
-
function resolveP2PFetch(requestId, data, error) {
|
|
161
|
-
const pending = pendingP2PFetches.get(requestId);
|
|
162
|
-
if (!pending)
|
|
163
|
-
return;
|
|
164
|
-
clearTimeout(pending.timeoutId);
|
|
165
|
-
pendingP2PFetches.delete(requestId);
|
|
166
|
-
if (error || !data) {
|
|
167
|
-
pending.resolve(null);
|
|
168
|
-
return;
|
|
169
|
-
}
|
|
170
|
-
pending.resolve(data);
|
|
171
|
-
}
|
|
172
|
-
async function loadBlobData(hashHex) {
|
|
173
|
-
if (!meshStore)
|
|
174
|
-
return null;
|
|
175
|
-
const result = await meshStore.getDetailed(fromHex(hashHex));
|
|
176
|
-
if (!result)
|
|
177
|
-
return null;
|
|
178
|
-
const source = result.sourceId === 'idb'
|
|
179
|
-
? 'idb'
|
|
180
|
-
: result.sourceId === 'blossom'
|
|
181
|
-
? 'blossom'
|
|
182
|
-
: 'p2p';
|
|
183
|
-
return { data: result.data, source };
|
|
184
|
-
}
|
|
185
|
-
function createStorageStore() {
|
|
186
|
-
return {
|
|
187
|
-
put: async (hash, data) => {
|
|
188
|
-
if (!storage)
|
|
189
|
-
throw new Error('Worker storage not initialized');
|
|
190
|
-
await storage.putByHashTrusted(toHex(hash), data);
|
|
191
|
-
return true;
|
|
192
|
-
},
|
|
193
|
-
get: async (hash) => {
|
|
194
|
-
if (!storage) {
|
|
195
|
-
return null;
|
|
196
|
-
}
|
|
197
|
-
return storage.get(toHex(hash));
|
|
198
|
-
},
|
|
199
|
-
has: async (hash) => {
|
|
200
|
-
if (!storage)
|
|
201
|
-
return false;
|
|
202
|
-
return storage.has(toHex(hash));
|
|
203
|
-
},
|
|
204
|
-
delete: async (hash) => {
|
|
205
|
-
if (!storage)
|
|
206
|
-
return false;
|
|
207
|
-
return storage.delete(toHex(hash));
|
|
208
|
-
},
|
|
209
|
-
};
|
|
210
|
-
}
|
|
211
|
-
function createMeshStore() {
|
|
212
|
-
return new MeshRouterStore({
|
|
213
|
-
primary: createStorageStore(),
|
|
214
|
-
primarySourceId: 'idb',
|
|
215
|
-
requestTimeoutMs: 5_500,
|
|
216
|
-
sources: [
|
|
217
|
-
{
|
|
218
|
-
id: 'p2p',
|
|
219
|
-
get: async (hash) => requestP2PBlob(toHex(hash)),
|
|
220
|
-
},
|
|
221
|
-
{
|
|
222
|
-
id: 'blossom',
|
|
223
|
-
isAvailable: () => !!blossom && blossom.getServers().some((server) => server.read !== false),
|
|
224
|
-
get: async (hash) => blossom ? blossom.fetch(toHex(hash)) : null,
|
|
225
|
-
},
|
|
226
|
-
],
|
|
227
|
-
});
|
|
228
|
-
}
|
|
229
|
-
async function getPlaintextFileSize(fileCid) {
|
|
230
|
-
if (!tree)
|
|
231
|
-
return null;
|
|
232
|
-
if (!fileCid.key) {
|
|
233
|
-
return tree.getSize(fileCid.hash);
|
|
234
|
-
}
|
|
235
|
-
const loaded = await loadBlobData(toHex(fileCid.hash));
|
|
236
|
-
if (!loaded)
|
|
237
|
-
return null;
|
|
238
|
-
const decryptedRoot = await decryptChk(loaded.data, fileCid.key);
|
|
239
|
-
const rootNode = tryDecodeTreeNode(decryptedRoot);
|
|
240
|
-
if (!rootNode) {
|
|
241
|
-
return decryptedRoot.byteLength;
|
|
242
|
-
}
|
|
243
|
-
const summedSize = rootNode.links.reduce((sum, link) => sum + (link.size ?? 0), 0);
|
|
244
|
-
if (summedSize > 0) {
|
|
245
|
-
return summedSize;
|
|
246
|
-
}
|
|
247
|
-
const fullData = await tree.readFile(fileCid);
|
|
248
|
-
return fullData?.byteLength ?? 0;
|
|
249
|
-
}
|
|
250
|
-
function decodeDownloadName(path) {
|
|
251
|
-
try {
|
|
252
|
-
return decodeURIComponent(path.split('/').pop() || 'file');
|
|
253
|
-
}
|
|
254
|
-
catch {
|
|
255
|
-
return path.split('/').pop() || 'file';
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
function postMediaError(port, requestId, message) {
|
|
259
|
-
emitDiagnostic('warn', 'media', 'media-request-error', message, { requestId });
|
|
260
|
-
const response = { type: 'error', requestId, message };
|
|
261
|
-
port.postMessage(response);
|
|
262
|
-
}
|
|
263
|
-
async function handleMediaFileRequest(port, request) {
|
|
264
|
-
if (!tree) {
|
|
265
|
-
emitDiagnostic('error', 'media', 'worker-not-initialized', 'Worker not initialized for media request', {
|
|
266
|
-
requestId: request.requestId,
|
|
267
|
-
});
|
|
268
|
-
postMediaError(port, request.requestId, 'Worker not initialized');
|
|
269
|
-
return;
|
|
270
|
-
}
|
|
271
|
-
let rootCid;
|
|
272
|
-
try {
|
|
273
|
-
rootCid = nhashDecode(request.nhash);
|
|
274
|
-
}
|
|
275
|
-
catch {
|
|
276
|
-
emitDiagnostic('warn', 'media', 'invalid-nhash', 'Invalid nhash for media request', {
|
|
277
|
-
requestId: request.requestId,
|
|
278
|
-
});
|
|
279
|
-
postMediaError(port, request.requestId, 'Invalid nhash');
|
|
280
|
-
return;
|
|
281
|
-
}
|
|
282
|
-
emitDiagnostic('debug', 'media', 'request-start', 'Handling media request', {
|
|
283
|
-
requestId: request.requestId,
|
|
284
|
-
start: request.start,
|
|
285
|
-
end: typeof request.end === 'number' ? request.end : null,
|
|
286
|
-
head: request.head === true,
|
|
287
|
-
});
|
|
288
|
-
let cid = rootCid;
|
|
289
|
-
const requestedPath = request.path.trim().replace(/^\/+/, '');
|
|
290
|
-
if (requestedPath) {
|
|
291
|
-
const resolved = await tree.resolvePath(rootCid, requestedPath);
|
|
292
|
-
if (resolved) {
|
|
293
|
-
cid = resolved.cid;
|
|
294
|
-
}
|
|
295
|
-
else if (await tree.isDirectory(rootCid)) {
|
|
296
|
-
emitDiagnostic('warn', 'media', 'file-not-found', 'Media file path not found', {
|
|
297
|
-
requestId: request.requestId,
|
|
298
|
-
});
|
|
299
|
-
postMediaError(port, request.requestId, 'File not found');
|
|
300
|
-
return;
|
|
301
|
-
}
|
|
302
|
-
}
|
|
303
|
-
const totalSize = await getPlaintextFileSize(cid);
|
|
304
|
-
if (totalSize === null) {
|
|
305
|
-
emitDiagnostic('warn', 'media', 'size-not-found', 'Media file size unavailable', {
|
|
306
|
-
requestId: request.requestId,
|
|
307
|
-
});
|
|
308
|
-
postMediaError(port, request.requestId, 'File not found');
|
|
309
|
-
return;
|
|
310
|
-
}
|
|
311
|
-
if (totalSize === 0) {
|
|
312
|
-
const headersMessage = {
|
|
313
|
-
type: 'headers',
|
|
314
|
-
requestId: request.requestId,
|
|
315
|
-
status: 200,
|
|
316
|
-
totalSize,
|
|
317
|
-
headers: {
|
|
318
|
-
'content-type': request.mimeType || 'application/octet-stream',
|
|
319
|
-
'accept-ranges': 'bytes',
|
|
320
|
-
'content-length': '0',
|
|
321
|
-
},
|
|
322
|
-
};
|
|
323
|
-
port.postMessage(headersMessage);
|
|
324
|
-
const doneMessage = { type: 'done', requestId: request.requestId };
|
|
325
|
-
port.postMessage(doneMessage);
|
|
326
|
-
return;
|
|
327
|
-
}
|
|
328
|
-
const start = Number.isFinite(request.start) ? Math.max(0, Math.floor(request.start)) : 0;
|
|
329
|
-
if (start >= totalSize) {
|
|
330
|
-
const headers = {
|
|
331
|
-
type: 'headers',
|
|
332
|
-
requestId: request.requestId,
|
|
333
|
-
status: 416,
|
|
334
|
-
totalSize,
|
|
335
|
-
headers: {
|
|
336
|
-
'content-type': request.mimeType || 'application/octet-stream',
|
|
337
|
-
'content-range': `bytes */${totalSize}`,
|
|
338
|
-
},
|
|
339
|
-
};
|
|
340
|
-
port.postMessage(headers);
|
|
341
|
-
const done = { type: 'done', requestId: request.requestId };
|
|
342
|
-
port.postMessage(done);
|
|
343
|
-
return;
|
|
344
|
-
}
|
|
345
|
-
const requestedEnd = Number.isFinite(request.end) && typeof request.end === 'number'
|
|
346
|
-
? Math.floor(request.end)
|
|
347
|
-
: totalSize - 1;
|
|
348
|
-
const end = Math.min(totalSize - 1, Math.max(start, requestedEnd));
|
|
349
|
-
const isPartial = start !== 0 || end !== totalSize - 1;
|
|
350
|
-
const expectedLength = end - start + 1;
|
|
351
|
-
const responseHeaders = {
|
|
352
|
-
'content-type': request.mimeType || 'application/octet-stream',
|
|
353
|
-
'accept-ranges': 'bytes',
|
|
354
|
-
'content-length': String(expectedLength),
|
|
355
|
-
};
|
|
356
|
-
if (isPartial) {
|
|
357
|
-
responseHeaders['content-range'] = `bytes ${start}-${end}/${totalSize}`;
|
|
358
|
-
}
|
|
359
|
-
if (request.download) {
|
|
360
|
-
const fileName = decodeDownloadName(request.path).replace(/["\\]/g, '_');
|
|
361
|
-
responseHeaders['content-disposition'] = `attachment; filename="${fileName}"`;
|
|
362
|
-
}
|
|
363
|
-
const headersMessage = {
|
|
364
|
-
type: 'headers',
|
|
365
|
-
requestId: request.requestId,
|
|
366
|
-
status: isPartial ? 206 : 200,
|
|
367
|
-
totalSize,
|
|
368
|
-
headers: responseHeaders,
|
|
369
|
-
};
|
|
370
|
-
port.postMessage(headersMessage);
|
|
371
|
-
if (!request.head) {
|
|
372
|
-
for await (const chunk of streamFileRangeChunks(tree, cid, start, end, MEDIA_CHUNK_SIZE)) {
|
|
373
|
-
const transferableChunk = cloneTransferableBytes(chunk);
|
|
374
|
-
const chunkMessage = {
|
|
375
|
-
type: 'chunk',
|
|
376
|
-
requestId: request.requestId,
|
|
377
|
-
data: transferableChunk,
|
|
378
|
-
};
|
|
379
|
-
port.postMessage(chunkMessage, [transferableChunk.buffer]);
|
|
380
|
-
}
|
|
381
|
-
}
|
|
382
|
-
emitDiagnostic('debug', 'media', 'request-complete', 'Completed media request', {
|
|
383
|
-
requestId: request.requestId,
|
|
384
|
-
totalSize,
|
|
385
|
-
status: isPartial ? 206 : 200,
|
|
386
|
-
});
|
|
387
|
-
const doneMessage = { type: 'done', requestId: request.requestId };
|
|
388
|
-
port.postMessage(doneMessage);
|
|
389
|
-
}
|
|
390
|
-
function registerMediaPort(port) {
|
|
391
|
-
emitDiagnostic('info', 'media', 'port-registered', 'Registered media MessagePort');
|
|
392
|
-
port.onmessage = (event) => {
|
|
393
|
-
const data = event.data;
|
|
394
|
-
if (!data || data.type !== 'hashtree-file' || typeof data.requestId !== 'string') {
|
|
395
|
-
return;
|
|
396
|
-
}
|
|
397
|
-
if (typeof data.nhash !== 'string' || typeof data.path !== 'string') {
|
|
398
|
-
emitDiagnostic('warn', 'media', 'invalid-request', 'Received invalid media request payload', {
|
|
399
|
-
requestId: data.requestId,
|
|
400
|
-
});
|
|
401
|
-
postMediaError(port, data.requestId, 'Invalid media request');
|
|
402
|
-
return;
|
|
403
|
-
}
|
|
404
|
-
const request = {
|
|
405
|
-
type: 'hashtree-file',
|
|
406
|
-
requestId: data.requestId,
|
|
407
|
-
nhash: data.nhash,
|
|
408
|
-
path: data.path,
|
|
409
|
-
start: typeof data.start === 'number' ? data.start : 0,
|
|
410
|
-
end: typeof data.end === 'number' ? data.end : undefined,
|
|
411
|
-
mimeType: typeof data.mimeType === 'string' ? data.mimeType : undefined,
|
|
412
|
-
download: !!data.download,
|
|
413
|
-
head: !!data.head,
|
|
414
|
-
};
|
|
415
|
-
void handleMediaFileRequest(port, request).catch((err) => {
|
|
416
|
-
postMediaError(port, request.requestId, getErrorMessage(err));
|
|
417
|
-
});
|
|
418
|
-
};
|
|
419
|
-
}
|
|
420
|
-
function init(config) {
|
|
421
|
-
resetState();
|
|
422
|
-
const storeName = config.storeName || DEFAULT_STORE_NAME;
|
|
423
|
-
const maxBytes = config.storageMaxBytes || DEFAULT_STORAGE_MAX_BYTES;
|
|
424
|
-
probeIntervalMs = config.connectivityProbeIntervalMs || DEFAULT_CONNECTIVITY_PROBE_INTERVAL_MS;
|
|
425
|
-
nostrRelays = config.relays ?? [];
|
|
426
|
-
diagnosticsEnabled = config.diagnosticsEnabled === true;
|
|
427
|
-
diagnosticsMirrorToConsole = config.diagnosticsMirrorToConsole === true;
|
|
428
|
-
storage = new IdbBlobStorage(storeName, maxBytes);
|
|
429
|
-
blossom = new BlossomTransport(config.blossomServers || DEFAULT_BLOSSOM_SERVERS, (stats) => {
|
|
430
|
-
publishBlossomBandwidth(stats);
|
|
431
|
-
});
|
|
432
|
-
meshStore = createMeshStore();
|
|
433
|
-
tree = new HashTree({ store: meshStore });
|
|
434
|
-
publishBlossomBandwidth(blossom.getBandwidthStats());
|
|
435
|
-
emitDiagnostic('info', 'worker', 'initialized', 'Hashtree worker initialized', {
|
|
436
|
-
storeName,
|
|
437
|
-
relayCount: nostrRelays.length,
|
|
438
|
-
diagnosticsMirrorToConsole,
|
|
439
|
-
});
|
|
440
|
-
startConnectivityProbeLoop();
|
|
441
|
-
void emitConnectivityUpdate();
|
|
442
|
-
}
|
|
443
|
-
function nextPutBlobStreamId() {
|
|
444
|
-
putBlobStreamCounter += 1;
|
|
445
|
-
return `pbs_${Date.now()}_${putBlobStreamCounter}`;
|
|
446
|
-
}
|
|
447
|
-
function startBlossomUploadProgress(hashHex, nhash, fileCid) {
|
|
448
|
-
if (!blossom || !tree)
|
|
449
|
-
return;
|
|
450
|
-
const writeServers = blossom.getWriteServers();
|
|
451
|
-
if (writeServers.length === 0)
|
|
452
|
-
return;
|
|
453
|
-
const chunkProgressEmitIntervalMs = 100;
|
|
454
|
-
const progress = {
|
|
455
|
-
hashHex,
|
|
456
|
-
nhash,
|
|
457
|
-
totalServers: writeServers.length,
|
|
458
|
-
processedServers: 0,
|
|
459
|
-
uploadedServers: 0,
|
|
460
|
-
skippedServers: 0,
|
|
461
|
-
failedServers: 0,
|
|
462
|
-
totalChunks: 0,
|
|
463
|
-
processedChunks: 0,
|
|
464
|
-
progressRatio: 0,
|
|
465
|
-
complete: false,
|
|
466
|
-
};
|
|
467
|
-
const serverStats = new Map();
|
|
468
|
-
for (const server of writeServers) {
|
|
469
|
-
serverStats.set(server.url, { url: server.url, uploaded: 0, skipped: 0, failed: 0 });
|
|
470
|
-
}
|
|
471
|
-
let lastChunkProgressEmit = 0;
|
|
472
|
-
const syncServerStatuses = () => {
|
|
473
|
-
progress.serverStatuses = Array.from(serverStats.values())
|
|
474
|
-
.map((status) => ({ ...status }))
|
|
475
|
-
.sort((a, b) => a.url.localeCompare(b.url));
|
|
476
|
-
};
|
|
477
|
-
const emitProgress = () => {
|
|
478
|
-
syncServerStatuses();
|
|
479
|
-
respond({ type: 'uploadProgress', progress: { ...progress } });
|
|
480
|
-
};
|
|
481
|
-
emitProgress();
|
|
482
|
-
const onUploadProgress = (serverUrl, status) => {
|
|
483
|
-
const stats = serverStats.get(serverUrl);
|
|
484
|
-
if (!stats)
|
|
485
|
-
return;
|
|
486
|
-
stats[status]++;
|
|
487
|
-
};
|
|
488
|
-
void (async () => {
|
|
489
|
-
const uploadStore = blossom.createUploadStore(onUploadProgress);
|
|
490
|
-
const result = await tree.push(fileCid, uploadStore, {
|
|
491
|
-
onProgress: (current, total) => {
|
|
492
|
-
if (total <= 0 || progress.complete)
|
|
493
|
-
return;
|
|
494
|
-
const fraction = current / total;
|
|
495
|
-
progress.totalChunks = total;
|
|
496
|
-
progress.processedChunks = current;
|
|
497
|
-
progress.progressRatio = Math.max(0, Math.min(1, fraction));
|
|
498
|
-
const processedEstimate = Math.min(progress.totalServers, Math.max(0, Math.floor(fraction * progress.totalServers)));
|
|
499
|
-
const serverEstimateChanged = processedEstimate !== progress.processedServers;
|
|
500
|
-
if (serverEstimateChanged) {
|
|
501
|
-
progress.processedServers = processedEstimate;
|
|
502
|
-
}
|
|
503
|
-
const now = Date.now();
|
|
504
|
-
const shouldEmitChunkProgress = now - lastChunkProgressEmit >= chunkProgressEmitIntervalMs || current >= total;
|
|
505
|
-
if (serverEstimateChanged || shouldEmitChunkProgress) {
|
|
506
|
-
lastChunkProgressEmit = now;
|
|
507
|
-
emitProgress();
|
|
508
|
-
}
|
|
509
|
-
},
|
|
510
|
-
});
|
|
511
|
-
let uploadedServers = 0;
|
|
512
|
-
let skippedServers = 0;
|
|
513
|
-
let failedServers = 0;
|
|
514
|
-
for (const [, stats] of serverStats) {
|
|
515
|
-
if (stats.failed > 0) {
|
|
516
|
-
failedServers++;
|
|
517
|
-
}
|
|
518
|
-
else if (stats.uploaded > 0) {
|
|
519
|
-
uploadedServers++;
|
|
520
|
-
}
|
|
521
|
-
else {
|
|
522
|
-
skippedServers++;
|
|
523
|
-
}
|
|
524
|
-
}
|
|
525
|
-
progress.uploadedServers = uploadedServers;
|
|
526
|
-
progress.skippedServers = skippedServers;
|
|
527
|
-
progress.failedServers = failedServers;
|
|
528
|
-
progress.processedServers = progress.totalServers;
|
|
529
|
-
if (typeof progress.totalChunks === 'number' && progress.totalChunks > 0) {
|
|
530
|
-
progress.processedChunks = progress.totalChunks;
|
|
531
|
-
}
|
|
532
|
-
progress.progressRatio = 1;
|
|
533
|
-
progress.complete = true;
|
|
534
|
-
if (result.failed > 0 && result.errors.length > 0) {
|
|
535
|
-
progress.error = result.errors[0].error.message;
|
|
536
|
-
}
|
|
537
|
-
emitProgress();
|
|
538
|
-
})().catch((err) => {
|
|
539
|
-
if (progress.complete)
|
|
540
|
-
return;
|
|
541
|
-
progress.failedServers = progress.totalServers;
|
|
542
|
-
progress.processedServers = progress.totalServers;
|
|
543
|
-
if (typeof progress.totalChunks === 'number' && progress.totalChunks > 0) {
|
|
544
|
-
progress.processedChunks = progress.totalChunks;
|
|
545
|
-
}
|
|
546
|
-
progress.progressRatio = 1;
|
|
547
|
-
progress.complete = true;
|
|
548
|
-
progress.error = getErrorMessage(err);
|
|
549
|
-
emitProgress();
|
|
550
|
-
});
|
|
551
|
-
}
|
|
552
|
-
function respondBlobStored(id, fileCid, upload) {
|
|
553
|
-
const hashHex = toHex(fileCid.hash);
|
|
554
|
-
const nhash = nhashEncode(fileCid);
|
|
555
|
-
if (upload) {
|
|
556
|
-
startBlossomUploadProgress(hashHex, nhash, fileCid);
|
|
557
|
-
}
|
|
558
|
-
respond({
|
|
559
|
-
type: 'blobStored',
|
|
560
|
-
id,
|
|
561
|
-
hashHex,
|
|
562
|
-
nhash,
|
|
563
|
-
});
|
|
564
|
-
}
|
|
565
|
-
async function handleRequest(req) {
|
|
566
|
-
switch (req.type) {
|
|
567
|
-
case 'init': {
|
|
568
|
-
init(req.config);
|
|
569
|
-
respond({ type: 'ready', id: req.id });
|
|
570
|
-
return;
|
|
571
|
-
}
|
|
572
|
-
case 'close': {
|
|
573
|
-
resetState();
|
|
574
|
-
respond({ type: 'void', id: req.id });
|
|
575
|
-
return;
|
|
576
|
-
}
|
|
577
|
-
case 'putBlob': {
|
|
578
|
-
if (!storage || !blossom || !tree) {
|
|
579
|
-
respond({ type: 'error', id: req.id, error: 'Worker not initialized' });
|
|
580
|
-
return;
|
|
581
|
-
}
|
|
582
|
-
let fileCid;
|
|
583
|
-
if (req.upload === false) {
|
|
584
|
-
const hash = await tree.putBlob(req.data);
|
|
585
|
-
fileCid = { hash };
|
|
586
|
-
}
|
|
587
|
-
else {
|
|
588
|
-
const fileResult = await tree.putFile(req.data);
|
|
589
|
-
fileCid = fileResult.cid;
|
|
590
|
-
assertEncryptedUploadCid(fileCid);
|
|
591
|
-
await markEncryptedTreeHashesAsPeerShareable(fileCid);
|
|
592
|
-
}
|
|
593
|
-
respondBlobStored(req.id, fileCid, req.upload !== false);
|
|
594
|
-
return;
|
|
595
|
-
}
|
|
596
|
-
case 'beginPutBlobStream': {
|
|
597
|
-
if (!tree) {
|
|
598
|
-
respond({ type: 'error', id: req.id, error: 'Worker not initialized' });
|
|
599
|
-
return;
|
|
600
|
-
}
|
|
601
|
-
const upload = req.upload !== false;
|
|
602
|
-
const streamId = nextPutBlobStreamId();
|
|
603
|
-
const writer = tree.createStream({ unencrypted: !upload });
|
|
604
|
-
activePutBlobStreams.set(streamId, { upload, writer });
|
|
605
|
-
respond({ type: 'blobStreamStarted', id: req.id, streamId });
|
|
606
|
-
return;
|
|
607
|
-
}
|
|
608
|
-
case 'appendPutBlobStream': {
|
|
609
|
-
const stream = activePutBlobStreams.get(req.streamId);
|
|
610
|
-
if (!stream) {
|
|
611
|
-
respond({ type: 'void', id: req.id, error: 'Upload stream not found' });
|
|
612
|
-
return;
|
|
613
|
-
}
|
|
614
|
-
await stream.writer.append(req.chunk);
|
|
615
|
-
respond({ type: 'void', id: req.id });
|
|
616
|
-
return;
|
|
617
|
-
}
|
|
618
|
-
case 'finishPutBlobStream': {
|
|
619
|
-
const stream = activePutBlobStreams.get(req.streamId);
|
|
620
|
-
if (!stream) {
|
|
621
|
-
respond({ type: 'error', id: req.id, error: 'Upload stream not found' });
|
|
622
|
-
return;
|
|
623
|
-
}
|
|
624
|
-
activePutBlobStreams.delete(req.streamId);
|
|
625
|
-
const finalized = await stream.writer.finalize();
|
|
626
|
-
const fileCid = finalized.key
|
|
627
|
-
? { hash: finalized.hash, key: finalized.key }
|
|
628
|
-
: { hash: finalized.hash };
|
|
629
|
-
if (stream.upload) {
|
|
630
|
-
assertEncryptedUploadCid(fileCid);
|
|
631
|
-
await markEncryptedTreeHashesAsPeerShareable(fileCid);
|
|
632
|
-
}
|
|
633
|
-
respondBlobStored(req.id, fileCid, stream.upload);
|
|
634
|
-
return;
|
|
635
|
-
}
|
|
636
|
-
case 'cancelPutBlobStream': {
|
|
637
|
-
activePutBlobStreams.delete(req.streamId);
|
|
638
|
-
respond({ type: 'void', id: req.id });
|
|
639
|
-
return;
|
|
640
|
-
}
|
|
641
|
-
case 'p2pFetchResult': {
|
|
642
|
-
resolveP2PFetch(req.requestId, req.data, req.error);
|
|
643
|
-
return;
|
|
644
|
-
}
|
|
645
|
-
case 'getBlob': {
|
|
646
|
-
if (!storage) {
|
|
647
|
-
respond({ type: 'blob', id: req.id, error: 'Worker not initialized' });
|
|
648
|
-
return;
|
|
649
|
-
}
|
|
650
|
-
if (req.forPeer && !shouldServeHashToPeer(req.hashHex, peerShareableEncryptedHashes)) {
|
|
651
|
-
respond({ type: 'blob', id: req.id, error: 'Refusing to serve non-encrypted or untrusted blob to peer' });
|
|
652
|
-
return;
|
|
653
|
-
}
|
|
654
|
-
const loaded = await loadBlobData(req.hashHex);
|
|
655
|
-
if (!loaded) {
|
|
656
|
-
respond({ type: 'blob', id: req.id, error: 'Blob not found' });
|
|
657
|
-
return;
|
|
658
|
-
}
|
|
659
|
-
respond({ type: 'blob', id: req.id, data: loaded.data, source: loaded.source });
|
|
660
|
-
return;
|
|
661
|
-
}
|
|
662
|
-
case 'registerMediaPort': {
|
|
663
|
-
if (!storage) {
|
|
664
|
-
respond({ type: 'void', id: req.id, error: 'Worker not initialized' });
|
|
665
|
-
return;
|
|
666
|
-
}
|
|
667
|
-
registerMediaPort(req.port);
|
|
668
|
-
respond({ type: 'void', id: req.id });
|
|
669
|
-
return;
|
|
670
|
-
}
|
|
671
|
-
case 'setBlossomServers': {
|
|
672
|
-
if (!blossom) {
|
|
673
|
-
respond({ type: 'void', id: req.id, error: 'Worker not initialized' });
|
|
674
|
-
return;
|
|
675
|
-
}
|
|
676
|
-
blossom.setServers(req.servers);
|
|
677
|
-
respond({ type: 'void', id: req.id });
|
|
678
|
-
void emitConnectivityUpdate();
|
|
679
|
-
return;
|
|
680
|
-
}
|
|
681
|
-
case 'setStorageMaxBytes': {
|
|
682
|
-
if (!storage) {
|
|
683
|
-
respond({ type: 'void', id: req.id, error: 'Worker not initialized' });
|
|
684
|
-
return;
|
|
685
|
-
}
|
|
686
|
-
storage.setMaxBytes(req.maxBytes);
|
|
687
|
-
respond({ type: 'void', id: req.id });
|
|
688
|
-
return;
|
|
689
|
-
}
|
|
690
|
-
case 'getStorageStats': {
|
|
691
|
-
if (!storage) {
|
|
692
|
-
respond({
|
|
693
|
-
type: 'storageStats',
|
|
694
|
-
id: req.id,
|
|
695
|
-
items: 0,
|
|
696
|
-
bytes: 0,
|
|
697
|
-
maxBytes: 0,
|
|
698
|
-
error: 'Worker not initialized',
|
|
699
|
-
});
|
|
700
|
-
return;
|
|
701
|
-
}
|
|
702
|
-
const stats = await storage.getStats();
|
|
703
|
-
respond({ type: 'storageStats', id: req.id, ...stats });
|
|
704
|
-
return;
|
|
705
|
-
}
|
|
706
|
-
case 'probeConnectivity': {
|
|
707
|
-
if (!blossom) {
|
|
708
|
-
respond({ type: 'connectivity', id: req.id, error: 'Worker not initialized' });
|
|
709
|
-
return;
|
|
710
|
-
}
|
|
711
|
-
const state = await probeConnectivity(blossom.getServers());
|
|
712
|
-
respond({ type: 'connectivity', id: req.id, state });
|
|
713
|
-
return;
|
|
714
|
-
}
|
|
715
|
-
case 'resolveRoot': {
|
|
716
|
-
if (!tree) {
|
|
717
|
-
respond({ type: 'cid', id: req.id, error: 'Worker not initialized' });
|
|
718
|
-
return;
|
|
719
|
-
}
|
|
720
|
-
try {
|
|
721
|
-
const cid = await resolveRootPathFromRelays(tree, nostrRelays, req.npub, req.path, req.timeoutMs, req.settleMs);
|
|
722
|
-
respond({ type: 'cid', id: req.id, cid: cid ?? undefined });
|
|
723
|
-
}
|
|
724
|
-
catch (err) {
|
|
725
|
-
respond({ type: 'cid', id: req.id, error: getErrorMessage(err) });
|
|
726
|
-
}
|
|
727
|
-
return;
|
|
728
|
-
}
|
|
729
|
-
case 'watchRoot': {
|
|
730
|
-
if (!tree) {
|
|
731
|
-
respond({ type: 'rootWatchStarted', id: req.id, watchId: '', error: 'Worker not initialized' });
|
|
732
|
-
return;
|
|
733
|
-
}
|
|
734
|
-
const watchId = nextRootWatchId();
|
|
735
|
-
try {
|
|
736
|
-
const watch = await watchRootPathFromRelays(tree, nostrRelays, req.npub, req.path, (cid) => {
|
|
737
|
-
respond({ type: 'rootUpdate', watchId, cid: cid ?? undefined });
|
|
738
|
-
}, req.timeoutMs, req.settleMs);
|
|
739
|
-
activeRootWatches.set(watchId, { close: watch.close });
|
|
740
|
-
respond({
|
|
741
|
-
type: 'rootWatchStarted',
|
|
742
|
-
id: req.id,
|
|
743
|
-
watchId,
|
|
744
|
-
...(watch.initialCid ? { cid: watch.initialCid } : {}),
|
|
745
|
-
});
|
|
746
|
-
}
|
|
747
|
-
catch (err) {
|
|
748
|
-
respond({ type: 'rootWatchStarted', id: req.id, watchId: '', error: getErrorMessage(err) });
|
|
749
|
-
}
|
|
750
|
-
return;
|
|
751
|
-
}
|
|
752
|
-
case 'unwatchRoot': {
|
|
753
|
-
const watch = activeRootWatches.get(req.watchId);
|
|
754
|
-
activeRootWatches.delete(req.watchId);
|
|
755
|
-
if (watch) {
|
|
756
|
-
await Promise.resolve(watch.close()).catch(() => undefined);
|
|
757
|
-
}
|
|
758
|
-
respond({ type: 'void', id: req.id });
|
|
759
|
-
return;
|
|
760
|
-
}
|
|
761
|
-
}
|
|
762
|
-
}
|
|
763
|
-
function isWorkerRequestMessage(value) {
|
|
764
|
-
return Boolean(value
|
|
765
|
-
&& typeof value === 'object'
|
|
766
|
-
&& typeof value.type === 'string');
|
|
767
|
-
}
|
|
768
|
-
export function attachHashtreeWorker(target = self) {
|
|
769
|
-
if (endpoint && endpointListener) {
|
|
770
|
-
endpoint.removeEventListener('message', endpointListener);
|
|
771
|
-
}
|
|
772
|
-
endpoint = target;
|
|
773
|
-
endpointListener = ((event) => {
|
|
774
|
-
const req = event.data;
|
|
775
|
-
if (!isWorkerRequestMessage(req)) {
|
|
776
|
-
return;
|
|
777
|
-
}
|
|
778
|
-
void handleRequest(req).catch((err) => {
|
|
779
|
-
respond({ type: 'error', id: req.id, error: getErrorMessage(err) });
|
|
780
|
-
});
|
|
781
|
-
});
|
|
782
|
-
endpoint.addEventListener('message', endpointListener);
|
|
783
|
-
endpoint.start?.();
|
|
784
|
-
return () => {
|
|
785
|
-
target.removeEventListener('message', endpointListener);
|
|
786
|
-
if (endpoint === target) {
|
|
787
|
-
endpoint = null;
|
|
788
|
-
endpointListener = null;
|
|
789
|
-
}
|
|
790
|
-
};
|
|
791
|
-
}
|
|
792
|
-
//# sourceMappingURL=worker.js.map
|