@hashtree/worker 0.2.0 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +7 -3
- package/src/app-runtime.ts +393 -0
- package/src/capabilities/blossomBandwidthTracker.ts +74 -0
- package/src/capabilities/blossomTransport.ts +179 -0
- package/src/capabilities/connectivity.ts +54 -0
- package/src/capabilities/idbStorage.ts +94 -0
- package/src/capabilities/meshRouterStore.ts +426 -0
- package/src/capabilities/rootResolver.ts +497 -0
- package/src/client-id.ts +137 -0
- package/src/client.ts +501 -0
- package/src/entry.ts +3 -0
- package/src/htree-path.ts +53 -0
- package/src/htree-url.ts +156 -0
- package/src/index.ts +76 -0
- package/src/mediaStreaming.ts +64 -0
- package/src/p2p/boundedQueue.ts +168 -0
- package/src/p2p/errorMessage.ts +6 -0
- package/src/p2p/index.ts +48 -0
- package/src/p2p/lruCache.ts +78 -0
- package/src/p2p/meshQueryRouter.ts +361 -0
- package/src/p2p/protocol.ts +11 -0
- package/src/p2p/queryForwardingMachine.ts +197 -0
- package/src/p2p/signaling.ts +284 -0
- package/src/p2p/uploadRateLimiter.ts +85 -0
- package/src/p2p/webrtcController.ts +1168 -0
- package/src/p2p/webrtcProxy.ts +519 -0
- package/src/privacyGuards.ts +31 -0
- package/src/protocol.ts +124 -0
- package/src/relay/identity.ts +86 -0
- package/src/relay/mediaHandler.ts +1633 -0
- package/src/relay/ndk.ts +590 -0
- package/src/relay/nostr-wasm.ts +249 -0
- package/src/relay/nostr.ts +249 -0
- package/src/relay/protocol.ts +361 -0
- package/src/relay/publicAssetUrl.ts +25 -0
- package/src/relay/rootPathResolver.ts +50 -0
- package/src/relay/shims.d.ts +17 -0
- package/src/relay/signing.ts +332 -0
- package/src/relay/treeRootCache.ts +354 -0
- package/src/relay/treeRootSubscription.ts +577 -0
- package/src/relay/utils/constants.ts +139 -0
- package/src/relay/utils/errorMessage.ts +7 -0
- package/src/relay/utils/lruCache.ts +79 -0
- package/src/relay/webrtc.ts +5 -0
- package/src/relay/webrtcSignaling.ts +108 -0
- package/src/relay/worker.ts +1787 -0
- package/src/relay-client.ts +265 -0
- package/src/relay-entry.ts +1 -0
- package/src/runtime-network.ts +134 -0
- package/src/runtime.ts +153 -0
- package/src/transferableBytes.ts +5 -0
- package/src/tree-root.ts +851 -0
- package/src/types.ts +8 -0
- package/src/worker.ts +975 -0
package/src/relay/ndk.ts
ADDED
|
@@ -0,0 +1,590 @@
|
|
|
1
|
+
// @ts-nocheck
|
|
2
|
+
/**
|
|
3
|
+
* NDK instance for Worker
|
|
4
|
+
*
|
|
5
|
+
* Runs NDK with:
|
|
6
|
+
* - Real relay connections
|
|
7
|
+
* - ndk-cache (Dexie) for IndexedDB caching
|
|
8
|
+
* - nostr-wasm for fast signature verification
|
|
9
|
+
*
|
|
10
|
+
* Main thread communicates via WorkerAdapter postMessage.
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import NDK, {
|
|
14
|
+
NDKEvent,
|
|
15
|
+
NDKPrivateKeySigner,
|
|
16
|
+
NDKSubscriptionCacheUsage,
|
|
17
|
+
type NDKFilter,
|
|
18
|
+
type NDKSubscriptionOptions,
|
|
19
|
+
} from 'ndk';
|
|
20
|
+
import NDKCacheAdapterDexie from 'ndk-cache';
|
|
21
|
+
import { verifyEvent, matchFilter } from 'nostr-tools';
|
|
22
|
+
import { NostrWasm } from './nostr-wasm';
|
|
23
|
+
import { resolveWorkerPublicAssetUrl } from './publicAssetUrl';
|
|
24
|
+
import type { SignedEvent, NostrFilter } from './protocol';
|
|
25
|
+
|
|
26
|
+
// NDK instance - initialized lazily
|
|
27
|
+
let ndk: NDK | null = null;
|
|
28
|
+
|
|
29
|
+
// nostr-wasm verifier interface
|
|
30
|
+
interface WasmVerifier {
|
|
31
|
+
verifyEvent(event: unknown): void; // throws on invalid sig
|
|
32
|
+
}
|
|
33
|
+
let wasmVerifier: WasmVerifier | null = null;
|
|
34
|
+
let wasmLoading = false;
|
|
35
|
+
|
|
36
|
+
// Event callbacks
|
|
37
|
+
let onEventCallback: ((subId: string, event: SignedEvent) => void | Promise<void>) | null = null;
|
|
38
|
+
let onEoseCallback: ((subId: string) => void) | null = null;
|
|
39
|
+
|
|
40
|
+
// Active subscriptions
|
|
41
|
+
const subscriptions = new Map<string, ReturnType<NDK['subscribe']>>();
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Load nostr-wasm from public wasm file (not base64 inlined)
|
|
45
|
+
* Runs in background - verification falls back to JS until loaded
|
|
46
|
+
*/
|
|
47
|
+
async function loadNostrWasm(): Promise<void> {
|
|
48
|
+
if (wasmVerifier || wasmLoading) return;
|
|
49
|
+
wasmLoading = true;
|
|
50
|
+
|
|
51
|
+
try {
|
|
52
|
+
// Fetch wasm from public directory (not base64 inlined)
|
|
53
|
+
const response = fetch(resolveWorkerPublicAssetUrl(import.meta.env.BASE_URL, 'secp256k1.wasm', {
|
|
54
|
+
importMetaUrl: import.meta.url,
|
|
55
|
+
origin: self.location.origin,
|
|
56
|
+
}));
|
|
57
|
+
wasmVerifier = await NostrWasm(response);
|
|
58
|
+
console.log('[Worker NDK] nostr-wasm loaded from wasm file');
|
|
59
|
+
} catch (err) {
|
|
60
|
+
console.warn('[Worker NDK] nostr-wasm load failed, using JS fallback:', err);
|
|
61
|
+
} finally {
|
|
62
|
+
wasmLoading = false;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Custom signature verification function for NDK
|
|
68
|
+
* Uses nostr-wasm if loaded, falls back to nostr-tools
|
|
69
|
+
*/
|
|
70
|
+
async function verifySignature(event: NDKEvent): Promise<boolean> {
|
|
71
|
+
if (wasmVerifier) {
|
|
72
|
+
try {
|
|
73
|
+
// nostr-wasm verifyEvent checks both id hash and signature
|
|
74
|
+
wasmVerifier.verifyEvent({
|
|
75
|
+
id: event.id,
|
|
76
|
+
pubkey: event.pubkey,
|
|
77
|
+
created_at: event.created_at,
|
|
78
|
+
kind: event.kind,
|
|
79
|
+
tags: event.tags,
|
|
80
|
+
content: event.content,
|
|
81
|
+
sig: event.sig,
|
|
82
|
+
});
|
|
83
|
+
return true;
|
|
84
|
+
} catch {
|
|
85
|
+
return false;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Fallback to nostr-tools until wasm loads
|
|
90
|
+
// Don't call event.verifySignature() - that would cause infinite recursion
|
|
91
|
+
return verifyEvent({
|
|
92
|
+
id: event.id!,
|
|
93
|
+
pubkey: event.pubkey,
|
|
94
|
+
created_at: event.created_at!,
|
|
95
|
+
kind: event.kind!,
|
|
96
|
+
tags: event.tags,
|
|
97
|
+
content: event.content,
|
|
98
|
+
sig: event.sig!,
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Initialize NDK with cache and nostr-wasm
|
|
104
|
+
*/
|
|
105
|
+
export async function initNdk(
|
|
106
|
+
relays: string[],
|
|
107
|
+
options: {
|
|
108
|
+
pubkey?: string;
|
|
109
|
+
nsec?: string;
|
|
110
|
+
} = {}
|
|
111
|
+
): Promise<void> {
|
|
112
|
+
// Create cache adapter
|
|
113
|
+
const cacheAdapter = new NDKCacheAdapterDexie({ dbName: 'hashtree-ndk-worker', eventCacheSize: 5000 });
|
|
114
|
+
|
|
115
|
+
// Create NDK instance
|
|
116
|
+
ndk = new NDK({
|
|
117
|
+
explicitRelayUrls: relays,
|
|
118
|
+
cacheAdapter,
|
|
119
|
+
// Custom verification - will use wasm when loaded, JS fallback until then
|
|
120
|
+
signatureVerificationFunction: verifySignature,
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
// Set up signer if nsec provided
|
|
124
|
+
if (options.nsec) {
|
|
125
|
+
ndk.signer = new NDKPrivateKeySigner(options.nsec);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Connect to relays immediately, but don't block init on network latency.
|
|
129
|
+
// Subscriptions will attach as relays come online.
|
|
130
|
+
const connectPromise = ndk.connect();
|
|
131
|
+
|
|
132
|
+
// Wait for at least one relay to connect (with timeout)
|
|
133
|
+
const CONNECTION_TIMEOUT = 5000;
|
|
134
|
+
const connectionWait = new Promise<void>((resolve) => {
|
|
135
|
+
const startTime = Date.now();
|
|
136
|
+
const checkConnection = () => {
|
|
137
|
+
const connected = ndk?.pool?.connectedRelays();
|
|
138
|
+
if (connected && connected.size > 0) {
|
|
139
|
+
console.log('[Worker NDK] At least one relay connected');
|
|
140
|
+
resolve();
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
143
|
+
if (Date.now() - startTime > CONNECTION_TIMEOUT) {
|
|
144
|
+
console.warn('[Worker NDK] Connection timeout, proceeding anyway');
|
|
145
|
+
resolve();
|
|
146
|
+
return;
|
|
147
|
+
}
|
|
148
|
+
setTimeout(checkConnection, 50);
|
|
149
|
+
};
|
|
150
|
+
checkConnection();
|
|
151
|
+
});
|
|
152
|
+
void connectionWait;
|
|
153
|
+
|
|
154
|
+
// Log when all relays are connected (async, don't block)
|
|
155
|
+
connectPromise.then(() => {
|
|
156
|
+
console.log('[Worker NDK] All relays connected');
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
// Lazy load nostr-wasm in background
|
|
160
|
+
loadNostrWasm();
|
|
161
|
+
|
|
162
|
+
console.log('[Worker NDK] Initialized with', relays.length, 'relays');
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Get the NDK instance
|
|
167
|
+
*/
|
|
168
|
+
export function getNdk(): NDK | null {
|
|
169
|
+
return ndk;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* Set event callback
|
|
174
|
+
*/
|
|
175
|
+
export function setOnEvent(callback: (subId: string, event: SignedEvent) => void | Promise<void>): void {
|
|
176
|
+
onEventCallback = callback;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Set EOSE callback
|
|
181
|
+
*/
|
|
182
|
+
export function setOnEose(callback: (subId: string) => void): void {
|
|
183
|
+
onEoseCallback = callback;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Subscribe to events
|
|
188
|
+
*/
|
|
189
|
+
export function subscribe(subId: string, filters: NostrFilter[], opts?: NDKSubscriptionOptions): void {
|
|
190
|
+
if (!ndk) {
|
|
191
|
+
console.error('[Worker NDK] Not initialized');
|
|
192
|
+
return;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Close existing subscription with same ID
|
|
196
|
+
unsubscribe(subId);
|
|
197
|
+
|
|
198
|
+
// Convert NostrFilter to NDKFilter
|
|
199
|
+
const ndkFilters: NDKFilter[] = filters.map(f => {
|
|
200
|
+
const filter: NDKFilter = {
|
|
201
|
+
ids: f.ids,
|
|
202
|
+
authors: f.authors,
|
|
203
|
+
kinds: f.kinds,
|
|
204
|
+
since: f.since,
|
|
205
|
+
until: f.until,
|
|
206
|
+
limit: f.limit,
|
|
207
|
+
};
|
|
208
|
+
|
|
209
|
+
// Copy tag filters
|
|
210
|
+
for (const key of Object.keys(f)) {
|
|
211
|
+
if (key.startsWith('#')) {
|
|
212
|
+
(filter as Record<string, unknown>)[key] = f[key];
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return filter;
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
// skipValidation: nostr-wasm verifyEvent handles structure validation
|
|
220
|
+
const sub = ndk.subscribe(ndkFilters, {
|
|
221
|
+
closeOnEose: false,
|
|
222
|
+
skipValidation: true,
|
|
223
|
+
cacheUsage: opts?.cacheUsage ?? NDKSubscriptionCacheUsage.CACHE_FIRST,
|
|
224
|
+
});
|
|
225
|
+
|
|
226
|
+
sub.on('event', (event: NDKEvent) => {
|
|
227
|
+
const signedEvent: SignedEvent = {
|
|
228
|
+
id: event.id!,
|
|
229
|
+
pubkey: event.pubkey,
|
|
230
|
+
kind: event.kind!,
|
|
231
|
+
content: event.content,
|
|
232
|
+
tags: event.tags,
|
|
233
|
+
created_at: event.created_at!,
|
|
234
|
+
sig: event.sig!,
|
|
235
|
+
};
|
|
236
|
+
if (onEventCallback) {
|
|
237
|
+
Promise.resolve(onEventCallback(subId, signedEvent)).catch((err) => {
|
|
238
|
+
console.warn('[Worker NDK] onEvent callback failed:', err);
|
|
239
|
+
});
|
|
240
|
+
}
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
sub.on('eose', () => {
|
|
244
|
+
onEoseCallback?.(subId);
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
subscriptions.set(subId, sub);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
/**
|
|
251
|
+
* Unsubscribe
|
|
252
|
+
*/
|
|
253
|
+
export function unsubscribe(subId: string): void {
|
|
254
|
+
const sub = subscriptions.get(subId);
|
|
255
|
+
if (sub) {
|
|
256
|
+
sub.stop();
|
|
257
|
+
subscriptions.delete(subId);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
/**
|
|
262
|
+
* Publish an event
|
|
263
|
+
*/
|
|
264
|
+
export async function publish(event: SignedEvent): Promise<void> {
|
|
265
|
+
if (!ndk) {
|
|
266
|
+
throw new Error('NDK not initialized');
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
const ndkEvent = new NDKEvent(ndk, {
|
|
270
|
+
id: event.id,
|
|
271
|
+
pubkey: event.pubkey,
|
|
272
|
+
kind: event.kind,
|
|
273
|
+
content: event.content,
|
|
274
|
+
tags: event.tags,
|
|
275
|
+
created_at: event.created_at,
|
|
276
|
+
sig: event.sig,
|
|
277
|
+
});
|
|
278
|
+
|
|
279
|
+
await ndkEvent.publish();
|
|
280
|
+
|
|
281
|
+
// Dispatch to local subscriptions that match this event
|
|
282
|
+
// NDK doesn't echo published events back to sender, so we do it locally
|
|
283
|
+
if (onEventCallback) {
|
|
284
|
+
for (const [subId, sub] of subscriptions) {
|
|
285
|
+
// Get the filters from the subscription (NDK stores them on the subscription object)
|
|
286
|
+
const filters = (sub as unknown as { filters?: NDKFilter[] }).filters || [];
|
|
287
|
+
for (const filter of filters) {
|
|
288
|
+
if (matchFilter(filter, event)) {
|
|
289
|
+
Promise.resolve(onEventCallback(subId, event)).catch((err) => {
|
|
290
|
+
console.warn('[Worker NDK] onEvent callback failed:', err);
|
|
291
|
+
});
|
|
292
|
+
break; // Only dispatch once per subscription
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
/**
|
|
300
|
+
* Close NDK and all subscriptions
|
|
301
|
+
*/
|
|
302
|
+
export function closeNdk(): void {
|
|
303
|
+
for (const [subId, sub] of subscriptions) {
|
|
304
|
+
sub.stop();
|
|
305
|
+
console.log('[Worker NDK] Closed subscription:', subId);
|
|
306
|
+
}
|
|
307
|
+
subscriptions.clear();
|
|
308
|
+
|
|
309
|
+
// NDK doesn't have a close method, but we can disconnect relays
|
|
310
|
+
if (ndk?.pool) {
|
|
311
|
+
for (const relay of ndk.pool.relays.values()) {
|
|
312
|
+
relay.disconnect();
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
ndk = null;
|
|
317
|
+
console.log('[Worker NDK] Closed');
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
/**
|
|
321
|
+
* Update relays dynamically
|
|
322
|
+
* Disconnects old relays and connects to new ones
|
|
323
|
+
*/
|
|
324
|
+
export async function setRelays(relays: string[]): Promise<void> {
|
|
325
|
+
if (!ndk?.pool) {
|
|
326
|
+
console.warn('[Worker NDK] Cannot setRelays - NDK not initialized');
|
|
327
|
+
return;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
console.log('[Worker NDK] Updating relays:', relays);
|
|
331
|
+
|
|
332
|
+
try {
|
|
333
|
+
// Disconnect all current relays
|
|
334
|
+
for (const relay of ndk.pool.relays.values()) {
|
|
335
|
+
try {
|
|
336
|
+
relay.disconnect();
|
|
337
|
+
} catch (e) {
|
|
338
|
+
console.warn('[Worker NDK] Error disconnecting relay:', e);
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
// Clear the relay pool
|
|
343
|
+
ndk.pool.relays.clear();
|
|
344
|
+
|
|
345
|
+
// Add new relays
|
|
346
|
+
for (const url of relays) {
|
|
347
|
+
try {
|
|
348
|
+
ndk.addExplicitRelay(url);
|
|
349
|
+
} catch (e) {
|
|
350
|
+
console.warn('[Worker NDK] Error adding relay:', url, e);
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
// Reconnect with timeout
|
|
355
|
+
const connectPromise = ndk.connect();
|
|
356
|
+
const timeoutPromise = new Promise<void>((_, reject) =>
|
|
357
|
+
setTimeout(() => reject(new Error('Connect timeout')), 10000)
|
|
358
|
+
);
|
|
359
|
+
|
|
360
|
+
await Promise.race([connectPromise, timeoutPromise]).catch(e => {
|
|
361
|
+
console.warn('[Worker NDK] Connect timeout/error, proceeding anyway:', e);
|
|
362
|
+
});
|
|
363
|
+
|
|
364
|
+
console.log('[Worker NDK] Relays updated, connected to', relays.length, 'relays');
|
|
365
|
+
} catch (e) {
|
|
366
|
+
console.error('[Worker NDK] Error in setRelays:', e);
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
/**
|
|
371
|
+
* Get relay stats
|
|
372
|
+
*/
|
|
373
|
+
export function getRelayStats(): { url: string; connected: boolean; eventsReceived: number; eventsSent: number }[] {
|
|
374
|
+
if (!ndk?.pool) return [];
|
|
375
|
+
|
|
376
|
+
const stats: { url: string; connected: boolean; eventsReceived: number; eventsSent: number }[] = [];
|
|
377
|
+
|
|
378
|
+
for (const relay of ndk.pool.relays.values()) {
|
|
379
|
+
stats.push({
|
|
380
|
+
url: relay.url,
|
|
381
|
+
connected: relay.status >= 5, // NDKRelayStatus.CONNECTED = 5
|
|
382
|
+
eventsReceived: 0, // TODO: track this
|
|
383
|
+
eventsSent: 0,
|
|
384
|
+
});
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
return stats;
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
/**
|
|
391
|
+
* Republish all user's hashtree events from cache to relays
|
|
392
|
+
* This helps recover when events exist locally but weren't properly published
|
|
393
|
+
*
|
|
394
|
+
* For unsigned events (never signed), we sign them using the worker's signing flow.
|
|
395
|
+
* For signed events, we republish directly.
|
|
396
|
+
* Also pushes blob data to Blossom servers.
|
|
397
|
+
*
|
|
398
|
+
* @param prefix - Optional URL-encoded prefix to filter trees by d-tag
|
|
399
|
+
*/
|
|
400
|
+
export async function republishTrees(
|
|
401
|
+
pubkey: string,
|
|
402
|
+
signFn: (template: { kind: number; created_at: number; content: string; tags: string[][] }) => Promise<{ id: string; pubkey: string; kind: number; content: string; tags: string[][]; created_at: number; sig: string }>,
|
|
403
|
+
pushToBlossomFn?: (hash: Uint8Array, key?: Uint8Array, treeName?: string) => Promise<{ pushed: number; skipped: number; failed: number }>,
|
|
404
|
+
prefix?: string
|
|
405
|
+
): Promise<number> {
|
|
406
|
+
if (!ndk) {
|
|
407
|
+
throw new Error('NDK not initialized');
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
// Decode prefix if provided (it's URL-encoded)
|
|
411
|
+
const decodedPrefix = prefix ? decodeURIComponent(prefix) : undefined;
|
|
412
|
+
console.log('[Worker NDK] Republishing trees for', pubkey, decodedPrefix ? `with prefix: ${decodedPrefix}` : '');
|
|
413
|
+
|
|
414
|
+
// Fetch user's hashtree events from cache and relays
|
|
415
|
+
const filter: NDKFilter = {
|
|
416
|
+
kinds: [30078],
|
|
417
|
+
authors: [pubkey],
|
|
418
|
+
'#l': ['hashtree'],
|
|
419
|
+
};
|
|
420
|
+
|
|
421
|
+
// Fetch events (will check cache first)
|
|
422
|
+
const events = await ndk.fetchEvents(filter);
|
|
423
|
+
console.log('[Worker NDK] Found', events.size, 'events');
|
|
424
|
+
|
|
425
|
+
// Filter by prefix if provided
|
|
426
|
+
const filteredEvents = decodedPrefix
|
|
427
|
+
? Array.from(events).filter(event => {
|
|
428
|
+
const dTag = event.tags.find(t => t[0] === 'd')?.[1];
|
|
429
|
+
return dTag?.startsWith(decodedPrefix);
|
|
430
|
+
})
|
|
431
|
+
: Array.from(events);
|
|
432
|
+
console.log('[Worker NDK] After prefix filter:', filteredEvents.length, 'events');
|
|
433
|
+
|
|
434
|
+
let count = 0;
|
|
435
|
+
let skippedNoHash = 0;
|
|
436
|
+
|
|
437
|
+
for (const event of filteredEvents) {
|
|
438
|
+
// Skip events without a hash tag (deleted trees)
|
|
439
|
+
const hasHash = event.tags.some(t => t[0] === 'hash' && t[1]);
|
|
440
|
+
if (!hasHash) {
|
|
441
|
+
skippedNoHash++;
|
|
442
|
+
continue;
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
const dTag = event.tags.find(t => t[0] === 'd')?.[1];
|
|
446
|
+
|
|
447
|
+
try {
|
|
448
|
+
const relaySet = ndk.pool?.connectedRelays();
|
|
449
|
+
if (!relaySet || relaySet.size === 0) {
|
|
450
|
+
console.warn('[Worker NDK] No connected relays');
|
|
451
|
+
continue;
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
let signedEvent: NDKEvent;
|
|
455
|
+
|
|
456
|
+
if (event.sig) {
|
|
457
|
+
// Already signed - use existing event
|
|
458
|
+
console.log('[Worker NDK] Republishing signed event:', dTag);
|
|
459
|
+
signedEvent = event;
|
|
460
|
+
} else {
|
|
461
|
+
// Unsigned event - sign it using worker's signing flow
|
|
462
|
+
console.log('[Worker NDK] Signing:', dTag);
|
|
463
|
+
|
|
464
|
+
const template = {
|
|
465
|
+
kind: 30078,
|
|
466
|
+
created_at: Math.floor(Date.now() / 1000),
|
|
467
|
+
content: event.content || '',
|
|
468
|
+
tags: event.tags,
|
|
469
|
+
};
|
|
470
|
+
|
|
471
|
+
const signed = await signFn(template);
|
|
472
|
+
|
|
473
|
+
// Create NDKEvent from signed event
|
|
474
|
+
signedEvent = new NDKEvent(ndk, {
|
|
475
|
+
id: signed.id,
|
|
476
|
+
pubkey: signed.pubkey,
|
|
477
|
+
kind: signed.kind,
|
|
478
|
+
content: signed.content,
|
|
479
|
+
tags: signed.tags,
|
|
480
|
+
created_at: signed.created_at,
|
|
481
|
+
sig: signed.sig,
|
|
482
|
+
});
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
// Publish to all connected relays
|
|
486
|
+
const promises = Array.from(relaySet).map(async (relay) => {
|
|
487
|
+
try {
|
|
488
|
+
await relay.publish(signedEvent);
|
|
489
|
+
} catch (e) {
|
|
490
|
+
console.warn('[Worker NDK] Failed to publish to relay:', relay.url, e);
|
|
491
|
+
}
|
|
492
|
+
});
|
|
493
|
+
await Promise.all(promises);
|
|
494
|
+
count++;
|
|
495
|
+
console.log('[Worker NDK] Published:', dTag);
|
|
496
|
+
|
|
497
|
+
// Push blob data to Blossom if function provided
|
|
498
|
+
if (pushToBlossomFn) {
|
|
499
|
+
const hashTag = event.tags.find(t => t[0] === 'hash')?.[1];
|
|
500
|
+
const keyTag = event.tags.find(t => t[0] === 'key')?.[1];
|
|
501
|
+
|
|
502
|
+
if (hashTag) {
|
|
503
|
+
try {
|
|
504
|
+
const hashBytes = new Uint8Array(hashTag.match(/.{2}/g)!.map(b => parseInt(b, 16)));
|
|
505
|
+
const keyBytes = keyTag ? new Uint8Array(keyTag.match(/.{2}/g)!.map(b => parseInt(b, 16))) : undefined;
|
|
506
|
+
|
|
507
|
+
console.log('[Worker NDK] Pushing to Blossom:', dTag);
|
|
508
|
+
const result = await pushToBlossomFn(hashBytes, keyBytes, dTag);
|
|
509
|
+
console.log('[Worker NDK] Blossom push result:', dTag, result);
|
|
510
|
+
} catch (e) {
|
|
511
|
+
console.warn('[Worker NDK] Failed to push to Blossom:', dTag, e);
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
} catch (e) {
|
|
516
|
+
console.warn('[Worker NDK] Failed to republish:', dTag, e);
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
console.log('[Worker NDK] Republished', count, 'trees, skipped', skippedNoHash, 'deleted');
|
|
521
|
+
return count;
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
/**
|
|
525
|
+
* Republish a single tree's event from cache to relays
|
|
526
|
+
* This republishes the original event as-is (preserves signature/timestamp)
|
|
527
|
+
* Works for any user's tree, not just own.
|
|
528
|
+
*/
|
|
529
|
+
export async function republishTree(pubkey: string, treeName: string): Promise<boolean> {
|
|
530
|
+
if (!ndk) {
|
|
531
|
+
throw new Error('NDK not initialized');
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
console.log('[Worker NDK] Republishing single tree:', pubkey, treeName);
|
|
535
|
+
|
|
536
|
+
// Fetch the specific event
|
|
537
|
+
const filter: NDKFilter = {
|
|
538
|
+
kinds: [30078],
|
|
539
|
+
authors: [pubkey],
|
|
540
|
+
'#d': [treeName],
|
|
541
|
+
'#l': ['hashtree'],
|
|
542
|
+
};
|
|
543
|
+
|
|
544
|
+
const events = await ndk.fetchEvents(filter);
|
|
545
|
+
if (events.size === 0) {
|
|
546
|
+
console.warn('[Worker NDK] Event not found for', treeName);
|
|
547
|
+
return false;
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
// Get the most recent event (in case there are multiple)
|
|
551
|
+
let event: NDKEvent | null = null;
|
|
552
|
+
for (const e of events) {
|
|
553
|
+
if (!event || (e.created_at && event.created_at && e.created_at > event.created_at)) {
|
|
554
|
+
event = e;
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
if (!event || !event.sig) {
|
|
559
|
+
console.warn('[Worker NDK] No signed event found for', treeName);
|
|
560
|
+
return false;
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
// Skip events without a hash tag (deleted trees)
|
|
564
|
+
const hasHash = event.tags.some(t => t[0] === 'hash' && t[1]);
|
|
565
|
+
if (!hasHash) {
|
|
566
|
+
console.warn('[Worker NDK] Event has no hash (deleted tree):', treeName);
|
|
567
|
+
return false;
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
const relaySet = ndk.pool?.connectedRelays();
|
|
571
|
+
if (!relaySet || relaySet.size === 0) {
|
|
572
|
+
console.warn('[Worker NDK] No connected relays');
|
|
573
|
+
return false;
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
// Publish to all connected relays
|
|
577
|
+
let success = false;
|
|
578
|
+
const promises = Array.from(relaySet).map(async (relay) => {
|
|
579
|
+
try {
|
|
580
|
+
await relay.publish(event);
|
|
581
|
+
success = true;
|
|
582
|
+
} catch (e) {
|
|
583
|
+
console.warn('[Worker NDK] Failed to publish to relay:', relay.url, e);
|
|
584
|
+
}
|
|
585
|
+
});
|
|
586
|
+
await Promise.all(promises);
|
|
587
|
+
|
|
588
|
+
console.log('[Worker NDK] Republished tree:', treeName, success ? 'success' : 'failed');
|
|
589
|
+
return success;
|
|
590
|
+
}
|