@johndimm/constellations 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/App.tsx +352 -70
  2. package/FullPageConstellations.tsx +7 -5
  3. package/components/AppConfirmDialog.tsx +1 -0
  4. package/components/AppHeader.tsx +69 -29
  5. package/components/AppNotifications.tsx +1 -0
  6. package/components/BrowsePeople.tsx +3 -0
  7. package/components/ControlPanel.tsx +46 -371
  8. package/components/Graph.tsx +251 -87
  9. package/components/HelpOverlay.tsx +1 -0
  10. package/components/NodeContextMenu.tsx +123 -3
  11. package/components/PeopleBrowserSidebar.tsx +15 -6
  12. package/components/Sidebar.tsx +46 -19
  13. package/components/TimelineView.tsx +1 -0
  14. package/embedded.css +38 -0
  15. package/hooks/useExpansion.ts +61 -229
  16. package/hooks/useGraphActions.ts +1 -0
  17. package/hooks/useGraphState.ts +75 -40
  18. package/hooks/useKioskMode.ts +1 -0
  19. package/hooks/useNodeClickHandler.ts +23 -15
  20. package/hooks/useSearchHandlers.ts +57 -19
  21. package/host.ts +1 -1
  22. package/index.css +17 -3
  23. package/package.json +4 -1
  24. package/services/aiService.ts +23 -0
  25. package/services/aiUtils.ts +216 -207
  26. package/services/cacheService.ts +1 -0
  27. package/services/crossrefService.ts +1 -0
  28. package/services/deepseekService.ts +467 -0
  29. package/services/geminiService.ts +532 -733
  30. package/services/graphUtils.ts +128 -18
  31. package/services/imageService.ts +18 -0
  32. package/services/openAlexService.ts +1 -0
  33. package/services/resolveImageForTitle.ts +458 -0
  34. package/services/wikipediaImage.ts +1 -0
  35. package/services/wikipediaService.ts +56 -46
  36. package/types.ts +3 -0
  37. package/utils/evidenceUtils.ts +1 -0
  38. package/utils/graphLogicUtils.ts +1 -0
  39. package/utils/wikiUtils.ts +14 -2
@@ -0,0 +1,458 @@
1
+ import { jsonFromResponse } from "./aiUtils";
2
+
3
+ type ImageResolveResult = { url: string | null; source?: string };
4
+
5
+ /** Identifiable UA — Wikimedia rate-limits shared / anonymous clients heavily when parallel bursts hit. */
6
+ const WIKI_UA = "Constellations/1.0 (knowledge graph; +https://www.mediawiki.org/wiki/API:Etiquette)";
7
+
8
+ /** Limit parallel MediaWiki API calls (many image nodes = many /api/image requests on the server at once). */
9
+ const WIKI_MAX_CONCURRENT = 3;
10
+ let wikiInFlight = 0;
11
+ const wikiWaitQueue: Array<() => void> = [];
12
+ function acquireWiki(): Promise<void> {
13
+ if (wikiInFlight < WIKI_MAX_CONCURRENT) {
14
+ wikiInFlight++;
15
+ return Promise.resolve();
16
+ }
17
+ return new Promise((res) => wikiWaitQueue.push(res));
18
+ }
19
+ function releaseWiki() {
20
+ wikiInFlight--;
21
+ const n = wikiWaitQueue.shift();
22
+ if (n) {
23
+ wikiInFlight++;
24
+ n();
25
+ }
26
+ }
27
+
28
+ /**
29
+ * JSON GET for api.php with 429/503 retry (Retry-After or exponential backoff) and concurrency gate.
30
+ */
31
+ async function wikimediaGetJson(url: string): Promise<unknown | null> {
32
+ await acquireWiki();
33
+ try {
34
+ for (let attempt = 0; attempt < 6; attempt++) {
35
+ const res = await fetch(url, { headers: { "User-Agent": WIKI_UA, Accept: "application/json" } });
36
+ if (res.status === 429 || res.status === 503) {
37
+ const ra = res.headers.get("Retry-After");
38
+ let ms: number;
39
+ if (ra) {
40
+ const sec = parseInt(ra.trim(), 10);
41
+ if (!Number.isNaN(sec) && /^\d+$/.test(ra.trim())) {
42
+ ms = Math.min(120_000, sec * 1000);
43
+ } else {
44
+ const httpDate = Date.parse(ra);
45
+ ms = !Number.isNaN(httpDate) ? Math.max(200, httpDate - Date.now()) : 2000;
46
+ }
47
+ } else {
48
+ ms = Math.min(20_000, 500 * 2 ** attempt) + Math.random() * 300;
49
+ }
50
+ await new Promise((r) => setTimeout(r, ms));
51
+ continue;
52
+ }
53
+ if (!res.ok) {
54
+ if (attempt < 5 && res.status >= 500 && res.status < 600) {
55
+ await new Promise((r) => setTimeout(r, 200 * (attempt + 1)));
56
+ continue;
57
+ }
58
+ return null;
59
+ }
60
+ const text = await res.text();
61
+ try {
62
+ return JSON.parse(text) as unknown;
63
+ } catch {
64
+ return null;
65
+ }
66
+ }
67
+ return null;
68
+ } finally {
69
+ releaseWiki();
70
+ }
71
+ }
72
+
73
+ /** Short-lived: avoid duplicate pageprops lookup when one graph fires many /api/image for related titles. */
74
+ const pagepropsQidCache = new Map<string, { qid: string; at: number }>();
75
+ const QID_CACHE_TTL_MS = 4 * 60 * 60 * 1000;
76
+ function cacheGetQid(titleKey: string): string | undefined {
77
+ const e = pagepropsQidCache.get(titleKey);
78
+ if (!e) return undefined;
79
+ if (Date.now() - e.at > QID_CACHE_TTL_MS) {
80
+ pagepropsQidCache.delete(titleKey);
81
+ return undefined;
82
+ }
83
+ return e.qid;
84
+ }
85
+ function cacheSetQid(titleKey: string, qid: string) {
86
+ pagepropsQidCache.set(titleKey, { qid, at: Date.now() });
87
+ }
88
+
89
+ export const fetchDuckDuckGoImages = async (
90
+ q: string,
91
+ limit: number = 10
92
+ ): Promise<Array<{ image?: string; thumbnail?: string; title?: string }>> => {
93
+ const headers = {
94
+ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.4 Safari/605.1.15",
95
+ "Accept-Language": "en-US,en;q=0.9"
96
+ };
97
+ try {
98
+ const searchUrl = `https://duckduckgo.com/?q=${encodeURIComponent(q)}&iax=images&ia=images`;
99
+ const pageRes = await fetch(searchUrl, { headers });
100
+ if (!pageRes.ok) {
101
+ console.warn("[DDG-Test] search status", pageRes.status, q);
102
+ return [];
103
+ }
104
+ const pageText = await pageRes.text();
105
+ const vqdMatch = pageText.match(/vqd=['"]?([^'"&]+)/);
106
+ const vqd = vqdMatch?.[1];
107
+ if (!vqd) {
108
+ console.warn("[DDG-Test] missing vqd for query", q);
109
+ return [];
110
+ }
111
+
112
+ const apiUrl = `https://duckduckgo.com/i.js?l=us-en&o=json&q=${encodeURIComponent(q)}&vqd=${encodeURIComponent(vqd)}&f=,,,&p=1`;
113
+ const apiRes = await fetch(apiUrl, {
114
+ headers: {
115
+ ...headers,
116
+ Referer: searchUrl,
117
+ Accept: "application/json, text/javascript, */*; q=0.01",
118
+ "X-Requested-With": "XMLHttpRequest"
119
+ }
120
+ });
121
+ if (!apiRes.ok) {
122
+ console.warn("[DDG-Test] api status", apiRes.status, q);
123
+ return [];
124
+ }
125
+ const data = (await jsonFromResponse(apiRes)) as { results?: any[] } | null;
126
+ if (!data) return [];
127
+ const results: any[] = data?.results || [];
128
+ return results.slice(0, limit).map((r) => ({
129
+ image: r?.image,
130
+ thumbnail: r?.thumbnail,
131
+ title: r?.title
132
+ }));
133
+ } catch {
134
+ return [];
135
+ }
136
+ };
137
+
138
+ const fetchPosterFromDuckDuckGo = async (q: string): Promise<string | null> => {
139
+ const exclude = ["logo", "icon", "emoji", "svg", "vector", "clipart", "cartoon", "animated", "posterized"];
140
+ try {
141
+ const candidates = await fetchDuckDuckGoImages(q, 10);
142
+ console.log("[Poster][DDG] results", candidates.length);
143
+ for (const r of candidates) {
144
+ const url = String(r?.image || r?.thumbnail || "");
145
+ if (!url) continue;
146
+ const lower = url.toLowerCase();
147
+ if (exclude.some((p) => lower.includes(p))) continue;
148
+ console.log(`[Poster][DDG] candidate`, { url: r?.image, thumbnail: r?.thumbnail, title: r?.title });
149
+ return url;
150
+ }
151
+ } catch (e) {
152
+ console.warn("[Poster][DDG] failed", q, e);
153
+ }
154
+ return null;
155
+ };
156
+
157
+ export const resolveImageForTitle = async (title: string, context: string): Promise<ImageResolveResult> => {
158
+ const trimmedTitle = title.trim();
159
+ const trimmedContext = context.trim();
160
+ const looksLikeScreenWork = (s: string) =>
161
+ /\b(film|movie|television series|tv series|miniseries|sitcom|drama series|comedy series|series)\b/i.test(s.toLowerCase());
162
+ /** Types from the graph LLM: must include music/performance roles, not just "author|actor" */
163
+ const isPersonContext = (s: string) => {
164
+ const normalized = s.trim().toLowerCase();
165
+ if (
166
+ /^(person|human|author|actor|actress|musician|artist|rapper|singer|songwriter|vocalist|bandleader|entertainer|drummer|guitarist|pianist|bassist|lyricist|poet|composer|scientist|mathematician|researcher|band|group|athlete|politician|model|dancer|dj|mc|deejay|celebrity)$/i.test(
167
+ normalized
168
+ )
169
+ ) {
170
+ return true;
171
+ }
172
+ if (
173
+ /\b(person|people|human|author|actor|actress|musicians?|rappers?|singers?|vocalists?|songwriters?|bandleaders?|entertainers?|composers?|artists?|director|writer|poet|playwright|drummers?|guitarists?|pianists?|bassists?|lyricists?|djs?|mcs?|vocalist|lyricist|bassist|orchestrators?|producers?|choreographer|dancers?|models?|athletes?|politicians?|disc jockey|scientist|mathematician|researcher|celebrity|celebrities|rap)\b/i.test(
174
+ normalized
175
+ ) ||
176
+ /(hip[ -]hop|rap artist|grime artist|musical group|boy band|girl band)/i.test(normalized)
177
+ ) {
178
+ return true;
179
+ }
180
+ return false;
181
+ };
182
+ let isScreenWork = looksLikeScreenWork(`${trimmedTitle} ${trimmedContext}`);
183
+ const isPerson = isPersonContext(trimmedContext);
184
+
185
+ const fetchImageInfo = async (fileTitle: string): Promise<string | null> => {
186
+ const apis = [`https://commons.wikimedia.org/w/api.php`, `https://en.wikipedia.org/w/api.php`];
187
+ for (const api of apis) {
188
+ try {
189
+ const url = `${api}?action=query&format=json&prop=imageinfo&titles=${encodeURIComponent(fileTitle)}&iiprop=url&iiurlwidth=800&origin=*`;
190
+ const data = (await wikimediaGetJson(url)) as any;
191
+ if (!data) continue;
192
+ const pagesInfo = data?.query?.pages;
193
+ const imgPage = pagesInfo ? (Object.values(pagesInfo)[0] as any) : null;
194
+ const info = imgPage?.imageinfo?.[0];
195
+ if (info?.thumburl || info?.url) return info.thumburl || info.url;
196
+ } catch { /* ignore */ }
197
+ }
198
+ return null;
199
+ };
200
+
201
+ const resolveWikidataId = async (allowSearchFallback: boolean): Promise<string | null> => {
202
+ const titleKey = trimmedTitle.toLowerCase();
203
+ const fromCache = cacheGetQid(titleKey);
204
+ if (fromCache !== undefined) {
205
+ return fromCache;
206
+ }
207
+ try {
208
+ const ppUrl = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=pageprops&titles=${encodeURIComponent(trimmedTitle)}&redirects=1&origin=*`;
209
+ const ppData = (await wikimediaGetJson(ppUrl)) as any;
210
+ const pages = ppData?.query?.pages;
211
+ const page = pages ? (Object.values(pages)[0] as any) : null;
212
+ const qid = page?.pageprops?.wikibase_item;
213
+ if (qid && /^Q\d+$/.test(qid)) {
214
+ cacheSetQid(titleKey, qid);
215
+ return qid;
216
+ }
217
+ // No wikibase_item on enwiki (common) — fall through to search when allowed
218
+ } catch (e) {
219
+ console.warn("[Image][Wikidata] pageprops failed", trimmedTitle, e);
220
+ }
221
+
222
+ if (allowSearchFallback) {
223
+ try {
224
+ const searchUrl = `https://www.wikidata.org/w/api.php?action=wbsearchentities&format=json&language=en&type=item&search=${encodeURIComponent(trimmedTitle)}&origin=*`;
225
+ const data = (await wikimediaGetJson(searchUrl)) as any;
226
+ const id = data?.search?.[0]?.id;
227
+ if (id && /^Q\d+$/.test(id)) {
228
+ cacheSetQid(titleKey, id);
229
+ return id;
230
+ }
231
+ } catch (e) {
232
+ console.warn("[Image][Wikidata] search failed", trimmedTitle, e);
233
+ }
234
+ }
235
+ return null;
236
+ };
237
+
238
+ const fetchWikidataImageForTitle = async (allowSearchFallback: boolean): Promise<string | null> => {
239
+ try {
240
+ const qid = await resolveWikidataId(allowSearchFallback);
241
+ if (!qid) return null;
242
+ const wdUrl = `https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=claims&ids=${qid}&origin=*`;
243
+ const wdData = (await wikimediaGetJson(wdUrl)) as any;
244
+ if (!wdData) return null;
245
+ const claims = wdData?.entities?.[qid]?.claims;
246
+ const p18 = claims?.P18?.[0]?.mainsnak?.datavalue?.value as string | undefined;
247
+ if (!p18) return null;
248
+ const imgTitle = p18.startsWith("File:") ? p18 : `File:${p18}`;
249
+ return fetchImageInfo(imgTitle);
250
+ } catch (e) {
251
+ console.warn("[Image][Wikidata] failed", trimmedTitle, e);
252
+ return null;
253
+ }
254
+ };
255
+
256
+ const fetchWikipediaPageImage = async (): Promise<string | null> => {
257
+ try {
258
+ const url = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=pageimages&titles=${encodeURIComponent(trimmedTitle)}&pithumbsize=800&redirects=1&origin=*`;
259
+ const data = (await wikimediaGetJson(url)) as any;
260
+ if (!data) return null;
261
+ const pages = data?.query?.pages;
262
+ const page = pages ? (Object.values(pages)[0] as any) : null;
263
+ return page?.thumbnail?.source || null;
264
+ } catch {
265
+ return null;
266
+ }
267
+ };
268
+
269
+ const fetchWikipediaExtract = async (): Promise<string | null> => {
270
+ try {
271
+ const url = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts&exintro=1&explaintext=1&titles=${encodeURIComponent(trimmedTitle)}&redirects=1&origin=*`;
272
+ const data = (await wikimediaGetJson(url)) as any;
273
+ if (!data) return null;
274
+ const pages = data?.query?.pages;
275
+ const page = pages ? (Object.values(pages)[0] as any) : null;
276
+ return page?.extract || null;
277
+ } catch {
278
+ return null;
279
+ }
280
+ };
281
+
282
+ const fetchWikipediaPosterFromImages = async (): Promise<string | null> => {
283
+ try {
284
+ const url = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=images&titles=${encodeURIComponent(trimmedTitle)}&imlimit=50&redirects=1&origin=*`;
285
+ const data = (await wikimediaGetJson(url)) as any;
286
+ if (!data) return null;
287
+ const pages = data?.query?.pages;
288
+ const page = pages ? (Object.values(pages)[0] as any) : null;
289
+ const images = page?.images || [];
290
+ if (!images.length) return null;
291
+
292
+ const normalizedTitle = trimmedTitle.toLowerCase();
293
+ const candidates = images
294
+ .map((img: any) => String(img?.title || ""))
295
+ .filter((t: string) => t.toLowerCase().startsWith("file:"));
296
+
297
+ if (!candidates.length) return null;
298
+
299
+ const scored = candidates
300
+ .map((t: string) => {
301
+ const lt = t.toLowerCase();
302
+ let score = 0;
303
+ if (lt.includes("poster")) score += 500;
304
+ if (lt.includes("cover")) score += 200;
305
+ if (lt.includes("film") || lt.includes("movie")) score += 150;
306
+ if (lt.includes(normalizedTitle)) score += 200;
307
+
308
+ const junk = ["museum", "car", "grill", "packard", "automobile", "vehicle", "display", "engine", "cockpit", "interior", "exterior", "restoration", "may_2017"];
309
+ if (junk.some((j) => lt.includes(j))) score -= 1000;
310
+
311
+ if (t.length > 100) score -= 400;
312
+
313
+ if (lt.includes(".svg") || lt.includes(".webm") || lt.includes(".gif")) score -= 300;
314
+ return { title: t, score };
315
+ })
316
+ .sort((a: any, b: any) => b.score - a.score);
317
+
318
+ const best = scored[0];
319
+ if (!best || best.score <= 0) {
320
+ console.warn(`[Image][Wiki-Poster] No good poster found for "${trimmedTitle}". Best score: ${best?.score || 0}`);
321
+ return null;
322
+ }
323
+ return fetchImageInfo(best.title);
324
+ } catch {
325
+ return null;
326
+ }
327
+ };
328
+
329
+ const fetchCommonsPoster = async (): Promise<string | null> => {
330
+ try {
331
+ const searchQuery = `"${trimmedTitle}" poster`;
332
+ const searchUrl = `https://commons.wikimedia.org/w/api.php?action=query&format=json&list=search&srsearch=${encodeURIComponent(searchQuery)}&srnamespace=6&srlimit=5&origin=*`;
333
+ const data = (await wikimediaGetJson(searchUrl)) as any;
334
+ if (!data) return null;
335
+ const hits: any[] = (data as any)?.query?.search || [];
336
+ let best: { url: string; score: number; title: string } | null = null;
337
+ const normalizedTitle = trimmedTitle.toLowerCase();
338
+ for (const h of hits) {
339
+ const fileTitle = h?.title;
340
+ if (!fileTitle) continue;
341
+ const lowerTitle = String(fileTitle).toLowerCase();
342
+ if (lowerTitle.endsWith(".pdf") || lowerTitle.includes(".pdf/")) continue;
343
+ const infoUrl = `https://commons.wikimedia.org/w/api.php?action=query&format=json&prop=imageinfo&titles=${encodeURIComponent(fileTitle)}&iiprop=url|size&iiurlwidth=800&origin=*`;
344
+ const infoData = (await wikimediaGetJson(infoUrl)) as any;
345
+ if (!infoData) continue;
346
+ const pages = (infoData as any)?.query?.pages;
347
+ const page = pages ? (Object.values(pages)[0] as any) : null;
348
+ const info = page?.imageinfo?.[0];
349
+ const url = info?.thumburl || info?.url;
350
+ if (!url) continue;
351
+ const w = Number(info?.thumbwidth || info?.width || 0);
352
+ const hgt = Number(info?.thumbheight || info?.height || 0);
353
+ const ratio = hgt > 0 && w > 0 ? hgt / w : 0;
354
+ let score = 0;
355
+ const lt = String(fileTitle).toLowerCase();
356
+ if (lt.includes(normalizedTitle)) score += 180;
357
+ if (lt.includes("poster")) score += 120;
358
+ if (lt.includes("season")) score += 40;
359
+ if (ratio > 1.2) score += 60;
360
+ if (ratio < 0.9) score -= 150;
361
+ if (w < 300 || hgt < 400) score -= 80;
362
+ if (score <= 0) score = 10;
363
+ if (!best || score > best.score) best = { url, score, title: fileTitle };
364
+ }
365
+ return best?.url || null;
366
+ } catch {
367
+ return null;
368
+ }
369
+ };
370
+
371
+ const fetchCommonsPortrait = async (): Promise<string | null> => {
372
+ try {
373
+ const searchUrl = `https://commons.wikimedia.org/w/api.php?action=query&format=json&list=search&srsearch=${encodeURIComponent(trimmedTitle)}&srnamespace=6&srlimit=10&origin=*`;
374
+ const data = (await wikimediaGetJson(searchUrl)) as any;
375
+ if (!data) return null;
376
+ const hits: any[] = data?.query?.search || [];
377
+ if (!hits.length) return null;
378
+ const baseWords = trimmedTitle.toLowerCase().split(/\s+/).filter((w) => w.length > 1);
379
+ const scored = hits
380
+ .map((h) => {
381
+ const fileTitle = String(h?.title || "");
382
+ const lt = fileTitle.toLowerCase();
383
+ if (!lt.startsWith("file:")) return { title: fileTitle, score: -1000 };
384
+ let score = 0;
385
+ if (lt.includes("portrait") || lt.includes("photo") || lt.includes("headshot") || lt.includes("face")) score += 350;
386
+ if (lt.includes("poster")) score -= 200;
387
+ if (lt.includes("with") || lt.includes(" and ") || lt.includes(" group")) score -= 250;
388
+ const matches = baseWords.filter((w) => lt.includes(w));
389
+ if (matches.length < Math.min(2, baseWords.length)) score -= 400;
390
+ score += (matches.length / Math.max(1, baseWords.length)) * 500;
391
+ if (lt.includes(".jpg") || lt.includes(".jpeg")) score += 100;
392
+ if (lt.includes(".png")) score -= 20;
393
+ if (lt.includes(".svg") || lt.includes(".webm") || lt.includes(".gif")) score -= 300;
394
+ const wordCount = lt.split(/[^a-z]/).filter((w) => w.length > 2).length;
395
+ score -= wordCount * 15;
396
+ return { title: fileTitle, score };
397
+ })
398
+ .sort((a: any, b: any) => b.score - a.score);
399
+
400
+ const best = scored[0];
401
+ if (!best || best.score <= 0) return null;
402
+ return fetchImageInfo(best.title);
403
+ } catch {
404
+ return null;
405
+ }
406
+ };
407
+
408
+ if (trimmedTitle.toLowerCase().startsWith("file:") || trimmedTitle.toLowerCase().startsWith("image:")) {
409
+ const fileUrl = await fetchImageInfo(trimmedTitle);
410
+ return { url: fileUrl, source: fileUrl ? "file" : "file-miss" };
411
+ }
412
+
413
+ if (!isScreenWork && !isPerson) {
414
+ const extract = await fetchWikipediaExtract();
415
+ if (extract && looksLikeScreenWork(extract)) {
416
+ isScreenWork = true;
417
+ }
418
+ }
419
+
420
+ if (isPerson) {
421
+ const fromPageImage = await fetchWikipediaPageImage();
422
+ if (fromPageImage) return { url: fromPageImage, source: "pageimage" };
423
+ const fromWikidata = await fetchWikidataImageForTitle(false);
424
+ if (fromWikidata) return { url: fromWikidata, source: "wikidata" };
425
+ const fromCommons = await fetchCommonsPortrait();
426
+ if (fromCommons) return { url: fromCommons, source: "commons-portrait" };
427
+ // Match non-`isPerson` branch: wikidata search + image search, or biographies stay blank too often
428
+ const fromWikidataSearch = await fetchWikidataImageForTitle(true);
429
+ if (fromWikidataSearch) return { url: fromWikidataSearch, source: "wikidata-search" };
430
+ const fromDdg = await fetchPosterFromDuckDuckGo(trimmedTitle);
431
+ if (fromDdg) return { url: fromDdg, source: "ddg-person" };
432
+ return { url: null };
433
+ }
434
+
435
+ if (isScreenWork) {
436
+ const fromEnwikiPoster = await fetchWikipediaPosterFromImages();
437
+ if (fromEnwikiPoster) return { url: fromEnwikiPoster, source: "enwiki-images" };
438
+ const fromCommons = await fetchCommonsPoster();
439
+ if (fromCommons) return { url: fromCommons, source: "commons" };
440
+ const fromWikidata = await fetchWikidataImageForTitle(false);
441
+ if (fromWikidata) return { url: fromWikidata, source: "wikidata" };
442
+ const fromPageImage = await fetchWikipediaPageImage();
443
+ if (fromPageImage) return { url: fromPageImage, source: "pageimage" };
444
+ const fromDdg = await fetchPosterFromDuckDuckGo(trimmedTitle);
445
+ if (fromDdg) return { url: fromDdg, source: "ddg" };
446
+ return { url: null };
447
+ }
448
+
449
+ const fromWikidata = await fetchWikidataImageForTitle(true);
450
+ if (fromWikidata) return { url: fromWikidata, source: "wikidata" };
451
+ const fromPageImage = await fetchWikipediaPageImage();
452
+ if (fromPageImage) return { url: fromPageImage, source: "pageimage" };
453
+ const fromCommons = await fetchCommonsPortrait();
454
+ if (fromCommons) return { url: fromCommons, source: "commons" };
455
+ const fromDdg = await fetchPosterFromDuckDuckGo(trimmedTitle);
456
+ if (fromDdg) return { url: fromDdg, source: "ddg" };
457
+ return { url: null };
458
+ };
@@ -1,3 +1,4 @@
1
+ "use client";
1
2
  export async function fetchWikipediaImage(title: string): Promise<string | null> {
2
3
  if (!title) return null;
3
4
  try {