@johndimm/constellations 1.0.1 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/App.tsx +360 -66
  2. package/FullPageConstellations.tsx +7 -4
  3. package/components/AppConfirmDialog.tsx +1 -0
  4. package/components/AppHeader.tsx +67 -30
  5. package/components/AppNotifications.tsx +1 -0
  6. package/components/BrowsePeople.tsx +3 -0
  7. package/components/ControlPanel.tsx +229 -250
  8. package/components/Graph.tsx +251 -87
  9. package/components/HelpOverlay.tsx +2 -1
  10. package/components/NodeContextMenu.tsx +123 -3
  11. package/components/PeopleBrowserSidebar.tsx +15 -6
  12. package/components/Sidebar.tsx +46 -19
  13. package/components/TimelineView.tsx +1 -0
  14. package/hooks/useExpansion.ts +85 -230
  15. package/hooks/useGraphActions.ts +1 -0
  16. package/hooks/useGraphState.ts +75 -40
  17. package/hooks/useKioskMode.ts +1 -0
  18. package/hooks/useNodeClickHandler.ts +23 -15
  19. package/hooks/useSearchHandlers.ts +60 -21
  20. package/host.ts +1 -1
  21. package/index.css +17 -3
  22. package/index.tsx +5 -3
  23. package/package.json +4 -2
  24. package/services/aiService.ts +27 -0
  25. package/services/aiUtils.ts +285 -195
  26. package/services/cacheService.ts +1 -0
  27. package/services/crossrefService.ts +1 -0
  28. package/services/deepseekService.ts +479 -0
  29. package/services/geminiService.ts +543 -736
  30. package/services/graphUtils.ts +128 -18
  31. package/services/imageService.ts +18 -0
  32. package/services/openAlexService.ts +1 -0
  33. package/services/resolveImageForTitle.ts +458 -0
  34. package/services/wikipediaImage.ts +1 -0
  35. package/services/wikipediaService.ts +79 -49
  36. package/sessionHandoff.ts +26 -0
  37. package/types.ts +3 -0
  38. package/utils/evidenceUtils.ts +1 -0
  39. package/utils/graphLogicUtils.ts +1 -0
  40. package/utils/wikiUtils.ts +14 -2
@@ -1,7 +1,23 @@
1
- import { fetchWithTimeout } from "./aiUtils";
1
+ "use client";
2
+
3
+ import { jsonFromResponse } from "./aiUtils";
2
4
 
3
5
  type WikiImageCacheEntry = { url: string | null; pageId?: number; pageTitle?: string; misses?: number };
4
6
 
7
+ // Session-level rate-limit gate: after any 429, block all Wikipedia/Wikidata calls for 90s.
8
+ let _wikiRateLimitedUntil = 0;
9
+ function wikiIsRateLimited() { return Date.now() < _wikiRateLimitedUntil; }
10
+ function wikiSetRateLimited() {
11
+ _wikiRateLimitedUntil = Date.now() + 90_000;
12
+ console.warn('[Wiki] 429 received — pausing all Wikipedia/Wikidata calls for 90s');
13
+ }
14
+ async function wikiFetch(url: string, init?: RequestInit): Promise<Response | null> {
15
+ if (wikiIsRateLimited()) return null;
16
+ const res = await fetch(url, init);
17
+ if (res.status === 429) { wikiSetRateLimited(); return null; }
18
+ return res;
19
+ }
20
+
5
21
  // DuckDuckGo image search fallback (posters/cover art when Wikimedia lacks a usable image).
6
22
  export const fetchDuckDuckGoPoster = async (q: string): Promise<string | null> => {
7
23
  // Respect network sandbox: if running in a browser without CORS, skip.
@@ -59,7 +75,8 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
59
75
  try {
60
76
  const url = `${api}?action=query&format=json&prop=imageinfo&titles=${encodeURIComponent(fileTitle)}&iiprop=url&iiurlwidth=500&origin=*`;
61
77
  const res = await fetch(url, { signal });
62
- const data = await res.json();
78
+ const data = (await jsonFromResponse(res)) as { query?: { pages?: Record<string, unknown> } } | null;
79
+ if (!data) continue;
63
80
  const pages = data.query?.pages;
64
81
  if (pages) {
65
82
  const page = Object.values(pages)[0] as any;
@@ -78,7 +95,7 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
78
95
  try {
79
96
  const wdUrl = `https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=claims&ids=${qid}&origin=*`;
80
97
  const wdRes = await fetch(wdUrl, { signal });
81
- const wdData = await wdRes.json();
98
+ const wdData = (await jsonFromResponse(wdRes)) as { entities?: Record<string, { claims?: any }> } | null;
82
99
  const claims = wdData?.entities?.[qid]?.claims;
83
100
  const p18 = claims?.P18?.[0]?.mainsnak?.datavalue?.value as string | undefined;
84
101
  if (!p18) return null;
@@ -95,8 +112,8 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
95
112
  try {
96
113
  const ppUrl = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=pageprops&titles=${encodeURIComponent(title)}&redirects=1&origin=*`;
97
114
  const ppRes = await fetch(ppUrl, { signal });
98
- const ppData = await ppRes.json();
99
- const pages = ppData?.query?.pages;
115
+ const ppData = await jsonFromResponse(ppRes);
116
+ const pages = (ppData as { query?: { pages?: unknown } } | null)?.query?.pages;
100
117
  const page = pages ? (Object.values(pages)[0] as any) : null;
101
118
  const qid = page?.pageprops?.wikibase_item;
102
119
  if (!qid || !/^Q\d+$/.test(qid)) return null;
@@ -112,7 +129,8 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
112
129
  // 1. Get page info, thumbnail, and all images in one go
113
130
  const url = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=pageimages|pageprops|images&titles=${encodeURIComponent(title)}&pithumbsize=500&imlimit=50&redirects=1&origin=*`;
114
131
  const res = await fetch(url, { signal });
115
- const data = await res.json();
132
+ const data = (await jsonFromResponse(res)) as { query?: { pages?: Record<string, unknown> } } | null;
133
+ if (!data) return { url: null };
116
134
 
117
135
  const pages = data.query?.pages;
118
136
  if (!pages) return { url: null };
@@ -146,7 +164,7 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
146
164
  if (candidates.length === 0) return { url: null };
147
165
 
148
166
  const normalized = query.trim().toLowerCase();
149
- const queryWords = normalized.split(/\s+/).filter(w => w.length > 1);
167
+ const queryWords = normalized.split(/\s+/).filter((w: string) => w.length > 1);
150
168
  const isPerson = context?.toLowerCase() === 'person';
151
169
 
152
170
  const scoredCandidates = candidates.map(c => {
@@ -198,7 +216,7 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
198
216
  if (t.includes('.png')) s -= isPerson ? 20 : 50;
199
217
 
200
218
  // Prefer solo filenames
201
- const wordCount = t.split(/[^a-z]/).filter(w => w.length > 2).length;
219
+ const wordCount = t.split(/[^a-z]/).filter((w: string) => w.length > 2).length;
202
220
  s -= (wordCount * 15); // Stronger penalty for long, descriptive filenames
203
221
 
204
222
  return { ...c, score: s };
@@ -245,7 +263,8 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
245
263
  const url = `https://www.googleapis.com/books/v1/volumes?q=${encodeURIComponent(q)}&maxResults=1`;
246
264
  const res = await fetch(url, { signal });
247
265
  if (res.ok) {
248
- const data = await res.json();
266
+ const data = (await jsonFromResponse(res)) as { items?: { volumeInfo?: { imageLinks?: { thumbnail?: string } } }[] } | null;
267
+ if (!data) return null;
249
268
  const img = data.items?.[0]?.volumeInfo?.imageLinks?.thumbnail;
250
269
  return img ? img.replace('http://', 'https://') : null;
251
270
  }
@@ -279,10 +298,10 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
279
298
  // console.log(`🔍 [ImageSearch] Attempt 1 (Media-Aware): "${searchQuery}"`);
280
299
  const initialSearchUrl = `https://en.wikipedia.org/w/api.php?action=query&format=json&list=search&srsearch=${encodeURIComponent(searchQuery)}&srlimit=5&origin=*`;
281
300
  const initialSearchRes = await fetch(initialSearchUrl, { signal: controller.signal });
282
- const initialSearchData = await initialSearchRes.json();
301
+ const initialSearchData = (await jsonFromResponse(initialSearchRes)) as { query?: { search?: { title: string; snippet?: string }[] } } | null;
283
302
 
284
303
  let bestTitle = query;
285
- if (initialSearchData.query?.search?.length) {
304
+ if (initialSearchData?.query?.search?.length) {
286
305
  const results = initialSearchData.query.search;
287
306
  const normalized = baseTitle.toLowerCase();
288
307
  const avoidMedia = false; // For images, we generally allow media if it's the right title
@@ -305,7 +324,7 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
305
324
 
306
325
  // 2. Context matching
307
326
  if (context) {
308
- const words = context.toLowerCase().split(/\s+/).filter(w => w.length > 2);
327
+ const words = context.toLowerCase().split(/\s+/).filter((w: string) => w.length > 2);
309
328
  words.forEach(word => {
310
329
  if (title.includes(word)) s += 100;
311
330
  if (snippet.includes(word)) s += 50;
@@ -322,7 +341,9 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
322
341
  return s;
323
342
  };
324
343
 
325
- const scored = results.map((r: any) => ({ r, score: scoreResult(r) })).sort((a, b) => b.score - a.score);
344
+ const scored = results
345
+ .map((r: any) => ({ r, score: scoreResult(r) }))
346
+ .sort((a: { score: number }, b: { score: number }) => b.score - a.score);
326
347
  bestTitle = scored[0]?.r?.title || query;
327
348
  // console.log(`✅ [ImageSearch] Chosen result "${bestTitle}" with score ${scored[0]?.score ?? 'n/a'}`);
328
349
  }
@@ -336,9 +357,9 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
336
357
  // console.log(`🔍 [ImageSearch] Attempt 2 (Commons for Person): "${baseTitle}"`);
337
358
  const commonsUrl = `https://commons.wikimedia.org/w/api.php?action=query&format=json&list=search&srsearch=${encodeURIComponent(baseTitle)}&srnamespace=6&srlimit=10&origin=*`;
338
359
  const commonsRes = await fetch(commonsUrl, { signal: controller.signal });
339
- const commonsData = await commonsRes.json();
340
- if (commonsData.query?.search?.length) {
341
- const baseWords = baseTitle.toLowerCase().split(/\s+/).filter(w => w.length > 1);
360
+ const commonsData = (await jsonFromResponse(commonsRes)) as { query?: { search?: any[] } } | null;
361
+ if (commonsData?.query?.search?.length) {
362
+ const baseWords = baseTitle.toLowerCase().split(/\s+/).filter((w: string) => w.length > 1);
342
363
  const scoredResults = commonsData.query.search.map((res: any) => {
343
364
  const t = res.title.toLowerCase();
344
365
  if (excludePatterns.some(p => t.includes(p))) return { res, score: -1000 };
@@ -356,7 +377,7 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
356
377
  if (t.includes('.png')) s -= 20; // Reduced penalty for Person
357
378
  if (t.includes('.svg') || t.includes('.webm') || t.includes('.gif')) s -= 300;
358
379
 
359
- const wordCount = t.split(/[^a-z]/).filter(w => w.length > 2).length;
380
+ const wordCount = t.split(/[^a-z]/).filter((w: string) => w.length > 2).length;
360
381
  s -= (wordCount * 15);
361
382
 
362
383
  return { res, score: s };
@@ -386,9 +407,9 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
386
407
  // console.log(`🔍 [ImageSearch] Attempt 4 (Commons): "${baseTitle}"`);
387
408
  const commonsUrl = `https://commons.wikimedia.org/w/api.php?action=query&format=json&list=search&srsearch=${encodeURIComponent(baseTitle)}&srnamespace=6&srlimit=10&origin=*`;
388
409
  const commonsRes = await fetch(commonsUrl, { signal: controller.signal });
389
- const commonsData = await commonsRes.json();
390
- if (commonsData.query?.search?.length) {
391
- const baseWords = baseTitle.toLowerCase().split(/\s+/).filter(w => w.length > 1);
410
+ const commonsData = (await jsonFromResponse(commonsRes)) as { query?: { search?: any[] } } | null;
411
+ if (commonsData?.query?.search?.length) {
412
+ const baseWords = baseTitle.toLowerCase().split(/\s+/).filter((w: string) => w.length > 1);
392
413
  const scoredResults = commonsData.query.search.map((res: any) => {
393
414
  const t = res.title.toLowerCase();
394
415
  if (excludePatterns.some(p => t.includes(p))) return { res, score: -1000 };
@@ -408,7 +429,7 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
408
429
  if (t.includes('.png')) s -= 50;
409
430
  if (t.includes('.svg') || t.includes('.webm') || t.includes('.gif')) s -= 300;
410
431
 
411
- const wordCount = t.split(/[^a-z]/).filter(w => w.length > 2).length;
432
+ const wordCount = t.split(/[^a-z]/).filter((w: string) => w.length > 2).length;
412
433
  s -= (wordCount * 15);
413
434
 
414
435
  return { res, score: s };
@@ -426,8 +447,8 @@ export const fetchWikipediaImage = async (query: string, context?: string): Prom
426
447
  // console.log(`🔍 [ImageSearch] Attempt 5 (Search): "${baseTitle}"`);
427
448
  const searchUrl = `https://en.wikipedia.org/w/api.php?action=query&format=json&list=search&srsearch=${encodeURIComponent(baseTitle)}&srlimit=5&origin=*`;
428
449
  const searchRes = await fetch(searchUrl, { signal: controller.signal });
429
- const searchData = await searchRes.json();
430
- if (searchData.query?.search?.length) {
450
+ const searchData = (await jsonFromResponse(searchRes)) as { query?: { search?: { title: string }[] } } | null;
451
+ if (searchData?.query?.search?.length) {
431
452
  for (const result of searchData.query.search) {
432
453
  const img = await fetchPageImage(result.title, controller.signal);
433
454
  if (img.url) return img;
@@ -486,6 +507,7 @@ export const fetchWikipediaSummary = async (
486
507
  depth: number = 0,
487
508
  triedNoContext = false
488
509
  ): Promise<{ extract: string | null; pageid: number | null; title: string | null; year?: number | null; mentioningPageTitles?: string[] | null; searchContext?: string | null }> => {
510
+ if (wikiIsRateLimited()) return { extract: null, pageid: null, title: null };
489
511
  const normKey = `${query.trim().toLowerCase()}|${context || ''}`;
490
512
  if (visited.has(normKey) || depth > 2) {
491
513
  return { extract: null, pageid: null, title: null };
@@ -497,8 +519,10 @@ export const fetchWikipediaSummary = async (
497
519
  const tryDirectLookup = async (titleToFetch: string) => {
498
520
  try {
499
521
  const directUrl = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts|pageprops&exintro&explaintext&titles=${encodeURIComponent(titleToFetch)}&redirects=1&origin=*`;
500
- const directRes = await fetch(directUrl);
501
- const directData = await directRes.json();
522
+ const directRes = await wikiFetch(directUrl);
523
+ if (!directRes) return null;
524
+ const directData = (await jsonFromResponse(directRes)) as { query?: { pages?: unknown; redirects?: unknown } } | null;
525
+ if (!directData) return null;
502
526
  const directPages = directData.query?.pages;
503
527
 
504
528
  if (directPages) {
@@ -541,7 +565,7 @@ export const fetchWikipediaSummary = async (
541
565
  // for disambiguation (e.g., "Republic (book)" vs "Republic").
542
566
  const cleanQuery = query.trim();
543
567
  const normalized = cleanQuery.toLowerCase();
544
- const queryNameParts = normalized.split(/[\s-]+/).filter(w => w.length > 2);
568
+ const queryNameParts = normalized.split(/[\s-]+/).filter((w: string) => w.length > 2);
545
569
  const looksLikePersonName = queryNameParts.length >= 2 && !/\d/.test(cleanQuery);
546
570
  const queryLastName = looksLikePersonName ? queryNameParts[queryNameParts.length - 1].toLowerCase() : null;
547
571
 
@@ -564,7 +588,7 @@ export const fetchWikipediaSummary = async (
564
588
  const directExact = await tryDirectLookup(cleanQuery);
565
589
  if (directExact?.extract) {
566
590
  if (queryLastName) {
567
- const titleParts = String(directExact.title || "").toLowerCase().split(/[\s-]+/).filter(w => w.length > 2);
591
+ const titleParts = String(directExact.title || "").toLowerCase().split(/[\s-]+/).filter((w: string) => w.length > 2);
568
592
  // If it's a redirect, we are MUCH more lenient. Napoleon Bonaparte -> Napoleon is a classic case.
569
593
  if (!titleParts.includes(queryLastName) && !directExact.redirected) {
570
594
  // console.log(`⚠️ [Wiki] Ignoring direct match "${directExact.title}" for "${cleanQuery}" (missing last-name match and no redirect).`);
@@ -638,11 +662,12 @@ export const fetchWikipediaSummary = async (
638
662
 
639
663
  const avoidMedia = /\b(project|program|programme|operation|war|battle|campaign|treaty|scandal|scientist)\b/i.test(baseQuery);
640
664
  const searchUrl = `https://en.wikipedia.org/w/api.php?action=query&format=json&list=search&srsearch=${encodeURIComponent(searchQuery)}&srlimit=5&origin=*`;
641
- const searchRes = await fetch(searchUrl);
642
- const searchData = await searchRes.json();
665
+ const searchRes = await wikiFetch(searchUrl);
666
+ if (!searchRes) return { extract: null, pageid: null, title: null };
667
+ const searchData = (await jsonFromResponse(searchRes)) as { query?: { search?: any[] } } | null;
643
668
 
644
669
  let bestTitle = query;
645
- if (searchData.query?.search?.length) {
670
+ if (searchData?.query?.search?.length) {
646
671
  const results = searchData.query.search;
647
672
  const scoreResult = (r: any, index: number) => {
648
673
  const title = r.title.toLowerCase();
@@ -697,7 +722,7 @@ export const fetchWikipediaSummary = async (
697
722
 
698
723
  // 2. Context matching
699
724
  if (context) {
700
- const words = context.toLowerCase().split(/\s+/).filter(w => w.length > 2);
725
+ const words = context.toLowerCase().split(/\s+/).filter((w: string) => w.length > 2);
701
726
  words.forEach(word => {
702
727
  if (title.includes(word)) s += 100;
703
728
  if (snippet.includes(word)) s += 50;
@@ -754,7 +779,7 @@ export const fetchWikipediaSummary = async (
754
779
  bestTitle = scored[0]?.r?.title || query;
755
780
 
756
781
 
757
- const titleNameParts = bestTitle.toLowerCase().split(/[\s-]+/).filter(w => w.length > 2);
782
+ const titleNameParts = bestTitle.toLowerCase().split(/[\s-]+/).filter((w: string) => w.length > 2);
758
783
  // Require at least one full word match, not just a substring overlap
759
784
  const hasFullWordMatch = queryNameParts.some(q => titleNameParts.includes(q));
760
785
  const hasOverlap = queryNameParts.some(q => titleNameParts.some(t => t.includes(q) || q.includes(t)));
@@ -764,7 +789,7 @@ export const fetchWikipediaSummary = async (
764
789
 
765
790
  for (const titleToTry of candidates) {
766
791
  if (queryNameParts.length > 0) {
767
- const candidateParts = titleToTry.toLowerCase().split(/[\s-]+/).filter(w => w.length > 2);
792
+ const candidateParts = titleToTry.toLowerCase().split(/[\s-]+/).filter((w: string) => w.length > 2);
768
793
 
769
794
  // STRICT PERSON MATCHING:
770
795
  // If we are looking for a person (query has 2+ name parts),
@@ -788,8 +813,10 @@ export const fetchWikipediaSummary = async (
788
813
  }
789
814
  }
790
815
  const summaryUrl = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts|pageprops&exintro&explaintext&titles=${encodeURIComponent(titleToTry)}&redirects=1&origin=*`;
791
- const summaryRes = await fetch(summaryUrl);
792
- const summaryData = await summaryRes.json();
816
+ const summaryRes = await wikiFetch(summaryUrl);
817
+ if (!summaryRes) break;
818
+ const summaryData = (await jsonFromResponse(summaryRes)) as { query?: { pages?: unknown } } | null;
819
+ if (!summaryData) continue;
793
820
  const pages = summaryData.query?.pages;
794
821
 
795
822
  if (pages) {
@@ -831,14 +858,14 @@ export const fetchWikipediaSummary = async (
831
858
  }
832
859
 
833
860
  if (queryNameParts.length >= 2) {
834
- const pageParts = String(page.title || "").toLowerCase().split(/[\s-]+/).filter(w => w.length > 2);
861
+ const pageParts = String(page.title || "").toLowerCase().split(/[\s-]+/).filter((w: string) => w.length > 2);
835
862
  const allMatch = queryNameParts.every(q => pageParts.includes(q));
836
863
  if (!allMatch) {
837
864
  // console.log(`⚠️ [Wiki] Skipping resolved title "${page.title}" for "${cleanQuery}" (not all name parts match).`);
838
865
  continue;
839
866
  }
840
867
  } else if (queryLastName) {
841
- const pageParts = String(page.title || "").toLowerCase().split(/[\s-]+/).filter(w => w.length > 2);
868
+ const pageParts = String(page.title || "").toLowerCase().split(/[\s-]+/).filter((w: string) => w.length > 2);
842
869
  if (!pageParts.includes(queryLastName)) {
843
870
  // console.log(`⚠️ [Wiki] Skipping resolved title "${page.title}" for "${cleanQuery}" (missing last-name match).`);
844
871
  continue;
@@ -936,9 +963,9 @@ export const fetchWikipediaExtract = async (
936
963
  // when exchars is set (returns fewer chars than the article actually contains). We fetch
937
964
  // the full extract and truncate client-side instead.
938
965
  const url = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts|pageprops&explaintext&titles=${encodeURIComponent(title)}&redirects=1&origin=*`;
939
- // Hard cap: a hung Wikipedia response must not strand graph expansion spinners indefinitely.
940
- const res = await fetchWithTimeout(url, {}, 25_000);
941
- const data = await res.json();
966
+ const res = await fetch(url);
967
+ const data = (await jsonFromResponse(res)) as { query?: { pages?: unknown } } | null;
968
+ if (!data) return { extract: null, pageid: null, title: null };
942
969
  const pages = data.query?.pages;
943
970
  if (!pages) return { extract: null, pageid: null, title: null };
944
971
  const page = Object.values(pages)[0] as any;
@@ -988,8 +1015,8 @@ export const fetchWikidataCastForTitle = async (title: string, limit: number = 1
988
1015
  try {
989
1016
  const pagepropsUrl = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=pageprops&titles=${encodeURIComponent(title)}&redirects=1&origin=*`;
990
1017
  const ppRes = await fetch(pagepropsUrl, { signal });
991
- const ppData = await ppRes.json();
992
- const pages = ppData?.query?.pages;
1018
+ const ppData = await jsonFromResponse(ppRes);
1019
+ const pages = (ppData as { query?: { pages?: unknown } } | null)?.query?.pages;
993
1020
  if (pages) {
994
1021
  const page = Object.values(pages)[0] as any;
995
1022
  const candidate = page?.pageprops?.wikibase_item;
@@ -1008,8 +1035,8 @@ export const fetchWikidataCastForTitle = async (title: string, limit: number = 1
1008
1035
 
1009
1036
  const entityUrl = `https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=claims&ids=${encodeURIComponent(wikidataId)}&origin=*`;
1010
1037
  const entRes = await fetch(entityUrl, { signal });
1011
- const entData = await entRes.json();
1012
- const claims = entData?.entities?.[wikidataId]?.claims;
1038
+ const entData = await jsonFromResponse(entRes);
1039
+ const claims = (entData as { entities?: Record<string, { claims?: unknown }> } | null)?.entities?.[wikidataId]?.claims;
1013
1040
  if (!claims) return [];
1014
1041
 
1015
1042
  const castIds = extractWikidataItemIds(claims, "P161");
@@ -1036,7 +1063,8 @@ const fetchWikidataLabels = async (ids: string[], signal: AbortSignal): Promise<
1036
1063
  try {
1037
1064
  const url = `https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=labels&languages=en&ids=${encodeURIComponent(chunk.join("|"))}&origin=*`;
1038
1065
  const res = await fetch(url, { signal });
1039
- const data = await res.json();
1066
+ const data = (await jsonFromResponse(res)) as { entities?: Record<string, { labels?: { en?: { value?: string } } }> } | null;
1067
+ if (!data) continue;
1040
1068
  const entities = data?.entities || {};
1041
1069
  for (const [id, ent] of Object.entries<any>(entities)) {
1042
1070
  const label = ent?.labels?.en?.value;
@@ -1051,9 +1079,11 @@ const fetchWikidataLabels = async (ids: string[], signal: AbortSignal): Promise<
1051
1079
 
1052
1080
  const resolveWikidataIdBySearch = async (label: string, signal: AbortSignal): Promise<string | null> => {
1053
1081
  try {
1082
+ if (wikiIsRateLimited()) return null;
1054
1083
  const url = `https://www.wikidata.org/w/api.php?action=wbsearchentities&format=json&language=en&limit=8&search=${encodeURIComponent(label)}&origin=*`;
1055
1084
  const res = await fetch(url, { signal });
1056
- const data = await res.json();
1085
+ if (res.status === 429) { wikiSetRateLimited(); return null; }
1086
+ const data = (await jsonFromResponse(res)) as { search?: any[] } | null;
1057
1087
  const results: any[] = data?.search || [];
1058
1088
  if (!results.length) return null;
1059
1089
 
@@ -1095,8 +1125,8 @@ export const fetchWikidataKeyPeopleForTitle = async (title: string): Promise<Wik
1095
1125
  try {
1096
1126
  const pagepropsUrl = `https://en.wikipedia.org/w/api.php?action=query&format=json&prop=pageprops&titles=${encodeURIComponent(title)}&redirects=1&origin=*`;
1097
1127
  const ppRes = await fetch(pagepropsUrl, { signal });
1098
- const ppData = await ppRes.json();
1099
- const pages = ppData?.query?.pages;
1128
+ const ppData = await jsonFromResponse(ppRes);
1129
+ const pages = (ppData as { query?: { pages?: unknown } } | null)?.query?.pages;
1100
1130
  if (pages) {
1101
1131
  const page = Object.values(pages)[0] as any;
1102
1132
  const resolvedTitle = String(page?.title || "");
@@ -1127,7 +1157,7 @@ export const fetchWikidataKeyPeopleForTitle = async (title: string): Promise<Wik
1127
1157
  // 2) Pull key-people claims.
1128
1158
  const entityUrl = `https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=claims&ids=${encodeURIComponent(wikidataId)}&origin=*`;
1129
1159
  const entRes = await fetch(entityUrl, { signal });
1130
- const entData = await entRes.json();
1160
+ const entData = (await jsonFromResponse(entRes)) as { entities?: Record<string, { claims?: unknown }> } | null;
1131
1161
  const entity = entData?.entities?.[wikidataId];
1132
1162
  const claims = entity?.claims;
1133
1163
  if (!claims) {
package/sessionHandoff.ts CHANGED
@@ -130,3 +130,29 @@ export function takeEmbedHandoffForInitialState(): ConstellationsSessionHandoffV
130
130
  embedHandoffMem = null;
131
131
  return null;
132
132
  }
133
+
134
+ declare global {
135
+ interface Window {
136
+ __soundingsConstellationsGetHandoff?: () => unknown;
137
+ }
138
+ }
139
+
140
+ /** Serialize current embedded graph (`__soundingsConstellationsGetHandoff`) before navigating away. */
141
+ export function persistWindowConstellationsHandoffToSession(): void {
142
+ if (typeof window === 'undefined') return;
143
+ try {
144
+ const fn = window.__soundingsConstellationsGetHandoff;
145
+ if (typeof fn !== 'function') return;
146
+ const payload = fn();
147
+ if (!payload || typeof payload !== 'object') return;
148
+ const p = payload as { v?: number; graph?: { nodes?: unknown[] } };
149
+ if (p.v !== 1 || !p.graph?.nodes?.length) return;
150
+ try {
151
+ sessionStorage.setItem(SOUNDINGS_CONSTELLATIONS_HANDOFF_KEY, JSON.stringify(payload));
152
+ } catch (e) {
153
+ console.warn('[constellations] handoff too large for sessionStorage', e);
154
+ }
155
+ } catch (e) {
156
+ console.warn('[constellations] handoff persist', e);
157
+ }
158
+ }
package/types.ts CHANGED
@@ -1,3 +1,4 @@
1
+ "use client";
1
2
  import { SimulationNodeDatum, SimulationLinkDatum } from 'd3';
2
3
 
3
4
  export interface GraphNode extends SimulationNodeDatum {
@@ -20,6 +21,8 @@ export interface GraphNode extends SimulationNodeDatum {
20
21
  atomic_type?: string; // e.g. "Symptom"
21
22
  composite_type?: string; // e.g. "Disease"
22
23
  mentioningPageTitles?: string[]; // Titles of articles mentioning this entity (for non-article fallback)
24
+ /** Measured card height in timeline view (set by Graph layout). */
25
+ h?: number;
23
26
  // D3 Simulation properties explicitly defined to ensure access
24
27
  x?: number;
25
28
  y?: number;
@@ -1,3 +1,4 @@
1
+ "use client";
1
2
  export const normalizeForEvidence = (s: unknown) =>
2
3
  String(s || '')
3
4
  .toLowerCase()
@@ -1,3 +1,4 @@
1
+ "use client";
1
2
  export const getLinkKey = (a: number | string, b: number | string) => {
2
3
  const s = String(a);
3
4
  const t = String(b);
@@ -1,3 +1,4 @@
1
+ "use client";
1
2
  import { fetchWikipediaExtract } from '../services/wikipediaService';
2
3
 
3
4
  export const buildWikiUrl = (title: string, wikipediaId?: string | number) => {
@@ -21,10 +22,21 @@ export const looksLikeWikipediaTitle = (t: unknown) => {
21
22
  return true;
22
23
  };
23
24
 
24
- const extractCache: Map<string, string | null> =
25
- ((window as any).__wikiExtractCache ||= new Map<string, string | null>());
25
+ const serverExtractCache = new Map<string, string | null>();
26
+
27
+ function getExtractCacheMap(): Map<string, string | null> {
28
+ if (typeof window === 'undefined') {
29
+ return serverExtractCache;
30
+ }
31
+ const w = window as unknown as { __wikiExtractCache?: Map<string, string | null> };
32
+ if (!w.__wikiExtractCache) {
33
+ w.__wikiExtractCache = new Map();
34
+ }
35
+ return w.__wikiExtractCache;
36
+ }
26
37
 
27
38
  export const getExtractCached = async (title: string) => {
39
+ const extractCache = getExtractCacheMap();
28
40
  const key = String(title || '').trim();
29
41
  if (!key) return null;
30
42
  if (extractCache.has(key)) return extractCache.get(key) || null;