@mcptoolshop/registry-stats 3.0.0 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,7 +1,7 @@
1
- <p align="center">
2
- <a href="README.ja.md">日本語</a> | <a href="README.zh.md">中文</a> | <a href="README.es.md">Español</a> | <a href="README.fr.md">Français</a> | <a href="README.hi.md">हिन्दी</a> | <a href="README.it.md">Italiano</a> | <a href="README.pt-BR.md">Português (BR)</a>
3
- </p>
4
-
1
+ <p align="center">
2
+ <a href="README.ja.md">日本語</a> | <a href="README.zh.md">中文</a> | <a href="README.es.md">Español</a> | <a href="README.fr.md">Français</a> | <a href="README.hi.md">हिन्दी</a> | <a href="README.it.md">Italiano</a> | <a href="README.pt-BR.md">Português (BR)</a>
3
+ </p>
4
+
5
5
  <p align="center">
6
6
  <img src="https://raw.githubusercontent.com/mcp-tool-shop-org/brand/main/logos/registry-stats/readme.png" alt="registry-stats logo" width="400" />
7
7
  </p>
@@ -39,8 +39,8 @@ Zero runtime dependencies. Uses native `fetch()`. Node 18+.
39
39
 
40
40
  | Layer | What it does |
41
41
  |-------|-------------|
42
- | **Engine** | TypeScript library + CLI + REST server. Query five registries with one interface. Published to npm as `@mcptoolshop/registry-stats`. |
43
- | **Dashboard** | Astro-powered web app with Pulse AI co-pilot (streaming voice, web search, fullscreen, GitHub data connectors), six interactive charts, live refresh, export reports (PDF / JSONL / Markdown), and tabbed Help guide. Rebuilt weekly by CI; refreshable on demand. |
42
+ | **Engine** | TypeScript library + CLI + REST server + AI inference. Query five registries with one interface. Published to npm as `@mcptoolshop/registry-stats`. |
43
+ | **Dashboard** | Astro-powered web app with AI inference panel, Pulse AI co-pilot (streaming voice, web search, fullscreen, GitHub data connectors), six interactive charts, live refresh, export reports (PDF / JSONL / Markdown), and tabbed Help guide. Rebuilt daily by CI; refreshable on demand. |
44
44
  | **Desktop** | WinUI 3 + WebView2 native Windows app. Bundles the dashboard offline, fetches live stats on demand. |
45
45
 
46
46
  ## Dashboard
@@ -52,16 +52,56 @@ A self-updating stats dashboard lives at [`/dashboard/`](https://mcp-tool-shop-o
52
52
  - **Executive snapshot** — health score (0–100), diversity index, weekly change, total downloads across all registries
53
53
  - **Six interactive charts** — 30-day trend (aggregate / per-registry / top-5 toggles), registry share (polar area), portfolio risk (histogram + Gini & P90), top-10 momentum, velocity tracker with sparklines, and 30-day heatmap with spike detection (>2σ)
54
54
  - **Smart growth engine** — handles small-denominator distortion with baseline threshold, percentage cap, and damped velocity formula
55
+ - **AI Inference Panel** — portfolio momentum (-100 to +100), risk score, 7-day forecast with confidence intervals, and automated recommendations (growth, risk, opportunity, attention)
55
56
  - **Actionable insights** — auto-generated recommendations and attention alerts for declining packages
56
57
  - **Pulse panel** — split view of Established Movers (≥ 50 downloads/wk) and Emerging & New packages, with inline 7-day sparklines, absolute + percentage deltas, baseline context, and a one-line executive summary
57
58
  - **Live refresh** — on-demand client-side fetch from npm and PyPI APIs with progress indicator; results cached in sessionStorage (5 min TTL) so tab switches are instant
58
59
  - **Export reports** — dropdown next to the Refresh button offering three formats: **Exec PDF** (via jsPDF), **LLM JSONL** (typed records for AI ingestion), and **Dev Markdown** (GFM tables)
59
60
  - **Leaderboard** — 132 packages ranked by weekly downloads with inline 30-day sparklines and smart trend badges
60
61
  - **Setup page** — portfolio editor with validation, registry-sync companion section, and pipeline overview
61
- - **Help tab** — human-friendly guide covering every tab, key concepts, AI assistant tips, data pipeline, and useful links
62
+ - **Leaderboard search** — instant text filter for finding packages by name or registry
63
+ - **Keyboard navigation** — arrow keys to cycle between tabs
64
+ - **Help tab** — human-friendly guide covering every tab, key concepts, AI inference engine, data pipeline, and useful links
62
65
  - **Dark / light theme** — follows system preference
66
+ - **Mobile responsive** — hamburger menu for small screens
67
+
68
+ Data is fetched at build time and rebuilt daily by CI (06:00 UTC). Live refresh pulls the latest numbers directly from registry APIs. Configure tracked packages in `site/src/data/packages.json`.
69
+
70
+ ## AI Inference Engine
71
+
72
+ Zero-dependency, pure-math inference that runs at build time — no ML runtime, no external APIs.
73
+
74
+ ```typescript
75
+ import {
76
+ forecast, detectAnomalies, segmentTrends,
77
+ detectSeasonality, computeMomentum,
78
+ generateRecommendations, inferPortfolio,
79
+ } from '@mcptoolshop/registry-stats';
80
+
81
+ // 7-day forecast with 80% confidence intervals
82
+ const predictions = forecast(dailySeries, 7);
83
+ // → [{ day: 1, predicted: 142, lower: 98, upper: 186 }, ...]
84
+
85
+ // Anomaly detection (adaptive rolling z-score, 14-day window)
86
+ const anomalies = detectAnomalies(dailySeries);
87
+ // → [{ day: 20, value: 1500, expected: 120, zscore: 4.2, type: 'spike' }]
88
+
89
+ // Composite momentum score (-100 to +100)
90
+ const momentum = computeMomentum(dailySeries);
91
+
92
+ // Full portfolio analysis
93
+ const result = inferPortfolio(leaderboard, { gini: 0.6, npmPct: 85 });
94
+ // → { packages, forecastTotal7, riskScore, portfolioMomentum, recommendations }
95
+ ```
63
96
 
64
- Data is fetched at build time and rebuilt weekly by CI (Mondays 06:00 UTC). Live refresh pulls the latest numbers directly from registry APIs. Configure tracked packages in `site/src/data/packages.json` (132 packages across 5 registries).
97
+ | Capability | Method | What it does |
98
+ |-----------|--------|-------------|
99
+ | **Forecast** | Weighted linear regression | Exponential recency bias, 80% CI that widens over time |
100
+ | **Anomaly detection** | Adaptive rolling z-score | 14-day baseline window, detects spikes and drops |
101
+ | **Trend segmentation** | Piecewise linear | Identifies up/down/flat segments in time series |
102
+ | **Seasonality** | Day-of-week decomposition | Detects weekly patterns, reports peak day |
103
+ | **Momentum** | Composite score | Direction + acceleration + consistency + volume |
104
+ | **Recommendations** | Rule engine | Growth, risk, opportunity, and attention categories |
65
105
 
66
106
  ## Desktop App
67
107
 
@@ -212,8 +252,10 @@ await stats('npm', 'express', { cache }); // cache hit
212
252
 
213
253
  - Automatic retry with exponential backoff on 429/5xx errors
214
254
  - Respects `Retry-After` headers
255
+ - 30-second request timeouts via `AbortSignal.timeout`
215
256
  - Concurrency limiting for bulk requests
216
257
  - Optional TTL cache (pluggable — bring your own Redis/file backend via `StatsCache` interface)
258
+ - SHA-pinned GitHub Actions for supply chain security
217
259
 
218
260
  ## REST API Server
219
261
 
package/dist/cli.js CHANGED
@@ -40,7 +40,7 @@ async function fetchWithRetry(url, registry, init) {
40
40
  let lastError;
41
41
  for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
42
42
  await acquireSlot(registry);
43
- const res = await fetch(url, init);
43
+ const res = await fetch(url, { signal: AbortSignal.timeout(3e4), ...init });
44
44
  if (res.status === 404) return null;
45
45
  if (res.ok) return res.json();
46
46
  const retryAfter = res.headers.get("retry-after");
@@ -62,7 +62,7 @@ async function fetchWithRetry(url, registry, init) {
62
62
  async function fetchDirect(url, registry, init) {
63
63
  let lastError;
64
64
  for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
65
- const res = await fetch(url, init);
65
+ const res = await fetch(url, { signal: AbortSignal.timeout(3e4), ...init });
66
66
  if (res.status === 404) return null;
67
67
  if (res.ok) return res.json();
68
68
  const retryAfter = res.headers.get("retry-after");
@@ -284,7 +284,8 @@ var docker = {
284
284
  if (options?.dockerToken) {
285
285
  headers["Authorization"] = `Bearer ${options.dockerToken}`;
286
286
  }
287
- const json2 = await fetchWithRetry(`${API4}/${pkg}`, "docker", { headers });
287
+ const safePkg = pkg.split("/").map((s) => encodeURIComponent(s)).join("/");
288
+ const json2 = await fetchWithRetry(`${API4}/${safePkg}`, "docker", { headers });
288
289
  if (!json2 || !json2.name || !json2.namespace) return null;
289
290
  return {
290
291
  registry: "docker",
package/dist/index.cjs CHANGED
@@ -22,11 +22,18 @@ var index_exports = {};
22
22
  __export(index_exports, {
23
23
  RegistryError: () => RegistryError,
24
24
  calc: () => calc,
25
+ computeMomentum: () => computeMomentum,
25
26
  createCache: () => createCache,
26
27
  createHandler: () => createHandler,
27
28
  defaultConfig: () => defaultConfig,
29
+ detectAnomalies: () => detectAnomalies,
30
+ detectSeasonality: () => detectSeasonality,
31
+ forecast: () => forecast,
32
+ generateRecommendations: () => generateRecommendations,
33
+ inferPortfolio: () => inferPortfolio,
28
34
  loadConfig: () => loadConfig,
29
35
  registerProvider: () => registerProvider,
36
+ segmentTrends: () => segmentTrends,
30
37
  serve: () => serve,
31
38
  starterConfig: () => starterConfig,
32
39
  stats: () => stats
@@ -69,7 +76,7 @@ async function fetchWithRetry(url, registry, init) {
69
76
  let lastError;
70
77
  for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
71
78
  await acquireSlot(registry);
72
- const res = await fetch(url, init);
79
+ const res = await fetch(url, { signal: AbortSignal.timeout(3e4), ...init });
73
80
  if (res.status === 404) return null;
74
81
  if (res.ok) return res.json();
75
82
  const retryAfter = res.headers.get("retry-after");
@@ -91,7 +98,7 @@ async function fetchWithRetry(url, registry, init) {
91
98
  async function fetchDirect(url, registry, init) {
92
99
  let lastError;
93
100
  for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
94
- const res = await fetch(url, init);
101
+ const res = await fetch(url, { signal: AbortSignal.timeout(3e4), ...init });
95
102
  if (res.status === 404) return null;
96
103
  if (res.ok) return res.json();
97
104
  const retryAfter = res.headers.get("retry-after");
@@ -313,7 +320,8 @@ var docker = {
313
320
  if (options?.dockerToken) {
314
321
  headers["Authorization"] = `Bearer ${options.dockerToken}`;
315
322
  }
316
- const json2 = await fetchWithRetry(`${API4}/${pkg}`, "docker", { headers });
323
+ const safePkg = pkg.split("/").map((s) => encodeURIComponent(s)).join("/");
324
+ const json2 = await fetchWithRetry(`${API4}/${safePkg}`, "docker", { headers });
317
325
  if (!json2 || !json2.name || !json2.namespace) return null;
318
326
  return {
319
327
  registry: "docker",
@@ -600,6 +608,299 @@ Endpoints:`);
600
608
  return server;
601
609
  }
602
610
 
611
+ // src/inference.ts
612
+ function mean(arr) {
613
+ if (arr.length === 0) return 0;
614
+ return arr.reduce((a, b) => a + b, 0) / arr.length;
615
+ }
616
+ function stddev(arr) {
617
+ if (arr.length < 2) return 0;
618
+ const m = mean(arr);
619
+ const variance = arr.reduce((s, v) => s + (v - m) ** 2, 0) / arr.length;
620
+ return Math.sqrt(variance);
621
+ }
622
+ function linearRegression(ys) {
623
+ const n = ys.length;
624
+ if (n < 2) return { slope: 0, intercept: ys[0] ?? 0, r2: 0 };
625
+ let sumX = 0, sumY = 0, sumXY = 0, sumX2 = 0;
626
+ for (let i = 0; i < n; i++) {
627
+ sumX += i;
628
+ sumY += ys[i];
629
+ sumXY += i * ys[i];
630
+ sumX2 += i * i;
631
+ }
632
+ const denom = n * sumX2 - sumX * sumX;
633
+ if (denom === 0) return { slope: 0, intercept: sumY / n, r2: 0 };
634
+ const slope = (n * sumXY - sumX * sumY) / denom;
635
+ const intercept = (sumY - slope * sumX) / n;
636
+ const meanY = sumY / n;
637
+ let ssTot = 0, ssRes = 0;
638
+ for (let i = 0; i < n; i++) {
639
+ ssTot += (ys[i] - meanY) ** 2;
640
+ ssRes += (ys[i] - (intercept + slope * i)) ** 2;
641
+ }
642
+ const r2 = ssTot > 0 ? 1 - ssRes / ssTot : 0;
643
+ return { slope, intercept, r2 };
644
+ }
645
+ var DAY_NAMES = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"];
646
+ function forecast(series, days = 7) {
647
+ if (series.length < 7) return [];
648
+ const window = series.slice(-Math.min(14, series.length));
649
+ const n = window.length;
650
+ const weights = window.map((_, i) => Math.exp(0.1 * (i - n + 1)));
651
+ const totalW = weights.reduce((a, b) => a + b, 0);
652
+ let wSumX = 0, wSumY = 0, wSumXY = 0, wSumX2 = 0;
653
+ for (let i = 0; i < n; i++) {
654
+ const w = weights[i];
655
+ wSumX += w * i;
656
+ wSumY += w * window[i];
657
+ wSumXY += w * i * window[i];
658
+ wSumX2 += w * i * i;
659
+ }
660
+ const denom = totalW * wSumX2 - wSumX * wSumX;
661
+ let slope, intercept;
662
+ if (Math.abs(denom) < 1e-10) {
663
+ slope = 0;
664
+ intercept = wSumY / totalW;
665
+ } else {
666
+ slope = (totalW * wSumXY - wSumX * wSumY) / denom;
667
+ intercept = (wSumY - slope * wSumX) / totalW;
668
+ }
669
+ let ssRes = 0;
670
+ for (let i = 0; i < n; i++) {
671
+ ssRes += weights[i] * (window[i] - (intercept + slope * i)) ** 2;
672
+ }
673
+ const rse = Math.sqrt(ssRes / Math.max(1, totalW - 2));
674
+ const z80 = 1.28;
675
+ const results = [];
676
+ for (let d = 1; d <= days; d++) {
677
+ const x = n - 1 + d;
678
+ const predicted = Math.max(0, Math.round(intercept + slope * x));
679
+ const margin = Math.round(z80 * rse * Math.sqrt(1 + 1 / n + (x - n / 2) ** 2 / (n * n / 12)));
680
+ results.push({
681
+ day: d,
682
+ predicted,
683
+ lower: Math.max(0, predicted - margin),
684
+ upper: predicted + margin
685
+ });
686
+ }
687
+ return results;
688
+ }
689
+ function detectAnomalies(series, threshold = 2) {
690
+ if (series.length < 7) return [];
691
+ const anomalies = [];
692
+ const windowSize = Math.min(14, Math.floor(series.length * 0.7));
693
+ for (let i = windowSize; i < series.length; i++) {
694
+ const window = series.slice(i - windowSize, i);
695
+ const m = mean(window);
696
+ const s = stddev(window);
697
+ if (s < 1) continue;
698
+ const zscore = (series[i] - m) / s;
699
+ if (Math.abs(zscore) >= threshold) {
700
+ anomalies.push({
701
+ day: i,
702
+ value: series[i],
703
+ expected: Math.round(m),
704
+ zscore: Math.round(zscore * 10) / 10,
705
+ type: zscore > 0 ? "spike" : "drop"
706
+ });
707
+ }
708
+ }
709
+ return anomalies;
710
+ }
711
+ function segmentTrends(series, minSegmentLength = 5) {
712
+ if (series.length < minSegmentLength) return [];
713
+ const segments = [];
714
+ let segStart = 0;
715
+ while (segStart < series.length - minSegmentLength + 1) {
716
+ let bestEnd = segStart + minSegmentLength - 1;
717
+ const initialSlope = linearRegression(series.slice(segStart, segStart + minSegmentLength)).slope;
718
+ const initialDir = initialSlope > 0.5 ? "up" : initialSlope < -0.5 ? "down" : "flat";
719
+ for (let end = segStart + minSegmentLength; end < series.length; end++) {
720
+ const seg2 = series.slice(segStart, end + 1);
721
+ const { slope: slope2 } = linearRegression(seg2);
722
+ const dir = slope2 > 0.5 ? "up" : slope2 < -0.5 ? "down" : "flat";
723
+ if (dir !== initialDir) break;
724
+ bestEnd = end;
725
+ }
726
+ const seg = series.slice(segStart, bestEnd + 1);
727
+ const { slope } = linearRegression(seg);
728
+ const direction = slope > 0.5 ? "up" : slope < -0.5 ? "down" : "flat";
729
+ segments.push({
730
+ start: segStart,
731
+ end: bestEnd,
732
+ direction,
733
+ slope: Math.round(slope * 10) / 10,
734
+ magnitude: Math.round(seg[seg.length - 1] - seg[0])
735
+ });
736
+ segStart = bestEnd + 1;
737
+ }
738
+ return segments;
739
+ }
740
+ function detectSeasonality(series, startDaysAgo) {
741
+ if (series.length < 14) return null;
742
+ const buckets = [[], [], [], [], [], [], []];
743
+ const today = /* @__PURE__ */ new Date();
744
+ for (let i = 0; i < series.length; i++) {
745
+ const date = new Date(today);
746
+ date.setDate(date.getDate() - (startDaysAgo - i));
747
+ const dow = date.getDay();
748
+ buckets[dow].push(series[i]);
749
+ }
750
+ const dayAvgs = buckets.map((b) => Math.round(mean(b)));
751
+ const overallMean = mean(dayAvgs);
752
+ const maxAvg = Math.max(...dayAvgs);
753
+ const minAvg = Math.min(...dayAvgs);
754
+ if (overallMean < 1 || (maxAvg - minAvg) / overallMean < 0.15) return null;
755
+ const peakIdx = dayAvgs.indexOf(maxAvg);
756
+ return {
757
+ dayOfWeek: dayAvgs,
758
+ peakDay: DAY_NAMES[peakIdx]
759
+ };
760
+ }
761
+ function computeMomentum(series) {
762
+ if (series.length < 14) return 0;
763
+ const last7 = series.slice(-7);
764
+ const prev7 = series.slice(-14, -7);
765
+ const last7Sum = last7.reduce((a, b) => a + b, 0);
766
+ const prev7Sum = prev7.reduce((a, b) => a + b, 0);
767
+ const dampK = 10;
768
+ const dirScore = prev7Sum > dampK ? Math.max(-40, Math.min(40, (last7Sum - prev7Sum) / Math.sqrt(prev7Sum + dampK) * 4)) : last7Sum > 0 ? 20 : 0;
769
+ const { slope: recentSlope } = linearRegression(last7);
770
+ const { slope: prevSlope } = linearRegression(prev7);
771
+ const accelScore = Math.max(-20, Math.min(20, (recentSlope - prevSlope) * 2));
772
+ const cv = last7Sum > 0 ? stddev(last7) / mean(last7) : 1;
773
+ const consistencyScore = Math.max(0, 20 - cv * 20);
774
+ const volumeScore = last7Sum > 0 ? Math.min(20, Math.log10(last7Sum + 1) * 5) : 0;
775
+ return Math.round(Math.max(-100, Math.min(100, dirScore + accelScore + consistencyScore + volumeScore)));
776
+ }
777
+ function generateRecommendations(packages, opts = {}) {
778
+ const recs = [];
779
+ const declining = packages.filter((p) => p.momentum < -30);
780
+ if (declining.length > 0) {
781
+ const names = declining.slice(0, 3).map((p) => p.name).join(", ");
782
+ recs.push({
783
+ type: "attention",
784
+ priority: declining.some((p) => p.momentum < -60) ? "high" : "medium",
785
+ title: `${declining.length} package${declining.length > 1 ? "s" : ""} losing momentum`,
786
+ detail: `${names}${declining.length > 3 ? ` and ${declining.length - 3} more` : ""} show sustained decline. Consider: release updates, fix open issues, or update documentation.`,
787
+ metric: `Worst momentum: ${Math.min(...declining.map((p) => p.momentum))}`
788
+ });
789
+ }
790
+ if (opts.gini !== void 0 && opts.gini > 0.7) {
791
+ recs.push({
792
+ type: "risk",
793
+ priority: opts.gini > 0.85 ? "high" : "medium",
794
+ title: "High portfolio concentration",
795
+ detail: `Gini coefficient ${opts.gini.toFixed(2)} indicates downloads are heavily concentrated in a few packages. Diversify promotion efforts across the portfolio.`,
796
+ metric: `Gini: ${opts.gini.toFixed(2)}`
797
+ });
798
+ }
799
+ if (opts.npmPct !== void 0 && opts.npmPct > 75) {
800
+ recs.push({
801
+ type: "risk",
802
+ priority: "medium",
803
+ title: "Heavy npm dependency",
804
+ detail: `${opts.npmPct}% of downloads come from npm. Consider cross-publishing to PyPI and NuGet to reduce single-registry risk.`,
805
+ metric: `npm share: ${opts.npmPct}%`
806
+ });
807
+ }
808
+ const growing = packages.filter((p) => p.momentum > 40 && p.anomalies.some((a) => a.type === "spike"));
809
+ if (growing.length > 0) {
810
+ const names = growing.slice(0, 3).map((p) => p.name).join(", ");
811
+ recs.push({
812
+ type: "opportunity",
813
+ priority: "medium",
814
+ title: `${growing.length} package${growing.length > 1 ? "s" : ""} gaining traction`,
815
+ detail: `${names} show organic growth with download spikes. Capitalize with blog posts, social media, or conference talks.`,
816
+ metric: `Best momentum: ${Math.max(...growing.map((p) => p.momentum))}`
817
+ });
818
+ }
819
+ const forecastGrowing = packages.filter((p) => {
820
+ if (p.forecast7.length < 7) return false;
821
+ const lastActual = p.forecast7[0]?.predicted ?? 0;
822
+ const lastForecast = p.forecast7[6]?.predicted ?? 0;
823
+ return lastForecast > lastActual * 1.2;
824
+ });
825
+ if (forecastGrowing.length > 0) {
826
+ recs.push({
827
+ type: "growth",
828
+ priority: "low",
829
+ title: `${forecastGrowing.length} package${forecastGrowing.length > 1 ? "s" : ""} predicted to grow`,
830
+ detail: `Statistical models predict >20% growth in the next 7 days for ${forecastGrowing.slice(0, 3).map((p) => p.name).join(", ")}.`
831
+ });
832
+ }
833
+ const spiked = packages.filter((p) => p.anomalies.filter((a) => a.type === "spike").length >= 2);
834
+ if (spiked.length > 0) {
835
+ recs.push({
836
+ type: "attention",
837
+ priority: "low",
838
+ title: `${spiked.length} package${spiked.length > 1 ? "s" : ""} with repeated spikes`,
839
+ detail: `Multiple download spikes detected for ${spiked.slice(0, 3).map((p) => p.name).join(", ")}. Could indicate bot activity, viral posts, or dependency adoption.`
840
+ });
841
+ }
842
+ const priorityOrder = { high: 0, medium: 1, low: 2 };
843
+ recs.sort((a, b) => priorityOrder[a.priority] - priorityOrder[b.priority]);
844
+ return recs;
845
+ }
846
+ function inferPortfolio(leaderboard, opts = {}) {
847
+ const packages = [];
848
+ for (const row of leaderboard) {
849
+ const series = row.range30;
850
+ if (!series || series.length < 7) {
851
+ packages.push({
852
+ name: row.name,
853
+ registry: row.registry,
854
+ forecast7: [],
855
+ anomalies: [],
856
+ trendSegments: [],
857
+ seasonality: null,
858
+ momentum: 0
859
+ });
860
+ continue;
861
+ }
862
+ packages.push({
863
+ name: row.name,
864
+ registry: row.registry,
865
+ forecast7: forecast(series, 7),
866
+ anomalies: detectAnomalies(series),
867
+ trendSegments: segmentTrends(series),
868
+ seasonality: detectSeasonality(series, 30),
869
+ momentum: computeMomentum(series)
870
+ });
871
+ }
872
+ const forecastTotal7 = new Array(7).fill(0);
873
+ for (const pkg of packages) {
874
+ for (const pt of pkg.forecast7) {
875
+ forecastTotal7[pt.day - 1] += pt.predicted;
876
+ }
877
+ }
878
+ const totalWeek = leaderboard.reduce((s, r) => s + (r.week ?? 0), 0);
879
+ let weightedMomentum = 0;
880
+ for (const pkg of packages) {
881
+ const row = leaderboard.find((r) => r.name === pkg.name);
882
+ const weight = totalWeek > 0 ? (row?.week ?? 0) / totalWeek : 1 / packages.length;
883
+ weightedMomentum += pkg.momentum * weight;
884
+ }
885
+ const decliningPct = packages.filter((p) => p.momentum < -20).length / Math.max(1, packages.length);
886
+ const totalAnomalies = packages.reduce((s, p) => s + p.anomalies.length, 0);
887
+ const anomalyDensity = totalAnomalies / Math.max(1, packages.length);
888
+ const giniRisk = (opts.gini ?? 0) * 30;
889
+ const declineRisk = decliningPct * 40;
890
+ const anomalyRisk = Math.min(30, anomalyDensity * 10);
891
+ const riskScore = Math.round(Math.max(0, Math.min(100, giniRisk + declineRisk + anomalyRisk)));
892
+ const diversityTrend = "stable";
893
+ const recommendations = generateRecommendations(packages, opts);
894
+ return {
895
+ packages,
896
+ recommendations,
897
+ forecastTotal7,
898
+ riskScore,
899
+ diversityTrend,
900
+ portfolioMomentum: Math.round(weightedMomentum)
901
+ };
902
+ }
903
+
603
904
  // src/index.ts
604
905
  function createCache() {
605
906
  const store = /* @__PURE__ */ new Map();
@@ -797,11 +1098,18 @@ stats.mine = async function mine(maintainer, options) {
797
1098
  0 && (module.exports = {
798
1099
  RegistryError,
799
1100
  calc,
1101
+ computeMomentum,
800
1102
  createCache,
801
1103
  createHandler,
802
1104
  defaultConfig,
1105
+ detectAnomalies,
1106
+ detectSeasonality,
1107
+ forecast,
1108
+ generateRecommendations,
1109
+ inferPortfolio,
803
1110
  loadConfig,
804
1111
  registerProvider,
1112
+ segmentTrends,
805
1113
  serve,
806
1114
  starterConfig,
807
1115
  stats
package/dist/index.d.cts CHANGED
@@ -116,6 +116,113 @@ declare function createHandler(opts?: StatsOptions): Handler;
116
116
  /** Starts an HTTP server. Returns the server instance. */
117
117
  declare function serve(opts?: ServerOptions): node_http.Server<typeof IncomingMessage, typeof ServerResponse>;
118
118
 
119
+ /**
120
+ * AI Inference Module — statistical forecasting, anomaly detection, and recommendations.
121
+ *
122
+ * Zero dependencies. Pure math on time-series data arrays.
123
+ * Designed to run at build-time in the fetch-stats pipeline.
124
+ */
125
+ interface ForecastPoint {
126
+ day: number;
127
+ predicted: number;
128
+ lower: number;
129
+ upper: number;
130
+ }
131
+ interface Anomaly {
132
+ day: number;
133
+ value: number;
134
+ expected: number;
135
+ zscore: number;
136
+ type: 'spike' | 'drop';
137
+ }
138
+ interface TrendSegment {
139
+ start: number;
140
+ end: number;
141
+ direction: 'up' | 'down' | 'flat';
142
+ slope: number;
143
+ magnitude: number;
144
+ }
145
+ interface Recommendation {
146
+ type: 'growth' | 'risk' | 'opportunity' | 'attention';
147
+ priority: 'high' | 'medium' | 'low';
148
+ title: string;
149
+ detail: string;
150
+ metric?: string;
151
+ }
152
+ interface PackageInference {
153
+ name: string;
154
+ registry: string;
155
+ forecast7: ForecastPoint[];
156
+ anomalies: Anomaly[];
157
+ trendSegments: TrendSegment[];
158
+ seasonality: {
159
+ dayOfWeek: number[];
160
+ peakDay: string;
161
+ } | null;
162
+ momentum: number;
163
+ }
164
+ interface PortfolioInference {
165
+ packages: PackageInference[];
166
+ recommendations: Recommendation[];
167
+ forecastTotal7: number[];
168
+ riskScore: number;
169
+ diversityTrend: 'improving' | 'stable' | 'declining';
170
+ portfolioMomentum: number;
171
+ }
172
+ /**
173
+ * Forecast next N days using weighted linear regression on recent data.
174
+ * Uses the last 14 days with exponential weighting (recent days matter more).
175
+ * Returns predictions with 80% confidence intervals.
176
+ */
177
+ declare function forecast(series: number[], days?: number): ForecastPoint[];
178
+ /**
179
+ * Detect anomalies using adaptive z-score with rolling baseline.
180
+ * More sophisticated than simple global z-score — uses a 14-day rolling
181
+ * window so seasonal patterns don't trigger false positives.
182
+ */
183
+ declare function detectAnomalies(series: number[], threshold?: number): Anomaly[];
184
+ /**
185
+ * Segment a time series into directional trend segments.
186
+ * Uses a simple piecewise linear approach with minimum segment length.
187
+ */
188
+ declare function segmentTrends(series: number[], minSegmentLength?: number): TrendSegment[];
189
+ /**
190
+ * Detect day-of-week seasonality patterns.
191
+ * Requires at least 14 days of data to identify weekly cycles.
192
+ */
193
+ declare function detectSeasonality(series: number[], startDaysAgo: number): {
194
+ dayOfWeek: number[];
195
+ peakDay: string;
196
+ } | null;
197
+ /**
198
+ * Compute a composite momentum score (-100 to +100).
199
+ * Combines: short-term trend, acceleration, volume, and consistency.
200
+ */
201
+ declare function computeMomentum(series: number[]): number;
202
+ /**
203
+ * Generate automated recommendations based on portfolio analysis.
204
+ */
205
+ declare function generateRecommendations(packages: PackageInference[], opts?: {
206
+ gini?: number;
207
+ npmPct?: number;
208
+ totalWeekly?: number;
209
+ }): Recommendation[];
210
+ /**
211
+ * Run full inference pipeline on a leaderboard dataset.
212
+ * This is the main entry point called from fetch-stats.mjs.
213
+ */
214
+ declare function inferPortfolio(leaderboard: Array<{
215
+ name: string;
216
+ registry: string;
217
+ week: number;
218
+ range30?: number[] | null;
219
+ trendPct?: number | null;
220
+ }>, opts?: {
221
+ gini?: number;
222
+ npmPct?: number;
223
+ totalWeekly?: number;
224
+ }): PortfolioInference;
225
+
119
226
  declare function createCache(): StatsCache;
120
227
 
121
228
  declare function registerProvider(provider: RegistryProvider): void;
@@ -130,4 +237,4 @@ declare namespace stats {
130
237
  }) => Promise<PackageStats[]>;
131
238
  }
132
239
 
133
- export { type ChartData, type ComparisonResult, type Config, type DailyDownloads, type PackageConfig, type PackageStats, type RateLimitConfig, RegistryError, type RegistryName, type RegistryProvider, type ServerOptions, type StatsCache, type StatsOptions, calc, createCache, createHandler, defaultConfig, loadConfig, registerProvider, serve, starterConfig, stats };
240
+ export { type Anomaly, type ChartData, type ComparisonResult, type Config, type DailyDownloads, type ForecastPoint, type PackageConfig, type PackageInference, type PackageStats, type PortfolioInference, type RateLimitConfig, type Recommendation, RegistryError, type RegistryName, type RegistryProvider, type ServerOptions, type StatsCache, type StatsOptions, type TrendSegment, calc, computeMomentum, createCache, createHandler, defaultConfig, detectAnomalies, detectSeasonality, forecast, generateRecommendations, inferPortfolio, loadConfig, registerProvider, segmentTrends, serve, starterConfig, stats };
package/dist/index.d.ts CHANGED
@@ -116,6 +116,113 @@ declare function createHandler(opts?: StatsOptions): Handler;
116
116
  /** Starts an HTTP server. Returns the server instance. */
117
117
  declare function serve(opts?: ServerOptions): node_http.Server<typeof IncomingMessage, typeof ServerResponse>;
118
118
 
119
+ /**
120
+ * AI Inference Module — statistical forecasting, anomaly detection, and recommendations.
121
+ *
122
+ * Zero dependencies. Pure math on time-series data arrays.
123
+ * Designed to run at build-time in the fetch-stats pipeline.
124
+ */
125
+ interface ForecastPoint {
126
+ day: number;
127
+ predicted: number;
128
+ lower: number;
129
+ upper: number;
130
+ }
131
+ interface Anomaly {
132
+ day: number;
133
+ value: number;
134
+ expected: number;
135
+ zscore: number;
136
+ type: 'spike' | 'drop';
137
+ }
138
+ interface TrendSegment {
139
+ start: number;
140
+ end: number;
141
+ direction: 'up' | 'down' | 'flat';
142
+ slope: number;
143
+ magnitude: number;
144
+ }
145
+ interface Recommendation {
146
+ type: 'growth' | 'risk' | 'opportunity' | 'attention';
147
+ priority: 'high' | 'medium' | 'low';
148
+ title: string;
149
+ detail: string;
150
+ metric?: string;
151
+ }
152
+ interface PackageInference {
153
+ name: string;
154
+ registry: string;
155
+ forecast7: ForecastPoint[];
156
+ anomalies: Anomaly[];
157
+ trendSegments: TrendSegment[];
158
+ seasonality: {
159
+ dayOfWeek: number[];
160
+ peakDay: string;
161
+ } | null;
162
+ momentum: number;
163
+ }
164
+ interface PortfolioInference {
165
+ packages: PackageInference[];
166
+ recommendations: Recommendation[];
167
+ forecastTotal7: number[];
168
+ riskScore: number;
169
+ diversityTrend: 'improving' | 'stable' | 'declining';
170
+ portfolioMomentum: number;
171
+ }
172
+ /**
173
+ * Forecast next N days using weighted linear regression on recent data.
174
+ * Uses the last 14 days with exponential weighting (recent days matter more).
175
+ * Returns predictions with 80% confidence intervals.
176
+ */
177
+ declare function forecast(series: number[], days?: number): ForecastPoint[];
178
+ /**
179
+ * Detect anomalies using adaptive z-score with rolling baseline.
180
+ * More sophisticated than simple global z-score — uses a 14-day rolling
181
+ * window so seasonal patterns don't trigger false positives.
182
+ */
183
+ declare function detectAnomalies(series: number[], threshold?: number): Anomaly[];
184
+ /**
185
+ * Segment a time series into directional trend segments.
186
+ * Uses a simple piecewise linear approach with minimum segment length.
187
+ */
188
+ declare function segmentTrends(series: number[], minSegmentLength?: number): TrendSegment[];
189
+ /**
190
+ * Detect day-of-week seasonality patterns.
191
+ * Requires at least 14 days of data to identify weekly cycles.
192
+ */
193
+ declare function detectSeasonality(series: number[], startDaysAgo: number): {
194
+ dayOfWeek: number[];
195
+ peakDay: string;
196
+ } | null;
197
+ /**
198
+ * Compute a composite momentum score (-100 to +100).
199
+ * Combines: short-term trend, acceleration, volume, and consistency.
200
+ */
201
+ declare function computeMomentum(series: number[]): number;
202
+ /**
203
+ * Generate automated recommendations based on portfolio analysis.
204
+ */
205
+ declare function generateRecommendations(packages: PackageInference[], opts?: {
206
+ gini?: number;
207
+ npmPct?: number;
208
+ totalWeekly?: number;
209
+ }): Recommendation[];
210
+ /**
211
+ * Run full inference pipeline on a leaderboard dataset.
212
+ * This is the main entry point called from fetch-stats.mjs.
213
+ */
214
+ declare function inferPortfolio(leaderboard: Array<{
215
+ name: string;
216
+ registry: string;
217
+ week: number;
218
+ range30?: number[] | null;
219
+ trendPct?: number | null;
220
+ }>, opts?: {
221
+ gini?: number;
222
+ npmPct?: number;
223
+ totalWeekly?: number;
224
+ }): PortfolioInference;
225
+
119
226
  declare function createCache(): StatsCache;
120
227
 
121
228
  declare function registerProvider(provider: RegistryProvider): void;
@@ -130,4 +237,4 @@ declare namespace stats {
130
237
  }) => Promise<PackageStats[]>;
131
238
  }
132
239
 
133
- export { type ChartData, type ComparisonResult, type Config, type DailyDownloads, type PackageConfig, type PackageStats, type RateLimitConfig, RegistryError, type RegistryName, type RegistryProvider, type ServerOptions, type StatsCache, type StatsOptions, calc, createCache, createHandler, defaultConfig, loadConfig, registerProvider, serve, starterConfig, stats };
240
+ export { type Anomaly, type ChartData, type ComparisonResult, type Config, type DailyDownloads, type ForecastPoint, type PackageConfig, type PackageInference, type PackageStats, type PortfolioInference, type RateLimitConfig, type Recommendation, RegistryError, type RegistryName, type RegistryProvider, type ServerOptions, type StatsCache, type StatsOptions, type TrendSegment, calc, computeMomentum, createCache, createHandler, defaultConfig, detectAnomalies, detectSeasonality, forecast, generateRecommendations, inferPortfolio, loadConfig, registerProvider, segmentTrends, serve, starterConfig, stats };
package/dist/index.js CHANGED
@@ -34,7 +34,7 @@ async function fetchWithRetry(url, registry, init) {
34
34
  let lastError;
35
35
  for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
36
36
  await acquireSlot(registry);
37
- const res = await fetch(url, init);
37
+ const res = await fetch(url, { signal: AbortSignal.timeout(3e4), ...init });
38
38
  if (res.status === 404) return null;
39
39
  if (res.ok) return res.json();
40
40
  const retryAfter = res.headers.get("retry-after");
@@ -56,7 +56,7 @@ async function fetchWithRetry(url, registry, init) {
56
56
  async function fetchDirect(url, registry, init) {
57
57
  let lastError;
58
58
  for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
59
- const res = await fetch(url, init);
59
+ const res = await fetch(url, { signal: AbortSignal.timeout(3e4), ...init });
60
60
  if (res.status === 404) return null;
61
61
  if (res.ok) return res.json();
62
62
  const retryAfter = res.headers.get("retry-after");
@@ -278,7 +278,8 @@ var docker = {
278
278
  if (options?.dockerToken) {
279
279
  headers["Authorization"] = `Bearer ${options.dockerToken}`;
280
280
  }
281
- const json2 = await fetchWithRetry(`${API4}/${pkg}`, "docker", { headers });
281
+ const safePkg = pkg.split("/").map((s) => encodeURIComponent(s)).join("/");
282
+ const json2 = await fetchWithRetry(`${API4}/${safePkg}`, "docker", { headers });
282
283
  if (!json2 || !json2.name || !json2.namespace) return null;
283
284
  return {
284
285
  registry: "docker",
@@ -565,6 +566,299 @@ Endpoints:`);
565
566
  return server;
566
567
  }
567
568
 
569
+ // src/inference.ts
570
+ function mean(arr) {
571
+ if (arr.length === 0) return 0;
572
+ return arr.reduce((a, b) => a + b, 0) / arr.length;
573
+ }
574
+ function stddev(arr) {
575
+ if (arr.length < 2) return 0;
576
+ const m = mean(arr);
577
+ const variance = arr.reduce((s, v) => s + (v - m) ** 2, 0) / arr.length;
578
+ return Math.sqrt(variance);
579
+ }
580
+ function linearRegression(ys) {
581
+ const n = ys.length;
582
+ if (n < 2) return { slope: 0, intercept: ys[0] ?? 0, r2: 0 };
583
+ let sumX = 0, sumY = 0, sumXY = 0, sumX2 = 0;
584
+ for (let i = 0; i < n; i++) {
585
+ sumX += i;
586
+ sumY += ys[i];
587
+ sumXY += i * ys[i];
588
+ sumX2 += i * i;
589
+ }
590
+ const denom = n * sumX2 - sumX * sumX;
591
+ if (denom === 0) return { slope: 0, intercept: sumY / n, r2: 0 };
592
+ const slope = (n * sumXY - sumX * sumY) / denom;
593
+ const intercept = (sumY - slope * sumX) / n;
594
+ const meanY = sumY / n;
595
+ let ssTot = 0, ssRes = 0;
596
+ for (let i = 0; i < n; i++) {
597
+ ssTot += (ys[i] - meanY) ** 2;
598
+ ssRes += (ys[i] - (intercept + slope * i)) ** 2;
599
+ }
600
+ const r2 = ssTot > 0 ? 1 - ssRes / ssTot : 0;
601
+ return { slope, intercept, r2 };
602
+ }
603
+ var DAY_NAMES = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"];
604
+ function forecast(series, days = 7) {
605
+ if (series.length < 7) return [];
606
+ const window = series.slice(-Math.min(14, series.length));
607
+ const n = window.length;
608
+ const weights = window.map((_, i) => Math.exp(0.1 * (i - n + 1)));
609
+ const totalW = weights.reduce((a, b) => a + b, 0);
610
+ let wSumX = 0, wSumY = 0, wSumXY = 0, wSumX2 = 0;
611
+ for (let i = 0; i < n; i++) {
612
+ const w = weights[i];
613
+ wSumX += w * i;
614
+ wSumY += w * window[i];
615
+ wSumXY += w * i * window[i];
616
+ wSumX2 += w * i * i;
617
+ }
618
+ const denom = totalW * wSumX2 - wSumX * wSumX;
619
+ let slope, intercept;
620
+ if (Math.abs(denom) < 1e-10) {
621
+ slope = 0;
622
+ intercept = wSumY / totalW;
623
+ } else {
624
+ slope = (totalW * wSumXY - wSumX * wSumY) / denom;
625
+ intercept = (wSumY - slope * wSumX) / totalW;
626
+ }
627
+ let ssRes = 0;
628
+ for (let i = 0; i < n; i++) {
629
+ ssRes += weights[i] * (window[i] - (intercept + slope * i)) ** 2;
630
+ }
631
+ const rse = Math.sqrt(ssRes / Math.max(1, totalW - 2));
632
+ const z80 = 1.28;
633
+ const results = [];
634
+ for (let d = 1; d <= days; d++) {
635
+ const x = n - 1 + d;
636
+ const predicted = Math.max(0, Math.round(intercept + slope * x));
637
+ const margin = Math.round(z80 * rse * Math.sqrt(1 + 1 / n + (x - n / 2) ** 2 / (n * n / 12)));
638
+ results.push({
639
+ day: d,
640
+ predicted,
641
+ lower: Math.max(0, predicted - margin),
642
+ upper: predicted + margin
643
+ });
644
+ }
645
+ return results;
646
+ }
647
+ function detectAnomalies(series, threshold = 2) {
648
+ if (series.length < 7) return [];
649
+ const anomalies = [];
650
+ const windowSize = Math.min(14, Math.floor(series.length * 0.7));
651
+ for (let i = windowSize; i < series.length; i++) {
652
+ const window = series.slice(i - windowSize, i);
653
+ const m = mean(window);
654
+ const s = stddev(window);
655
+ if (s < 1) continue;
656
+ const zscore = (series[i] - m) / s;
657
+ if (Math.abs(zscore) >= threshold) {
658
+ anomalies.push({
659
+ day: i,
660
+ value: series[i],
661
+ expected: Math.round(m),
662
+ zscore: Math.round(zscore * 10) / 10,
663
+ type: zscore > 0 ? "spike" : "drop"
664
+ });
665
+ }
666
+ }
667
+ return anomalies;
668
+ }
669
+ function segmentTrends(series, minSegmentLength = 5) {
670
+ if (series.length < minSegmentLength) return [];
671
+ const segments = [];
672
+ let segStart = 0;
673
+ while (segStart < series.length - minSegmentLength + 1) {
674
+ let bestEnd = segStart + minSegmentLength - 1;
675
+ const initialSlope = linearRegression(series.slice(segStart, segStart + minSegmentLength)).slope;
676
+ const initialDir = initialSlope > 0.5 ? "up" : initialSlope < -0.5 ? "down" : "flat";
677
+ for (let end = segStart + minSegmentLength; end < series.length; end++) {
678
+ const seg2 = series.slice(segStart, end + 1);
679
+ const { slope: slope2 } = linearRegression(seg2);
680
+ const dir = slope2 > 0.5 ? "up" : slope2 < -0.5 ? "down" : "flat";
681
+ if (dir !== initialDir) break;
682
+ bestEnd = end;
683
+ }
684
+ const seg = series.slice(segStart, bestEnd + 1);
685
+ const { slope } = linearRegression(seg);
686
+ const direction = slope > 0.5 ? "up" : slope < -0.5 ? "down" : "flat";
687
+ segments.push({
688
+ start: segStart,
689
+ end: bestEnd,
690
+ direction,
691
+ slope: Math.round(slope * 10) / 10,
692
+ magnitude: Math.round(seg[seg.length - 1] - seg[0])
693
+ });
694
+ segStart = bestEnd + 1;
695
+ }
696
+ return segments;
697
+ }
698
+ function detectSeasonality(series, startDaysAgo) {
699
+ if (series.length < 14) return null;
700
+ const buckets = [[], [], [], [], [], [], []];
701
+ const today = /* @__PURE__ */ new Date();
702
+ for (let i = 0; i < series.length; i++) {
703
+ const date = new Date(today);
704
+ date.setDate(date.getDate() - (startDaysAgo - i));
705
+ const dow = date.getDay();
706
+ buckets[dow].push(series[i]);
707
+ }
708
+ const dayAvgs = buckets.map((b) => Math.round(mean(b)));
709
+ const overallMean = mean(dayAvgs);
710
+ const maxAvg = Math.max(...dayAvgs);
711
+ const minAvg = Math.min(...dayAvgs);
712
+ if (overallMean < 1 || (maxAvg - minAvg) / overallMean < 0.15) return null;
713
+ const peakIdx = dayAvgs.indexOf(maxAvg);
714
+ return {
715
+ dayOfWeek: dayAvgs,
716
+ peakDay: DAY_NAMES[peakIdx]
717
+ };
718
+ }
719
+ function computeMomentum(series) {
720
+ if (series.length < 14) return 0;
721
+ const last7 = series.slice(-7);
722
+ const prev7 = series.slice(-14, -7);
723
+ const last7Sum = last7.reduce((a, b) => a + b, 0);
724
+ const prev7Sum = prev7.reduce((a, b) => a + b, 0);
725
+ const dampK = 10;
726
+ const dirScore = prev7Sum > dampK ? Math.max(-40, Math.min(40, (last7Sum - prev7Sum) / Math.sqrt(prev7Sum + dampK) * 4)) : last7Sum > 0 ? 20 : 0;
727
+ const { slope: recentSlope } = linearRegression(last7);
728
+ const { slope: prevSlope } = linearRegression(prev7);
729
+ const accelScore = Math.max(-20, Math.min(20, (recentSlope - prevSlope) * 2));
730
+ const cv = last7Sum > 0 ? stddev(last7) / mean(last7) : 1;
731
+ const consistencyScore = Math.max(0, 20 - cv * 20);
732
+ const volumeScore = last7Sum > 0 ? Math.min(20, Math.log10(last7Sum + 1) * 5) : 0;
733
+ return Math.round(Math.max(-100, Math.min(100, dirScore + accelScore + consistencyScore + volumeScore)));
734
+ }
735
+ function generateRecommendations(packages, opts = {}) {
736
+ const recs = [];
737
+ const declining = packages.filter((p) => p.momentum < -30);
738
+ if (declining.length > 0) {
739
+ const names = declining.slice(0, 3).map((p) => p.name).join(", ");
740
+ recs.push({
741
+ type: "attention",
742
+ priority: declining.some((p) => p.momentum < -60) ? "high" : "medium",
743
+ title: `${declining.length} package${declining.length > 1 ? "s" : ""} losing momentum`,
744
+ detail: `${names}${declining.length > 3 ? ` and ${declining.length - 3} more` : ""} show sustained decline. Consider: release updates, fix open issues, or update documentation.`,
745
+ metric: `Worst momentum: ${Math.min(...declining.map((p) => p.momentum))}`
746
+ });
747
+ }
748
+ if (opts.gini !== void 0 && opts.gini > 0.7) {
749
+ recs.push({
750
+ type: "risk",
751
+ priority: opts.gini > 0.85 ? "high" : "medium",
752
+ title: "High portfolio concentration",
753
+ detail: `Gini coefficient ${opts.gini.toFixed(2)} indicates downloads are heavily concentrated in a few packages. Diversify promotion efforts across the portfolio.`,
754
+ metric: `Gini: ${opts.gini.toFixed(2)}`
755
+ });
756
+ }
757
+ if (opts.npmPct !== void 0 && opts.npmPct > 75) {
758
+ recs.push({
759
+ type: "risk",
760
+ priority: "medium",
761
+ title: "Heavy npm dependency",
762
+ detail: `${opts.npmPct}% of downloads come from npm. Consider cross-publishing to PyPI and NuGet to reduce single-registry risk.`,
763
+ metric: `npm share: ${opts.npmPct}%`
764
+ });
765
+ }
766
+ const growing = packages.filter((p) => p.momentum > 40 && p.anomalies.some((a) => a.type === "spike"));
767
+ if (growing.length > 0) {
768
+ const names = growing.slice(0, 3).map((p) => p.name).join(", ");
769
+ recs.push({
770
+ type: "opportunity",
771
+ priority: "medium",
772
+ title: `${growing.length} package${growing.length > 1 ? "s" : ""} gaining traction`,
773
+ detail: `${names} show organic growth with download spikes. Capitalize with blog posts, social media, or conference talks.`,
774
+ metric: `Best momentum: ${Math.max(...growing.map((p) => p.momentum))}`
775
+ });
776
+ }
777
+ const forecastGrowing = packages.filter((p) => {
778
+ if (p.forecast7.length < 7) return false;
779
+ const lastActual = p.forecast7[0]?.predicted ?? 0;
780
+ const lastForecast = p.forecast7[6]?.predicted ?? 0;
781
+ return lastForecast > lastActual * 1.2;
782
+ });
783
+ if (forecastGrowing.length > 0) {
784
+ recs.push({
785
+ type: "growth",
786
+ priority: "low",
787
+ title: `${forecastGrowing.length} package${forecastGrowing.length > 1 ? "s" : ""} predicted to grow`,
788
+ detail: `Statistical models predict >20% growth in the next 7 days for ${forecastGrowing.slice(0, 3).map((p) => p.name).join(", ")}.`
789
+ });
790
+ }
791
+ const spiked = packages.filter((p) => p.anomalies.filter((a) => a.type === "spike").length >= 2);
792
+ if (spiked.length > 0) {
793
+ recs.push({
794
+ type: "attention",
795
+ priority: "low",
796
+ title: `${spiked.length} package${spiked.length > 1 ? "s" : ""} with repeated spikes`,
797
+ detail: `Multiple download spikes detected for ${spiked.slice(0, 3).map((p) => p.name).join(", ")}. Could indicate bot activity, viral posts, or dependency adoption.`
798
+ });
799
+ }
800
+ const priorityOrder = { high: 0, medium: 1, low: 2 };
801
+ recs.sort((a, b) => priorityOrder[a.priority] - priorityOrder[b.priority]);
802
+ return recs;
803
+ }
804
+ function inferPortfolio(leaderboard, opts = {}) {
805
+ const packages = [];
806
+ for (const row of leaderboard) {
807
+ const series = row.range30;
808
+ if (!series || series.length < 7) {
809
+ packages.push({
810
+ name: row.name,
811
+ registry: row.registry,
812
+ forecast7: [],
813
+ anomalies: [],
814
+ trendSegments: [],
815
+ seasonality: null,
816
+ momentum: 0
817
+ });
818
+ continue;
819
+ }
820
+ packages.push({
821
+ name: row.name,
822
+ registry: row.registry,
823
+ forecast7: forecast(series, 7),
824
+ anomalies: detectAnomalies(series),
825
+ trendSegments: segmentTrends(series),
826
+ seasonality: detectSeasonality(series, 30),
827
+ momentum: computeMomentum(series)
828
+ });
829
+ }
830
+ const forecastTotal7 = new Array(7).fill(0);
831
+ for (const pkg of packages) {
832
+ for (const pt of pkg.forecast7) {
833
+ forecastTotal7[pt.day - 1] += pt.predicted;
834
+ }
835
+ }
836
+ const totalWeek = leaderboard.reduce((s, r) => s + (r.week ?? 0), 0);
837
+ let weightedMomentum = 0;
838
+ for (const pkg of packages) {
839
+ const row = leaderboard.find((r) => r.name === pkg.name);
840
+ const weight = totalWeek > 0 ? (row?.week ?? 0) / totalWeek : 1 / packages.length;
841
+ weightedMomentum += pkg.momentum * weight;
842
+ }
843
+ const decliningPct = packages.filter((p) => p.momentum < -20).length / Math.max(1, packages.length);
844
+ const totalAnomalies = packages.reduce((s, p) => s + p.anomalies.length, 0);
845
+ const anomalyDensity = totalAnomalies / Math.max(1, packages.length);
846
+ const giniRisk = (opts.gini ?? 0) * 30;
847
+ const declineRisk = decliningPct * 40;
848
+ const anomalyRisk = Math.min(30, anomalyDensity * 10);
849
+ const riskScore = Math.round(Math.max(0, Math.min(100, giniRisk + declineRisk + anomalyRisk)));
850
+ const diversityTrend = "stable";
851
+ const recommendations = generateRecommendations(packages, opts);
852
+ return {
853
+ packages,
854
+ recommendations,
855
+ forecastTotal7,
856
+ riskScore,
857
+ diversityTrend,
858
+ portfolioMomentum: Math.round(weightedMomentum)
859
+ };
860
+ }
861
+
568
862
  // src/index.ts
569
863
  function createCache() {
570
864
  const store = /* @__PURE__ */ new Map();
@@ -761,11 +1055,18 @@ stats.mine = async function mine(maintainer, options) {
761
1055
  export {
762
1056
  RegistryError,
763
1057
  calc,
1058
+ computeMomentum,
764
1059
  createCache,
765
1060
  createHandler,
766
1061
  defaultConfig,
1062
+ detectAnomalies,
1063
+ detectSeasonality,
1064
+ forecast,
1065
+ generateRecommendations,
1066
+ inferPortfolio,
767
1067
  loadConfig,
768
1068
  registerProvider,
1069
+ segmentTrends,
769
1070
  serve,
770
1071
  starterConfig,
771
1072
  stats
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mcptoolshop/registry-stats",
3
- "version": "3.0.0",
3
+ "version": "3.1.0",
4
4
  "description": "Multi-registry download stats — engine, AI-powered dashboard, and desktop app for npm, PyPI, NuGet, VS Code Marketplace, and Docker Hub",
5
5
  "type": "module",
6
6
  "main": "./dist/index.cjs",