ghscout 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/README.md +195 -0
  2. package/dist/analysis/ai-scorer.d.ts +34 -0
  3. package/dist/analysis/ai-scorer.d.ts.map +1 -0
  4. package/dist/analysis/ai-scorer.js +139 -0
  5. package/dist/analysis/ai-scorer.js.map +1 -0
  6. package/dist/analysis/cluster.d.ts +14 -0
  7. package/dist/analysis/cluster.d.ts.map +1 -0
  8. package/dist/analysis/cluster.js +223 -0
  9. package/dist/analysis/cluster.js.map +1 -0
  10. package/dist/analysis/scorer.d.ts +27 -0
  11. package/dist/analysis/scorer.d.ts.map +1 -0
  12. package/dist/analysis/scorer.js +196 -0
  13. package/dist/analysis/scorer.js.map +1 -0
  14. package/dist/analysis/signals.d.ts +30 -0
  15. package/dist/analysis/signals.d.ts.map +1 -0
  16. package/dist/analysis/signals.js +59 -0
  17. package/dist/analysis/signals.js.map +1 -0
  18. package/dist/analysis/tokenizer.d.ts +15 -0
  19. package/dist/analysis/tokenizer.d.ts.map +1 -0
  20. package/dist/analysis/tokenizer.js +64 -0
  21. package/dist/analysis/tokenizer.js.map +1 -0
  22. package/dist/commands/evidence.d.ts +9 -0
  23. package/dist/commands/evidence.d.ts.map +1 -0
  24. package/dist/commands/evidence.js +229 -0
  25. package/dist/commands/evidence.js.map +1 -0
  26. package/dist/commands/scan-org.d.ts +3 -0
  27. package/dist/commands/scan-org.d.ts.map +1 -0
  28. package/dist/commands/scan-org.js +88 -0
  29. package/dist/commands/scan-org.js.map +1 -0
  30. package/dist/commands/scan.d.ts +32 -0
  31. package/dist/commands/scan.d.ts.map +1 -0
  32. package/dist/commands/scan.js +197 -0
  33. package/dist/commands/scan.js.map +1 -0
  34. package/dist/commands/trending.d.ts +14 -0
  35. package/dist/commands/trending.d.ts.map +1 -0
  36. package/dist/commands/trending.js +145 -0
  37. package/dist/commands/trending.js.map +1 -0
  38. package/dist/github/auth.d.ts +3 -0
  39. package/dist/github/auth.d.ts.map +1 -0
  40. package/dist/github/auth.js +33 -0
  41. package/dist/github/auth.js.map +1 -0
  42. package/dist/github/cache.d.ts +18 -0
  43. package/dist/github/cache.d.ts.map +1 -0
  44. package/dist/github/cache.js +51 -0
  45. package/dist/github/cache.js.map +1 -0
  46. package/dist/github/client.d.ts +24 -0
  47. package/dist/github/client.d.ts.map +1 -0
  48. package/dist/github/client.js +140 -0
  49. package/dist/github/client.js.map +1 -0
  50. package/dist/github/fetchers.d.ts +13 -0
  51. package/dist/github/fetchers.d.ts.map +1 -0
  52. package/dist/github/fetchers.js +142 -0
  53. package/dist/github/fetchers.js.map +1 -0
  54. package/dist/github/types.d.ts +46 -0
  55. package/dist/github/types.d.ts.map +1 -0
  56. package/dist/github/types.js +2 -0
  57. package/dist/github/types.js.map +1 -0
  58. package/dist/index.d.ts +3 -0
  59. package/dist/index.d.ts.map +1 -0
  60. package/dist/index.js +116 -0
  61. package/dist/index.js.map +1 -0
  62. package/dist/output/formatters.d.ts +35 -0
  63. package/dist/output/formatters.d.ts.map +1 -0
  64. package/dist/output/formatters.js +195 -0
  65. package/dist/output/formatters.js.map +1 -0
  66. package/package.json +50 -0
package/README.md ADDED
@@ -0,0 +1,195 @@
1
+ # ghscout — evidence engine for product discovery from GitHub issues
2
+
3
+ Every idea tool mines Reddit. Nobody mines GitHub issues. **ghscout** does.
4
+
5
+ GitHub issues are the richest source of developer pain: structured reactions, labels, linked PRs, repo stars, and code context. ghscout scans them at scale, clusters recurring pain patterns, and scores each opportunity.
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ npx ghscout scan vercel/next.js
11
+
12
+ # or install globally
13
+ npm install -g ghscout
14
+ ```
15
+
16
+ Requires Node.js 18+. Works without a GitHub token (60 req/h), but set `GITHUB_TOKEN` or use `gh auth` for 5,000 req/h.
17
+
18
+ ## Scan a repo
19
+
20
+ ```
21
+ $ ghscout scan vercel/next.js --top 5
22
+
23
+ #1 [100/100] server actions
24
+ Issues: 25 | Reactions: 740 | Labels: bug
25
+ Demand: 100 Frequency: 100 Frustration: 100 Market: 100 Gap: 100
26
+ → [65 👍] Unable to import react-dom/server in a server component (1200d)
27
+ https://github.com/vercel/next.js/issues/43810
28
+ → [57 👍] Turbopack dev server uses too much RAM and CPU (263d)
29
+ https://github.com/vercel/next.js/issues/81161
30
+
31
+ #2 [80/100] pages router
32
+ Issues: 13 | Reactions: 663 | Labels: none
33
+ Demand: 92 Frequency: 48 Frustration: 67 Market: 100 Gap: 100
34
+ → [325 👍] App router issue with Framer Motion shared layout animatio... (1051d)
35
+ https://github.com/vercel/next.js/issues/49279
36
+
37
+ #3 [58/100] compress false
38
+ Issues: 10 | Reactions: 258 | Labels: Runtime, bug
39
+ → [49 👍] middleware matcher should support template literals (899d)
40
+
41
+ #4 [54/100] parallel routes
42
+ Issues: 8 | Reactions: 217 | Labels: bug
43
+ → [54 👍] Parallel routes are rendered unnecessarily (967d)
44
+
45
+ #5 [48/100] image
46
+ Issues: 8 | Reactions: 203 | Labels: Image (next/image), bug
47
+ → [46 👍] next/image not properly sizing images (1186d)
48
+ ```
49
+
50
+ ## Scan across repos (org or topic)
51
+
52
+ ```
53
+ $ ghscout scan --topic developer-tools --min-stars 1000 --top 5
54
+
55
+ Scanning 1/10: puppeteer/puppeteer...
56
+ Scanning 2/10: hoppscotch/hoppscotch...
57
+ ...
58
+
59
+ #1 [97/100] st dataframe
60
+ Issues: 71 | Reactions: 2312 | Labels: type:enhancement
61
+ → [99 👍] st.tabs & st.expander - Improve handling of frontend... (746d)
62
+
63
+ #2 [61/100] bruno cli
64
+ Issues: 37 | Reactions: 461 | Labels: enhancement
65
+ → [131 👍] Import Open API / Swagger 2.x Spec into Bruno (1147d)
66
+
67
+ #3 [60/100] sandbox creation
68
+ Issues: 44 | Reactions: 13
69
+ → [2 👍] Live File Synchronization (daytona sandbox sync) (72d)
70
+ ```
71
+
72
+ ## AI scoring (via Claude Code)
73
+
74
+ Uses your existing Claude Code subscription. Zero extra cost, zero API key.
75
+
76
+ ```
77
+ $ ghscout scan vercel/next.js --top 3 --ai-score
78
+
79
+ #1 [3/10] server actions SKIP
80
+ Issues: 25 | Reactions: 740 | Heuristic: 100/100
81
+ AI: "These are Next.js framework bugs that need fixes from the
82
+ core team, not standalone products. No indie tool can meaningfully
83
+ solve server action cookie reloads or Turbopack memory leaks."
84
+
85
+ #2 [3/10] pages router SKIP
86
+ AI: "Framework-level routing bugs tightly coupled to Next.js
87
+ internals. Best resolved upstream via PRs to the library itself."
88
+
89
+ #3 [3/10] parallel routes SKIP
90
+ AI: "Parallel routes issues are framework-level bugs in Next.js's
91
+ routing engine that require fixes within Next.js itself."
92
+ ```
93
+
94
+ The AI separates "real pain but not a product opportunity" from "pain you can build a product around." Heuristics gave server actions 100/100. AI gave it 3/10 with a clear rationale.
95
+
96
+ ## Deep-dive with evidence
97
+
98
+ ```
99
+ $ ghscout evidence vercel/next.js "middleware"
100
+
101
+ # Evidence: "middleware" in vercel/next.js
102
+
103
+ ## Summary
104
+ - **17 open issues** across 16 unique authors
105
+ - **1,111 total 👍 reactions** — strong demand signal
106
+ - **20 related PRs**
107
+
108
+ ## Top Issues by Demand
109
+ 1. [195 👍] [RFC] Dynamic Routes (#7607)
110
+ Opened 2468 days ago | 90 comments
111
+ 2. [136 👍] trailing slash in link for legit page works for client side...
112
+ Opened 2740 days ago | 124 comments
113
+ ```
114
+
115
+ Output is plain markdown — paste it into a pitch deck, README, or blog post.
116
+
117
+ ## Trending pain
118
+
119
+ ```
120
+ $ ghscout trending --top 3
121
+
122
+ #1 [67/100] claude code
123
+ Issues: 7 | Reactions: 513
124
+ → [119 👍] Support multiple Connector accounts (29d)
125
+ → [79 👍] remote-control shows misleading error (25d)
126
+
127
+ #2 [58/100] option
128
+ Issues: 5 | Reactions: 583
129
+ → [283 👍] Add collision presets (godot-proposals) (25d)
130
+
131
+ #3 [28/100] code
132
+ Issues: 3 | Reactions: 263
133
+ → [118 👍] Add a Shader Code variable previewer (18d)
134
+ ```
135
+
136
+ ## Scoring model
137
+
138
+ Scores are **relative within each scan** — the top cluster always gets the highest demand score, differentiating clusters from each other.
139
+
140
+ | Signal | Weight (single-repo) | Weight (cross-repo) | What it measures |
141
+ |---|---|---|---|
142
+ | **Demand** | 35% | 30% | Total 👍 reactions across issues |
143
+ | **Frequency** | 30% | 25% | Number of separate issues about the same pain |
144
+ | **Frustration** | 20% | 15% | Negative reactions, frustration keywords in title+body, issue age |
145
+ | **Market size** | 0% | 15% | Repo stars (constant in single-repo, varies in cross-repo) |
146
+ | **Gap** | 15% | 15% | Percentage of issues still open (no solution yet) |
147
+
148
+ ## Autoresearch
149
+
150
+ ghscout ships with a `program.md` — a structured instruction file (inspired by [karpathy/autoresearch](https://github.com/karpathy/autoresearch)) that teaches AI coding agents to run autonomous product discovery sessions.
151
+
152
+ ```bash
153
+ # In Claude Code, just say:
154
+ "Read program.md and run a research session"
155
+ ```
156
+
157
+ The agent scans topics, evaluates clusters, and writes findings to `discoveries/YYYY-MM-DD.md` with BUILD/SKIP/WATCH verdicts.
158
+
159
+ ## CLI reference
160
+
161
+ ```
162
+ ghscout scan <repo> Scan a repo for opportunity clusters
163
+ ghscout scan --org <org> Scan an org's repos
164
+ ghscout scan --topic <topic> Scan repos by GitHub topic
165
+ ghscout evidence <repo> <query> Deep-dive on a specific pain topic
166
+ ghscout trending Top pain clusters across GitHub
167
+
168
+ Options:
169
+ --output <format> json | table | pretty (default: pretty)
170
+ --json Shorthand for --output json
171
+ --ai-score Score with AI via Claude Code CLI
172
+ --limit <n> Max issues per repo (default: 200)
173
+ --period <duration> Time window: 7d, 30d, 90d (default: all open)
174
+ --min-stars <n> Min stars to include (default: 100)
175
+ --top <n> Show top N clusters
176
+ --min-reactions <n> Min reactions per cluster
177
+ --verbose Show API calls and rate limits
178
+ --no-cache Fetch fresh data
179
+ ```
180
+
181
+ ## Comparison
182
+
183
+ | | **ghscout** | SaasFinder | GummySearch |
184
+ |---|---|---|---|
185
+ | Data source | GitHub issues | Reddit | Reddit |
186
+ | AI scoring | Yes (Claude Code) | No | No |
187
+ | Structured signals | Reactions, labels, PRs | Upvotes, comments | Upvotes, comments |
188
+ | Cross-repo analysis | Yes | N/A | N/A |
189
+ | Rejected PR detection | Yes | N/A | N/A |
190
+ | Evidence packages | Yes (markdown) | No | No |
191
+ | Price | Free / open source | $29/mo+ | $48/mo+ |
192
+
193
+ ## License
194
+
195
+ MIT
@@ -0,0 +1,34 @@
1
+ import type { ScoredCluster } from "./scorer.js";
2
+ import type { RepoMeta } from "../github/types.js";
3
+ export interface AIScoredCluster extends ScoredCluster {
4
+ aiScore: number;
5
+ verdict: "BUILD" | "SKIP" | "WATCH";
6
+ rationale: string;
7
+ }
8
+ interface AIResponse {
9
+ score: number;
10
+ verdict: "BUILD" | "SKIP" | "WATCH";
11
+ rationale: string;
12
+ }
13
+ /**
14
+ * Check if the claude CLI is available.
15
+ */
16
+ export declare function isClaudeAvailable(): boolean;
17
+ /**
18
+ * Build the scoring prompt for a cluster.
19
+ */
20
+ export declare function buildPrompt(cluster: ScoredCluster, repoMeta: RepoMeta): string;
21
+ /**
22
+ * Call claude CLI to score a single cluster.
23
+ */
24
+ export declare function callClaude(prompt: string, verbose: boolean): AIResponse | null;
25
+ /**
26
+ * Score clusters using AI via the user's Claude Code CLI.
27
+ * Falls back to heuristic score if claude is not available or call fails.
28
+ */
29
+ export declare function aiScoreClusters(clusters: ScoredCluster[], repoMeta: RepoMeta, opts: {
30
+ verbose: boolean;
31
+ minHeuristicScore?: number;
32
+ }): Promise<AIScoredCluster[]>;
33
+ export {};
34
+ //# sourceMappingURL=ai-scorer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ai-scorer.d.ts","sourceRoot":"","sources":["../../src/analysis/ai-scorer.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AACjD,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAC;AAEnD,MAAM,WAAW,eAAgB,SAAQ,aAAa;IACpD,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,OAAO,GAAG,MAAM,GAAG,OAAO,CAAC;IACpC,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,UAAU,UAAU;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,OAAO,GAAG,MAAM,GAAG,OAAO,CAAC;IACpC,SAAS,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,wBAAgB,iBAAiB,IAAI,OAAO,CAO3C;AAED;;GAEG;AACH,wBAAgB,WAAW,CAAC,OAAO,EAAE,aAAa,EAAE,QAAQ,EAAE,QAAQ,GAAG,MAAM,CAmC9E;AAED;;GAEG;AACH,wBAAgB,UAAU,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,GAAG,UAAU,GAAG,IAAI,CAkD9E;AAED;;;GAGG;AACH,wBAAsB,eAAe,CACnC,QAAQ,EAAE,aAAa,EAAE,EACzB,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,iBAAiB,CAAC,EAAE,MAAM,CAAA;CAAE,GACrD,OAAO,CAAC,eAAe,EAAE,CAAC,CA0C5B"}
@@ -0,0 +1,139 @@
1
+ import { execSync } from "node:child_process";
2
+ /**
3
+ * Check if the claude CLI is available.
4
+ */
5
+ export function isClaudeAvailable() {
6
+ try {
7
+ execSync("claude --version", { stdio: "pipe", timeout: 5000 });
8
+ return true;
9
+ }
10
+ catch {
11
+ return false;
12
+ }
13
+ }
14
+ /**
15
+ * Build the scoring prompt for a cluster.
16
+ */
17
+ export function buildPrompt(cluster, repoMeta) {
18
+ const topIssues = cluster.issues
19
+ .slice()
20
+ .sort((a, b) => b.reactions.total - a.reactions.total)
21
+ .slice(0, 5)
22
+ .map((i) => {
23
+ const ageDays = Math.floor((Date.now() - new Date(i.createdAt).getTime()) / (1000 * 60 * 60 * 24));
24
+ return `- [${i.reactions.total} 👍] ${i.title} (${ageDays}d old)`;
25
+ })
26
+ .join("\n");
27
+ const labels = cluster.labels.length > 0 ? cluster.labels.join(", ") : "none";
28
+ return `You are evaluating a product opportunity found by analyzing GitHub issues.
29
+
30
+ Cluster: "${cluster.name}"
31
+ Repository: ${repoMeta.fullName} (${repoMeta.stars.toLocaleString()} stars)
32
+ Issues: ${cluster.issueCount} open issues
33
+ Total reactions: ${cluster.totalReactions} 👍
34
+ Labels: ${labels}
35
+ Heuristic score: ${cluster.score}/100
36
+
37
+ Top issues in this cluster:
38
+ ${topIssues}
39
+
40
+ Score this opportunity on a scale of 0-10 based on these criteria:
41
+ 1. Product viability: Could someone build a standalone tool, extension, or service for this?
42
+ 2. Market demand: Is this pain widespread beyond just this repo?
43
+ 3. Solution gap: Do adequate solutions already exist, or are people stuck?
44
+ 4. Indie feasibility: Could one developer build an MVP in 2-4 weeks?
45
+
46
+ Respond ONLY with valid JSON, no markdown, no explanation outside the JSON:
47
+ {"score": <0-10>, "verdict": "<BUILD|SKIP|WATCH>", "rationale": "<1-2 sentences explaining your assessment>"}`;
48
+ }
49
+ /**
50
+ * Call claude CLI to score a single cluster.
51
+ */
52
+ export function callClaude(prompt, verbose) {
53
+ try {
54
+ if (verbose) {
55
+ process.stderr.write(" Calling Claude for AI scoring...\n");
56
+ }
57
+ // Escape the prompt for shell safety
58
+ const escaped = prompt.replace(/'/g, "'\\''");
59
+ const result = execSync(`echo '${escaped}' | claude --print --output-format text`, {
60
+ encoding: "utf-8",
61
+ timeout: 60000,
62
+ stdio: ["pipe", "pipe", "pipe"],
63
+ });
64
+ // Extract JSON from response (claude might add extra text)
65
+ const jsonMatch = result.match(/\{[\s\S]*?"score"[\s\S]*?"verdict"[\s\S]*?"rationale"[\s\S]*?\}/);
66
+ if (!jsonMatch) {
67
+ if (verbose) {
68
+ process.stderr.write(` Warning: Could not parse AI response\n`);
69
+ }
70
+ return null;
71
+ }
72
+ const parsed = JSON.parse(jsonMatch[0]);
73
+ // Validate
74
+ if (typeof parsed.score !== "number" ||
75
+ parsed.score < 0 ||
76
+ parsed.score > 10 ||
77
+ !["BUILD", "SKIP", "WATCH"].includes(parsed.verdict) ||
78
+ typeof parsed.rationale !== "string") {
79
+ if (verbose) {
80
+ process.stderr.write(` Warning: Invalid AI response format\n`);
81
+ }
82
+ return null;
83
+ }
84
+ return parsed;
85
+ }
86
+ catch (err) {
87
+ if (verbose) {
88
+ const msg = err instanceof Error ? err.message : String(err);
89
+ process.stderr.write(` Warning: Claude call failed (${msg})\n`);
90
+ }
91
+ return null;
92
+ }
93
+ }
94
+ /**
95
+ * Score clusters using AI via the user's Claude Code CLI.
96
+ * Falls back to heuristic score if claude is not available or call fails.
97
+ */
98
+ export async function aiScoreClusters(clusters, repoMeta, opts) {
99
+ if (!isClaudeAvailable()) {
100
+ process.stderr.write("Warning: claude CLI not found. Install Claude Code to use --ai-score.\n" +
101
+ "Falling back to heuristic scoring.\n");
102
+ return clusters.map(fallback);
103
+ }
104
+ const minScore = opts.minHeuristicScore ?? 30;
105
+ const results = [];
106
+ for (let i = 0; i < clusters.length; i++) {
107
+ const cluster = clusters[i];
108
+ // Skip low-scoring clusters and "other" bucket
109
+ if (cluster.score < minScore || cluster.name === "other") {
110
+ results.push(fallback(cluster));
111
+ continue;
112
+ }
113
+ process.stderr.write(`AI scoring ${i + 1}/${clusters.length}: "${cluster.name}"...\n`);
114
+ const prompt = buildPrompt(cluster, repoMeta);
115
+ const response = callClaude(prompt, opts.verbose);
116
+ if (response) {
117
+ results.push({
118
+ ...cluster,
119
+ aiScore: response.score,
120
+ verdict: response.verdict,
121
+ rationale: response.rationale,
122
+ });
123
+ }
124
+ else {
125
+ results.push(fallback(cluster));
126
+ }
127
+ }
128
+ // Sort by AI score descending (fallbacks use heuristic/10)
129
+ return results.sort((a, b) => b.aiScore - a.aiScore);
130
+ }
131
+ function fallback(cluster) {
132
+ return {
133
+ ...cluster,
134
+ aiScore: Math.round(cluster.score / 10),
135
+ verdict: cluster.score >= 70 ? "WATCH" : "SKIP",
136
+ rationale: "Heuristic score (AI scoring skipped)",
137
+ };
138
+ }
139
+ //# sourceMappingURL=ai-scorer.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ai-scorer.js","sourceRoot":"","sources":["../../src/analysis/ai-scorer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAC;AAgB9C;;GAEG;AACH,MAAM,UAAU,iBAAiB;IAC/B,IAAI,CAAC;QACH,QAAQ,CAAC,kBAAkB,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;QAC/D,OAAO,IAAI,CAAC;IACd,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,KAAK,CAAC;IACf,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW,CAAC,OAAsB,EAAE,QAAkB;IACpE,MAAM,SAAS,GAAG,OAAO,CAAC,MAAM;SAC7B,KAAK,EAAE;SACP,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,KAAK,GAAG,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC;SACrD,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC;SACX,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE;QACT,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CACxB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,OAAO,EAAE,CAAC,GAAG,CAAC,IAAI,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CACvE,CAAC;QACF,OAAO,MAAM,CAAC,CAAC,SAAS,CAAC,KAAK,QAAQ,CAAC,CAAC,KAAK,KAAK,OAAO,QAAQ,CAAC;IACpE,CAAC,CAAC;SACD,IAAI,CAAC,IAAI,CAAC,CAAC;IAEd,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;IAE9E,OAAO;;YAEG,OAAO,CAAC,IAAI;cACV,QAAQ,CAAC,QAAQ,KAAK,QAAQ,CAAC,KAAK,CAAC,cAAc,EAAE;UACzD,OAAO,CAAC,UAAU;mBACT,OAAO,CAAC,cAAc;UAC/B,MAAM;mBACG,OAAO,CAAC,KAAK;;;EAG9B,SAAS;;;;;;;;;8GASmG,CAAC;AAC/G,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,UAAU,CAAC,MAAc,EAAE,OAAgB;IACzD,IAAI,CAAC;QACH,IAAI,OAAO,EAAE,CAAC;YACZ,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,sCAAsC,CAAC,CAAC;QAC/D,CAAC;QAED,qCAAqC;QACrC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC9C,MAAM,MAAM,GAAG,QAAQ,CACrB,SAAS,OAAO,yCAAyC,EACzD;YACE,QAAQ,EAAE,OAAO;YACjB,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC;SAChC,CACF,CAAC;QAEF,2DAA2D;QAC3D,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,iEAAiE,CAAC,CAAC;QAClG,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,IAAI,OAAO,EAAE,CAAC;gBACZ,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,0CAA0C,CAAC,CAAC;YACnE,CAAC;YACD,OAAO,IAAI,CAAC;QACd,CAAC;QAED,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,CAAe,CAAC;QAEtD,WAAW;QACX,IACE,OAAO,MAAM,CAAC,KAAK,KAAK,QAAQ;YAChC,MAAM,CAAC,KAAK,GAAG,CAAC;YAChB,MAAM,CAAC,KAAK,GAAG,EAAE;YACjB,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,OAAO,CAAC;YACpD,OAAO,MAAM,CAAC,SAAS,KAAK,QAAQ,EACpC,CAAC;YACD,IAAI,OAAO,EAAE,CAAC;gBACZ,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;YAClE,CAAC;YACD,OAAO,IAAI,CAAC;QACd,CAAC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,IAAI,OAAO,EAAE,CAAC;YACZ,MAAM,GAAG,GAAG,GAAG,YAAY,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YAC7D,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,kCAAkC,GAAG,KAAK,CAAC,CAAC;QACnE,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;AACH,CAAC;AAED;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,QAAyB,EACzB,QAAkB,EAClB,IAAsD;IAEtD,IAAI,CAAC,iBAAiB,EAAE,EAAE,CAAC;QACzB,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,yEAAyE;YACvE,sCAAsC,CACzC,CAAC;QACF,OAAO,QAAQ,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;IAChC,CAAC;IAED,MAAM,QAAQ,GAAG,IAAI,CAAC,iBAAiB,IAAI,EAAE,CAAC;IAC9C,MAAM,OAAO,GAAsB,EAAE,CAAC;IAEtC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACzC,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QAE5B,+CAA+C;QAC/C,IAAI,OAAO,CAAC,KAAK,GAAG,QAAQ,IAAI,OAAO,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;YACzD,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;YAChC,SAAS;QACX,CAAC;QAED,OAAO,CAAC,MAAM,CAAC,KAAK,CAClB,cAAc,CAAC,GAAG,CAAC,IAAI,QAAQ,CAAC,MAAM,MAAM,OAAO,CAAC,IAAI,QAAQ,CACjE,CAAC;QAEF,MAAM,MAAM,GAAG,WAAW,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;QAC9C,MAAM,QAAQ,GAAG,UAAU,CAAC,MAAM,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAElD,IAAI,QAAQ,EAAE,CAAC;YACb,OAAO,CAAC,IAAI,CAAC;gBACX,GAAG,OAAO;gBACV,OAAO,EAAE,QAAQ,CAAC,KAAK;gBACvB,OAAO,EAAE,QAAQ,CAAC,OAAO;gBACzB,SAAS,EAAE,QAAQ,CAAC,SAAS;aAC9B,CAAC,CAAC;QACL,CAAC;aAAM,CAAC;YACN,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC;QAClC,CAAC;IACH,CAAC;IAED,2DAA2D;IAC3D,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC;AACvD,CAAC;AAED,SAAS,QAAQ,CAAC,OAAsB;IACtC,OAAO;QACL,GAAG,OAAO;QACV,OAAO,EAAE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,GAAG,EAAE,CAAC;QACvC,OAAO,EAAE,OAAO,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM;QAC/C,SAAS,EAAE,sCAAsC;KAClD,CAAC;AACJ,CAAC"}
@@ -0,0 +1,14 @@
1
+ import type { Issue } from "../github/types.js";
2
+ export interface Cluster {
3
+ name: string;
4
+ issues: Issue[];
5
+ issueCount: number;
6
+ totalReactions: number;
7
+ labels: string[];
8
+ }
9
+ /**
10
+ * Cluster issues by shared title tokens (bigrams preferred) with label overlap
11
+ * as a secondary signal.
12
+ */
13
+ export declare function clusterIssues(issues: Issue[]): Cluster[];
14
+ //# sourceMappingURL=cluster.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cluster.d.ts","sourceRoot":"","sources":["../../src/analysis/cluster.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,oBAAoB,CAAC;AAGhD,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,KAAK,EAAE,CAAC;IAChB,UAAU,EAAE,MAAM,CAAC;IACnB,cAAc,EAAE,MAAM,CAAC;IACvB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB;AAWD;;;GAGG;AACH,wBAAgB,aAAa,CAAC,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,EAAE,CAuKxD"}
@@ -0,0 +1,223 @@
1
+ import { tokenizeTitle, extractBigrams } from "./tokenizer.js";
2
+ const GENERIC_LABELS = new Set([
3
+ "bug",
4
+ "feature",
5
+ "enhancement",
6
+ "question",
7
+ "help wanted",
8
+ "good first issue",
9
+ ]);
10
+ /**
11
+ * Cluster issues by shared title tokens (bigrams preferred) with label overlap
12
+ * as a secondary signal.
13
+ */
14
+ export function clusterIssues(issues) {
15
+ if (issues.length === 0)
16
+ return [];
17
+ // Step 1: Tokenize all issues and extract bigrams
18
+ const issueTokens = issues.map((issue) => {
19
+ const tokens = tokenizeTitle(issue.title);
20
+ const bigrams = extractBigrams(tokens);
21
+ return { issue, tokens, bigrams };
22
+ });
23
+ // Step 2: Count frequency of each bigram and unigram across ALL issues
24
+ const bigramFreq = new Map();
25
+ const unigramFreq = new Map();
26
+ for (const { tokens, bigrams } of issueTokens) {
27
+ // Count each bigram once per issue (deduplicate within a single issue)
28
+ const seenBigrams = new Set();
29
+ for (const bigram of bigrams) {
30
+ if (!seenBigrams.has(bigram)) {
31
+ seenBigrams.add(bigram);
32
+ bigramFreq.set(bigram, (bigramFreq.get(bigram) ?? 0) + 1);
33
+ }
34
+ }
35
+ const seenUnigrams = new Set();
36
+ for (const token of tokens) {
37
+ if (!seenUnigrams.has(token)) {
38
+ seenUnigrams.add(token);
39
+ unigramFreq.set(token, (unigramFreq.get(token) ?? 0) + 1);
40
+ }
41
+ }
42
+ }
43
+ // Step 3 & 6: For each issue, find its "top token" considering label overlap
44
+ // Build label-based affinity: map non-generic labels to issue indices
45
+ const labelToIssues = new Map();
46
+ for (let i = 0; i < issues.length; i++) {
47
+ for (const label of issues[i].labels) {
48
+ const lower = label.toLowerCase();
49
+ if (!GENERIC_LABELS.has(lower)) {
50
+ if (!labelToIssues.has(lower)) {
51
+ labelToIssues.set(lower, new Set());
52
+ }
53
+ labelToIssues.get(lower).add(i);
54
+ }
55
+ }
56
+ }
57
+ // For each issue, compute the best token (bigram preferred)
58
+ const issueTopToken = [];
59
+ for (const { tokens, bigrams } of issueTokens) {
60
+ let bestToken = "";
61
+ let bestScore = -1;
62
+ let bestIsBigram = false;
63
+ // Check bigrams first
64
+ for (const bigram of bigrams) {
65
+ const freq = bigramFreq.get(bigram) ?? 0;
66
+ if (freq > bestScore || (freq === bestScore && !bestIsBigram)) {
67
+ bestScore = freq;
68
+ bestToken = bigram;
69
+ bestIsBigram = true;
70
+ }
71
+ }
72
+ // Check unigrams — only prefer unigram if its frequency is notably higher
73
+ for (const token of tokens) {
74
+ const freq = unigramFreq.get(token) ?? 0;
75
+ // Prefer bigrams when frequency is similar (bigram needs freq * 1.5 < unigram freq to lose)
76
+ if (bestIsBigram) {
77
+ if (freq > bestScore * 1.5) {
78
+ bestScore = freq;
79
+ bestToken = token;
80
+ bestIsBigram = false;
81
+ }
82
+ }
83
+ else {
84
+ if (freq > bestScore) {
85
+ bestScore = freq;
86
+ bestToken = token;
87
+ bestIsBigram = false;
88
+ }
89
+ }
90
+ }
91
+ issueTopToken.push(bestToken || "other");
92
+ }
93
+ // Step 6 (continued): Boost grouping via shared non-generic labels
94
+ // If two issues share a non-generic label and one of them has a top token with freq < 2,
95
+ // try to reassign it to match the other issue's top token
96
+ for (const [, issueIndices] of labelToIssues) {
97
+ if (issueIndices.size < 2)
98
+ continue;
99
+ const indices = Array.from(issueIndices);
100
+ // Find the most common top token among issues sharing this label
101
+ const tokenCounts = new Map();
102
+ for (const idx of indices) {
103
+ const token = issueTopToken[idx];
104
+ if (token !== "other") {
105
+ tokenCounts.set(token, (tokenCounts.get(token) ?? 0) + 1);
106
+ }
107
+ }
108
+ if (tokenCounts.size === 0)
109
+ continue;
110
+ let dominantToken = "";
111
+ let dominantCount = 0;
112
+ for (const [token, count] of tokenCounts) {
113
+ if (count > dominantCount) {
114
+ dominantCount = count;
115
+ dominantToken = token;
116
+ }
117
+ }
118
+ // Reassign issues with weak tokens to the dominant one in this label group
119
+ if (dominantCount >= 2) {
120
+ for (const idx of indices) {
121
+ const currentToken = issueTopToken[idx];
122
+ if (currentToken === "other") {
123
+ issueTopToken[idx] = dominantToken;
124
+ continue;
125
+ }
126
+ // Check if current token is weak (only appears once in global freq)
127
+ const currentFreq = bigramFreq.get(currentToken) ?? unigramFreq.get(currentToken) ?? 0;
128
+ if (currentFreq < 2 && currentToken !== dominantToken) {
129
+ issueTopToken[idx] = dominantToken;
130
+ }
131
+ }
132
+ }
133
+ }
134
+ // Step 4: Group issues by their top token
135
+ const groups = new Map();
136
+ for (let i = 0; i < issues.length; i++) {
137
+ const token = issueTopToken[i];
138
+ if (!groups.has(token)) {
139
+ groups.set(token, []);
140
+ }
141
+ groups.get(token).push(issues[i]);
142
+ }
143
+ // Step 5: Merge small clusters (<2 issues) into "other"
144
+ // Then relabel each cluster with its most representative bigram
145
+ const clusters = [];
146
+ const otherIssues = [];
147
+ for (const [name, groupIssues] of groups) {
148
+ if (groupIssues.length < 2) {
149
+ otherIssues.push(...groupIssues);
150
+ }
151
+ else {
152
+ const relabeledName = pickBigramLabel(name, groupIssues);
153
+ clusters.push(buildCluster(relabeledName, groupIssues));
154
+ }
155
+ }
156
+ if (otherIssues.length > 0) {
157
+ clusters.push(buildCluster("other", otherIssues));
158
+ }
159
+ // Step 9: Sort by issueCount descending, then totalReactions descending
160
+ clusters.sort((a, b) => {
161
+ if (b.issueCount !== a.issueCount)
162
+ return b.issueCount - a.issueCount;
163
+ return b.totalReactions - a.totalReactions;
164
+ });
165
+ return clusters;
166
+ }
167
+ /**
168
+ * Pick the best bigram label for a cluster.
169
+ * Collects all bigrams from every issue in the cluster, counts frequency,
170
+ * and returns the most common bigram if it appears >= 2 times.
171
+ * Otherwise keeps the original (unigram) name.
172
+ */
173
+ function pickBigramLabel(currentName, issues) {
174
+ // If the current name is already a bigram (contains a space), keep it
175
+ if (currentName.includes(" "))
176
+ return currentName;
177
+ const bigramCounts = new Map();
178
+ for (const issue of issues) {
179
+ const tokens = tokenizeTitle(issue.title);
180
+ const bigrams = extractBigrams(tokens);
181
+ const seen = new Set();
182
+ for (const bigram of bigrams) {
183
+ if (!seen.has(bigram)) {
184
+ seen.add(bigram);
185
+ bigramCounts.set(bigram, (bigramCounts.get(bigram) ?? 0) + 1);
186
+ }
187
+ }
188
+ }
189
+ let bestBigram = "";
190
+ let bestCount = 0;
191
+ for (const [bigram, count] of bigramCounts) {
192
+ if (count > bestCount) {
193
+ bestCount = count;
194
+ bestBigram = bigram;
195
+ }
196
+ }
197
+ return bestCount >= 2 ? bestBigram : currentName;
198
+ }
199
+ function buildCluster(name, issues) {
200
+ // Step 7: Calculate totalReactions
201
+ const totalReactions = issues.reduce((sum, issue) => sum + issue.reactions.total, 0);
202
+ // Step 8: Collect common labels
203
+ const labelCounts = new Map();
204
+ for (const issue of issues) {
205
+ for (const label of issue.labels) {
206
+ labelCounts.set(label, (labelCounts.get(label) ?? 0) + 1);
207
+ }
208
+ }
209
+ // Labels that appear in at least half the issues in the cluster
210
+ const threshold = Math.ceil(issues.length / 2);
211
+ const labels = Array.from(labelCounts.entries())
212
+ .filter(([, count]) => count >= threshold)
213
+ .map(([label]) => label)
214
+ .sort();
215
+ return {
216
+ name,
217
+ issues,
218
+ issueCount: issues.length,
219
+ totalReactions,
220
+ labels,
221
+ };
222
+ }
223
+ //# sourceMappingURL=cluster.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cluster.js","sourceRoot":"","sources":["../../src/analysis/cluster.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAC;AAU/D,MAAM,cAAc,GAAG,IAAI,GAAG,CAAC;IAC7B,KAAK;IACL,SAAS;IACT,aAAa;IACb,UAAU;IACV,aAAa;IACb,kBAAkB;CACnB,CAAC,CAAC;AAEH;;;GAGG;AACH,MAAM,UAAU,aAAa,CAAC,MAAe;IAC3C,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,EAAE,CAAC;IAEnC,kDAAkD;IAClD,MAAM,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QACvC,MAAM,MAAM,GAAG,aAAa,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1C,MAAM,OAAO,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC;QACvC,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;IACpC,CAAC,CAAC,CAAC;IAEH,uEAAuE;IACvE,MAAM,UAAU,GAAG,IAAI,GAAG,EAAkB,CAAC;IAC7C,MAAM,WAAW,GAAG,IAAI,GAAG,EAAkB,CAAC;IAE9C,KAAK,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,WAAW,EAAE,CAAC;QAC9C,uEAAuE;QACvE,MAAM,WAAW,GAAG,IAAI,GAAG,EAAU,CAAC;QACtC,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;YAC7B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;gBAC7B,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;gBACxB,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;YAC5D,CAAC;QACH,CAAC;QAED,MAAM,YAAY,GAAG,IAAI,GAAG,EAAU,CAAC;QACvC,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC3B,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;gBAC7B,YAAY,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;gBACxB,WAAW,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;YAC5D,CAAC;QACH,CAAC;IACH,CAAC;IAED,6EAA6E;IAC7E,sEAAsE;IACtE,MAAM,aAAa,GAAG,IAAI,GAAG,EAAuB,CAAC;IACrD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACvC,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC;YACrC,MAAM,KAAK,GAAG,KAAK,CAAC,WAAW,EAAE,CAAC;YAClC,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;gBAC/B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;oBAC9B,aAAa,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC;gBACtC,CAAC;gBACD,aAAa,CAAC,GAAG,CAAC,KAAK,CAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACnC,CAAC;QACH,CAAC;IACH,CAAC;IAED,4DAA4D;IAC5D,MAAM,aAAa,GAAa,EAAE,CAAC;IAEnC,KAAK,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,WAAW,EAAE,CAAC;QAC9C,IAAI,SAAS,GAAG,EAAE,CAAC;QACnB,IAAI,SAAS,GAAG,CAAC,CAAC,CAAC;QACnB,IAAI,YAAY,GAAG,KAAK,CAAC;QAEzB,sBAAsB;QACtB,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;YAC7B,MAAM,IAAI,GAAG,UAAU,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;YACzC,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,IAAI,KAAK,SAAS,IAAI,CAAC,YAAY,CAAC,EAAE,CAAC;gBAC9D,SAAS,GAAG,IAAI,CAAC;gBACjB,SAAS,GAAG,MAAM,CAAC;gBACnB,YAAY,GAAG,IAAI,CAAC;YACtB,CAAC;QACH,CAAC;QAED,0EAA0E;QAC1E,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC3B,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YACzC,4FAA4F;YAC5F,IAAI,YAAY,EAAE,CAAC;gBACjB,IAAI,IAAI,GAAG,SAAS,GAAG,GAAG,EAAE,CAAC;oBAC3B,SAAS,GAAG,IAAI,CAAC;oBACjB,SAAS,GAAG,KAAK,CAAC;oBAClB,YAAY,GAAG,KAAK,CAAC;gBACvB,CAAC;YACH,CAAC;iBAAM,CAAC;gBACN,IAAI,IAAI,GAAG,SAAS,EAAE,CAAC;oBACrB,SAAS,GAAG,IAAI,CAAC;oBACjB,SAAS,GAAG,KAAK,CAAC;oBAClB,YAAY,GAAG,KAAK,CAAC;gBACvB,CAAC;YACH,CAAC;QACH,CAAC;QAED,aAAa,CAAC,IAAI,CAAC,SAAS,IAAI,OAAO,CAAC,CAAC;IAC3C,CAAC;IAED,mEAAmE;IACnE,yFAAyF;IACzF,0DAA0D;IAC1D,KAAK,MAAM,CAAC,EAAE,YAAY,CAAC,IAAI,aAAa,EAAE,CAAC;QAC7C,IAAI,YAAY,CAAC,IAAI,GAAG,CAAC;YAAE,SAAS;QACpC,MAAM,OAAO,GAAG,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAEzC,iEAAiE;QACjE,MAAM,WAAW,GAAG,IAAI,GAAG,EAAkB,CAAC;QAC9C,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;YAC1B,MAAM,KAAK,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;YACjC,IAAI,KAAK,KAAK,OAAO,EAAE,CAAC;gBACtB,WAAW,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;YAC5D,CAAC;QACH,CAAC;QAED,IAAI,WAAW,CAAC,IAAI,KAAK,CAAC;YAAE,SAAS;QAErC,IAAI,aAAa,GAAG,EAAE,CAAC;QACvB,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,KAAK,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,WAAW,EAAE,CAAC;YACzC,IAAI,KAAK,GAAG,aAAa,EAAE,CAAC;gBAC1B,aAAa,GAAG,KAAK,CAAC;gBACtB,aAAa,GAAG,KAAK,CAAC;YACxB,CAAC;QACH,CAAC;QAED,2EAA2E;QAC3E,IAAI,aAAa,IAAI,CAAC,EAAE,CAAC;YACvB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;gBAC1B,MAAM,YAAY,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;gBACxC,IAAI,YAAY,KAAK,OAAO,EAAE,CAAC;oBAC7B,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC;oBACnC,SAAS;gBACX,CAAC;gBACD,oEAAoE;gBACpE,MAAM,WAAW,GAAG,UAAU,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,WAAW,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;gBACvF,IAAI,WAAW,GAAG,CAAC,IAAI,YAAY,KAAK,aAAa,EAAE,CAAC;oBACtD,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC;gBACrC,CAAC;YACH,CAAC;QACH,CAAC;IACH,CAAC;IAED,0CAA0C;IAC1C,MAAM,MAAM,GAAG,IAAI,GAAG,EAAmB,CAAC;IAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACvC,MAAM,KAAK,GAAG,aAAa,CAAC,CAAC,CAAC,CAAC;QAC/B,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;YACvB,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;QACxB,CAAC;QACD,MAAM,CAAC,GAAG,CAAC,KAAK,CAAE,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;IACrC,CAAC;IAED,wDAAwD;IACxD,gEAAgE;IAChE,MAAM,QAAQ,GAAc,EAAE,CAAC;IAC/B,MAAM,WAAW,GAAY,EAAE,CAAC;IAEhC,KAAK,MAAM,CAAC,IAAI,EAAE,WAAW,CAAC,IAAI,MAAM,EAAE,CAAC;QACzC,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC3B,WAAW,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,CAAC;QACnC,CAAC;aAAM,CAAC;YACN,MAAM,aAAa,GAAG,eAAe,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;YACzD,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC,CAAC;QAC1D,CAAC;IACH,CAAC;IAED,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3B,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC,CAAC;IACpD,CAAC;IAED,wEAAwE;IACxE,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QACrB,IAAI,CAAC,CAAC,UAAU,KAAK,CAAC,CAAC,UAAU;YAAE,OAAO,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,UAAU,CAAC;QACtE,OAAO,CAAC,CAAC,cAAc,GAAG,CAAC,CAAC,cAAc,CAAC;IAC7C,CAAC,CAAC,CAAC;IAEH,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED;;;;;GAKG;AACH,SAAS,eAAe,CAAC,WAAmB,EAAE,MAAe;IAC3D,sEAAsE;IACtE,IAAI,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,WAAW,CAAC;IAElD,MAAM,YAAY,GAAG,IAAI,GAAG,EAAkB,CAAC;IAC/C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,MAAM,MAAM,GAAG,aAAa,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1C,MAAM,OAAO,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC;QACvC,MAAM,IAAI,GAAG,IAAI,GAAG,EAAU,CAAC;QAC/B,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;YAC7B,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;gBACtB,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;gBACjB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;YAChE,CAAC;QACH,CAAC;IACH,CAAC;IAED,IAAI,UAAU,GAAG,EAAE,CAAC;IACpB,IAAI,SAAS,GAAG,CAAC,CAAC;IAClB,KAAK,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,IAAI,YAAY,EAAE,CAAC;QAC3C,IAAI,KAAK,GAAG,SAAS,EAAE,CAAC;YACtB,SAAS,GAAG,KAAK,CAAC;YAClB,UAAU,GAAG,MAAM,CAAC;QACtB,CAAC;IACH,CAAC;IAED,OAAO,SAAS,IAAI,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,WAAW,CAAC;AACnD,CAAC;AAED,SAAS,YAAY,CAAC,IAAY,EAAE,MAAe;IACjD,mCAAmC;IACnC,MAAM,cAAc,GAAG,MAAM,CAAC,MAAM,CAClC,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,GAAG,GAAG,KAAK,CAAC,SAAS,CAAC,KAAK,EAC3C,CAAC,CACF,CAAC;IAEF,gCAAgC;IAChC,MAAM,WAAW,GAAG,IAAI,GAAG,EAAkB,CAAC;IAC9C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,KAAK,MAAM,KAAK,IAAI,KAAK,CAAC,MAAM,EAAE,CAAC;YACjC,WAAW,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;QAC5D,CAAC;IACH,CAAC;IAED,gEAAgE;IAChE,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IAC/C,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE,CAAC;SAC7C,MAAM,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,KAAK,IAAI,SAAS,CAAC;SACzC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC;SACvB,IAAI,EAAE,CAAC;IAEV,OAAO;QACL,IAAI;QACJ,MAAM;QACN,UAAU,EAAE,MAAM,CAAC,MAAM;QACzB,cAAc;QACd,MAAM;KACP,CAAC;AACJ,CAAC"}
@@ -0,0 +1,27 @@
1
+ import type { Cluster } from "./cluster.js";
2
+ import type { RepoMeta } from "../github/types.js";
3
+ export interface ScoredCluster extends Cluster {
4
+ score: number;
5
+ breakdown: {
6
+ demand: number;
7
+ frequency: number;
8
+ frustration: number;
9
+ marketSize: number;
10
+ gap: number;
11
+ };
12
+ }
13
+ /**
14
+ * Score a single cluster. Used only when scoring one cluster in isolation.
15
+ * For proper relative scoring, use scoreClusters().
16
+ */
17
+ export declare function scoreCluster(cluster: Cluster, repoMeta: RepoMeta): ScoredCluster;
18
+ /**
19
+ * Score all clusters with relative normalization.
20
+ * Demand, frequency, and frustration are normalized against the min/max
21
+ * of the current dataset so they always spread 0-100.
22
+ *
23
+ * @param mode - "single" redistributes market size weight (constant in single-repo scans),
24
+ * "cross" keeps original weights (market size varies across repos).
25
+ */
26
+ export declare function scoreClusters(clusters: Cluster[], repoMeta: RepoMeta, mode?: "single" | "cross"): ScoredCluster[];
27
+ //# sourceMappingURL=scorer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scorer.d.ts","sourceRoot":"","sources":["../../src/analysis/scorer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AAC5C,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAC;AAEnD,MAAM,WAAW,aAAc,SAAQ,OAAO;IAC5C,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE;QACT,MAAM,EAAE,MAAM,CAAC;QACf,SAAS,EAAE,MAAM,CAAC;QAClB,WAAW,EAAE,MAAM,CAAC;QACpB,UAAU,EAAE,MAAM,CAAC;QACnB,GAAG,EAAE,MAAM,CAAC;KACb,CAAC;CACH;AA0GD;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,OAAO,EAAE,OAAO,EAChB,QAAQ,EAAE,QAAQ,GACjB,aAAa,CA+Bf;AAQD;;;;;;;GAOG;AACH,wBAAgB,aAAa,CAC3B,QAAQ,EAAE,OAAO,EAAE,EACnB,QAAQ,EAAE,QAAQ,EAClB,IAAI,GAAE,QAAQ,GAAG,OAAkB,GAClC,aAAa,EAAE,CAyEjB"}