@doccov/api 0.4.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,251 @@
1
+ /**
2
+ * Remote repository analyzer - runs DocCov analysis on GitHub repos via webhooks.
3
+ * Uses shallow clone to temp dir for full TypeScript resolution.
4
+ */
5
+
6
+ import { rm } from 'node:fs/promises';
7
+ import { tmpdir } from 'node:os';
8
+ import { join } from 'node:path';
9
+ import { DocCov, enrichSpec, type OpenPkgSpec } from '@doccov/sdk';
10
+ import { getTokenByInstallationId } from './github-app';
11
+
12
+ /**
13
+ * Result from remote analysis.
14
+ */
15
+ export interface RemoteAnalysisResult {
16
+ coverageScore: number;
17
+ documentedExports: number;
18
+ totalExports: number;
19
+ driftCount: number;
20
+ qualityErrors: number;
21
+ qualityWarnings: number;
22
+ /** Full spec for detailed reports */
23
+ spec?: OpenPkgSpec;
24
+ }
25
+
26
+ /**
27
+ * Shallow clone a GitHub repo to a temp directory.
28
+ */
29
+ async function cloneRepo(
30
+ owner: string,
31
+ repo: string,
32
+ ref: string,
33
+ authToken: string,
34
+ ): Promise<string> {
35
+ const tmpDir = join(tmpdir(), `doccov-${owner}-${repo}-${Date.now()}`);
36
+
37
+ // Use authenticated HTTPS URL for private repos
38
+ const cloneUrl = `https://x-access-token:${authToken}@github.com/${owner}/${repo}.git`;
39
+
40
+ const proc = Bun.spawn(['git', 'clone', '--depth', '1', '--branch', ref, cloneUrl, tmpDir], {
41
+ stdout: 'pipe',
42
+ stderr: 'pipe',
43
+ });
44
+
45
+ const exitCode = await proc.exited;
46
+
47
+ if (exitCode !== 0) {
48
+ const stderr = await new Response(proc.stderr).text();
49
+
50
+ // Try fetching specific SHA if branch clone fails
51
+ if (ref.length === 40) {
52
+ // Looks like a SHA
53
+ const shallowProc = Bun.spawn(['git', 'clone', '--depth', '1', cloneUrl, tmpDir], {
54
+ stdout: 'pipe',
55
+ stderr: 'pipe',
56
+ });
57
+ await shallowProc.exited;
58
+
59
+ const fetchProc = Bun.spawn(['git', '-C', tmpDir, 'fetch', 'origin', ref], {
60
+ stdout: 'pipe',
61
+ stderr: 'pipe',
62
+ });
63
+ await fetchProc.exited;
64
+
65
+ const checkoutProc = Bun.spawn(['git', '-C', tmpDir, 'checkout', ref], {
66
+ stdout: 'pipe',
67
+ stderr: 'pipe',
68
+ });
69
+ const checkoutExit = await checkoutProc.exited;
70
+
71
+ if (checkoutExit !== 0) {
72
+ throw new Error(`Failed to checkout ${ref}: ${stderr}`);
73
+ }
74
+ } else {
75
+ throw new Error(`Failed to clone ${owner}/${repo}@${ref}: ${stderr}`);
76
+ }
77
+ }
78
+
79
+ return tmpDir;
80
+ }
81
+
82
+ /**
83
+ * Detect the entry point for a package.
84
+ */
85
+ async function detectEntryPoint(repoDir: string): Promise<string | null> {
86
+ try {
87
+ const packageJsonPath = join(repoDir, 'package.json');
88
+ const packageJson = await Bun.file(packageJsonPath).json();
89
+
90
+ // Check exports first (modern packages)
91
+ if (packageJson.exports) {
92
+ const mainExport = packageJson.exports['.'];
93
+ if (typeof mainExport === 'string') {
94
+ return mainExport.replace(/^\.\//, '');
95
+ }
96
+ if (mainExport?.import) {
97
+ const importPath =
98
+ typeof mainExport.import === 'string' ? mainExport.import : mainExport.import.default;
99
+ if (importPath) return importPath.replace(/^\.\//, '');
100
+ }
101
+ if (mainExport?.types) {
102
+ return mainExport.types.replace(/^\.\//, '');
103
+ }
104
+ }
105
+
106
+ // Check types field
107
+ if (packageJson.types) {
108
+ return packageJson.types.replace(/^\.\//, '');
109
+ }
110
+
111
+ // Check main field
112
+ if (packageJson.main) {
113
+ // Convert .js to .ts if applicable
114
+ const main = packageJson.main.replace(/^\.\//, '');
115
+ const tsMain = main.replace(/\.js$/, '.ts');
116
+ const tsxMain = main.replace(/\.js$/, '.tsx');
117
+
118
+ // Check if TS version exists
119
+ const tsFile = Bun.file(join(repoDir, tsMain));
120
+ if (await tsFile.exists()) return tsMain;
121
+
122
+ const tsxFile = Bun.file(join(repoDir, tsxMain));
123
+ if (await tsxFile.exists()) return tsxMain;
124
+
125
+ return main;
126
+ }
127
+
128
+ // Common fallbacks
129
+ const fallbacks = ['src/index.ts', 'src/index.tsx', 'index.ts', 'lib/index.ts'];
130
+ for (const fallback of fallbacks) {
131
+ const file = Bun.file(join(repoDir, fallback));
132
+ if (await file.exists()) return fallback;
133
+ }
134
+
135
+ return null;
136
+ } catch {
137
+ return null;
138
+ }
139
+ }
140
+
141
+ /**
142
+ * Analyze a remote GitHub repository.
143
+ *
144
+ * @param installationId - GitHub App installation ID
145
+ * @param owner - Repository owner
146
+ * @param repo - Repository name
147
+ * @param ref - Git ref (branch, tag, or SHA)
148
+ * @param includeSpec - Whether to include full spec in result
149
+ * @returns Analysis result or null if failed
150
+ */
151
+ export async function analyzeRemoteRepo(
152
+ installationId: string,
153
+ owner: string,
154
+ repo: string,
155
+ ref: string,
156
+ includeSpec = false,
157
+ ): Promise<RemoteAnalysisResult | null> {
158
+ // Get installation token
159
+ const token = await getTokenByInstallationId(installationId);
160
+ if (!token) {
161
+ console.error(`No token for installation ${installationId}`);
162
+ return null;
163
+ }
164
+
165
+ let tmpDir: string | null = null;
166
+
167
+ try {
168
+ // Clone repo to temp dir
169
+ tmpDir = await cloneRepo(owner, repo, ref, token);
170
+
171
+ // Detect entry point
172
+ const entryPoint = await detectEntryPoint(tmpDir);
173
+ if (!entryPoint) {
174
+ console.error(`No entry point found for ${owner}/${repo}`);
175
+ return null;
176
+ }
177
+
178
+ const entryPath = join(tmpDir, entryPoint);
179
+
180
+ // Run analysis
181
+ const doccov = new DocCov({
182
+ resolveExternalTypes: false, // Skip for speed
183
+ useCache: false, // No caching for webhook analysis
184
+ });
185
+
186
+ const result = await doccov.analyzeFileWithDiagnostics(entryPath);
187
+
188
+ // Enrich with coverage metrics
189
+ const enriched = enrichSpec(result.spec);
190
+
191
+ // Extract metrics
192
+ const docs = enriched.docs;
193
+ const coverageScore = docs?.coverageScore ?? 0;
194
+ const documentedExports = docs?.documented ?? 0;
195
+ const totalExports = docs?.total ?? 0;
196
+ const driftCount = docs?.drift?.length ?? 0;
197
+
198
+ // Count quality issues
199
+ let qualityErrors = 0;
200
+ let qualityWarnings = 0;
201
+
202
+ if (docs?.quality) {
203
+ for (const item of docs.quality) {
204
+ if (item.severity === 'error') qualityErrors++;
205
+ else if (item.severity === 'warning') qualityWarnings++;
206
+ }
207
+ }
208
+
209
+ return {
210
+ coverageScore,
211
+ documentedExports,
212
+ totalExports,
213
+ driftCount,
214
+ qualityErrors,
215
+ qualityWarnings,
216
+ spec: includeSpec ? enriched : undefined,
217
+ };
218
+ } catch (err) {
219
+ console.error(`Analysis failed for ${owner}/${repo}@${ref}:`, err);
220
+ return null;
221
+ } finally {
222
+ // Cleanup temp dir
223
+ if (tmpDir) {
224
+ try {
225
+ await rm(tmpDir, { recursive: true, force: true });
226
+ } catch {
227
+ // Ignore cleanup errors
228
+ }
229
+ }
230
+ }
231
+ }
232
+
233
+ /**
234
+ * Compute diff between two analysis results.
235
+ */
236
+ export function computeAnalysisDiff(
237
+ base: RemoteAnalysisResult,
238
+ head: RemoteAnalysisResult,
239
+ ): {
240
+ coverageDelta: number;
241
+ documentedDelta: number;
242
+ totalDelta: number;
243
+ driftDelta: number;
244
+ } {
245
+ return {
246
+ coverageDelta: Number((head.coverageScore - base.coverageScore).toFixed(1)),
247
+ documentedDelta: head.documentedExports - base.documentedExports,
248
+ totalDelta: head.totalExports - base.totalExports,
249
+ driftDelta: head.driftCount - base.driftCount,
250
+ };
251
+ }
@@ -0,0 +1,131 @@
1
+ /**
2
+ * In-memory LRU cache for specs and diffs.
3
+ * TTL: 1 hour, Max entries: 100 per cache
4
+ */
5
+
6
+ import type { SpecDiffWithDocs } from '@doccov/sdk';
7
+ import type { OpenPkg } from '@openpkg-ts/spec';
8
+
9
+ interface CacheEntry<T> {
10
+ value: T;
11
+ createdAt: number;
12
+ }
13
+
14
+ const CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
15
+ const MAX_ENTRIES = 100;
16
+
17
+ // Spec cache: key = `${owner}/${repo}/${sha}`
18
+ const specCache = new Map<string, CacheEntry<OpenPkg>>();
19
+
20
+ // Diff cache: key = `${baseSha}_${headSha}`
21
+ const diffCache = new Map<string, CacheEntry<SpecDiffWithDocs>>();
22
+
23
+ /**
24
+ * Evict oldest entries if cache exceeds max size
25
+ */
26
+ function evictOldest<T>(cache: Map<string, CacheEntry<T>>): void {
27
+ if (cache.size <= MAX_ENTRIES) return;
28
+
29
+ // Find and delete oldest entry
30
+ let oldestKey: string | null = null;
31
+ let oldestTime = Infinity;
32
+
33
+ for (const [key, entry] of cache) {
34
+ if (entry.createdAt < oldestTime) {
35
+ oldestTime = entry.createdAt;
36
+ oldestKey = key;
37
+ }
38
+ }
39
+
40
+ if (oldestKey) {
41
+ cache.delete(oldestKey);
42
+ }
43
+ }
44
+
45
+ /**
46
+ * Check if entry is still valid (not expired)
47
+ */
48
+ function isValid<T>(entry: CacheEntry<T> | undefined): entry is CacheEntry<T> {
49
+ if (!entry) return false;
50
+ return Date.now() - entry.createdAt < CACHE_TTL_MS;
51
+ }
52
+
53
+ /**
54
+ * Generate cache key for spec
55
+ */
56
+ export function specCacheKey(owner: string, repo: string, sha: string): string {
57
+ return `${owner}/${repo}/${sha}`;
58
+ }
59
+
60
+ /**
61
+ * Generate cache key for diff
62
+ */
63
+ export function diffCacheKey(baseSha: string, headSha: string): string {
64
+ return `${baseSha}_${headSha}`;
65
+ }
66
+
67
+ /**
68
+ * Get cached spec if available and not expired
69
+ */
70
+ export function getCachedSpec(owner: string, repo: string, sha: string): OpenPkg | null {
71
+ const key = specCacheKey(owner, repo, sha);
72
+ const entry = specCache.get(key);
73
+
74
+ if (!isValid(entry)) {
75
+ if (entry) specCache.delete(key);
76
+ return null;
77
+ }
78
+
79
+ return entry.value;
80
+ }
81
+
82
+ /**
83
+ * Cache a spec
84
+ */
85
+ export function setCachedSpec(owner: string, repo: string, sha: string, spec: OpenPkg): void {
86
+ const key = specCacheKey(owner, repo, sha);
87
+ specCache.set(key, { value: spec, createdAt: Date.now() });
88
+ evictOldest(specCache);
89
+ }
90
+
91
+ /**
92
+ * Get cached diff if available and not expired
93
+ */
94
+ export function getCachedDiff(baseSha: string, headSha: string): SpecDiffWithDocs | null {
95
+ const key = diffCacheKey(baseSha, headSha);
96
+ const entry = diffCache.get(key);
97
+
98
+ if (!isValid(entry)) {
99
+ if (entry) diffCache.delete(key);
100
+ return null;
101
+ }
102
+
103
+ return entry.value;
104
+ }
105
+
106
+ /**
107
+ * Cache a diff result
108
+ */
109
+ export function setCachedDiff(baseSha: string, headSha: string, diff: SpecDiffWithDocs): void {
110
+ const key = diffCacheKey(baseSha, headSha);
111
+ diffCache.set(key, { value: diff, createdAt: Date.now() });
112
+ evictOldest(diffCache);
113
+ }
114
+
115
+ /**
116
+ * Clear all caches (for testing)
117
+ */
118
+ export function clearCaches(): void {
119
+ specCache.clear();
120
+ diffCache.clear();
121
+ }
122
+
123
+ /**
124
+ * Get cache stats (for monitoring)
125
+ */
126
+ export function getCacheStats(): { specCount: number; diffCount: number } {
127
+ return {
128
+ specCount: specCache.size,
129
+ diffCount: diffCache.size,
130
+ };
131
+ }