@doccov/sdk 0.3.2 → 0.3.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +466 -2
- package/dist/index.js +817 -14
- package/package.json +8 -2
package/dist/index.d.ts
CHANGED
|
@@ -69,6 +69,214 @@ declare function hasNonAssertionComments(code: string): boolean;
|
|
|
69
69
|
* Detect assertion failures by comparing stdout to expected values.
|
|
70
70
|
*/
|
|
71
71
|
declare function detectExampleAssertionFailures(entry: SpecExport, runtimeResults: Map<number, ExampleRunResult>): SpecDocDrift[];
|
|
72
|
+
/**
|
|
73
|
+
* Markdown/MDX documentation analysis types
|
|
74
|
+
*/
|
|
75
|
+
/**
|
|
76
|
+
* A code block extracted from a markdown file
|
|
77
|
+
*/
|
|
78
|
+
interface MarkdownCodeBlock {
|
|
79
|
+
/** Language tag (ts, typescript, js, javascript, tsx, jsx) */
|
|
80
|
+
lang: string;
|
|
81
|
+
/** The code content */
|
|
82
|
+
code: string;
|
|
83
|
+
/** Raw meta string from code fence (e.g., "title=example.ts") */
|
|
84
|
+
meta?: string;
|
|
85
|
+
/** Starting line number in the markdown file */
|
|
86
|
+
lineStart: number;
|
|
87
|
+
/** Ending line number in the markdown file */
|
|
88
|
+
lineEnd: number;
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* A parsed markdown documentation file
|
|
92
|
+
*/
|
|
93
|
+
interface MarkdownDocFile {
|
|
94
|
+
/** File path relative to project root */
|
|
95
|
+
path: string;
|
|
96
|
+
/** All executable code blocks found */
|
|
97
|
+
codeBlocks: MarkdownCodeBlock[];
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* A reference to an found in markdown
|
|
101
|
+
*/
|
|
102
|
+
interface ExportReference {
|
|
103
|
+
/** The name of the being referenced */
|
|
104
|
+
exportName: string;
|
|
105
|
+
/** File path where the reference was found */
|
|
106
|
+
file: string;
|
|
107
|
+
/** Line number in the file */
|
|
108
|
+
line: number;
|
|
109
|
+
/** Surrounding code/text for context */
|
|
110
|
+
context: string;
|
|
111
|
+
/** Whether this reference is inside a code block */
|
|
112
|
+
inCodeBlock: boolean;
|
|
113
|
+
/** The code block index if inside a code block */
|
|
114
|
+
blockIndex?: number;
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Change type for an impacted reference
|
|
118
|
+
*/
|
|
119
|
+
type DocsChangeType = "signature-changed" | "removed" | "deprecated";
|
|
120
|
+
/**
|
|
121
|
+
* An impacted reference in a documentation file
|
|
122
|
+
*/
|
|
123
|
+
interface DocsImpactReference {
|
|
124
|
+
/** The name that was changed */
|
|
125
|
+
exportName: string;
|
|
126
|
+
/** Line number in the file */
|
|
127
|
+
line: number;
|
|
128
|
+
/** Type of change affecting this reference */
|
|
129
|
+
changeType: DocsChangeType;
|
|
130
|
+
/** Suggested fix (AI-generated or deterministic) */
|
|
131
|
+
suggestion?: string;
|
|
132
|
+
/** Context around the reference */
|
|
133
|
+
context?: string;
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Documentation file impact summary
|
|
137
|
+
*/
|
|
138
|
+
interface DocsImpact {
|
|
139
|
+
/** File path */
|
|
140
|
+
file: string;
|
|
141
|
+
/** All impacted references in this file */
|
|
142
|
+
references: DocsImpactReference[];
|
|
143
|
+
}
|
|
144
|
+
/**
|
|
145
|
+
* Complete docs impact analysis result
|
|
146
|
+
*/
|
|
147
|
+
interface DocsImpactResult {
|
|
148
|
+
/** Files with impacted references */
|
|
149
|
+
impactedFiles: DocsImpact[];
|
|
150
|
+
/** New exports that have no documentation */
|
|
151
|
+
missingDocs: string[];
|
|
152
|
+
/** Statistics */
|
|
153
|
+
stats: {
|
|
154
|
+
/** Total markdown files scanned */
|
|
155
|
+
filesScanned: number;
|
|
156
|
+
/** Total code blocks found */
|
|
157
|
+
codeBlocksFound: number;
|
|
158
|
+
/** Total references found */
|
|
159
|
+
referencesFound: number;
|
|
160
|
+
/** References impacted by changes */
|
|
161
|
+
impactedReferences: number;
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Check if a language tag represents executable code
|
|
166
|
+
*/
|
|
167
|
+
declare function isExecutableLang(lang: string | null | undefined): boolean;
|
|
168
|
+
/**
|
|
169
|
+
* Parse a markdown file and extract code blocks
|
|
170
|
+
*/
|
|
171
|
+
declare function parseMarkdownFile(content: string, filePath: string): MarkdownDocFile;
|
|
172
|
+
/**
|
|
173
|
+
* Parse multiple markdown files
|
|
174
|
+
*/
|
|
175
|
+
declare function parseMarkdownFiles(files: Array<{
|
|
176
|
+
path: string;
|
|
177
|
+
content: string;
|
|
178
|
+
}>): MarkdownDocFile[];
|
|
179
|
+
/**
|
|
180
|
+
* Extract import statements from code
|
|
181
|
+
* Finds named imports: import { X, Y } from 'pkg'
|
|
182
|
+
*/
|
|
183
|
+
declare function extractImports(code: string): Array<{
|
|
184
|
+
name: string;
|
|
185
|
+
from: string;
|
|
186
|
+
}>;
|
|
187
|
+
/**
|
|
188
|
+
* Extract function calls from code
|
|
189
|
+
* Finds: functionName( or functionName<
|
|
190
|
+
*/
|
|
191
|
+
declare function extractFunctionCalls(code: string): string[];
|
|
192
|
+
/**
|
|
193
|
+
* Find all references to given names in markdown files
|
|
194
|
+
*/
|
|
195
|
+
declare function findExportReferences(files: MarkdownDocFile[], exportNames: string[]): ExportReference[];
|
|
196
|
+
/**
|
|
197
|
+
* Check if a code block references any of the given names
|
|
198
|
+
*/
|
|
199
|
+
declare function blockReferencesExport(block: MarkdownCodeBlock, exportName: string): boolean;
|
|
200
|
+
import { SpecDiff } from "@openpkg-ts/spec";
|
|
201
|
+
/**
|
|
202
|
+
* Analyze docs impact from a spec diff
|
|
203
|
+
*
|
|
204
|
+
* @param diff - The spec diff result
|
|
205
|
+
* @param markdownFiles - Parsed markdown files
|
|
206
|
+
* @param newExportNames - All names in the new spec (for missing docs detection)
|
|
207
|
+
*/
|
|
208
|
+
declare function analyzeDocsImpact(diff: SpecDiff, markdownFiles: MarkdownDocFile[], newExportNames?: string[]): DocsImpactResult;
|
|
209
|
+
/**
|
|
210
|
+
* Find references to deprecated exports
|
|
211
|
+
*/
|
|
212
|
+
declare function findDeprecatedReferences(markdownFiles: MarkdownDocFile[], deprecatedExports: string[]): ExportReference[];
|
|
213
|
+
/**
|
|
214
|
+
* Find references to removed exports
|
|
215
|
+
*/
|
|
216
|
+
declare function findRemovedReferences(markdownFiles: MarkdownDocFile[], removedExports: string[]): ExportReference[];
|
|
217
|
+
/**
|
|
218
|
+
* Check if any docs reference a specific */
|
|
219
|
+
declare function hasDocsForExport(markdownFiles: MarkdownDocFile[], exportName: string): boolean;
|
|
220
|
+
/**
|
|
221
|
+
* Get all exports that have documentation
|
|
222
|
+
*/
|
|
223
|
+
declare function getDocumentedExports(markdownFiles: MarkdownDocFile[], exportNames: string[]): string[];
|
|
224
|
+
/**
|
|
225
|
+
* Get all exports that lack documentation
|
|
226
|
+
*/
|
|
227
|
+
declare function getUndocumentedExports(markdownFiles: MarkdownDocFile[], exportNames: string[]): string[];
|
|
228
|
+
import { OpenPkg as OpenPkg2, SpecDiff as SpecDiff2 } from "@openpkg-ts/spec";
|
|
229
|
+
/**
|
|
230
|
+
* Extended spec diff result with docs impact
|
|
231
|
+
*/
|
|
232
|
+
interface SpecDiffWithDocs extends SpecDiff2 {
|
|
233
|
+
/** Docs impact analysis (only present if markdown files provided) */
|
|
234
|
+
docsImpact?: DocsImpactResult;
|
|
235
|
+
}
|
|
236
|
+
/**
|
|
237
|
+
* Options for diffSpecWithDocs
|
|
238
|
+
*/
|
|
239
|
+
interface DiffWithDocsOptions {
|
|
240
|
+
/** Parsed markdown documentation files */
|
|
241
|
+
markdownFiles?: MarkdownDocFile[];
|
|
242
|
+
}
|
|
243
|
+
/**
|
|
244
|
+
* Compute spec diff with optional docs impact analysis
|
|
245
|
+
*
|
|
246
|
+
* @param oldSpec - Previous version of the spec
|
|
247
|
+
* @param newSpec - Current version of the spec
|
|
248
|
+
* @param options - Options including markdown files to analyze
|
|
249
|
+
* @returns Extended diff result with docs impact
|
|
250
|
+
*
|
|
251
|
+
* @example
|
|
252
|
+
* ```ts
|
|
253
|
+
* import { diffSpecWithDocs, parseMarkdownFiles } from '@doccov/sdk';
|
|
254
|
+
*
|
|
255
|
+
* const markdownFiles = parseMarkdownFiles([
|
|
256
|
+
* { path: 'docs/guide.md', content: '...' },
|
|
257
|
+
* ]);
|
|
258
|
+
*
|
|
259
|
+
* const diff = diffSpecWithDocs(oldSpec, newSpec, { markdownFiles });
|
|
260
|
+
*
|
|
261
|
+
* if (diff.docsImpact?.impactedFiles.length) {
|
|
262
|
+
* console.log('Docs need updating!');
|
|
263
|
+
* }
|
|
264
|
+
* ```
|
|
265
|
+
*/
|
|
266
|
+
declare function diffSpecWithDocs(oldSpec: OpenPkg2, newSpec: OpenPkg2, options?: DiffWithDocsOptions): SpecDiffWithDocs;
|
|
267
|
+
/**
|
|
268
|
+
* Check if a diff has any docs impact
|
|
269
|
+
*/
|
|
270
|
+
declare function hasDocsImpact(diff: SpecDiffWithDocs): boolean;
|
|
271
|
+
/**
|
|
272
|
+
* Get summary of docs impact for display
|
|
273
|
+
*/
|
|
274
|
+
declare function getDocsImpactSummary(diff: SpecDiffWithDocs): {
|
|
275
|
+
impactedFileCount: number;
|
|
276
|
+
impactedReferenceCount: number;
|
|
277
|
+
missingDocsCount: number;
|
|
278
|
+
totalIssues: number;
|
|
279
|
+
};
|
|
72
280
|
interface DocCovOptions {
|
|
73
281
|
includePrivate?: boolean;
|
|
74
282
|
followImports?: boolean;
|
|
@@ -258,5 +466,261 @@ declare class DocCov {
|
|
|
258
466
|
declare function analyze(code: string, options?: AnalyzeOptions): Promise<OpenPkgSpec>;
|
|
259
467
|
declare function analyzeFile(filePath: string, options?: AnalyzeOptions): Promise<OpenPkgSpec>;
|
|
260
468
|
/** @deprecated Use DocCov instead */
|
|
261
|
-
declare const
|
|
262
|
-
|
|
469
|
+
declare const OpenPkg3: typeof DocCov;
|
|
470
|
+
/**
|
|
471
|
+
* Project detection types for I/O-agnostic project analysis.
|
|
472
|
+
* Used by both CLI (NodeFileSystem) and API (SandboxFileSystem).
|
|
473
|
+
*/
|
|
474
|
+
/**
|
|
475
|
+
* Minimal filesystem interface for I/O-agnostic detection.
|
|
476
|
+
* Implementations: NodeFileSystem (CLI), SandboxFileSystem (API)
|
|
477
|
+
*/
|
|
478
|
+
interface FileSystem {
|
|
479
|
+
/** Check if a file or directory exists */
|
|
480
|
+
exists(path: string): Promise<boolean>;
|
|
481
|
+
/** Read file contents as string */
|
|
482
|
+
readFile(path: string): Promise<string>;
|
|
483
|
+
/** List directory contents (file/folder names only) */
|
|
484
|
+
readDir(path: string): Promise<string[]>;
|
|
485
|
+
/** Check if path is a directory */
|
|
486
|
+
isDirectory(path: string): Promise<boolean>;
|
|
487
|
+
}
|
|
488
|
+
/** Supported package managers */
|
|
489
|
+
type PackageManager = "npm" | "yarn" | "pnpm" | "bun";
|
|
490
|
+
/** Package manager detection result with install/run commands */
|
|
491
|
+
interface PackageManagerInfo {
|
|
492
|
+
/** Package manager name */
|
|
493
|
+
name: PackageManager;
|
|
494
|
+
/** Lockfile that was detected (null if none found) */
|
|
495
|
+
lockfile: string | null;
|
|
496
|
+
/** Arguments for install command, e.g. ['install', '--frozen-lockfile'] */
|
|
497
|
+
installArgs: string[];
|
|
498
|
+
/** Prefix for running scripts, e.g. ['npm', 'run'] or ['pnpm'] */
|
|
499
|
+
runPrefix: string[];
|
|
500
|
+
}
|
|
501
|
+
/** Monorepo type based on configuration */
|
|
502
|
+
type MonorepoType = "npm-workspaces" | "pnpm-workspaces" | "lerna" | "none";
|
|
503
|
+
/** Monorepo detection result */
|
|
504
|
+
interface MonorepoInfo {
|
|
505
|
+
/** Whether this is a monorepo */
|
|
506
|
+
isMonorepo: boolean;
|
|
507
|
+
/** Type of monorepo configuration */
|
|
508
|
+
type: MonorepoType;
|
|
509
|
+
/** Workspace patterns from config (e.g. ['packages/*']) */
|
|
510
|
+
patterns: string[];
|
|
511
|
+
/** Resolved workspace packages */
|
|
512
|
+
packages: WorkspacePackage[];
|
|
513
|
+
}
|
|
514
|
+
/** A package within a monorepo workspace */
|
|
515
|
+
interface WorkspacePackage {
|
|
516
|
+
/** Package name from package.json */
|
|
517
|
+
name: string;
|
|
518
|
+
/** Relative path to package directory */
|
|
519
|
+
path: string;
|
|
520
|
+
/** Whether the package is marked as private */
|
|
521
|
+
private: boolean;
|
|
522
|
+
}
|
|
523
|
+
/** Entry point source - where the entry was detected from */
|
|
524
|
+
type EntryPointSource = "types" | "exports" | "main" | "module" | "fallback";
|
|
525
|
+
/** Entry point detection result */
|
|
526
|
+
interface EntryPointInfo {
|
|
527
|
+
/** Path to entry file (relative to package root) */
|
|
528
|
+
path: string;
|
|
529
|
+
/** Where the entry point was detected from */
|
|
530
|
+
source: EntryPointSource;
|
|
531
|
+
/** Whether this is a .d.ts file (no source available) */
|
|
532
|
+
isDeclarationOnly: boolean;
|
|
533
|
+
}
|
|
534
|
+
/** Build configuration detection result */
|
|
535
|
+
interface BuildInfo {
|
|
536
|
+
/** Build-related script names found (e.g. ['build', 'build:types']) */
|
|
537
|
+
scripts: string[];
|
|
538
|
+
/** Whether any build script was found */
|
|
539
|
+
hasBuildScript: boolean;
|
|
540
|
+
/** Whether TypeScript is configured/installed */
|
|
541
|
+
hasTypeScript: boolean;
|
|
542
|
+
/** Indicators for exotic project types */
|
|
543
|
+
exoticIndicators: {
|
|
544
|
+
/** WASM project (Cargo.toml or wasm-pack scripts) */
|
|
545
|
+
wasm: boolean;
|
|
546
|
+
/** napi-rs native addon project */
|
|
547
|
+
napi: boolean;
|
|
548
|
+
};
|
|
549
|
+
}
|
|
550
|
+
/** Complete project analysis result */
|
|
551
|
+
interface ProjectInfo {
|
|
552
|
+
/** Package manager info */
|
|
553
|
+
packageManager: PackageManagerInfo;
|
|
554
|
+
/** Monorepo info */
|
|
555
|
+
monorepo: MonorepoInfo;
|
|
556
|
+
/** Entry point info */
|
|
557
|
+
entryPoint: EntryPointInfo;
|
|
558
|
+
/** Build info */
|
|
559
|
+
build: BuildInfo;
|
|
560
|
+
}
|
|
561
|
+
/** Options for analyzeProject() */
|
|
562
|
+
interface AnalyzeProjectOptions {
|
|
563
|
+
/** Target package name for monorepos */
|
|
564
|
+
targetPackage?: string;
|
|
565
|
+
}
|
|
566
|
+
import { Sandbox } from "@vercel/sandbox";
|
|
567
|
+
/**
|
|
568
|
+
* Node.js filesystem implementation for CLI usage.
|
|
569
|
+
* Wraps Node.js fs module with a base path.
|
|
570
|
+
*/
|
|
571
|
+
declare class NodeFileSystem implements FileSystem {
|
|
572
|
+
private basePath;
|
|
573
|
+
constructor(basePath: string);
|
|
574
|
+
private resolve;
|
|
575
|
+
exists(relativePath: string): Promise<boolean>;
|
|
576
|
+
readFile(relativePath: string): Promise<string>;
|
|
577
|
+
readDir(relativePath: string): Promise<string[]>;
|
|
578
|
+
isDirectory(relativePath: string): Promise<boolean>;
|
|
579
|
+
}
|
|
580
|
+
/**
|
|
581
|
+
* Vercel Sandbox filesystem implementation for API usage.
|
|
582
|
+
* Uses sandbox.runCommand() with shell commands.
|
|
583
|
+
*/
|
|
584
|
+
declare class SandboxFileSystem implements FileSystem {
|
|
585
|
+
private sandbox;
|
|
586
|
+
constructor(sandbox: Sandbox);
|
|
587
|
+
exists(path: string): Promise<boolean>;
|
|
588
|
+
readFile(path: string): Promise<string>;
|
|
589
|
+
readDir(path: string): Promise<string[]>;
|
|
590
|
+
isDirectory(path: string): Promise<boolean>;
|
|
591
|
+
}
|
|
592
|
+
/**
|
|
593
|
+
* Detect package manager based on lockfile presence.
|
|
594
|
+
*
|
|
595
|
+
* Priority order:
|
|
596
|
+
* 1. pnpm-lock.yaml
|
|
597
|
+
* 2. bun.lock / bun.lockb
|
|
598
|
+
* 3. yarn.lock
|
|
599
|
+
* 4. package-lock.json
|
|
600
|
+
* 5. Default to npm
|
|
601
|
+
*/
|
|
602
|
+
declare function detectPackageManager(fs: FileSystem): Promise<PackageManagerInfo>;
|
|
603
|
+
/**
|
|
604
|
+
* Get install command for a package manager.
|
|
605
|
+
* Returns [command, ...args] array.
|
|
606
|
+
*/
|
|
607
|
+
declare function getInstallCommand(pm: PackageManagerInfo): string[];
|
|
608
|
+
/**
|
|
609
|
+
* Get run command for a package manager script.
|
|
610
|
+
* Returns [command, ...args, scriptName] array.
|
|
611
|
+
*/
|
|
612
|
+
declare function getRunCommand(pm: PackageManagerInfo, script: string): string[];
|
|
613
|
+
/**
|
|
614
|
+
* Detect if a project is a monorepo and list its packages.
|
|
615
|
+
*
|
|
616
|
+
* Detection triggers (in order):
|
|
617
|
+
* 1. package.json has workspaces field (npm/yarn)
|
|
618
|
+
* 2. pnpm-workspace.yaml exists
|
|
619
|
+
* 3. lerna.json exists
|
|
620
|
+
*/
|
|
621
|
+
declare function detectMonorepo(fs: FileSystem): Promise<MonorepoInfo>;
|
|
622
|
+
/**
|
|
623
|
+
* Find a package by name or path in a list of workspace packages.
|
|
624
|
+
*/
|
|
625
|
+
declare function findPackageByName(packages: WorkspacePackage[], nameOrPath: string): WorkspacePackage | undefined;
|
|
626
|
+
/**
|
|
627
|
+
* Format package list for display in error messages.
|
|
628
|
+
*/
|
|
629
|
+
declare function formatPackageList(packages: WorkspacePackage[], limit?: number): string;
|
|
630
|
+
/**
|
|
631
|
+
* Detect the TypeScript entry point for a package.
|
|
632
|
+
*
|
|
633
|
+
* Priority order:
|
|
634
|
+
* 1. package.json -> types or typings field
|
|
635
|
+
* 2. package.json -> exports["."].types
|
|
636
|
+
* 3. package.json -> main field (resolve to .ts)
|
|
637
|
+
* 4. package.json -> module field (resolve to .ts)
|
|
638
|
+
* 5. Common fallback paths
|
|
639
|
+
*
|
|
640
|
+
* @param fs - FileSystem implementation
|
|
641
|
+
* @param packagePath - Path to package directory (default: ".")
|
|
642
|
+
* @returns Entry point info
|
|
643
|
+
* @throws Error if no entry point can be found
|
|
644
|
+
*/
|
|
645
|
+
declare function detectEntryPoint(fs: FileSystem, packagePath?: string): Promise<EntryPointInfo>;
|
|
646
|
+
/**
|
|
647
|
+
* Detect build configuration and exotic project indicators.
|
|
648
|
+
*
|
|
649
|
+
* @param fs - FileSystem implementation
|
|
650
|
+
* @param packagePath - Path to package directory (default: ".")
|
|
651
|
+
* @returns Build info including scripts and exotic indicators
|
|
652
|
+
*/
|
|
653
|
+
declare function detectBuildInfo(fs: FileSystem, packagePath?: string): Promise<BuildInfo>;
|
|
654
|
+
/**
|
|
655
|
+
* Get the primary build script name to run.
|
|
656
|
+
* Prefers 'build' over 'compile' over 'tsc'.
|
|
657
|
+
*/
|
|
658
|
+
declare function getPrimaryBuildScript(buildInfo: BuildInfo): string | null;
|
|
659
|
+
/**
|
|
660
|
+
* Safely parse a JSON file, returning null on any error.
|
|
661
|
+
*/
|
|
662
|
+
declare function safeParseJson<T = Record<string, unknown>>(fs: FileSystem, path: string): Promise<T | null>;
|
|
663
|
+
/**
|
|
664
|
+
* Standard package.json structure for detection purposes.
|
|
665
|
+
*/
|
|
666
|
+
interface PackageJson {
|
|
667
|
+
name?: string;
|
|
668
|
+
version?: string;
|
|
669
|
+
private?: boolean;
|
|
670
|
+
main?: string;
|
|
671
|
+
module?: string;
|
|
672
|
+
types?: string;
|
|
673
|
+
typings?: string;
|
|
674
|
+
exports?: PackageExports;
|
|
675
|
+
workspaces?: string[] | {
|
|
676
|
+
packages: string[];
|
|
677
|
+
};
|
|
678
|
+
scripts?: Record<string, string>;
|
|
679
|
+
dependencies?: Record<string, string>;
|
|
680
|
+
devDependencies?: Record<string, string>;
|
|
681
|
+
}
|
|
682
|
+
/**
|
|
683
|
+
* Package.json exports field structure.
|
|
684
|
+
*/
|
|
685
|
+
type PackageExports = string | {
|
|
686
|
+
"."?: string | {
|
|
687
|
+
types?: string;
|
|
688
|
+
import?: string;
|
|
689
|
+
require?: string;
|
|
690
|
+
default?: string;
|
|
691
|
+
};
|
|
692
|
+
[key: string]: unknown;
|
|
693
|
+
};
|
|
694
|
+
/**
|
|
695
|
+
* Read and parse package.json from a directory.
|
|
696
|
+
*/
|
|
697
|
+
declare function readPackageJson(fs: FileSystem, dir: string): Promise<PackageJson | null>;
|
|
698
|
+
/**
|
|
699
|
+
* Analyze a project's structure for scanning.
|
|
700
|
+
*
|
|
701
|
+
* This is the main entry point for project detection. It combines all
|
|
702
|
+
* detection functions into a single call that returns complete project info.
|
|
703
|
+
*
|
|
704
|
+
* For monorepos, you must specify the target package via options.targetPackage.
|
|
705
|
+
* If not specified and a monorepo is detected, an error is thrown with the
|
|
706
|
+
* list of available packages.
|
|
707
|
+
*
|
|
708
|
+
* @param fs - FileSystem implementation (NodeFileSystem or SandboxFileSystem)
|
|
709
|
+
* @param options - Options including targetPackage for monorepos
|
|
710
|
+
* @returns Complete project info
|
|
711
|
+
* @throws Error if monorepo detected without targetPackage specified
|
|
712
|
+
* @throws Error if targetPackage not found in monorepo
|
|
713
|
+
*
|
|
714
|
+
* @example
|
|
715
|
+
* ```typescript
|
|
716
|
+
* // Single package
|
|
717
|
+
* const fs = new NodeFileSystem('/path/to/package');
|
|
718
|
+
* const project = await analyzeProject(fs);
|
|
719
|
+
*
|
|
720
|
+
* // Monorepo with target package
|
|
721
|
+
* const fs = new NodeFileSystem('/path/to/monorepo');
|
|
722
|
+
* const project = await analyzeProject(fs, { targetPackage: '@scope/core' });
|
|
723
|
+
* ```
|
|
724
|
+
*/
|
|
725
|
+
declare function analyzeProject2(fs: FileSystem, options?: AnalyzeProjectOptions): Promise<ProjectInfo>;
|
|
726
|
+
export { serializeJSDoc, safeParseJson, runExamplesWithPackage, runExamples, runExample, readPackageJson, parseMarkdownFiles, parseMarkdownFile, parseJSDocToPatch, parseAssertions, mergeFixes, isFixableDrift, isExecutableLang, hasNonAssertionComments, hasDocsImpact, hasDocsForExport, getUndocumentedExports, getRunCommand, getPrimaryBuildScript, getInstallCommand, getDocumentedExports, getDocsImpactSummary, generateFixesForExport, generateFix, formatPackageList, findRemovedReferences, findPackageByName, findJSDocLocation, findExportReferences, findDeprecatedReferences, extractPackageSpec, extractImports, extractFunctionCalls, diffSpecWithDocs, detectPackageManager, detectMonorepo, detectExampleRuntimeErrors, detectExampleAssertionFailures, detectEntryPoint, detectBuildInfo, createSourceFile, categorizeDrifts, blockReferencesExport, applyPatchToJSDoc, applyEdits, analyzeProject2 as analyzeProject, analyzeFile, analyzeDocsImpact, analyze, WorkspacePackage, SpecDiffWithDocs, SandboxFileSystem, RunExamplesWithPackageResult, RunExamplesWithPackageOptions, RunExampleOptions, ProjectInfo, PackageManagerInfo, PackageManager, PackageJson, PackageExports, OpenPkgSpec, OpenPkgOptions, OpenPkg3 as OpenPkg, NodeFileSystem, MonorepoType, MonorepoInfo, MarkdownDocFile, MarkdownCodeBlock, JSDocTag, JSDocReturn, JSDocPatch, JSDocParam, JSDocEdit, FixType, FixSuggestion, FilterOptions, FileSystem, ExportReference, ExampleRunResult, EntryPointSource, EntryPointInfo, DocsImpactResult, DocsImpactReference, DocsImpact, DocsChangeType, DocCovOptions, DocCov, DiffWithDocsOptions, Diagnostic, BuildInfo, ApplyEditsResult, AnalyzeProjectOptions, AnalyzeOptions, AnalysisResult };
|
package/dist/index.js
CHANGED
|
@@ -669,22 +669,31 @@ function levenshtein(a, b) {
|
|
|
669
669
|
return matrix[b.length][a.length];
|
|
670
670
|
}
|
|
671
671
|
function detectExampleDrift(entry, exportRegistry) {
|
|
672
|
-
if (!exportRegistry || !entry.examples
|
|
672
|
+
if (!exportRegistry || !entry.examples?.length)
|
|
673
673
|
return [];
|
|
674
|
-
}
|
|
675
674
|
const drifts = [];
|
|
676
|
-
const identifierPattern = /\b([A-Z][a-zA-Z0-9]*)\b/g;
|
|
677
675
|
for (const example of entry.examples) {
|
|
678
|
-
|
|
676
|
+
let visit = function(node) {
|
|
677
|
+
if (ts.isIdentifier(node) && isPascalCase(node.text)) {
|
|
678
|
+
if (isLocalDeclaration(node)) {
|
|
679
|
+
localDeclarations.add(node.text);
|
|
680
|
+
} else if (isIdentifierReference(node) && !isBuiltInIdentifier(node.text)) {
|
|
681
|
+
referencedIdentifiers.add(node.text);
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
ts.forEachChild(node, visit);
|
|
685
|
+
};
|
|
686
|
+
if (typeof example !== "string")
|
|
679
687
|
continue;
|
|
680
|
-
|
|
681
|
-
|
|
688
|
+
const codeContent = example.replace(/^```(?:ts|typescript|js|javascript)?\n?/i, "").replace(/\n?```$/i, "").trim();
|
|
689
|
+
if (!codeContent)
|
|
690
|
+
continue;
|
|
691
|
+
const sourceFile = ts.createSourceFile("example.ts", codeContent, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS);
|
|
692
|
+
const localDeclarations = new Set;
|
|
682
693
|
const referencedIdentifiers = new Set;
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
referencedIdentifiers.add(identifier);
|
|
687
|
-
}
|
|
694
|
+
visit(sourceFile);
|
|
695
|
+
for (const local of localDeclarations) {
|
|
696
|
+
referencedIdentifiers.delete(local);
|
|
688
697
|
}
|
|
689
698
|
for (const identifier of referencedIdentifiers) {
|
|
690
699
|
if (!exportRegistry.has(identifier)) {
|
|
@@ -757,6 +766,37 @@ function isBuiltInIdentifier(identifier) {
|
|
|
757
766
|
]);
|
|
758
767
|
return builtIns.has(identifier);
|
|
759
768
|
}
|
|
769
|
+
function isPascalCase(text) {
|
|
770
|
+
return /^[A-Z][a-zA-Z0-9]*$/.test(text);
|
|
771
|
+
}
|
|
772
|
+
function isLocalDeclaration(node) {
|
|
773
|
+
const parent = node.parent;
|
|
774
|
+
if (!parent)
|
|
775
|
+
return false;
|
|
776
|
+
if (ts.isClassDeclaration(parent) && parent.name === node)
|
|
777
|
+
return true;
|
|
778
|
+
if (ts.isFunctionDeclaration(parent) && parent.name === node)
|
|
779
|
+
return true;
|
|
780
|
+
if (ts.isVariableDeclaration(parent) && parent.name === node)
|
|
781
|
+
return true;
|
|
782
|
+
return false;
|
|
783
|
+
}
|
|
784
|
+
function isIdentifierReference(node) {
|
|
785
|
+
const parent = node.parent;
|
|
786
|
+
if (!parent)
|
|
787
|
+
return false;
|
|
788
|
+
if (ts.isClassDeclaration(parent) && parent.name === node)
|
|
789
|
+
return false;
|
|
790
|
+
if (ts.isFunctionDeclaration(parent) && parent.name === node)
|
|
791
|
+
return false;
|
|
792
|
+
if (ts.isVariableDeclaration(parent) && parent.name === node)
|
|
793
|
+
return false;
|
|
794
|
+
if (ts.isMethodDeclaration(parent) && parent.name === node)
|
|
795
|
+
return false;
|
|
796
|
+
if (ts.isPropertyDeclaration(parent) && parent.name === node)
|
|
797
|
+
return false;
|
|
798
|
+
return true;
|
|
799
|
+
}
|
|
760
800
|
function detectBrokenLinks(entry, exportRegistry) {
|
|
761
801
|
if (!exportRegistry) {
|
|
762
802
|
return [];
|
|
@@ -983,6 +1023,267 @@ function detectExampleAssertionFailures(entry, runtimeResults) {
|
|
|
983
1023
|
}
|
|
984
1024
|
return drifts;
|
|
985
1025
|
}
|
|
1026
|
+
// src/markdown/parser.ts
|
|
1027
|
+
import remarkMdx from "remark-mdx";
|
|
1028
|
+
import remarkParse from "remark-parse";
|
|
1029
|
+
import { unified } from "unified";
|
|
1030
|
+
import { visit } from "unist-util-visit";
|
|
1031
|
+
var EXECUTABLE_LANGS = new Set([
|
|
1032
|
+
"ts",
|
|
1033
|
+
"typescript",
|
|
1034
|
+
"js",
|
|
1035
|
+
"javascript",
|
|
1036
|
+
"tsx",
|
|
1037
|
+
"jsx"
|
|
1038
|
+
]);
|
|
1039
|
+
function isExecutableLang(lang) {
|
|
1040
|
+
if (!lang)
|
|
1041
|
+
return false;
|
|
1042
|
+
return EXECUTABLE_LANGS.has(lang.toLowerCase());
|
|
1043
|
+
}
|
|
1044
|
+
function parseMarkdownFile(content, filePath) {
|
|
1045
|
+
const processor = unified().use(remarkParse).use(remarkMdx);
|
|
1046
|
+
const tree = processor.parse(content);
|
|
1047
|
+
const codeBlocks = [];
|
|
1048
|
+
visit(tree, "code", (node) => {
|
|
1049
|
+
if (isExecutableLang(node.lang)) {
|
|
1050
|
+
codeBlocks.push({
|
|
1051
|
+
lang: node.lang ?? "ts",
|
|
1052
|
+
code: node.value,
|
|
1053
|
+
meta: node.meta ?? undefined,
|
|
1054
|
+
lineStart: node.position?.start.line ?? 0,
|
|
1055
|
+
lineEnd: node.position?.end.line ?? 0
|
|
1056
|
+
});
|
|
1057
|
+
}
|
|
1058
|
+
});
|
|
1059
|
+
return { path: filePath, codeBlocks };
|
|
1060
|
+
}
|
|
1061
|
+
function parseMarkdownFiles(files) {
|
|
1062
|
+
return files.map((f) => parseMarkdownFile(f.content, f.path));
|
|
1063
|
+
}
|
|
1064
|
+
function extractImports(code) {
|
|
1065
|
+
const imports = [];
|
|
1066
|
+
const importRegex = /import\s+\{([^}]+)\}\s+from\s+['"]([^'"]+)['"]/g;
|
|
1067
|
+
let match;
|
|
1068
|
+
while ((match = importRegex.exec(code)) !== null) {
|
|
1069
|
+
const names = match[1];
|
|
1070
|
+
const from = match[2];
|
|
1071
|
+
const namedImports = names.split(",").map((n) => n.trim().split(/\s+as\s+/)[0].trim());
|
|
1072
|
+
for (const name of namedImports) {
|
|
1073
|
+
if (name) {
|
|
1074
|
+
imports.push({ name, from });
|
|
1075
|
+
}
|
|
1076
|
+
}
|
|
1077
|
+
}
|
|
1078
|
+
return imports;
|
|
1079
|
+
}
|
|
1080
|
+
function extractFunctionCalls(code) {
|
|
1081
|
+
const calls = new Set;
|
|
1082
|
+
const callRegex = /\b([a-zA-Z_$][a-zA-Z0-9_$]*)\s*[(<]/g;
|
|
1083
|
+
const keywords = new Set([
|
|
1084
|
+
"if",
|
|
1085
|
+
"for",
|
|
1086
|
+
"while",
|
|
1087
|
+
"switch",
|
|
1088
|
+
"catch",
|
|
1089
|
+
"function",
|
|
1090
|
+
"class",
|
|
1091
|
+
"interface",
|
|
1092
|
+
"type",
|
|
1093
|
+
"import",
|
|
1094
|
+
"export",
|
|
1095
|
+
"return",
|
|
1096
|
+
"throw",
|
|
1097
|
+
"new",
|
|
1098
|
+
"typeof",
|
|
1099
|
+
"instanceof"
|
|
1100
|
+
]);
|
|
1101
|
+
let match;
|
|
1102
|
+
while ((match = callRegex.exec(code)) !== null) {
|
|
1103
|
+
const name = match[1];
|
|
1104
|
+
if (!keywords.has(name)) {
|
|
1105
|
+
calls.add(name);
|
|
1106
|
+
}
|
|
1107
|
+
}
|
|
1108
|
+
return Array.from(calls);
|
|
1109
|
+
}
|
|
1110
|
+
function findExportReferences(files, exportNames) {
|
|
1111
|
+
const references = [];
|
|
1112
|
+
const exportSet = new Set(exportNames);
|
|
1113
|
+
for (const file of files) {
|
|
1114
|
+
for (let blockIndex = 0;blockIndex < file.codeBlocks.length; blockIndex++) {
|
|
1115
|
+
const block = file.codeBlocks[blockIndex];
|
|
1116
|
+
const imports = extractImports(block.code);
|
|
1117
|
+
for (const imp of imports) {
|
|
1118
|
+
if (exportSet.has(imp.name)) {
|
|
1119
|
+
references.push({
|
|
1120
|
+
exportName: imp.name,
|
|
1121
|
+
file: file.path,
|
|
1122
|
+
line: block.lineStart,
|
|
1123
|
+
context: getContextFromCode(block.code, imp.name),
|
|
1124
|
+
inCodeBlock: true,
|
|
1125
|
+
blockIndex
|
|
1126
|
+
});
|
|
1127
|
+
}
|
|
1128
|
+
}
|
|
1129
|
+
const calls = extractFunctionCalls(block.code);
|
|
1130
|
+
for (const call of calls) {
|
|
1131
|
+
if (exportSet.has(call)) {
|
|
1132
|
+
const alreadyFound = references.some((r) => r.exportName === call && r.file === file.path && r.blockIndex === blockIndex);
|
|
1133
|
+
if (!alreadyFound) {
|
|
1134
|
+
references.push({
|
|
1135
|
+
exportName: call,
|
|
1136
|
+
file: file.path,
|
|
1137
|
+
line: block.lineStart,
|
|
1138
|
+
context: getContextFromCode(block.code, call),
|
|
1139
|
+
inCodeBlock: true,
|
|
1140
|
+
blockIndex
|
|
1141
|
+
});
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
}
|
|
1146
|
+
}
|
|
1147
|
+
return references;
|
|
1148
|
+
}
|
|
1149
|
+
function getContextFromCode(code, name) {
|
|
1150
|
+
const lines = code.split(`
|
|
1151
|
+
`);
|
|
1152
|
+
for (let i = 0;i < lines.length; i++) {
|
|
1153
|
+
if (lines[i].includes(name)) {
|
|
1154
|
+
const start = Math.max(0, i - 1);
|
|
1155
|
+
const end = Math.min(lines.length, i + 2);
|
|
1156
|
+
return lines.slice(start, end).join(`
|
|
1157
|
+
`);
|
|
1158
|
+
}
|
|
1159
|
+
}
|
|
1160
|
+
return code.slice(0, 100);
|
|
1161
|
+
}
|
|
1162
|
+
function blockReferencesExport(block, exportName) {
|
|
1163
|
+
const imports = extractImports(block.code);
|
|
1164
|
+
if (imports.some((i) => i.name === exportName)) {
|
|
1165
|
+
return true;
|
|
1166
|
+
}
|
|
1167
|
+
const calls = extractFunctionCalls(block.code);
|
|
1168
|
+
return calls.includes(exportName);
|
|
1169
|
+
}
|
|
1170
|
+
// src/markdown/analyzer.ts
|
|
1171
|
+
function getChangeType(exportName, diff) {
|
|
1172
|
+
if (diff.breaking.includes(exportName)) {
|
|
1173
|
+
return "signature-changed";
|
|
1174
|
+
}
|
|
1175
|
+
return null;
|
|
1176
|
+
}
|
|
1177
|
+
function analyzeDocsImpact(diff, markdownFiles, newExportNames = []) {
|
|
1178
|
+
const changedExports = [
|
|
1179
|
+
...diff.breaking
|
|
1180
|
+
];
|
|
1181
|
+
const references = findExportReferences(markdownFiles, changedExports);
|
|
1182
|
+
const impactByFile = new Map;
|
|
1183
|
+
for (const ref of references) {
|
|
1184
|
+
const changeType = getChangeType(ref.exportName, diff);
|
|
1185
|
+
if (!changeType)
|
|
1186
|
+
continue;
|
|
1187
|
+
let impact = impactByFile.get(ref.file);
|
|
1188
|
+
if (!impact) {
|
|
1189
|
+
impact = { file: ref.file, references: [] };
|
|
1190
|
+
impactByFile.set(ref.file, impact);
|
|
1191
|
+
}
|
|
1192
|
+
impact.references.push({
|
|
1193
|
+
exportName: ref.exportName,
|
|
1194
|
+
line: ref.line,
|
|
1195
|
+
changeType,
|
|
1196
|
+
context: ref.context
|
|
1197
|
+
});
|
|
1198
|
+
}
|
|
1199
|
+
const documentedExports = new Set;
|
|
1200
|
+
for (const file of markdownFiles) {
|
|
1201
|
+
for (const block of file.codeBlocks) {
|
|
1202
|
+
for (const exportName of newExportNames) {
|
|
1203
|
+
if (block.code.includes(exportName)) {
|
|
1204
|
+
documentedExports.add(exportName);
|
|
1205
|
+
}
|
|
1206
|
+
}
|
|
1207
|
+
}
|
|
1208
|
+
}
|
|
1209
|
+
const missingDocs = diff.nonBreaking.filter((name) => !documentedExports.has(name));
|
|
1210
|
+
const totalCodeBlocks = markdownFiles.reduce((sum, f) => sum + f.codeBlocks.length, 0);
|
|
1211
|
+
const allReferences = findExportReferences(markdownFiles, [
|
|
1212
|
+
...changedExports,
|
|
1213
|
+
...diff.nonBreaking
|
|
1214
|
+
]);
|
|
1215
|
+
return {
|
|
1216
|
+
impactedFiles: Array.from(impactByFile.values()),
|
|
1217
|
+
missingDocs,
|
|
1218
|
+
stats: {
|
|
1219
|
+
filesScanned: markdownFiles.length,
|
|
1220
|
+
codeBlocksFound: totalCodeBlocks,
|
|
1221
|
+
referencesFound: allReferences.length,
|
|
1222
|
+
impactedReferences: references.length
|
|
1223
|
+
}
|
|
1224
|
+
};
|
|
1225
|
+
}
|
|
1226
|
+
function findDeprecatedReferences(markdownFiles, deprecatedExports) {
|
|
1227
|
+
return findExportReferences(markdownFiles, deprecatedExports);
|
|
1228
|
+
}
|
|
1229
|
+
function findRemovedReferences(markdownFiles, removedExports) {
|
|
1230
|
+
return findExportReferences(markdownFiles, removedExports);
|
|
1231
|
+
}
|
|
1232
|
+
function hasDocsForExport(markdownFiles, exportName) {
|
|
1233
|
+
const refs = findExportReferences(markdownFiles, [exportName]);
|
|
1234
|
+
return refs.length > 0;
|
|
1235
|
+
}
|
|
1236
|
+
function getDocumentedExports(markdownFiles, exportNames) {
|
|
1237
|
+
const documented = [];
|
|
1238
|
+
for (const name of exportNames) {
|
|
1239
|
+
if (hasDocsForExport(markdownFiles, name)) {
|
|
1240
|
+
documented.push(name);
|
|
1241
|
+
}
|
|
1242
|
+
}
|
|
1243
|
+
return documented;
|
|
1244
|
+
}
|
|
1245
|
+
function getUndocumentedExports(markdownFiles, exportNames) {
|
|
1246
|
+
const documented = new Set(getDocumentedExports(markdownFiles, exportNames));
|
|
1247
|
+
return exportNames.filter((name) => !documented.has(name));
|
|
1248
|
+
}
|
|
1249
|
+
// src/markdown/diff-with-docs.ts
|
|
1250
|
+
import { diffSpec } from "@openpkg-ts/spec";
|
|
1251
|
+
function diffSpecWithDocs(oldSpec, newSpec, options = {}) {
|
|
1252
|
+
const baseDiff = diffSpec(oldSpec, newSpec);
|
|
1253
|
+
if (!options.markdownFiles?.length) {
|
|
1254
|
+
return baseDiff;
|
|
1255
|
+
}
|
|
1256
|
+
const newExportNames = newSpec.exports?.map((e) => e.name) ?? [];
|
|
1257
|
+
const docsImpact = analyzeDocsImpact(baseDiff, options.markdownFiles, newExportNames);
|
|
1258
|
+
return {
|
|
1259
|
+
...baseDiff,
|
|
1260
|
+
docsImpact
|
|
1261
|
+
};
|
|
1262
|
+
}
|
|
1263
|
+
function hasDocsImpact(diff) {
|
|
1264
|
+
if (!diff.docsImpact)
|
|
1265
|
+
return false;
|
|
1266
|
+
return diff.docsImpact.impactedFiles.length > 0 || diff.docsImpact.missingDocs.length > 0;
|
|
1267
|
+
}
|
|
1268
|
+
function getDocsImpactSummary(diff) {
|
|
1269
|
+
if (!diff.docsImpact) {
|
|
1270
|
+
return {
|
|
1271
|
+
impactedFileCount: 0,
|
|
1272
|
+
impactedReferenceCount: 0,
|
|
1273
|
+
missingDocsCount: 0,
|
|
1274
|
+
totalIssues: 0
|
|
1275
|
+
};
|
|
1276
|
+
}
|
|
1277
|
+
const impactedFileCount = diff.docsImpact.impactedFiles.length;
|
|
1278
|
+
const impactedReferenceCount = diff.docsImpact.impactedFiles.reduce((sum, f) => sum + f.references.length, 0);
|
|
1279
|
+
const missingDocsCount = diff.docsImpact.missingDocs.length;
|
|
1280
|
+
return {
|
|
1281
|
+
impactedFileCount,
|
|
1282
|
+
impactedReferenceCount,
|
|
1283
|
+
missingDocsCount,
|
|
1284
|
+
totalIssues: impactedReferenceCount + missingDocsCount
|
|
1285
|
+
};
|
|
1286
|
+
}
|
|
986
1287
|
// src/analysis/run-analysis.ts
|
|
987
1288
|
import * as fs2 from "node:fs";
|
|
988
1289
|
import * as path4 from "node:path";
|
|
@@ -4083,7 +4384,7 @@ function findJSDocLocation(sourceFile, symbolName, approximateLine) {
|
|
|
4083
4384
|
};
|
|
4084
4385
|
}
|
|
4085
4386
|
}
|
|
4086
|
-
function
|
|
4387
|
+
function visit2(node) {
|
|
4087
4388
|
if (ts2.isFunctionDeclaration(node) && node.name) {
|
|
4088
4389
|
processNode(node, node.name.getText(sourceFile));
|
|
4089
4390
|
}
|
|
@@ -4106,9 +4407,9 @@ function findJSDocLocation(sourceFile, symbolName, approximateLine) {
|
|
|
4106
4407
|
if (ts2.isMethodDeclaration(node) && node.name) {
|
|
4107
4408
|
processNode(node, node.name.getText(sourceFile));
|
|
4108
4409
|
}
|
|
4109
|
-
ts2.forEachChild(node,
|
|
4410
|
+
ts2.forEachChild(node, visit2);
|
|
4110
4411
|
}
|
|
4111
|
-
|
|
4412
|
+
visit2(sourceFile);
|
|
4112
4413
|
return result;
|
|
4113
4414
|
}
|
|
4114
4415
|
async function applyEdits(edits) {
|
|
@@ -4660,28 +4961,530 @@ async function runExamplesWithPackage(examples, options) {
|
|
|
4660
4961
|
} catch {}
|
|
4661
4962
|
}
|
|
4662
4963
|
}
|
|
4964
|
+
// src/detect/filesystem.ts
|
|
4965
|
+
import * as fs6 from "node:fs";
|
|
4966
|
+
import * as nodePath from "node:path";
|
|
4967
|
+
import { Writable } from "node:stream";
|
|
4968
|
+
|
|
4969
|
+
class NodeFileSystem {
|
|
4970
|
+
basePath;
|
|
4971
|
+
constructor(basePath) {
|
|
4972
|
+
this.basePath = basePath;
|
|
4973
|
+
}
|
|
4974
|
+
resolve(relativePath) {
|
|
4975
|
+
return nodePath.join(this.basePath, relativePath);
|
|
4976
|
+
}
|
|
4977
|
+
async exists(relativePath) {
|
|
4978
|
+
return fs6.existsSync(this.resolve(relativePath));
|
|
4979
|
+
}
|
|
4980
|
+
async readFile(relativePath) {
|
|
4981
|
+
return fs6.readFileSync(this.resolve(relativePath), "utf-8");
|
|
4982
|
+
}
|
|
4983
|
+
async readDir(relativePath) {
|
|
4984
|
+
return fs6.readdirSync(this.resolve(relativePath));
|
|
4985
|
+
}
|
|
4986
|
+
async isDirectory(relativePath) {
|
|
4987
|
+
const fullPath = this.resolve(relativePath);
|
|
4988
|
+
if (!fs6.existsSync(fullPath))
|
|
4989
|
+
return false;
|
|
4990
|
+
return fs6.statSync(fullPath).isDirectory();
|
|
4991
|
+
}
|
|
4992
|
+
}
|
|
4993
|
+
function createCaptureStream() {
|
|
4994
|
+
let output = "";
|
|
4995
|
+
const stream = new Writable({
|
|
4996
|
+
write(chunk, _encoding, callback) {
|
|
4997
|
+
output += chunk.toString();
|
|
4998
|
+
callback();
|
|
4999
|
+
}
|
|
5000
|
+
});
|
|
5001
|
+
return { stream, getOutput: () => output };
|
|
5002
|
+
}
|
|
5003
|
+
|
|
5004
|
+
class SandboxFileSystem {
|
|
5005
|
+
sandbox;
|
|
5006
|
+
constructor(sandbox) {
|
|
5007
|
+
this.sandbox = sandbox;
|
|
5008
|
+
}
|
|
5009
|
+
async exists(path8) {
|
|
5010
|
+
const result = await this.sandbox.runCommand({
|
|
5011
|
+
cmd: "test",
|
|
5012
|
+
args: ["-e", path8]
|
|
5013
|
+
});
|
|
5014
|
+
return result.exitCode === 0;
|
|
5015
|
+
}
|
|
5016
|
+
async readFile(path8) {
|
|
5017
|
+
const capture = createCaptureStream();
|
|
5018
|
+
await this.sandbox.runCommand({
|
|
5019
|
+
cmd: "cat",
|
|
5020
|
+
args: [path8],
|
|
5021
|
+
stdout: capture.stream
|
|
5022
|
+
});
|
|
5023
|
+
return capture.getOutput();
|
|
5024
|
+
}
|
|
5025
|
+
async readDir(path8) {
|
|
5026
|
+
const capture = createCaptureStream();
|
|
5027
|
+
await this.sandbox.runCommand({
|
|
5028
|
+
cmd: "ls",
|
|
5029
|
+
args: ["-1", path8],
|
|
5030
|
+
stdout: capture.stream
|
|
5031
|
+
});
|
|
5032
|
+
return capture.getOutput().split(`
|
|
5033
|
+
`).filter(Boolean);
|
|
5034
|
+
}
|
|
5035
|
+
async isDirectory(path8) {
|
|
5036
|
+
const result = await this.sandbox.runCommand({
|
|
5037
|
+
cmd: "test",
|
|
5038
|
+
args: ["-d", path8]
|
|
5039
|
+
});
|
|
5040
|
+
return result.exitCode === 0;
|
|
5041
|
+
}
|
|
5042
|
+
}
|
|
5043
|
+
// src/detect/package-manager.ts
|
|
5044
|
+
var PM_CONFIGS = [
|
|
5045
|
+
{
|
|
5046
|
+
lockfile: "pnpm-lock.yaml",
|
|
5047
|
+
info: {
|
|
5048
|
+
name: "pnpm",
|
|
5049
|
+
lockfile: "pnpm-lock.yaml",
|
|
5050
|
+
installArgs: ["install", "--frozen-lockfile"],
|
|
5051
|
+
runPrefix: ["pnpm"]
|
|
5052
|
+
}
|
|
5053
|
+
},
|
|
5054
|
+
{
|
|
5055
|
+
lockfile: "bun.lock",
|
|
5056
|
+
info: {
|
|
5057
|
+
name: "bun",
|
|
5058
|
+
lockfile: "bun.lock",
|
|
5059
|
+
installArgs: ["install", "--frozen-lockfile"],
|
|
5060
|
+
runPrefix: ["bun"]
|
|
5061
|
+
}
|
|
5062
|
+
},
|
|
5063
|
+
{
|
|
5064
|
+
lockfile: "bun.lockb",
|
|
5065
|
+
info: {
|
|
5066
|
+
name: "bun",
|
|
5067
|
+
lockfile: "bun.lockb",
|
|
5068
|
+
installArgs: ["install", "--frozen-lockfile"],
|
|
5069
|
+
runPrefix: ["bun"]
|
|
5070
|
+
}
|
|
5071
|
+
},
|
|
5072
|
+
{
|
|
5073
|
+
lockfile: "yarn.lock",
|
|
5074
|
+
info: {
|
|
5075
|
+
name: "yarn",
|
|
5076
|
+
lockfile: "yarn.lock",
|
|
5077
|
+
installArgs: ["install", "--frozen-lockfile"],
|
|
5078
|
+
runPrefix: ["yarn"]
|
|
5079
|
+
}
|
|
5080
|
+
},
|
|
5081
|
+
{
|
|
5082
|
+
lockfile: "package-lock.json",
|
|
5083
|
+
info: {
|
|
5084
|
+
name: "npm",
|
|
5085
|
+
lockfile: "package-lock.json",
|
|
5086
|
+
installArgs: ["install", "--legacy-peer-deps"],
|
|
5087
|
+
runPrefix: ["npm", "run"]
|
|
5088
|
+
}
|
|
5089
|
+
}
|
|
5090
|
+
];
|
|
5091
|
+
var DEFAULT_PM = {
|
|
5092
|
+
name: "npm",
|
|
5093
|
+
lockfile: null,
|
|
5094
|
+
installArgs: ["install", "--legacy-peer-deps"],
|
|
5095
|
+
runPrefix: ["npm", "run"]
|
|
5096
|
+
};
|
|
5097
|
+
async function detectPackageManager2(fs7) {
|
|
5098
|
+
for (const { lockfile, info } of PM_CONFIGS) {
|
|
5099
|
+
if (await fs7.exists(lockfile)) {
|
|
5100
|
+
return info;
|
|
5101
|
+
}
|
|
5102
|
+
}
|
|
5103
|
+
return DEFAULT_PM;
|
|
5104
|
+
}
|
|
5105
|
+
function getInstallCommand2(pm) {
|
|
5106
|
+
return [pm.name, ...pm.installArgs];
|
|
5107
|
+
}
|
|
5108
|
+
function getRunCommand(pm, script) {
|
|
5109
|
+
return [...pm.runPrefix, script];
|
|
5110
|
+
}
|
|
5111
|
+
// src/detect/utils.ts
|
|
5112
|
+
async function safeParseJson(fs7, path8) {
|
|
5113
|
+
try {
|
|
5114
|
+
if (!await fs7.exists(path8))
|
|
5115
|
+
return null;
|
|
5116
|
+
const content = await fs7.readFile(path8);
|
|
5117
|
+
return JSON.parse(content);
|
|
5118
|
+
} catch {
|
|
5119
|
+
return null;
|
|
5120
|
+
}
|
|
5121
|
+
}
|
|
5122
|
+
async function readPackageJson(fs7, dir) {
|
|
5123
|
+
const path8 = dir === "." ? "package.json" : `${dir}/package.json`;
|
|
5124
|
+
return safeParseJson(fs7, path8);
|
|
5125
|
+
}
|
|
5126
|
+
|
|
5127
|
+
// src/detect/monorepo.ts
|
|
5128
|
+
async function detectMonorepo(fs7) {
|
|
5129
|
+
const pkgJson = await readPackageJson(fs7, ".");
|
|
5130
|
+
if (pkgJson?.workspaces) {
|
|
5131
|
+
const patterns = extractWorkspacePatterns(pkgJson.workspaces);
|
|
5132
|
+
const packages = await resolveWorkspacePackages(fs7, patterns, pkgJson.name, pkgJson.private);
|
|
5133
|
+
return {
|
|
5134
|
+
isMonorepo: packages.length > 0,
|
|
5135
|
+
type: "npm-workspaces",
|
|
5136
|
+
patterns,
|
|
5137
|
+
packages
|
|
5138
|
+
};
|
|
5139
|
+
}
|
|
5140
|
+
if (await fs7.exists("pnpm-workspace.yaml")) {
|
|
5141
|
+
const content = await fs7.readFile("pnpm-workspace.yaml");
|
|
5142
|
+
const patterns = parsePnpmWorkspace(content);
|
|
5143
|
+
const packages = await resolveWorkspacePackages(fs7, patterns, pkgJson?.name, pkgJson?.private);
|
|
5144
|
+
return {
|
|
5145
|
+
isMonorepo: packages.length > 0,
|
|
5146
|
+
type: "pnpm-workspaces",
|
|
5147
|
+
patterns,
|
|
5148
|
+
packages
|
|
5149
|
+
};
|
|
5150
|
+
}
|
|
5151
|
+
if (await fs7.exists("lerna.json")) {
|
|
5152
|
+
const lerna = await safeParseJson(fs7, "lerna.json");
|
|
5153
|
+
const patterns = lerna?.packages ?? ["packages/*"];
|
|
5154
|
+
const packages = await resolveWorkspacePackages(fs7, patterns, pkgJson?.name, pkgJson?.private);
|
|
5155
|
+
return {
|
|
5156
|
+
isMonorepo: packages.length > 0,
|
|
5157
|
+
type: "lerna",
|
|
5158
|
+
patterns,
|
|
5159
|
+
packages
|
|
5160
|
+
};
|
|
5161
|
+
}
|
|
5162
|
+
return { isMonorepo: false, type: "none", patterns: [], packages: [] };
|
|
5163
|
+
}
|
|
5164
|
+
function extractWorkspacePatterns(workspaces) {
|
|
5165
|
+
if (Array.isArray(workspaces)) {
|
|
5166
|
+
return workspaces.filter((w) => typeof w === "string");
|
|
5167
|
+
}
|
|
5168
|
+
if (typeof workspaces === "object" && workspaces !== null) {
|
|
5169
|
+
if (Array.isArray(workspaces.packages)) {
|
|
5170
|
+
return workspaces.packages.filter((w) => typeof w === "string");
|
|
5171
|
+
}
|
|
5172
|
+
}
|
|
5173
|
+
return [];
|
|
5174
|
+
}
|
|
5175
|
+
function parsePnpmWorkspace(content) {
|
|
5176
|
+
const lines = content.split(`
|
|
5177
|
+
`);
|
|
5178
|
+
const patterns = [];
|
|
5179
|
+
let inPackages = false;
|
|
5180
|
+
for (const line of lines) {
|
|
5181
|
+
if (line.match(/^packages:/i)) {
|
|
5182
|
+
inPackages = true;
|
|
5183
|
+
continue;
|
|
5184
|
+
}
|
|
5185
|
+
if (inPackages) {
|
|
5186
|
+
if (line.match(/^\w+:/) && !line.startsWith(" ") && !line.startsWith("\t")) {
|
|
5187
|
+
break;
|
|
5188
|
+
}
|
|
5189
|
+
const match = line.match(/^\s*-\s*['"]?([^'"]+)['"]?\s*$/);
|
|
5190
|
+
if (match) {
|
|
5191
|
+
patterns.push(match[1].trim());
|
|
5192
|
+
}
|
|
5193
|
+
}
|
|
5194
|
+
}
|
|
5195
|
+
return patterns.length > 0 ? patterns : ["packages/*"];
|
|
5196
|
+
}
|
|
5197
|
+
async function resolveWorkspacePackages(fs7, patterns, rootPackageName, rootIsPrivate) {
|
|
5198
|
+
const packages = [];
|
|
5199
|
+
const seen = new Set;
|
|
5200
|
+
if (rootPackageName && !rootIsPrivate && rootPackageName !== "root") {
|
|
5201
|
+
packages.push({
|
|
5202
|
+
name: rootPackageName,
|
|
5203
|
+
path: ".",
|
|
5204
|
+
private: false
|
|
5205
|
+
});
|
|
5206
|
+
seen.add(rootPackageName);
|
|
5207
|
+
}
|
|
5208
|
+
const dirsToScan = new Set;
|
|
5209
|
+
for (const pattern of patterns) {
|
|
5210
|
+
if (pattern.startsWith("!"))
|
|
5211
|
+
continue;
|
|
5212
|
+
const dir = pattern.replace(/\/?\*\*?$/, "");
|
|
5213
|
+
if (dir && !dir.includes("*")) {
|
|
5214
|
+
dirsToScan.add(dir);
|
|
5215
|
+
}
|
|
5216
|
+
}
|
|
5217
|
+
dirsToScan.add("packages");
|
|
5218
|
+
for (const dir of dirsToScan) {
|
|
5219
|
+
if (!await fs7.exists(dir))
|
|
5220
|
+
continue;
|
|
5221
|
+
if (!await fs7.isDirectory(dir))
|
|
5222
|
+
continue;
|
|
5223
|
+
const subdirs = await fs7.readDir(dir);
|
|
5224
|
+
for (const subdir of subdirs) {
|
|
5225
|
+
const pkgPath = `${dir}/${subdir}`;
|
|
5226
|
+
const pkgJsonPath = `${pkgPath}/package.json`;
|
|
5227
|
+
if (!await fs7.exists(pkgJsonPath))
|
|
5228
|
+
continue;
|
|
5229
|
+
try {
|
|
5230
|
+
const content = await fs7.readFile(pkgJsonPath);
|
|
5231
|
+
if (content.includes("No such file"))
|
|
5232
|
+
continue;
|
|
5233
|
+
const pkg = JSON.parse(content);
|
|
5234
|
+
if (pkg.name && !seen.has(pkg.name)) {
|
|
5235
|
+
seen.add(pkg.name);
|
|
5236
|
+
packages.push({
|
|
5237
|
+
name: pkg.name,
|
|
5238
|
+
path: pkgPath,
|
|
5239
|
+
private: pkg.private ?? false
|
|
5240
|
+
});
|
|
5241
|
+
}
|
|
5242
|
+
} catch {}
|
|
5243
|
+
}
|
|
5244
|
+
}
|
|
5245
|
+
return packages.sort((a, b) => a.name.localeCompare(b.name));
|
|
5246
|
+
}
|
|
5247
|
+
function findPackageByName(packages, nameOrPath) {
|
|
5248
|
+
return packages.find((p) => p.name === nameOrPath || p.path === nameOrPath);
|
|
5249
|
+
}
|
|
5250
|
+
function formatPackageList(packages, limit = 10) {
|
|
5251
|
+
const publicPackages = packages.filter((p) => !p.private);
|
|
5252
|
+
const lines = publicPackages.slice(0, limit).map((pkg) => ` --package ${pkg.name}`);
|
|
5253
|
+
if (publicPackages.length > limit) {
|
|
5254
|
+
lines.push(` ... and ${publicPackages.length - limit} more`);
|
|
5255
|
+
}
|
|
5256
|
+
return lines.join(`
|
|
5257
|
+
`);
|
|
5258
|
+
}
|
|
5259
|
+
// src/detect/entry-point.ts
|
|
5260
|
+
async function detectEntryPoint(fs7, packagePath = ".") {
|
|
5261
|
+
const pkgJson = await readPackageJson(fs7, packagePath);
|
|
5262
|
+
if (!pkgJson) {
|
|
5263
|
+
throw new Error("No package.json found - not a valid npm package");
|
|
5264
|
+
}
|
|
5265
|
+
const typesField = pkgJson.types || pkgJson.typings;
|
|
5266
|
+
if (typesField && typeof typesField === "string") {
|
|
5267
|
+
const resolved = await resolveToSource(fs7, packagePath, typesField);
|
|
5268
|
+
if (resolved) {
|
|
5269
|
+
return { ...resolved, source: "types" };
|
|
5270
|
+
}
|
|
5271
|
+
}
|
|
5272
|
+
if (pkgJson.exports && typeof pkgJson.exports === "object") {
|
|
5273
|
+
const dotExport = pkgJson.exports["."];
|
|
5274
|
+
if (dotExport && typeof dotExport === "object" && "types" in dotExport) {
|
|
5275
|
+
const typesPath = dotExport.types;
|
|
5276
|
+
if (typesPath && typeof typesPath === "string") {
|
|
5277
|
+
const resolved = await resolveToSource(fs7, packagePath, typesPath);
|
|
5278
|
+
if (resolved) {
|
|
5279
|
+
return { ...resolved, source: "exports" };
|
|
5280
|
+
}
|
|
5281
|
+
}
|
|
5282
|
+
}
|
|
5283
|
+
}
|
|
5284
|
+
if (pkgJson.main && typeof pkgJson.main === "string") {
|
|
5285
|
+
const resolved = await resolveToSource(fs7, packagePath, pkgJson.main);
|
|
5286
|
+
if (resolved) {
|
|
5287
|
+
return { ...resolved, source: "main" };
|
|
5288
|
+
}
|
|
5289
|
+
}
|
|
5290
|
+
if (pkgJson.module && typeof pkgJson.module === "string") {
|
|
5291
|
+
const resolved = await resolveToSource(fs7, packagePath, pkgJson.module);
|
|
5292
|
+
if (resolved) {
|
|
5293
|
+
return { ...resolved, source: "module" };
|
|
5294
|
+
}
|
|
5295
|
+
}
|
|
5296
|
+
const fallbacks = [
|
|
5297
|
+
"src/index.ts",
|
|
5298
|
+
"src/index.tsx",
|
|
5299
|
+
"src/main.ts",
|
|
5300
|
+
"index.ts",
|
|
5301
|
+
"lib/index.ts",
|
|
5302
|
+
"source/index.ts"
|
|
5303
|
+
];
|
|
5304
|
+
for (const fallback of fallbacks) {
|
|
5305
|
+
const fullPath = packagePath === "." ? fallback : `${packagePath}/${fallback}`;
|
|
5306
|
+
if (await fs7.exists(fullPath)) {
|
|
5307
|
+
return { path: fullPath, source: "fallback", isDeclarationOnly: false };
|
|
5308
|
+
}
|
|
5309
|
+
}
|
|
5310
|
+
throw new Error("Could not detect TypeScript entry point. No types field in package.json and no common entry paths found.");
|
|
5311
|
+
}
|
|
5312
|
+
async function resolveToSource(fs7, basePath, filePath) {
|
|
5313
|
+
const normalized = filePath.replace(/^\.\//, "");
|
|
5314
|
+
const fullPath = (p) => basePath === "." ? p : `${basePath}/${p}`;
|
|
5315
|
+
const isSourceTs = normalized.endsWith(".ts") && !normalized.endsWith(".d.ts") || normalized.endsWith(".tsx");
|
|
5316
|
+
if (isSourceTs) {
|
|
5317
|
+
const path8 = fullPath(normalized);
|
|
5318
|
+
if (await fs7.exists(path8)) {
|
|
5319
|
+
return { path: path8, isDeclarationOnly: false };
|
|
5320
|
+
}
|
|
5321
|
+
}
|
|
5322
|
+
const candidates = [];
|
|
5323
|
+
if (normalized.startsWith("dist/")) {
|
|
5324
|
+
const srcPath = normalized.replace(/^dist\//, "src/");
|
|
5325
|
+
candidates.push(srcPath.replace(/\.js$/, ".ts"));
|
|
5326
|
+
candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
|
|
5327
|
+
candidates.push(srcPath.replace(/\.js$/, ".tsx"));
|
|
5328
|
+
}
|
|
5329
|
+
if (normalized.startsWith("build/")) {
|
|
5330
|
+
const srcPath = normalized.replace(/^build\//, "src/");
|
|
5331
|
+
candidates.push(srcPath.replace(/\.js$/, ".ts"));
|
|
5332
|
+
candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
|
|
5333
|
+
}
|
|
5334
|
+
if (normalized.startsWith("lib/")) {
|
|
5335
|
+
const srcPath = normalized.replace(/^lib\//, "src/");
|
|
5336
|
+
candidates.push(srcPath.replace(/\.js$/, ".ts"));
|
|
5337
|
+
candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
|
|
5338
|
+
}
|
|
5339
|
+
candidates.push(normalized.replace(/\.js$/, ".ts"));
|
|
5340
|
+
candidates.push(normalized.replace(/\.d\.ts$/, ".ts"));
|
|
5341
|
+
candidates.push(normalized.replace(/\.js$/, ".tsx"));
|
|
5342
|
+
if (normalized.endsWith(".d.ts")) {
|
|
5343
|
+
const baseName = normalized.replace(/\.d\.ts$/, "").split("/").pop();
|
|
5344
|
+
if (baseName) {
|
|
5345
|
+
candidates.push(`src/${baseName}.ts`);
|
|
5346
|
+
}
|
|
5347
|
+
}
|
|
5348
|
+
for (const candidate of candidates) {
|
|
5349
|
+
if (candidate.endsWith(".d.ts"))
|
|
5350
|
+
continue;
|
|
5351
|
+
const path8 = fullPath(candidate);
|
|
5352
|
+
if (await fs7.exists(path8)) {
|
|
5353
|
+
return { path: path8, isDeclarationOnly: false };
|
|
5354
|
+
}
|
|
5355
|
+
}
|
|
5356
|
+
if (normalized.endsWith(".d.ts")) {
|
|
5357
|
+
const path8 = fullPath(normalized);
|
|
5358
|
+
if (await fs7.exists(path8)) {
|
|
5359
|
+
return { path: path8, isDeclarationOnly: true };
|
|
5360
|
+
}
|
|
5361
|
+
}
|
|
5362
|
+
return null;
|
|
5363
|
+
}
|
|
5364
|
+
// src/detect/build.ts
|
|
5365
|
+
async function detectBuildInfo(fs7, packagePath = ".") {
|
|
5366
|
+
const pkgJson = await readPackageJson(fs7, packagePath);
|
|
5367
|
+
const scripts = pkgJson?.scripts ?? {};
|
|
5368
|
+
const scriptNames = Object.keys(scripts);
|
|
5369
|
+
const buildScripts = scriptNames.filter((name) => name === "build" || name === "compile" || name === "tsc" || name.startsWith("build:") || name.startsWith("compile:"));
|
|
5370
|
+
const tsconfigPath = packagePath === "." ? "tsconfig.json" : `${packagePath}/tsconfig.json`;
|
|
5371
|
+
const hasTsConfig = await fs7.exists(tsconfigPath);
|
|
5372
|
+
const hasTsDep = pkgJson?.devDependencies?.typescript !== undefined || pkgJson?.dependencies?.typescript !== undefined;
|
|
5373
|
+
const hasTypeScript = hasTsConfig || hasTsDep;
|
|
5374
|
+
const wasm = await detectWasmProject(fs7, packagePath, scripts);
|
|
5375
|
+
const napi = detectNapiProject(pkgJson);
|
|
5376
|
+
return {
|
|
5377
|
+
scripts: buildScripts,
|
|
5378
|
+
hasBuildScript: buildScripts.length > 0,
|
|
5379
|
+
hasTypeScript,
|
|
5380
|
+
exoticIndicators: { wasm, napi }
|
|
5381
|
+
};
|
|
5382
|
+
}
|
|
5383
|
+
async function detectWasmProject(fs7, packagePath, scripts) {
|
|
5384
|
+
const pkgCargoPath = packagePath === "." ? "Cargo.toml" : `${packagePath}/Cargo.toml`;
|
|
5385
|
+
if (await fs7.exists(pkgCargoPath))
|
|
5386
|
+
return true;
|
|
5387
|
+
if (packagePath !== "." && await fs7.exists("Cargo.toml"))
|
|
5388
|
+
return true;
|
|
5389
|
+
const allScripts = Object.values(scripts).join(" ");
|
|
5390
|
+
return allScripts.includes("wasm-pack") || allScripts.includes("wasm");
|
|
5391
|
+
}
|
|
5392
|
+
function detectNapiProject(pkgJson) {
|
|
5393
|
+
if (!pkgJson)
|
|
5394
|
+
return false;
|
|
5395
|
+
const deps = {
|
|
5396
|
+
...pkgJson.dependencies ?? {},
|
|
5397
|
+
...pkgJson.devDependencies ?? {}
|
|
5398
|
+
};
|
|
5399
|
+
return Object.keys(deps).some((dep) => dep.includes("napi"));
|
|
5400
|
+
}
|
|
5401
|
+
function getPrimaryBuildScript(buildInfo) {
|
|
5402
|
+
if (buildInfo.scripts.includes("build"))
|
|
5403
|
+
return "build";
|
|
5404
|
+
if (buildInfo.scripts.includes("compile"))
|
|
5405
|
+
return "compile";
|
|
5406
|
+
if (buildInfo.scripts.includes("tsc"))
|
|
5407
|
+
return "tsc";
|
|
5408
|
+
return buildInfo.scripts[0] ?? null;
|
|
5409
|
+
}
|
|
5410
|
+
// src/detect/index.ts
|
|
5411
|
+
async function analyzeProject(fs7, options = {}) {
|
|
5412
|
+
const [packageManager, monorepo] = await Promise.all([
|
|
5413
|
+
detectPackageManager2(fs7),
|
|
5414
|
+
detectMonorepo(fs7)
|
|
5415
|
+
]);
|
|
5416
|
+
let targetPath = ".";
|
|
5417
|
+
if (monorepo.isMonorepo) {
|
|
5418
|
+
if (!options.targetPackage) {
|
|
5419
|
+
const publicPackages = monorepo.packages.filter((p) => !p.private);
|
|
5420
|
+
const packageNames = publicPackages.map((p) => p.name).join(", ");
|
|
5421
|
+
throw new Error(`Monorepo detected with ${publicPackages.length} packages. ` + `Specify target with --package. Available: ${packageNames}`);
|
|
5422
|
+
}
|
|
5423
|
+
const pkg = findPackageByName(monorepo.packages, options.targetPackage);
|
|
5424
|
+
if (!pkg) {
|
|
5425
|
+
const available = monorepo.packages.map((p) => p.name).join(", ");
|
|
5426
|
+
throw new Error(`Package not found: ${options.targetPackage}. Available packages: ${available}`);
|
|
5427
|
+
}
|
|
5428
|
+
targetPath = pkg.path;
|
|
5429
|
+
}
|
|
5430
|
+
const [entryPoint, build] = await Promise.all([
|
|
5431
|
+
detectEntryPoint(fs7, targetPath),
|
|
5432
|
+
detectBuildInfo(fs7, targetPath)
|
|
5433
|
+
]);
|
|
5434
|
+
return { packageManager, monorepo, entryPoint, build };
|
|
5435
|
+
}
|
|
4663
5436
|
export {
|
|
4664
5437
|
serializeJSDoc,
|
|
5438
|
+
safeParseJson,
|
|
4665
5439
|
runExamplesWithPackage,
|
|
4666
5440
|
runExamples,
|
|
4667
5441
|
runExample,
|
|
5442
|
+
readPackageJson,
|
|
5443
|
+
parseMarkdownFiles,
|
|
5444
|
+
parseMarkdownFile,
|
|
4668
5445
|
parseJSDocToPatch,
|
|
4669
5446
|
parseAssertions,
|
|
4670
5447
|
mergeFixes,
|
|
4671
5448
|
isFixableDrift,
|
|
5449
|
+
isExecutableLang,
|
|
4672
5450
|
hasNonAssertionComments,
|
|
5451
|
+
hasDocsImpact,
|
|
5452
|
+
hasDocsForExport,
|
|
5453
|
+
getUndocumentedExports,
|
|
5454
|
+
getRunCommand,
|
|
5455
|
+
getPrimaryBuildScript,
|
|
5456
|
+
getInstallCommand2 as getInstallCommand,
|
|
5457
|
+
getDocumentedExports,
|
|
5458
|
+
getDocsImpactSummary,
|
|
4673
5459
|
generateFixesForExport,
|
|
4674
5460
|
generateFix,
|
|
5461
|
+
formatPackageList,
|
|
5462
|
+
findRemovedReferences,
|
|
5463
|
+
findPackageByName,
|
|
4675
5464
|
findJSDocLocation,
|
|
5465
|
+
findExportReferences,
|
|
5466
|
+
findDeprecatedReferences,
|
|
4676
5467
|
extractPackageSpec,
|
|
5468
|
+
extractImports,
|
|
5469
|
+
extractFunctionCalls,
|
|
5470
|
+
diffSpecWithDocs,
|
|
5471
|
+
detectPackageManager2 as detectPackageManager,
|
|
5472
|
+
detectMonorepo,
|
|
4677
5473
|
detectExampleRuntimeErrors,
|
|
4678
5474
|
detectExampleAssertionFailures,
|
|
5475
|
+
detectEntryPoint,
|
|
5476
|
+
detectBuildInfo,
|
|
4679
5477
|
createSourceFile,
|
|
4680
5478
|
categorizeDrifts,
|
|
5479
|
+
blockReferencesExport,
|
|
4681
5480
|
applyPatchToJSDoc,
|
|
4682
5481
|
applyEdits,
|
|
5482
|
+
analyzeProject,
|
|
4683
5483
|
analyzeFile,
|
|
5484
|
+
analyzeDocsImpact,
|
|
4684
5485
|
analyze,
|
|
5486
|
+
SandboxFileSystem,
|
|
4685
5487
|
OpenPkg,
|
|
5488
|
+
NodeFileSystem,
|
|
4686
5489
|
DocCov
|
|
4687
5490
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@doccov/sdk",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.3",
|
|
4
4
|
"description": "DocCov SDK - Documentation coverage and drift detection for TypeScript",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"typescript",
|
|
@@ -40,7 +40,13 @@
|
|
|
40
40
|
],
|
|
41
41
|
"dependencies": {
|
|
42
42
|
"@openpkg-ts/spec": "^0.3.1",
|
|
43
|
-
"
|
|
43
|
+
"@vercel/sandbox": "^1.0.3",
|
|
44
|
+
"mdast": "^3.0.0",
|
|
45
|
+
"remark-mdx": "^3.1.0",
|
|
46
|
+
"remark-parse": "^11.0.0",
|
|
47
|
+
"typescript": "^5.0.0",
|
|
48
|
+
"unified": "^11.0.5",
|
|
49
|
+
"unist-util-visit": "^5.0.0"
|
|
44
50
|
},
|
|
45
51
|
"devDependencies": {
|
|
46
52
|
"@types/bun": "latest",
|